Moving to own repo without the base couchrest
This commit is contained in:
parent
e6604a0990
commit
3894579304
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,4 +1,5 @@
|
|||
.DS_Store
|
||||
html/*
|
||||
pkg
|
||||
*.swp
|
||||
*.swp
|
||||
*.gemspec
|
||||
|
|
32
README.md
32
README.md
|
@ -1,25 +1,25 @@
|
|||
# CouchRest: CouchDB, close to the metal
|
||||
# CouchRest::ExtendedDocument: CouchDB, not too close to the metal
|
||||
|
||||
CouchRest is based on [CouchDB's couch.js test
|
||||
library](http://svn.apache.org/repos/asf/couchdb/trunk/share/www/script/couch.js),
|
||||
which I find to be concise, clear, and well designed. CouchRest lightly wraps
|
||||
CouchDB's HTTP API, managing JSON serialization, and remembering the URI-paths
|
||||
to CouchDB's API endpoints so you don't have to.
|
||||
CouchRest::ExtendedDocument adds additional functionality to the standard CouchRest Document class such as
|
||||
setting properties, callbacks, typecasting, and validations.
|
||||
|
||||
CouchRest is designed to make a simple base for application and framework-specific object oriented APIs. CouchRest is Object-Mapper agnostic, the parsed JSON it returns from CouchDB shows up as subclasses of Ruby's Hash. Naked JSON, just as it was mean to be.
|
||||
Note: CouchRest only supports CouchDB 0.9.0 or newer.
|
||||
|
||||
Note: CouchRest only support CouchDB 0.9.0 or newer.
|
||||
## Install
|
||||
|
||||
## Easy Install
|
||||
|
||||
$ sudo gem install couchrest
|
||||
$ sudo gem install couchrest_extended_document
|
||||
|
||||
### Relax, it's RESTful
|
||||
## Usage
|
||||
|
||||
CouchRest rests on top of a HTTP abstraction layer using by default Heroku’s excellent REST Client Ruby HTTP wrapper.
|
||||
Other adapters can be added to support more http libraries.
|
||||
require 'couchrest/extended_document'
|
||||
|
||||
### Running the Specs
|
||||
class Cat < CouchRest::ExtendedDocument
|
||||
|
||||
property :name, :type => String
|
||||
|
||||
end
|
||||
|
||||
## Testing
|
||||
|
||||
The most complete documentation is the spec/ directory. To validate your
|
||||
CouchRest install, from the project root directory run `rake`, or `autotest`
|
||||
|
@ -27,7 +27,7 @@ CouchRest install, from the project root directory run `rake`, or `autotest`
|
|||
|
||||
## Docs
|
||||
|
||||
API: [http://rdoc.info/projects/couchrest/couchrest](http://rdoc.info/projects/couchrest/couchrest)
|
||||
API: [http://rdoc.info/projects/couchrest/couchrest_extended_document](http://rdoc.info/projects/couchrest/couchrest_extended_document)
|
||||
|
||||
Check the wiki for documentation and examples [http://wiki.github.com/couchrest/couchrest](http://wiki.github.com/couchrest/couchrest)
|
||||
|
||||
|
|
14
Rakefile
14
Rakefile
|
@ -1,6 +1,6 @@
|
|||
require 'rake'
|
||||
require "rake/rdoctask"
|
||||
require File.join(File.expand_path(File.dirname(__FILE__)),'lib','couchrest')
|
||||
require File.join(File.expand_path(File.dirname(__FILE__)),'lib','couchrest','extended_document')
|
||||
|
||||
begin
|
||||
require 'spec/rake/spectask'
|
||||
|
@ -15,18 +15,18 @@ end
|
|||
begin
|
||||
require 'jeweler'
|
||||
Jeweler::Tasks.new do |gemspec|
|
||||
gemspec.name = "couchrest"
|
||||
gemspec.summary = "Lean and RESTful interface to CouchDB."
|
||||
gemspec.description = "CouchRest provides a simple interface on top of CouchDB's RESTful HTTP API, as well as including some utility scripts for managing views and attachments."
|
||||
gemspec.name = "samlown-couchrest_extended_document"
|
||||
gemspec.summary = "Extend CouchRest Document class with useful features."
|
||||
gemspec.description = "CouchRest::ExtendedDocument provides aditional features to the standard CouchRest::Document class such as properties, view designs, callbacks, typecasting and validations."
|
||||
gemspec.email = "jchris@apache.org"
|
||||
gemspec.homepage = "http://github.com/couchrest/couchrest"
|
||||
gemspec.homepage = "http://github.com/samlown/couchrest_extended_document"
|
||||
gemspec.authors = ["J. Chris Anderson", "Matt Aimonetti", "Marcos Tapajos", "Will Leinweber"]
|
||||
gemspec.extra_rdoc_files = %w( README.md LICENSE THANKS.md )
|
||||
gemspec.files = %w( LICENSE README.md Rakefile THANKS.md history.txt couchrest.gemspec) + Dir["{examples,lib,spec,utils}/**/*"] - Dir["spec/tmp"]
|
||||
gemspec.has_rdoc = true
|
||||
gemspec.add_dependency("rest-client", ">= 0.5")
|
||||
gemspec.add_dependency("samlown-couchrest", ">= 1.0.0")
|
||||
gemspec.add_dependency("mime-types", ">= 1.15")
|
||||
gemspec.version = CouchRest::VERSION
|
||||
gemspec.version = CouchRest::ExtendedDocument::VERSION
|
||||
gemspec.date = "2008-11-22"
|
||||
gemspec.require_path = "lib"
|
||||
end
|
||||
|
|
|
@ -1,182 +0,0 @@
|
|||
# Generated by jeweler
|
||||
# DO NOT EDIT THIS FILE DIRECTLY
|
||||
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
|
||||
# -*- encoding: utf-8 -*-
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = %q{couchrest}
|
||||
s.version = "0.37"
|
||||
|
||||
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
|
||||
s.authors = ["J. Chris Anderson", "Matt Aimonetti", "Marcos Tapajos", "Will Leinweber"]
|
||||
s.date = %q{2010-03-30}
|
||||
s.description = %q{CouchRest provides a simple interface on top of CouchDB's RESTful HTTP API, as well as including some utility scripts for managing views and attachments.}
|
||||
s.email = %q{jchris@apache.org}
|
||||
s.extra_rdoc_files = [
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"THANKS.md"
|
||||
]
|
||||
s.files = [
|
||||
"LICENSE",
|
||||
"README.md",
|
||||
"Rakefile",
|
||||
"THANKS.md",
|
||||
"couchrest.gemspec",
|
||||
"examples/model/example.rb",
|
||||
"examples/word_count/markov",
|
||||
"examples/word_count/views/books/chunked-map.js",
|
||||
"examples/word_count/views/books/united-map.js",
|
||||
"examples/word_count/views/markov/chain-map.js",
|
||||
"examples/word_count/views/markov/chain-reduce.js",
|
||||
"examples/word_count/views/word_count/count-map.js",
|
||||
"examples/word_count/views/word_count/count-reduce.js",
|
||||
"examples/word_count/word_count.rb",
|
||||
"examples/word_count/word_count_query.rb",
|
||||
"examples/word_count/word_count_views.rb",
|
||||
"history.txt",
|
||||
"lib/couchrest.rb",
|
||||
"lib/couchrest/commands/generate.rb",
|
||||
"lib/couchrest/commands/push.rb",
|
||||
"lib/couchrest/core/adapters/restclient.rb",
|
||||
"lib/couchrest/core/database.rb",
|
||||
"lib/couchrest/core/design.rb",
|
||||
"lib/couchrest/core/document.rb",
|
||||
"lib/couchrest/core/http_abstraction.rb",
|
||||
"lib/couchrest/core/response.rb",
|
||||
"lib/couchrest/core/rest_api.rb",
|
||||
"lib/couchrest/core/server.rb",
|
||||
"lib/couchrest/core/view.rb",
|
||||
"lib/couchrest/helper/pager.rb",
|
||||
"lib/couchrest/helper/streamer.rb",
|
||||
"lib/couchrest/helper/upgrade.rb",
|
||||
"lib/couchrest/middlewares/logger.rb",
|
||||
"lib/couchrest/mixins.rb",
|
||||
"lib/couchrest/mixins/attachments.rb",
|
||||
"lib/couchrest/mixins/attribute_protection.rb",
|
||||
"lib/couchrest/mixins/callbacks.rb",
|
||||
"lib/couchrest/mixins/class_proxy.rb",
|
||||
"lib/couchrest/mixins/collection.rb",
|
||||
"lib/couchrest/mixins/design_doc.rb",
|
||||
"lib/couchrest/mixins/document_queries.rb",
|
||||
"lib/couchrest/mixins/extended_attachments.rb",
|
||||
"lib/couchrest/mixins/extended_document_mixins.rb",
|
||||
"lib/couchrest/mixins/properties.rb",
|
||||
"lib/couchrest/mixins/validation.rb",
|
||||
"lib/couchrest/mixins/views.rb",
|
||||
"lib/couchrest/monkeypatches.rb",
|
||||
"lib/couchrest/more/casted_model.rb",
|
||||
"lib/couchrest/more/extended_document.rb",
|
||||
"lib/couchrest/more/property.rb",
|
||||
"lib/couchrest/more/typecast.rb",
|
||||
"lib/couchrest/support/blank.rb",
|
||||
"lib/couchrest/support/class.rb",
|
||||
"lib/couchrest/support/rails.rb",
|
||||
"lib/couchrest/validation/auto_validate.rb",
|
||||
"lib/couchrest/validation/contextual_validators.rb",
|
||||
"lib/couchrest/validation/validation_errors.rb",
|
||||
"lib/couchrest/validation/validators/absent_field_validator.rb",
|
||||
"lib/couchrest/validation/validators/confirmation_validator.rb",
|
||||
"lib/couchrest/validation/validators/format_validator.rb",
|
||||
"lib/couchrest/validation/validators/formats/email.rb",
|
||||
"lib/couchrest/validation/validators/formats/url.rb",
|
||||
"lib/couchrest/validation/validators/generic_validator.rb",
|
||||
"lib/couchrest/validation/validators/length_validator.rb",
|
||||
"lib/couchrest/validation/validators/method_validator.rb",
|
||||
"lib/couchrest/validation/validators/numeric_validator.rb",
|
||||
"lib/couchrest/validation/validators/required_field_validator.rb",
|
||||
"spec/couchrest/core/couchrest_spec.rb",
|
||||
"spec/couchrest/core/database_spec.rb",
|
||||
"spec/couchrest/core/design_spec.rb",
|
||||
"spec/couchrest/core/document_spec.rb",
|
||||
"spec/couchrest/core/server_spec.rb",
|
||||
"spec/couchrest/helpers/pager_spec.rb",
|
||||
"spec/couchrest/helpers/streamer_spec.rb",
|
||||
"spec/couchrest/more/attribute_protection_spec.rb",
|
||||
"spec/couchrest/more/casted_extended_doc_spec.rb",
|
||||
"spec/couchrest/more/casted_model_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_attachment_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_inherited_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_subclass_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_view_spec.rb",
|
||||
"spec/couchrest/more/property_spec.rb",
|
||||
"spec/fixtures/attachments/README",
|
||||
"spec/fixtures/attachments/couchdb.png",
|
||||
"spec/fixtures/attachments/test.html",
|
||||
"spec/fixtures/more/article.rb",
|
||||
"spec/fixtures/more/card.rb",
|
||||
"spec/fixtures/more/cat.rb",
|
||||
"spec/fixtures/more/course.rb",
|
||||
"spec/fixtures/more/event.rb",
|
||||
"spec/fixtures/more/invoice.rb",
|
||||
"spec/fixtures/more/person.rb",
|
||||
"spec/fixtures/more/question.rb",
|
||||
"spec/fixtures/more/service.rb",
|
||||
"spec/fixtures/more/user.rb",
|
||||
"spec/fixtures/views/lib.js",
|
||||
"spec/fixtures/views/test_view/lib.js",
|
||||
"spec/fixtures/views/test_view/only-map.js",
|
||||
"spec/fixtures/views/test_view/test-map.js",
|
||||
"spec/fixtures/views/test_view/test-reduce.js",
|
||||
"spec/spec.opts",
|
||||
"spec/spec_helper.rb",
|
||||
"utils/remap.rb",
|
||||
"utils/subset.rb"
|
||||
]
|
||||
s.homepage = %q{http://github.com/couchrest/couchrest}
|
||||
s.rdoc_options = ["--charset=UTF-8"]
|
||||
s.require_paths = ["lib"]
|
||||
s.rubygems_version = %q{1.3.6}
|
||||
s.summary = %q{Lean and RESTful interface to CouchDB.}
|
||||
s.test_files = [
|
||||
"spec/couchrest/core/couchrest_spec.rb",
|
||||
"spec/couchrest/core/database_spec.rb",
|
||||
"spec/couchrest/core/design_spec.rb",
|
||||
"spec/couchrest/core/document_spec.rb",
|
||||
"spec/couchrest/core/server_spec.rb",
|
||||
"spec/couchrest/helpers/pager_spec.rb",
|
||||
"spec/couchrest/helpers/streamer_spec.rb",
|
||||
"spec/couchrest/more/attribute_protection_spec.rb",
|
||||
"spec/couchrest/more/casted_extended_doc_spec.rb",
|
||||
"spec/couchrest/more/casted_model_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_attachment_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_inherited_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_subclass_spec.rb",
|
||||
"spec/couchrest/more/extended_doc_view_spec.rb",
|
||||
"spec/couchrest/more/property_spec.rb",
|
||||
"spec/fixtures/more/article.rb",
|
||||
"spec/fixtures/more/card.rb",
|
||||
"spec/fixtures/more/cat.rb",
|
||||
"spec/fixtures/more/course.rb",
|
||||
"spec/fixtures/more/event.rb",
|
||||
"spec/fixtures/more/invoice.rb",
|
||||
"spec/fixtures/more/person.rb",
|
||||
"spec/fixtures/more/question.rb",
|
||||
"spec/fixtures/more/service.rb",
|
||||
"spec/fixtures/more/user.rb",
|
||||
"spec/spec_helper.rb",
|
||||
"examples/model/example.rb",
|
||||
"examples/word_count/word_count.rb",
|
||||
"examples/word_count/word_count_query.rb",
|
||||
"examples/word_count/word_count_views.rb"
|
||||
]
|
||||
|
||||
if s.respond_to? :specification_version then
|
||||
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
|
||||
s.specification_version = 3
|
||||
|
||||
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
|
||||
s.add_runtime_dependency(%q<rest-client>, [">= 0.5"])
|
||||
s.add_runtime_dependency(%q<mime-types>, [">= 1.15"])
|
||||
else
|
||||
s.add_dependency(%q<rest-client>, [">= 0.5"])
|
||||
s.add_dependency(%q<mime-types>, [">= 1.15"])
|
||||
end
|
||||
else
|
||||
s.add_dependency(%q<rest-client>, [">= 0.5"])
|
||||
s.add_dependency(%q<mime-types>, [">= 1.15"])
|
||||
end
|
||||
end
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require File.expand_path(File.dirname(__FILE__)) + '/../../couchrest'
|
||||
|
||||
cr = CouchRest.new("http://127.0.0.1:5984")
|
||||
@db = cr.database('word-count-example')
|
||||
@word_memoizer = {}
|
||||
|
||||
def probable_follower_for(word)
|
||||
@word_memoizer[word] ||= @db.view('markov/chain-reduce', :startkey => [word,nil], :endkey => [word,{}],:group_level => 2)
|
||||
|
||||
# puts
|
||||
# puts "search #{word} #{wprobs[word]['rows'].length}"
|
||||
# @word_memoizer[word]['rows'].sort_by{|r|r['value']}.each{|r|puts [r['value'],r['key']].inspect}
|
||||
|
||||
rows = @word_memoizer[word]['rows'].select{|r|(r['key'][1]!='')}.sort_by{|r|r['value']}
|
||||
row = rows[(-1*[rows.length,5].min)..-1].sort_by{rand}[0]
|
||||
row ? row['key'][1] : nil
|
||||
end
|
||||
|
||||
|
||||
word = ARGV[0]
|
||||
words = [word]
|
||||
|
||||
while word
|
||||
$stdout.print ' ' if words.length > 1
|
||||
$stdout.print word
|
||||
$stdout.flush
|
||||
word = probable_follower_for(word)
|
||||
words << word
|
||||
end
|
||||
|
||||
$stdout.print '.'
|
||||
$stdout.flush
|
||||
puts
|
||||
|
||||
# `say #{words.join(' ')}`
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
function(doc) {
|
||||
doc.title && doc.chunk && emit([doc.title, doc.chunk],null);
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
function(doc){if(doc.text && doc.text.match(/united/)) emit([doc.title, doc.chunk],null)}
|
|
@ -1,6 +0,0 @@
|
|||
function(doc){
|
||||
var words = doc.text.split(/\W/).filter(function(w) {return w.length > 0}).map(function(w){return w.toLowerCase()});
|
||||
for (var i = 0, l = words.length; i < l; i++) {
|
||||
emit(words.slice(i,4),doc.title);
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
function(key,vs,c){
|
||||
if (c) {
|
||||
return sum(vs);
|
||||
} else {
|
||||
return vs.length;
|
||||
}
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
function(doc){
|
||||
var words = doc.text.split(/\W/).map(function(w){return w.toLowerCase()});
|
||||
words.forEach(function(word){
|
||||
if (word.length > 0) emit([word,doc.title],1);
|
||||
});
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
function(key,values){
|
||||
return sum(values);
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
require 'rubygems'
|
||||
require 'couchrest'
|
||||
|
||||
couch = CouchRest.new("http://127.0.0.1:5984")
|
||||
db = couch.database('word-count-example')
|
||||
db.delete! rescue nil
|
||||
db = couch.create_db('word-count-example')
|
||||
|
||||
books = {
|
||||
'outline-of-science.txt' => 'http://www.gutenberg.org/files/20417/20417.txt',
|
||||
'ulysses.txt' => 'http://www.gutenberg.org/dirs/etext03/ulyss12.txt',
|
||||
'america.txt' => 'http://www.gutenberg.org/files/16960/16960.txt',
|
||||
'da-vinci.txt' => 'http://www.gutenberg.org/dirs/etext04/7ldv110.txt'
|
||||
}
|
||||
|
||||
books.each do |file, url|
|
||||
pathfile = File.join(File.dirname(__FILE__),file)
|
||||
`curl #{url} > #{pathfile}` unless File.exists?(pathfile)
|
||||
end
|
||||
|
||||
|
||||
books.keys.each do |book|
|
||||
title = book.split('.')[0]
|
||||
puts title
|
||||
File.open(File.join(File.dirname(__FILE__),book),'r') do |file|
|
||||
lines = []
|
||||
chunk = 0
|
||||
while line = file.gets
|
||||
lines << line
|
||||
if lines.length > 10
|
||||
db.save({
|
||||
:title => title,
|
||||
:chunk => chunk,
|
||||
:text => lines.join('')
|
||||
})
|
||||
chunk += 1
|
||||
puts chunk
|
||||
lines = []
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# puts "The books have been stored in your CouchDB. To initiate the MapReduce process, visit http://127.0.0.1:5984/_utils/ in your browser and click 'word-count-example', then select view 'words' or 'count'. The process could take about 15 minutes on an average MacBook."
|
||||
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
require 'rubygems'
|
||||
require 'couchrest'
|
||||
|
||||
couch = CouchRest.new("http://127.0.0.1:5984")
|
||||
db = couch.database('word-count-example')
|
||||
|
||||
puts "Now that we've parsed all those books into CouchDB, the queries we can run are incredibly flexible."
|
||||
puts "\nThe simplest query we can run is the total word count for all words in all documents:"
|
||||
puts "this will take a few minutes the first time. if it times out, just rerun this script in a few few minutes."
|
||||
puts db.view('word_count/words').inspect
|
||||
|
||||
puts "\nWe can also narrow the query down to just one word, across all documents. Here is the count for 'flight' in all three books:"
|
||||
|
||||
word = 'flight'
|
||||
params = {
|
||||
:startkey => [word],
|
||||
:endkey => [word,{}]
|
||||
}
|
||||
|
||||
puts db.view('word_count/words',params).inspect
|
||||
|
||||
puts "\nWe scope the query using startkey and endkey params to take advantage of CouchDB's collation ordering. Here are the params for the last query:"
|
||||
puts params.inspect
|
||||
|
||||
puts "\nWe can also count words on a per-title basis."
|
||||
|
||||
title = 'da-vinci'
|
||||
params = {
|
||||
:key => [word, title]
|
||||
}
|
||||
|
||||
puts db.view('word_count/words',params).inspect
|
||||
|
||||
|
||||
puts "\nHere are the params for 'flight' in the da-vinci book:"
|
||||
puts params.inspect
|
||||
puts
|
||||
puts 'The url looks like this:'
|
||||
puts 'http://127.0.0.1:5984/word-count-example/_view/word_count/count?key=["flight","da-vinci"]'
|
||||
puts "\nTry dropping that in your browser..."
|
|
@ -1,26 +0,0 @@
|
|||
require 'rubygems'
|
||||
require 'couchrest'
|
||||
|
||||
couch = CouchRest.new("http://127.0.0.1:5984")
|
||||
db = couch.database('word-count-example')
|
||||
|
||||
word_count = {
|
||||
:map => 'function(doc){
|
||||
var words = doc.text.split(/\W/);
|
||||
words.forEach(function(word){
|
||||
if (word.length > 0) emit([word,doc.title],1);
|
||||
});
|
||||
}',
|
||||
:reduce => 'function(key,combine){
|
||||
return sum(combine);
|
||||
}'
|
||||
}
|
||||
|
||||
db.delete db.get("_design/word_count") rescue nil
|
||||
|
||||
db.save({
|
||||
"_id" => "_design/word_count",
|
||||
:views => {
|
||||
:words => word_count
|
||||
}
|
||||
})
|
|
@ -4,6 +4,11 @@
|
|||
|
||||
* Minor enhancements
|
||||
|
||||
== 0.4.0
|
||||
|
||||
* Major enhancements
|
||||
* Separated ExtendedDocument from main CouchRest gem (Sam Lown)
|
||||
|
||||
== 0.37
|
||||
|
||||
* Minor enhancements
|
||||
|
|
2
init.rb
2
init.rb
|
@ -1 +1 @@
|
|||
require File.join(File.dirname(__FILE__),'lib', 'couchrest.rb')
|
||||
require File.join(File.dirname(__FILE__),'lib', 'couchrest', 'extended_document')
|
||||
|
|
162
lib/couchrest.rb
162
lib/couchrest.rb
|
@ -1,162 +0,0 @@
|
|||
# Copyright 2008 J. Chris Anderson
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
require 'rubygems'
|
||||
begin
|
||||
require 'json'
|
||||
rescue LoadError
|
||||
raise "You need install and require your own json compatible library since couchrest rest couldn't load the json/json_pure gem" unless Kernel.const_defined?("JSON")
|
||||
end
|
||||
require 'rest_client'
|
||||
|
||||
$:.unshift File.dirname(__FILE__) unless
|
||||
$:.include?(File.dirname(__FILE__)) ||
|
||||
$:.include?(File.expand_path(File.dirname(__FILE__)))
|
||||
|
||||
require 'couchrest/monkeypatches'
|
||||
|
||||
# = CouchDB, close to the metal
|
||||
module CouchRest
|
||||
VERSION = '0.37.4' unless self.const_defined?("VERSION")
|
||||
|
||||
autoload :Server, 'couchrest/core/server'
|
||||
autoload :Database, 'couchrest/core/database'
|
||||
autoload :Response, 'couchrest/core/response'
|
||||
autoload :Document, 'couchrest/core/document'
|
||||
autoload :Design, 'couchrest/core/design'
|
||||
autoload :View, 'couchrest/core/view'
|
||||
autoload :Model, 'couchrest/core/model'
|
||||
autoload :Pager, 'couchrest/helper/pager'
|
||||
autoload :FileManager, 'couchrest/helper/file_manager'
|
||||
autoload :Streamer, 'couchrest/helper/streamer'
|
||||
autoload :Upgrade, 'couchrest/helper/upgrade'
|
||||
|
||||
autoload :ExtendedDocument, 'couchrest/more/extended_document'
|
||||
autoload :CastedModel, 'couchrest/more/casted_model'
|
||||
|
||||
require File.join(File.dirname(__FILE__), 'couchrest', 'core', 'rest_api')
|
||||
require File.join(File.dirname(__FILE__), 'couchrest', 'core', 'http_abstraction')
|
||||
require File.join(File.dirname(__FILE__), 'couchrest', 'mixins')
|
||||
require File.join(File.dirname(__FILE__), 'couchrest', 'support', 'rails') if defined?(Rails)
|
||||
|
||||
# we extend CouchRest with the RestAPI module which gives us acess to
|
||||
# the get, post, put, delete and copy
|
||||
CouchRest.extend(::RestAPI)
|
||||
|
||||
# The CouchRest module methods handle the basic JSON serialization
|
||||
# and deserialization, as well as query parameters. The module also includes
|
||||
# some helpers for tasks like instantiating a new Database or Server instance.
|
||||
class << self
|
||||
|
||||
# extracted from Extlib
|
||||
#
|
||||
# Constantize tries to find a declared constant with the name specified
|
||||
# in the string. It raises a NameError when the name is not in CamelCase
|
||||
# or is not initialized.
|
||||
#
|
||||
# @example
|
||||
# "Module".constantize #=> Module
|
||||
# "Class".constantize #=> Class
|
||||
def constantize(camel_cased_word)
|
||||
unless /\A(?:::)?([A-Z]\w*(?:::[A-Z]\w*)*)\z/ =~ camel_cased_word
|
||||
raise NameError, "#{camel_cased_word.inspect} is not a valid constant name!"
|
||||
end
|
||||
|
||||
Object.module_eval("::#{$1}", __FILE__, __LINE__)
|
||||
end
|
||||
|
||||
# extracted from Extlib
|
||||
#
|
||||
# Capitalizes the first word and turns underscores into spaces and strips _id.
|
||||
# Like titleize, this is meant for creating pretty output.
|
||||
#
|
||||
# @example
|
||||
# "employee_salary" #=> "Employee salary"
|
||||
# "author_id" #=> "Author"
|
||||
def humanize(lower_case_and_underscored_word)
|
||||
lower_case_and_underscored_word.to_s.gsub(/_id$/, "").gsub(/_/, " ").capitalize
|
||||
end
|
||||
|
||||
# todo, make this parse the url and instantiate a Server or Database instance
|
||||
# depending on the specificity.
|
||||
def new(*opts)
|
||||
Server.new(*opts)
|
||||
end
|
||||
|
||||
def parse url
|
||||
case url
|
||||
when /^(https?:\/\/)(.*)\/(.*)\/(.*)/
|
||||
scheme = $1
|
||||
host = $2
|
||||
db = $3
|
||||
docid = $4
|
||||
when /^(https?:\/\/)(.*)\/(.*)/
|
||||
scheme = $1
|
||||
host = $2
|
||||
db = $3
|
||||
when /^(https?:\/\/)(.*)/
|
||||
scheme = $1
|
||||
host = $2
|
||||
when /(.*)\/(.*)\/(.*)/
|
||||
host = $1
|
||||
db = $2
|
||||
docid = $3
|
||||
when /(.*)\/(.*)/
|
||||
host = $1
|
||||
db = $2
|
||||
else
|
||||
db = url
|
||||
end
|
||||
|
||||
db = nil if db && db.empty?
|
||||
|
||||
{
|
||||
:host => (scheme || "http://") + (host || "127.0.0.1:5984"),
|
||||
:database => db,
|
||||
:doc => docid
|
||||
}
|
||||
end
|
||||
|
||||
# set proxy to use
|
||||
def proxy url
|
||||
HttpAbstraction.proxy = url
|
||||
end
|
||||
|
||||
# ensure that a database exists
|
||||
# creates it if it isn't already there
|
||||
# returns it after it's been created
|
||||
def database! url
|
||||
parsed = parse url
|
||||
cr = CouchRest.new(parsed[:host])
|
||||
cr.database!(parsed[:database])
|
||||
end
|
||||
|
||||
def database url
|
||||
parsed = parse url
|
||||
cr = CouchRest.new(parsed[:host])
|
||||
cr.database(parsed[:database])
|
||||
end
|
||||
|
||||
def paramify_url url, params = {}
|
||||
if params && !params.empty?
|
||||
query = params.collect do |k,v|
|
||||
v = v.to_json if %w{key startkey endkey}.include?(k.to_s)
|
||||
"#{k}=#{CGI.escape(v.to_s)}"
|
||||
end.join("&")
|
||||
url = "#{url}?#{query}"
|
||||
end
|
||||
url
|
||||
end
|
||||
end # class << self
|
||||
end
|
|
@ -1,11 +1,12 @@
|
|||
require File.expand_path('../../mixins/properties', __FILE__)
|
||||
|
||||
require 'couchrest'
|
||||
require File.join(File.dirname(__FILE__), 'mixins/callbacks')
|
||||
require File.join(File.dirname(__FILE__), 'mixins/properties')
|
||||
|
||||
module CouchRest
|
||||
module CastedModel
|
||||
|
||||
def self.included(base)
|
||||
base.send(:include, ::CouchRest::Callbacks)
|
||||
base.send(:include, ::CouchRest::Mixins::Callbacks)
|
||||
base.send(:include, ::CouchRest::Mixins::Properties)
|
||||
base.send(:attr_accessor, :casted_by)
|
||||
base.send(:attr_accessor, :document_saved)
|
|
@ -1,71 +0,0 @@
|
|||
require 'fileutils'
|
||||
|
||||
module CouchRest
|
||||
module Commands
|
||||
module Generate
|
||||
|
||||
def self.run(options)
|
||||
directory = options[:directory]
|
||||
design_names = options[:trailing_args]
|
||||
|
||||
FileUtils.mkdir_p(directory)
|
||||
filename = File.join(directory, "lib.js")
|
||||
self.write(filename, <<-FUNC)
|
||||
// Put global functions here.
|
||||
// Include in your views with
|
||||
//
|
||||
// //include-lib
|
||||
FUNC
|
||||
|
||||
design_names.each do |design_name|
|
||||
subdirectory = File.join(directory, design_name)
|
||||
FileUtils.mkdir_p(subdirectory)
|
||||
filename = File.join(subdirectory, "sample-map.js")
|
||||
self.write(filename, <<-FUNC)
|
||||
function(doc) {
|
||||
// Keys is first letter of _id
|
||||
emit(doc._id[0], doc);
|
||||
}
|
||||
FUNC
|
||||
|
||||
filename = File.join(subdirectory, "sample-reduce.js")
|
||||
self.write(filename, <<-FUNC)
|
||||
function(keys, values) {
|
||||
// Count the number of keys starting with this letter
|
||||
return values.length;
|
||||
}
|
||||
FUNC
|
||||
|
||||
filename = File.join(subdirectory, "lib.js")
|
||||
self.write(filename, <<-FUNC)
|
||||
// Put functions specific to '#{design_name}' here.
|
||||
// Include in your views with
|
||||
//
|
||||
// //include-lib
|
||||
FUNC
|
||||
end
|
||||
end
|
||||
|
||||
def self.help
|
||||
helpstring = <<-GEN
|
||||
|
||||
Usage: couchview generate directory design1 design2 design3 ...
|
||||
|
||||
Couchview will create directories and example views for the design documents you specify.
|
||||
|
||||
GEN
|
||||
helpstring.gsub(/^ /, '')
|
||||
end
|
||||
|
||||
def self.write(filename, contents)
|
||||
puts "Writing #{filename}"
|
||||
File.open(filename, "w") do |f|
|
||||
# Remove leading spaces
|
||||
contents.gsub!(/^ ( )?/, '')
|
||||
f.write contents
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,103 +0,0 @@
|
|||
module CouchRest
|
||||
|
||||
module Commands
|
||||
|
||||
module Push
|
||||
|
||||
def self.run(options)
|
||||
directory = options[:directory]
|
||||
database = options[:trailing_args].first
|
||||
|
||||
fm = CouchRest::FileManager.new(database)
|
||||
fm.loud = options[:loud]
|
||||
|
||||
if options[:loud]
|
||||
puts "Pushing views from directory #{directory} to database #{fm.db}"
|
||||
end
|
||||
|
||||
fm.push_views(directory)
|
||||
end
|
||||
|
||||
def self.help
|
||||
helpstring = <<-GEN
|
||||
|
||||
== Pushing views with Couchview ==
|
||||
|
||||
Usage: couchview push directory dbname
|
||||
|
||||
Couchview expects a specific filesystem layout for your CouchDB views (see
|
||||
example below). It also supports advanced features like inlining of library
|
||||
code (so you can keep DRY) as well as avoiding unnecessary document
|
||||
modification.
|
||||
|
||||
Couchview also solves a problem with CouchDB's view API, which only provides
|
||||
access to the final reduce side of any views which have both a map and a
|
||||
reduce function defined. The intermediate map results are often useful for
|
||||
development and production. CouchDB is smart enough to reuse map indexes for
|
||||
functions duplicated across views within the same design document.
|
||||
|
||||
For views with a reduce function defined, Couchview creates both a reduce view
|
||||
and a map-only view, so that you can browse and query the map side as well as
|
||||
the reduction, with no performance penalty.
|
||||
|
||||
== Example ==
|
||||
|
||||
couchview push foo-project/bar-views baz-database
|
||||
|
||||
This will push the views defined in foo-project/bar-views into a database
|
||||
called baz-database. Couchview expects the views to be defined in files with
|
||||
names like:
|
||||
|
||||
foo-project/bar-views/my-design/viewname-map.js
|
||||
foo-project/bar-views/my-design/viewname-reduce.js
|
||||
foo-project/bar-views/my-design/noreduce-map.js
|
||||
|
||||
Pushed to => http://127.0.0.1:5984/baz-database/_design/my-design
|
||||
|
||||
And the design document:
|
||||
{
|
||||
"views" : {
|
||||
"viewname-map" : {
|
||||
"map" : "### contents of view-name-map.js ###"
|
||||
},
|
||||
"viewname-reduce" : {
|
||||
"map" : "### contents of view-name-map.js ###",
|
||||
"reduce" : "### contents of view-name-reduce.js ###"
|
||||
},
|
||||
"noreduce-map" : {
|
||||
"map" : "### contents of noreduce-map.js ###"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Couchview will create a design document for each subdirectory of the views
|
||||
directory specified on the command line.
|
||||
|
||||
== Library Inlining ==
|
||||
|
||||
Couchview can optionally inline library code into your views so you only have
|
||||
to maintain it in one place. It looks for any files named lib.* in your
|
||||
design-doc directory (for doc specific libs) and in the parent views directory
|
||||
(for project global libs). These libraries are only inserted into views which
|
||||
include the text
|
||||
|
||||
// !include lib
|
||||
|
||||
or
|
||||
|
||||
# !include lib
|
||||
|
||||
Couchview is a result of scratching my own itch. I'd be happy to make it more
|
||||
general, so please contact me at jchris@grabb.it if you'd like to see anything
|
||||
added or changed.
|
||||
|
||||
GEN
|
||||
helpstring.gsub(/^ /, '')
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -1,35 +0,0 @@
|
|||
module RestClientAdapter
|
||||
|
||||
module API
|
||||
def proxy=(url)
|
||||
RestClient.proxy = url
|
||||
end
|
||||
|
||||
def proxy
|
||||
RestClient.proxy
|
||||
end
|
||||
|
||||
def get(uri, headers={})
|
||||
RestClient.get(uri, headers).to_s
|
||||
end
|
||||
|
||||
def post(uri, payload, headers={})
|
||||
RestClient.post(uri, payload, headers).to_s
|
||||
end
|
||||
|
||||
def put(uri, payload, headers={})
|
||||
RestClient.put(uri, payload, headers).to_s
|
||||
end
|
||||
|
||||
def delete(uri, headers={})
|
||||
RestClient.delete(uri, headers).to_s
|
||||
end
|
||||
|
||||
def copy(uri, headers)
|
||||
RestClient::Request.execute( :method => :copy,
|
||||
:url => uri,
|
||||
:headers => headers).to_s
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,377 +0,0 @@
|
|||
require 'cgi'
|
||||
require "base64"
|
||||
|
||||
module CouchRest
|
||||
class Database
|
||||
attr_reader :server, :host, :name, :root, :uri
|
||||
attr_accessor :bulk_save_cache_limit
|
||||
|
||||
# Create a CouchRest::Database adapter for the supplied CouchRest::Server
|
||||
# and database name.
|
||||
#
|
||||
# ==== Parameters
|
||||
# server<CouchRest::Server>:: database host
|
||||
# name<String>:: database name
|
||||
#
|
||||
def initialize(server, name)
|
||||
@name = name
|
||||
@server = server
|
||||
@host = server.uri
|
||||
@uri = "/#{name.gsub('/','%2F')}"
|
||||
@root = host + uri
|
||||
@streamer = Streamer.new(self)
|
||||
@bulk_save_cache = []
|
||||
@bulk_save_cache_limit = 500 # must be smaller than the uuid count
|
||||
end
|
||||
|
||||
# returns the database's uri
|
||||
def to_s
|
||||
@root
|
||||
end
|
||||
|
||||
# GET the database info from CouchDB
|
||||
def info
|
||||
CouchRest.get @root
|
||||
end
|
||||
|
||||
# Query the <tt>_all_docs</tt> view. Accepts all the same arguments as view.
|
||||
def documents(params = {})
|
||||
keys = params.delete(:keys)
|
||||
url = CouchRest.paramify_url "#{@root}/_all_docs", params
|
||||
if keys
|
||||
CouchRest.post(url, {:keys => keys})
|
||||
else
|
||||
CouchRest.get url
|
||||
end
|
||||
end
|
||||
|
||||
# Query a CouchDB-Lucene search view
|
||||
def search(name, params={})
|
||||
# -> http://localhost:5984/yourdb/_fti/YourDesign/by_name?include_docs=true&q=plop*'
|
||||
url = CouchRest.paramify_url "#{root}/_fti/#{name}", params
|
||||
CouchRest.get url
|
||||
end
|
||||
|
||||
# load a set of documents by passing an array of ids
|
||||
def get_bulk(ids)
|
||||
documents(:keys => ids, :include_docs => true)
|
||||
end
|
||||
alias :bulk_load :get_bulk
|
||||
|
||||
# POST a temporary view function to CouchDB for querying. This is not
|
||||
# recommended, as you don't get any performance benefit from CouchDB's
|
||||
# materialized views. Can be quite slow on large databases.
|
||||
def slow_view(funcs, params = {})
|
||||
keys = params.delete(:keys)
|
||||
funcs = funcs.merge({:keys => keys}) if keys
|
||||
url = CouchRest.paramify_url "#{@root}/_temp_view", params
|
||||
JSON.parse(HttpAbstraction.post(url, funcs.to_json, {"Content-Type" => 'application/json'}))
|
||||
end
|
||||
|
||||
# backwards compatibility is a plus
|
||||
alias :temp_view :slow_view
|
||||
|
||||
# Query a CouchDB view as defined by a <tt>_design</tt> document. Accepts
|
||||
# paramaters as described in http://wiki.apache.org/couchdb/HttpViewApi
|
||||
def view(name, params = {}, &block)
|
||||
keys = params.delete(:keys)
|
||||
name = name.split('/') # I think this will always be length == 2, but maybe not...
|
||||
dname = name.shift
|
||||
vname = name.join('/')
|
||||
url = CouchRest.paramify_url "#{@root}/_design/#{dname}/_view/#{vname}", params
|
||||
if keys
|
||||
CouchRest.post(url, {:keys => keys})
|
||||
else
|
||||
if block_given?
|
||||
@streamer.view("_design/#{dname}/_view/#{vname}", params, &block)
|
||||
else
|
||||
CouchRest.get url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# GET a document from CouchDB, by id. Returns a Ruby Hash.
|
||||
def get(id, params = {})
|
||||
slug = escape_docid(id)
|
||||
url = CouchRest.paramify_url("#{@root}/#{slug}", params)
|
||||
result = CouchRest.get(url)
|
||||
return result unless result.is_a?(Hash)
|
||||
doc = if /^_design/ =~ result["_id"]
|
||||
Design.new(result)
|
||||
else
|
||||
Document.new(result)
|
||||
end
|
||||
doc.database = self
|
||||
doc
|
||||
end
|
||||
|
||||
# GET an attachment directly from CouchDB
|
||||
def fetch_attachment(doc, name)
|
||||
uri = url_for_attachment(doc, name)
|
||||
HttpAbstraction.get uri
|
||||
end
|
||||
|
||||
# PUT an attachment directly to CouchDB
|
||||
def put_attachment(doc, name, file, options = {})
|
||||
docid = escape_docid(doc['_id'])
|
||||
uri = url_for_attachment(doc, name)
|
||||
JSON.parse(HttpAbstraction.put(uri, file, options))
|
||||
end
|
||||
|
||||
# DELETE an attachment directly from CouchDB
|
||||
def delete_attachment(doc, name, force=false)
|
||||
uri = url_for_attachment(doc, name)
|
||||
# this needs a rev
|
||||
begin
|
||||
JSON.parse(HttpAbstraction.delete(uri))
|
||||
rescue Exception => error
|
||||
if force
|
||||
# get over a 409
|
||||
doc = get(doc['_id'])
|
||||
uri = url_for_attachment(doc, name)
|
||||
JSON.parse(HttpAbstraction.delete(uri))
|
||||
else
|
||||
error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Save a document to CouchDB. This will use the <tt>_id</tt> field from
|
||||
# the document as the id for PUT, or request a new UUID from CouchDB, if
|
||||
# no <tt>_id</tt> is present on the document. IDs are attached to
|
||||
# documents on the client side because POST has the curious property of
|
||||
# being automatically retried by proxies in the event of network
|
||||
# segmentation and lost responses.
|
||||
#
|
||||
# If <tt>bulk</tt> is true (false by default) the document is cached for bulk-saving later.
|
||||
# Bulk saving happens automatically when #bulk_save_cache limit is exceded, or on the next non bulk save.
|
||||
#
|
||||
# If <tt>batch</tt> is true (false by default) the document is saved in
|
||||
# batch mode, "used to achieve higher throughput at the cost of lower
|
||||
# guarantees. When [...] sent using this option, it is not immediately
|
||||
# written to disk. Instead it is stored in memory on a per-user basis for a
|
||||
# second or so (or the number of docs in memory reaches a certain point).
|
||||
# After the threshold has passed, the docs are committed to disk. Instead
|
||||
# of waiting for the doc to be written to disk before responding, CouchDB
|
||||
# sends an HTTP 202 Accepted response immediately. batch=ok is not suitable
|
||||
# for crucial data, but it ideal for applications like logging which can
|
||||
# accept the risk that a small proportion of updates could be lost due to a
|
||||
# crash."
|
||||
def save_doc(doc, bulk = false, batch = false)
|
||||
if doc['_attachments']
|
||||
doc['_attachments'] = encode_attachments(doc['_attachments'])
|
||||
end
|
||||
if bulk
|
||||
@bulk_save_cache << doc
|
||||
bulk_save if @bulk_save_cache.length >= @bulk_save_cache_limit
|
||||
return {"ok" => true} # Compatibility with Document#save
|
||||
elsif !bulk && @bulk_save_cache.length > 0
|
||||
bulk_save
|
||||
end
|
||||
result = if doc['_id']
|
||||
slug = escape_docid(doc['_id'])
|
||||
begin
|
||||
uri = "#{@root}/#{slug}"
|
||||
uri << "?batch=ok" if batch
|
||||
CouchRest.put uri, doc
|
||||
rescue HttpAbstraction::ResourceNotFound
|
||||
p "resource not found when saving even tho an id was passed"
|
||||
slug = doc['_id'] = @server.next_uuid
|
||||
CouchRest.put "#{@root}/#{slug}", doc
|
||||
end
|
||||
else
|
||||
begin
|
||||
slug = doc['_id'] = @server.next_uuid
|
||||
CouchRest.put "#{@root}/#{slug}", doc
|
||||
rescue #old version of couchdb
|
||||
CouchRest.post @root, doc
|
||||
end
|
||||
end
|
||||
if result['ok']
|
||||
doc['_id'] = result['id']
|
||||
doc['_rev'] = result['rev']
|
||||
doc.database = self if doc.respond_to?(:database=)
|
||||
end
|
||||
result
|
||||
end
|
||||
|
||||
# Save a document to CouchDB in bulk mode. See #save_doc's +bulk+ argument.
|
||||
def bulk_save_doc(doc)
|
||||
save_doc(doc, true)
|
||||
end
|
||||
|
||||
# Save a document to CouchDB in batch mode. See #save_doc's +batch+ argument.
|
||||
def batch_save_doc(doc)
|
||||
save_doc(doc, false, true)
|
||||
end
|
||||
|
||||
# POST an array of documents to CouchDB. If any of the documents are
|
||||
# missing ids, supply one from the uuid cache.
|
||||
#
|
||||
# If called with no arguments, bulk saves the cache of documents to be bulk saved.
|
||||
def bulk_save(docs = nil, use_uuids = true)
|
||||
if docs.nil?
|
||||
docs = @bulk_save_cache
|
||||
@bulk_save_cache = []
|
||||
end
|
||||
if (use_uuids)
|
||||
ids, noids = docs.partition{|d|d['_id']}
|
||||
uuid_count = [noids.length, @server.uuid_batch_count].max
|
||||
noids.each do |doc|
|
||||
nextid = @server.next_uuid(uuid_count) rescue nil
|
||||
doc['_id'] = nextid if nextid
|
||||
end
|
||||
end
|
||||
CouchRest.post "#{@root}/_bulk_docs", {:docs => docs}
|
||||
end
|
||||
alias :bulk_delete :bulk_save
|
||||
|
||||
# DELETE the document from CouchDB that has the given <tt>_id</tt> and
|
||||
# <tt>_rev</tt>.
|
||||
#
|
||||
# If <tt>bulk</tt> is true (false by default) the deletion is recorded for bulk-saving (bulk-deletion :) later.
|
||||
# Bulk saving happens automatically when #bulk_save_cache limit is exceded, or on the next non bulk save.
|
||||
def delete_doc(doc, bulk = false)
|
||||
raise ArgumentError, "_id and _rev required for deleting" unless doc['_id'] && doc['_rev']
|
||||
if bulk
|
||||
@bulk_save_cache << { '_id' => doc['_id'], '_rev' => doc['_rev'], '_deleted' => true }
|
||||
return bulk_save if @bulk_save_cache.length >= @bulk_save_cache_limit
|
||||
return { "ok" => true } # Mimic the non-deferred version
|
||||
end
|
||||
slug = escape_docid(doc['_id'])
|
||||
CouchRest.delete "#{@root}/#{slug}?rev=#{doc['_rev']}"
|
||||
end
|
||||
|
||||
# COPY an existing document to a new id. If the destination id currently exists, a rev must be provided.
|
||||
# <tt>dest</tt> can take one of two forms if overwriting: "id_to_overwrite?rev=revision" or the actual doc
|
||||
# hash with a '_rev' key
|
||||
def copy_doc(doc, dest)
|
||||
raise ArgumentError, "_id is required for copying" unless doc['_id']
|
||||
slug = escape_docid(doc['_id'])
|
||||
destination = if dest.respond_to?(:has_key?) && dest['_id'] && dest['_rev']
|
||||
"#{dest['_id']}?rev=#{dest['_rev']}"
|
||||
else
|
||||
dest
|
||||
end
|
||||
CouchRest.copy "#{@root}/#{slug}", destination
|
||||
end
|
||||
|
||||
# Updates the given doc by yielding the current state of the doc
|
||||
# and trying to update update_limit times. Returns the new doc
|
||||
# if the doc was successfully updated without hitting the limit
|
||||
def update_doc(doc_id, params = {}, update_limit=10)
|
||||
resp = {'ok' => false}
|
||||
new_doc = nil
|
||||
last_fail = nil
|
||||
|
||||
until resp['ok'] or update_limit <= 0
|
||||
doc = self.get(doc_id, params) # grab the doc
|
||||
new_doc = yield doc # give it to the caller to be updated
|
||||
begin
|
||||
resp = self.save_doc new_doc # try to PUT the updated doc into the db
|
||||
rescue RestClient::RequestFailed => e
|
||||
if e.http_code == 409 # Update collision
|
||||
update_limit -= 1
|
||||
last_fail = e
|
||||
else # some other error
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
raise last_fail unless resp['ok']
|
||||
new_doc
|
||||
end
|
||||
|
||||
# Compact the database, removing old document revisions and optimizing space use.
|
||||
def compact!
|
||||
CouchRest.post "#{@root}/_compact"
|
||||
end
|
||||
|
||||
# Create the database
|
||||
def create!
|
||||
bool = server.create_db(@name) rescue false
|
||||
bool && true
|
||||
end
|
||||
|
||||
# Delete and re create the database
|
||||
def recreate!
|
||||
delete!
|
||||
create!
|
||||
rescue RestClient::ResourceNotFound
|
||||
ensure
|
||||
create!
|
||||
end
|
||||
|
||||
# Replicates via "pulling" from another database to this database. Makes no attempt to deal with conflicts.
|
||||
def replicate_from other_db, continuous=false
|
||||
replicate other_db, continuous, :target => name
|
||||
end
|
||||
|
||||
# Replicates via "pushing" to another database. Makes no attempt to deal with conflicts.
|
||||
def replicate_to other_db, continuous=false
|
||||
replicate other_db, continuous, :source => name
|
||||
end
|
||||
|
||||
# DELETE the database itself. This is not undoable and could be rather
|
||||
# catastrophic. Use with care!
|
||||
def delete!
|
||||
clear_extended_doc_fresh_cache
|
||||
CouchRest.delete @root
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def replicate other_db, continuous, options
|
||||
raise ArgumentError, "must provide a CouchReset::Database" unless other_db.kind_of?(CouchRest::Database)
|
||||
raise ArgumentError, "must provide a target or source option" unless (options.key?(:target) || options.key?(:source))
|
||||
payload = options
|
||||
if options.has_key?(:target)
|
||||
payload[:source] = other_db.root
|
||||
else
|
||||
payload[:target] = other_db.root
|
||||
end
|
||||
payload[:continuous] = continuous
|
||||
CouchRest.post "#{@host}/_replicate", payload
|
||||
end
|
||||
|
||||
def clear_extended_doc_fresh_cache
|
||||
::CouchRest::ExtendedDocument.subclasses.each{|klass| klass.req_design_doc_refresh if klass.respond_to?(:req_design_doc_refresh)}
|
||||
end
|
||||
|
||||
def uri_for_attachment(doc, name)
|
||||
if doc.is_a?(String)
|
||||
puts "CouchRest::Database#fetch_attachment will eventually require a doc as the first argument, not a doc.id"
|
||||
docid = doc
|
||||
rev = nil
|
||||
else
|
||||
docid = doc['_id']
|
||||
rev = doc['_rev']
|
||||
end
|
||||
docid = escape_docid(docid)
|
||||
name = CGI.escape(name)
|
||||
rev = "?rev=#{doc['_rev']}" if rev
|
||||
"/#{docid}/#{name}#{rev}"
|
||||
end
|
||||
|
||||
def url_for_attachment(doc, name)
|
||||
@root + uri_for_attachment(doc, name)
|
||||
end
|
||||
|
||||
def escape_docid id
|
||||
/^_design\/(.*)/ =~ id ? "_design/#{CGI.escape($1)}" : CGI.escape(id)
|
||||
end
|
||||
|
||||
def encode_attachments(attachments)
|
||||
attachments.each do |k,v|
|
||||
next if v['stub']
|
||||
v['data'] = base64(v['data'])
|
||||
end
|
||||
attachments
|
||||
end
|
||||
|
||||
def base64(data)
|
||||
Base64.encode64(data).gsub(/\s/,'')
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,79 +0,0 @@
|
|||
module CouchRest
|
||||
class Design < Document
|
||||
def view_by *keys
|
||||
opts = keys.pop if keys.last.is_a?(Hash)
|
||||
opts ||= {}
|
||||
self['views'] ||= {}
|
||||
method_name = "by_#{keys.join('_and_')}"
|
||||
|
||||
if opts[:map]
|
||||
view = {}
|
||||
view['map'] = opts.delete(:map)
|
||||
if opts[:reduce]
|
||||
view['reduce'] = opts.delete(:reduce)
|
||||
opts[:reduce] = false
|
||||
end
|
||||
self['views'][method_name] = view
|
||||
else
|
||||
doc_keys = keys.collect{|k|"doc['#{k}']"} # this is where :require => 'doc.x == true' would show up
|
||||
key_emit = doc_keys.length == 1 ? "#{doc_keys.first}" : "[#{doc_keys.join(', ')}]"
|
||||
guards = opts.delete(:guards) || []
|
||||
guards.concat doc_keys
|
||||
map_function = <<-JAVASCRIPT
|
||||
function(doc) {
|
||||
if (#{guards.join(' && ')}) {
|
||||
emit(#{key_emit}, null);
|
||||
}
|
||||
}
|
||||
JAVASCRIPT
|
||||
self['views'][method_name] = {
|
||||
'map' => map_function
|
||||
}
|
||||
end
|
||||
self['views'][method_name]['couchrest-defaults'] = opts unless opts.empty?
|
||||
method_name
|
||||
end
|
||||
|
||||
# Dispatches to any named view.
|
||||
# (using the database where this design doc was saved)
|
||||
def view view_name, query={}, &block
|
||||
view_on database, view_name, query, &block
|
||||
end
|
||||
|
||||
# Dispatches to any named view in a specific database
|
||||
def view_on db, view_name, query={}, &block
|
||||
view_name = view_name.to_s
|
||||
view_slug = "#{name}/#{view_name}"
|
||||
defaults = (self['views'][view_name] && self['views'][view_name]["couchrest-defaults"]) || {}
|
||||
db.view(view_slug, defaults.merge(query), &block)
|
||||
end
|
||||
|
||||
def name
|
||||
id.sub('_design/','') if id
|
||||
end
|
||||
|
||||
def name= newname
|
||||
self['_id'] = "_design/#{newname}"
|
||||
end
|
||||
|
||||
def save
|
||||
raise ArgumentError, "_design docs require a name" unless name && name.length > 0
|
||||
super
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# returns stored defaults if the there is a view named this in the design doc
|
||||
def has_view?(view)
|
||||
view = view.to_s
|
||||
self['views'][view] &&
|
||||
(self['views'][view]["couchrest-defaults"] || {})
|
||||
end
|
||||
|
||||
def fetch_view view_name, opts, &block
|
||||
database.view(view_name, opts, &block)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -1,84 +0,0 @@
|
|||
require 'delegate'
|
||||
|
||||
module CouchRest
|
||||
class Document < Response
|
||||
include CouchRest::Mixins::Attachments
|
||||
|
||||
extlib_inheritable_accessor :database
|
||||
attr_accessor :database
|
||||
|
||||
# override the CouchRest::Model-wide default_database
|
||||
# This is not a thread safe operation, do not change the model
|
||||
# database at runtime.
|
||||
def self.use_database(db)
|
||||
self.database = db
|
||||
end
|
||||
|
||||
def id
|
||||
self['_id']
|
||||
end
|
||||
|
||||
def rev
|
||||
self['_rev']
|
||||
end
|
||||
|
||||
# returns true if the document has never been saved
|
||||
def new?
|
||||
!rev
|
||||
end
|
||||
alias :new_document? :new?
|
||||
|
||||
# Saves the document to the db using create or update. Also runs the :save
|
||||
# callbacks. Sets the <tt>_id</tt> and <tt>_rev</tt> fields based on
|
||||
# CouchDB's response.
|
||||
# If <tt>bulk</tt> is <tt>true</tt> (defaults to false) the document is cached for bulk save.
|
||||
def save(bulk = false)
|
||||
raise ArgumentError, "doc.database required for saving" unless database
|
||||
result = database.save_doc self, bulk
|
||||
result['ok']
|
||||
end
|
||||
|
||||
# Deletes the document from the database. Runs the :delete callbacks.
|
||||
# Removes the <tt>_id</tt> and <tt>_rev</tt> fields, preparing the
|
||||
# document to be saved to a new <tt>_id</tt>.
|
||||
# If <tt>bulk</tt> is <tt>true</tt> (defaults to false) the document won't
|
||||
# actually be deleted from the db until bulk save.
|
||||
def destroy(bulk = false)
|
||||
raise ArgumentError, "doc.database required to destroy" unless database
|
||||
result = database.delete_doc(self, bulk)
|
||||
if result['ok']
|
||||
self['_rev'] = nil
|
||||
self['_id'] = nil
|
||||
end
|
||||
result['ok']
|
||||
end
|
||||
|
||||
# copies the document to a new id. If the destination id currently exists, a rev must be provided.
|
||||
# <tt>dest</tt> can take one of two forms if overwriting: "id_to_overwrite?rev=revision" or the actual doc
|
||||
# hash with a '_rev' key
|
||||
def copy(dest)
|
||||
raise ArgumentError, "doc.database required to copy" unless database
|
||||
result = database.copy_doc(self, dest)
|
||||
result['ok']
|
||||
end
|
||||
|
||||
# Returns the CouchDB uri for the document
|
||||
def uri(append_rev = false)
|
||||
return nil if new?
|
||||
couch_uri = "#{database.root}/#{CGI.escape(id)}"
|
||||
if append_rev == true
|
||||
couch_uri << "?rev=#{rev}"
|
||||
elsif append_rev.kind_of?(Integer)
|
||||
couch_uri << "?rev=#{append_rev}"
|
||||
end
|
||||
couch_uri
|
||||
end
|
||||
|
||||
# Returns the document's database
|
||||
def database
|
||||
@database || self.class.database
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -1,48 +0,0 @@
|
|||
require 'couchrest/core/adapters/restclient'
|
||||
|
||||
# Abstraction layet for HTTP communications.
|
||||
#
|
||||
# By defining a basic API that CouchRest is relying on,
|
||||
# it allows for easy experimentations and implementations of various libraries.
|
||||
#
|
||||
# Most of the API is based on the RestClient API that was used in the early version of CouchRest.
|
||||
#
|
||||
module HttpAbstraction
|
||||
|
||||
# here is the list of exception expected by CouchRest
|
||||
# please convert the underlying errors in this set of known
|
||||
# exceptions.
|
||||
class ResourceNotFound < StandardError; end
|
||||
class RequestFailed < StandardError; end
|
||||
class RequestTimeout < StandardError; end
|
||||
class ServerBrokeConnection < StandardError; end
|
||||
class Conflict < StandardError; end
|
||||
|
||||
|
||||
# # Here is the API you need to implement if you want to write a new adapter
|
||||
# # See adapters/restclient.rb for more information.
|
||||
#
|
||||
# def self.proxy=(url)
|
||||
# end
|
||||
#
|
||||
# def self.proxy
|
||||
# end
|
||||
#
|
||||
# def self.get(uri, headers=nil)
|
||||
# end
|
||||
#
|
||||
# def self.post(uri, payload, headers=nil)
|
||||
# end
|
||||
#
|
||||
# def self.put(uri, payload, headers=nil)
|
||||
# end
|
||||
#
|
||||
# def self.delete(uri, headers=nil)
|
||||
# end
|
||||
#
|
||||
# def self.copy(uri, headers)
|
||||
# end
|
||||
|
||||
end
|
||||
|
||||
HttpAbstraction.extend(RestClientAdapter::API)
|
|
@ -1,16 +0,0 @@
|
|||
module CouchRest
|
||||
class Response < Hash
|
||||
def initialize(pkeys = {})
|
||||
pkeys ||= {}
|
||||
pkeys.each do |k,v|
|
||||
self[k.to_s] = v
|
||||
end
|
||||
end
|
||||
def []=(key, value)
|
||||
super(key.to_s, value)
|
||||
end
|
||||
def [](key)
|
||||
super(key.to_s)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,49 +0,0 @@
|
|||
module RestAPI
|
||||
|
||||
def put(uri, doc = nil)
|
||||
payload = doc.to_json if doc
|
||||
begin
|
||||
JSON.parse(HttpAbstraction.put(uri, payload))
|
||||
rescue Exception => e
|
||||
if $DEBUG
|
||||
raise "Error while sending a PUT request #{uri}\npayload: #{payload.inspect}\n#{e}"
|
||||
else
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def get(uri)
|
||||
begin
|
||||
JSON.parse(HttpAbstraction.get(uri), :max_nesting => false)
|
||||
rescue => e
|
||||
if $DEBUG
|
||||
raise "Error while sending a GET request #{uri}\n: #{e}"
|
||||
else
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def post(uri, doc = nil)
|
||||
payload = doc.to_json if doc
|
||||
begin
|
||||
JSON.parse(HttpAbstraction.post(uri, payload))
|
||||
rescue Exception => e
|
||||
if $DEBUG
|
||||
raise "Error while sending a POST request #{uri}\npayload: #{payload.inspect}\n#{e}"
|
||||
else
|
||||
raise e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def delete(uri)
|
||||
JSON.parse(HttpAbstraction.delete(uri))
|
||||
end
|
||||
|
||||
def copy(uri, destination)
|
||||
JSON.parse(HttpAbstraction.copy(uri, {'Destination' => destination}))
|
||||
end
|
||||
|
||||
end
|
|
@ -1,88 +0,0 @@
|
|||
module CouchRest
|
||||
class Server
|
||||
attr_accessor :uri, :uuid_batch_count, :available_databases
|
||||
def initialize(server = 'http://127.0.0.1:5984', uuid_batch_count = 1000)
|
||||
@uri = server
|
||||
@uuid_batch_count = uuid_batch_count
|
||||
end
|
||||
|
||||
# Lists all "available" databases.
|
||||
# An available database, is a database that was specified
|
||||
# as avaiable by your code.
|
||||
# It allows to define common databases to use and reuse in your code
|
||||
def available_databases
|
||||
@available_databases ||= {}
|
||||
end
|
||||
|
||||
# Adds a new available database and create it unless it already exists
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# @couch = CouchRest::Server.new
|
||||
# @couch.define_available_database(:default, "tech-blog")
|
||||
#
|
||||
def define_available_database(reference, db_name, create_unless_exists = true)
|
||||
available_databases[reference.to_sym] = create_unless_exists ? database!(db_name) : database(db_name)
|
||||
end
|
||||
|
||||
# Checks that a database is set as available
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# @couch.available_database?(:default)
|
||||
#
|
||||
def available_database?(ref_or_name)
|
||||
ref_or_name.is_a?(Symbol) ? available_databases.keys.include?(ref_or_name) : available_databases.values.map{|db| db.name}.include?(ref_or_name)
|
||||
end
|
||||
|
||||
def default_database=(name, create_unless_exists = true)
|
||||
define_available_database(:default, name, create_unless_exists = true)
|
||||
end
|
||||
|
||||
def default_database
|
||||
available_databases[:default]
|
||||
end
|
||||
|
||||
# Lists all databases on the server
|
||||
def databases
|
||||
CouchRest.get "#{@uri}/_all_dbs"
|
||||
end
|
||||
|
||||
# Returns a CouchRest::Database for the given name
|
||||
def database(name)
|
||||
CouchRest::Database.new(self, name)
|
||||
end
|
||||
|
||||
# Creates the database if it doesn't exist
|
||||
def database!(name)
|
||||
create_db(name) rescue nil
|
||||
database(name)
|
||||
end
|
||||
|
||||
# GET the welcome message
|
||||
def info
|
||||
CouchRest.get "#{@uri}/"
|
||||
end
|
||||
|
||||
# Create a database
|
||||
def create_db(name)
|
||||
CouchRest.put "#{@uri}/#{name}"
|
||||
database(name)
|
||||
end
|
||||
|
||||
# Restart the CouchDB instance
|
||||
def restart!
|
||||
CouchRest.post "#{@uri}/_restart"
|
||||
end
|
||||
|
||||
# Retrive an unused UUID from CouchDB. Server instances manage caching a list of unused UUIDs.
|
||||
def next_uuid(count = @uuid_batch_count)
|
||||
@uuids ||= []
|
||||
if @uuids.empty?
|
||||
@uuids = CouchRest.get("#{@uri}/_uuids?count=#{count}")["uuids"]
|
||||
end
|
||||
@uuids.pop
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -1,4 +0,0 @@
|
|||
module CouchRest
|
||||
class View
|
||||
end
|
||||
end
|
|
@ -1,13 +1,21 @@
|
|||
gem 'samlown-couchrest'
|
||||
require 'couchrest'
|
||||
require 'active_support'
|
||||
require 'mime/types'
|
||||
require File.join(File.dirname(__FILE__), "property")
|
||||
require File.join(File.dirname(__FILE__), '..', 'mixins', 'extended_document_mixins')
|
||||
require "enumerator"
|
||||
require File.join(File.dirname(__FILE__), "monkeypatches")
|
||||
require File.join(File.dirname(__FILE__), "property")
|
||||
require File.join(File.dirname(__FILE__), 'mixins')
|
||||
require File.join(File.dirname(__FILE__), 'casted_model')
|
||||
|
||||
module CouchRest
|
||||
|
||||
# Same as CouchRest::Document but with properties and validations
|
||||
class ExtendedDocument < Document
|
||||
include CouchRest::Callbacks
|
||||
|
||||
VERSION = "1.0.0"
|
||||
|
||||
include CouchRest::Mixins::Callbacks
|
||||
include CouchRest::Mixins::DocumentQueries
|
||||
include CouchRest::Mixins::Views
|
||||
include CouchRest::Mixins::DesignDoc
|
|
@ -1,103 +0,0 @@
|
|||
module CouchRest
|
||||
class Pager
|
||||
attr_accessor :db
|
||||
def initialize db
|
||||
@db = db
|
||||
end
|
||||
|
||||
def all_docs(limit=100, &block)
|
||||
startkey = nil
|
||||
oldend = nil
|
||||
|
||||
while docrows = request_all_docs(limit+1, startkey)
|
||||
startkey = docrows.last['key']
|
||||
docrows.pop if docrows.length > limit
|
||||
if oldend == startkey
|
||||
break
|
||||
end
|
||||
yield(docrows)
|
||||
oldend = startkey
|
||||
end
|
||||
end
|
||||
|
||||
def key_reduce(view, limit=2000, firstkey = nil, lastkey = nil, &block)
|
||||
# start with no keys
|
||||
startkey = firstkey
|
||||
# lastprocessedkey = nil
|
||||
keepgoing = true
|
||||
|
||||
while keepgoing && viewrows = request_view(view, limit, startkey)
|
||||
startkey = viewrows.first['key']
|
||||
endkey = viewrows.last['key']
|
||||
|
||||
if (startkey == endkey)
|
||||
# we need to rerequest to get a bigger page
|
||||
# so we know we have all the rows for that key
|
||||
viewrows = @db.view(view, :key => startkey)['rows']
|
||||
# we need to do an offset thing to find the next startkey
|
||||
# otherwise we just get stuck
|
||||
lastdocid = viewrows.last['id']
|
||||
fornextloop = @db.view(view, :startkey => startkey, :startkey_docid => lastdocid, :limit => 2)['rows']
|
||||
|
||||
newendkey = fornextloop.last['key']
|
||||
if (newendkey == endkey)
|
||||
keepgoing = false
|
||||
else
|
||||
startkey = newendkey
|
||||
end
|
||||
rows = viewrows
|
||||
else
|
||||
rows = []
|
||||
for r in viewrows
|
||||
if (lastkey && r['key'] == lastkey)
|
||||
keepgoing = false
|
||||
break
|
||||
end
|
||||
break if (r['key'] == endkey)
|
||||
rows << r
|
||||
end
|
||||
startkey = endkey
|
||||
end
|
||||
|
||||
key = :begin
|
||||
values = []
|
||||
|
||||
rows.each do |r|
|
||||
if key != r['key']
|
||||
# we're on a new key, yield the old first and then reset
|
||||
yield(key, values) if key != :begin
|
||||
key = r['key']
|
||||
values = []
|
||||
end
|
||||
# keep accumulating
|
||||
values << r['value']
|
||||
end
|
||||
yield(key, values)
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def request_all_docs limit, startkey = nil
|
||||
opts = {}
|
||||
opts[:limit] = limit if limit
|
||||
opts[:startkey] = startkey if startkey
|
||||
results = @db.documents(opts)
|
||||
rows = results['rows']
|
||||
rows unless rows.length == 0
|
||||
end
|
||||
|
||||
def request_view view, limit = nil, startkey = nil, endkey = nil
|
||||
opts = {}
|
||||
opts[:limit] = limit if limit
|
||||
opts[:startkey] = startkey if startkey
|
||||
opts[:endkey] = endkey if endkey
|
||||
|
||||
results = @db.view(view, opts)
|
||||
rows = results['rows']
|
||||
rows unless rows.length == 0
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -1,51 +0,0 @@
|
|||
module CouchRest
|
||||
class Streamer
|
||||
attr_accessor :db
|
||||
def initialize db
|
||||
@db = db
|
||||
end
|
||||
|
||||
# Stream a view, yielding one row at a time. Shells out to <tt>curl</tt> to keep RAM usage low when you have millions of rows.
|
||||
def view name, params = nil, &block
|
||||
urlst = if /^_/.match(name) then
|
||||
"#{@db.root}/#{name}"
|
||||
else
|
||||
name = name.split('/')
|
||||
dname = name.shift
|
||||
vname = name.join('/')
|
||||
"#{@db.root}/_design/#{dname}/_view/#{vname}"
|
||||
end
|
||||
url = CouchRest.paramify_url urlst, params
|
||||
# puts "stream #{url}"
|
||||
first = nil
|
||||
IO.popen("curl --silent \"#{url}\"") do |view|
|
||||
first = view.gets # discard header
|
||||
while line = view.gets
|
||||
row = parse_line(line)
|
||||
block.call row unless row.nil? # last line "}]" discarded
|
||||
end
|
||||
end
|
||||
parse_first(first)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def parse_line line
|
||||
return nil unless line
|
||||
if /(\{.*\}),?/.match(line.chomp)
|
||||
JSON.parse($1)
|
||||
end
|
||||
end
|
||||
|
||||
def parse_first first
|
||||
return nil unless first
|
||||
parts = first.split(',')
|
||||
parts.pop
|
||||
line = parts.join(',')
|
||||
JSON.parse("#{line}}")
|
||||
rescue
|
||||
nil
|
||||
end
|
||||
|
||||
end
|
||||
end
|
|
@ -1,51 +0,0 @@
|
|||
module CouchRest
|
||||
class Upgrade
|
||||
attr_accessor :olddb, :newdb, :dbname
|
||||
def initialize dbname, old_couch, new_couch
|
||||
@dbname = dbname
|
||||
@olddb = old_couch.database dbname
|
||||
@newdb = new_couch.database!(dbname)
|
||||
@bulk_docs = []
|
||||
end
|
||||
def clone!
|
||||
puts "#{dbname} - #{olddb.info['doc_count']} docs"
|
||||
streamer = CouchRest::Streamer.new(olddb)
|
||||
streamer.view("_all_docs_by_seq") do |row|
|
||||
load_row_docs(row) if row
|
||||
maybe_flush_bulks
|
||||
end
|
||||
flush_bulks!
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def maybe_flush_bulks
|
||||
flush_bulks! if (@bulk_docs.length > 99)
|
||||
end
|
||||
|
||||
def flush_bulks!
|
||||
url = CouchRest.paramify_url "#{@newdb.uri}/_bulk_docs", {:all_or_nothing => true}
|
||||
puts "posting #{@bulk_docs.length} bulk docs to #{url}"
|
||||
begin
|
||||
CouchRest.post url, {:docs => @bulk_docs}
|
||||
@bulk_docs = []
|
||||
rescue Exception => e
|
||||
puts e.response
|
||||
raise e
|
||||
end
|
||||
end
|
||||
|
||||
def load_row_docs(row)
|
||||
results = @olddb.get(row["id"], {:open_revs => "all", :attachments => true})
|
||||
results.select{|r|r["ok"]}.each do |r|
|
||||
doc = r["ok"]
|
||||
if /^_/.match(doc["_id"]) && !/^_design/.match(doc["_id"])
|
||||
puts "invalid docid #{doc["_id"]} -- trimming"
|
||||
doc["_id"] = doc["_id"].sub('_','')
|
||||
end
|
||||
doc.delete('_rev')
|
||||
@bulk_docs << doc
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,263 +0,0 @@
|
|||
####################################
|
||||
# USAGE
|
||||
#
|
||||
# in your rack.rb file
|
||||
# require this file and then:
|
||||
#
|
||||
# couch = CouchRest.new
|
||||
# LOG_DB = couch.database!('couchrest-logger')
|
||||
# use CouchRest::Logger, LOG_DB
|
||||
#
|
||||
# Note:
|
||||
# to require just this middleware, if you have the gem installed do:
|
||||
# require 'couchrest/middlewares/logger'
|
||||
#
|
||||
# For log processing examples, see examples at the bottom of this file
|
||||
|
||||
module CouchRest
|
||||
class Logger
|
||||
|
||||
def self.log
|
||||
Thread.current["couchrest.logger"] ||= {:queries => []}
|
||||
end
|
||||
|
||||
def initialize(app, db=nil)
|
||||
@app = app
|
||||
@db = db
|
||||
end
|
||||
|
||||
def self.record(log_info)
|
||||
log[:queries] << log_info
|
||||
end
|
||||
|
||||
def log
|
||||
Thread.current["couchrest.logger"] ||= {:queries => []}
|
||||
end
|
||||
|
||||
def reset_log
|
||||
Thread.current["couchrest.logger"] = nil
|
||||
end
|
||||
|
||||
def call(env)
|
||||
reset_log
|
||||
log['started_at'] = Time.now
|
||||
log['env'] = env
|
||||
log['url'] = 'http://' + env['HTTP_HOST'] + env['REQUEST_URI']
|
||||
response = @app.call(env)
|
||||
log['ended_at'] = Time.now
|
||||
log['duration'] = log['ended_at'] - log['started_at']
|
||||
# let's report the log in a different thread so we don't slow down the app
|
||||
@db ? Thread.new(@db, log){|db, rlog| db.save_doc(rlog);} : p(log.inspect)
|
||||
response
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# inject our logger into CouchRest HTTP abstraction layer
|
||||
module HttpAbstraction
|
||||
|
||||
def self.get(uri, headers=nil)
|
||||
start_query = Time.now
|
||||
log = {:method => :get, :uri => uri, :headers => headers}
|
||||
response = super(uri, headers=nil)
|
||||
end_query = Time.now
|
||||
log[:duration] = (end_query - start_query)
|
||||
CouchRest::Logger.record(log)
|
||||
response
|
||||
end
|
||||
|
||||
def self.post(uri, payload, headers=nil)
|
||||
start_query = Time.now
|
||||
log = {:method => :post, :uri => uri, :payload => (payload ? (JSON.load(payload) rescue 'parsing error') : nil), :headers => headers}
|
||||
response = super(uri, payload, headers=nil)
|
||||
end_query = Time.now
|
||||
log[:duration] = (end_query - start_query)
|
||||
CouchRest::Logger.record(log)
|
||||
response
|
||||
end
|
||||
|
||||
def self.put(uri, payload, headers=nil)
|
||||
start_query = Time.now
|
||||
log = {:method => :put, :uri => uri, :payload => (payload ? (JSON.load(payload) rescue 'parsing error') : nil), :headers => headers}
|
||||
response = super(uri, payload, headers=nil)
|
||||
end_query = Time.now
|
||||
log[:duration] = (end_query - start_query)
|
||||
CouchRest::Logger.record(log)
|
||||
response
|
||||
end
|
||||
|
||||
def self.delete(uri, headers=nil)
|
||||
start_query = Time.now
|
||||
log = {:method => :delete, :uri => uri, :headers => headers}
|
||||
response = super(uri, headers=nil)
|
||||
end_query = Time.now
|
||||
log[:duration] = (end_query - start_query)
|
||||
CouchRest::Logger.record(log)
|
||||
response
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
# Advanced usage example
|
||||
#
|
||||
#
|
||||
# # DB VIEWS
|
||||
# by_url = {
|
||||
# :map =>
|
||||
# "function(doc) {
|
||||
# if(doc['url']){ emit(doc['url'], 1) };
|
||||
# }",
|
||||
# :reduce =>
|
||||
# 'function (key, values, rereduce) {
|
||||
# return(sum(values));
|
||||
# };'
|
||||
# }
|
||||
# req_duration = {
|
||||
# :map =>
|
||||
# "function(doc) {
|
||||
# if(doc['duration']){ emit(doc['url'], doc['duration']) };
|
||||
# }",
|
||||
# :reduce =>
|
||||
# 'function (key, values, rereduce) {
|
||||
# return(sum(values)/values.length);
|
||||
# };'
|
||||
# }
|
||||
#
|
||||
# query_duration = {
|
||||
# :map =>
|
||||
# "function(doc) {
|
||||
# if(doc['queries']){
|
||||
# doc.queries.forEach(function(query){
|
||||
# if(query['duration'] && query['method']){
|
||||
# emit(query['method'], query['duration'])
|
||||
# }
|
||||
# });
|
||||
# };
|
||||
# }" ,
|
||||
# :reduce =>
|
||||
# 'function (key, values, rereduce) {
|
||||
# return(sum(values)/values.length);
|
||||
# };'
|
||||
# }
|
||||
#
|
||||
# action_queries = {
|
||||
# :map =>
|
||||
# "function(doc) {
|
||||
# if(doc['queries']){
|
||||
# emit(doc['url'], doc['queries'].length)
|
||||
# };
|
||||
# }",
|
||||
# :reduce =>
|
||||
# 'function (key, values, rereduce) {
|
||||
# return(sum(values)/values.length);
|
||||
# };'
|
||||
# }
|
||||
#
|
||||
# action_time_spent_in_db = {
|
||||
# :map =>
|
||||
# "function(doc) {
|
||||
# if(doc['queries']){
|
||||
# var totalDuration = 0;
|
||||
# doc.queries.forEach(function(query){
|
||||
# totalDuration += query['duration']
|
||||
# })
|
||||
# emit(doc['url'], totalDuration)
|
||||
# };
|
||||
# }",
|
||||
# :reduce =>
|
||||
# 'function (key, values, rereduce) {
|
||||
# return(sum(values)/values.length);
|
||||
# };'
|
||||
# }
|
||||
#
|
||||
# show_queries = %Q~function(doc, req) {
|
||||
# var body = ""
|
||||
# body += "<h1>" + doc['url'] + "</h1>"
|
||||
# body += "<h2>Request duration in seconds: " + doc['duration'] + "</h2>"
|
||||
# body += "<h3>" + doc['queries'].length + " queries</h3><ul>"
|
||||
# if (doc.queries){
|
||||
# doc.queries.forEach(function(query){
|
||||
# body += "<li>"+ query['uri'] +"</li>"
|
||||
# });
|
||||
# };
|
||||
# body += "</ul>"
|
||||
# if(doc){ return { body: body} }
|
||||
# }~
|
||||
#
|
||||
#
|
||||
# couch = CouchRest.new
|
||||
# LOG_DB = couch.database!('couchrest-logger')
|
||||
# design_doc = LOG_DB.get("_design/stats") rescue nil
|
||||
# LOG_DB.delete_doc design_doc rescue nil
|
||||
# LOG_DB.save_doc({
|
||||
# "_id" => "_design/stats",
|
||||
# :views => {
|
||||
# :by_url => by_url,
|
||||
# :request_duration => req_duration,
|
||||
# :query_duration => query_duration,
|
||||
# :action_queries => action_queries,
|
||||
# :action_time_spent_in_db => action_time_spent_in_db
|
||||
# },
|
||||
# :shows => {
|
||||
# :queries => show_queries
|
||||
# }
|
||||
# })
|
||||
#
|
||||
# module CouchRest
|
||||
# class Logger
|
||||
#
|
||||
# def self.roundup(value)
|
||||
# begin
|
||||
# value = Float(value)
|
||||
# (value * 100).round.to_f / 100
|
||||
# rescue
|
||||
# value
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# # Usage example:
|
||||
# # CouchRest::Logger.average_request_duration(LOG_DB)['rows'].first['value']
|
||||
# def self.average_request_duration(db)
|
||||
# raw = db.view('stats/request_duration', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.average_query_duration(db)
|
||||
# raw = db.view('stats/query_duration', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.average_get_query_duration(db)
|
||||
# raw = db.view('stats/query_duration', :key => 'get', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.average_post_query_duration(db)
|
||||
# raw = db.view('stats/query_duration', :key => 'post', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.average_queries_per_action(db)
|
||||
# raw = db.view('stats/action_queries', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.average_db_time_per_action(db)
|
||||
# raw = db.view('stats/action_time_spent_in_db', :reduce => true)
|
||||
# (raw.has_key?('rows') && !raw['rows'].empty?) ? roundup(raw['rows'].first['value']) : 'not available yet'
|
||||
# end
|
||||
#
|
||||
# def self.stats(db)
|
||||
# Thread.new(db){|db|
|
||||
# puts "=== STATS ===\n"
|
||||
# puts "average request duration: #{average_request_duration(db)}\n"
|
||||
# puts "average query duration: #{average_query_duration(db)}\n"
|
||||
# puts "average queries per action : #{average_queries_per_action(db)}\n"
|
||||
# puts "average time spent in DB (per action): #{average_db_time_per_action(db)}\n"
|
||||
# puts "===============\n"
|
||||
# }
|
||||
# end
|
||||
#
|
||||
# end
|
||||
# end
|
|
@ -1,4 +1,12 @@
|
|||
mixins_dir = File.join(File.dirname(__FILE__), 'mixins')
|
||||
|
||||
require File.join(mixins_dir, 'attachments')
|
||||
require File.join(mixins_dir, 'callbacks')
|
||||
require File.join(mixins_dir, 'callbacks')
|
||||
require File.join(mixins_dir, 'properties')
|
||||
require File.join(mixins_dir, 'document_queries')
|
||||
require File.join(mixins_dir, 'views')
|
||||
require File.join(mixins_dir, 'design_doc')
|
||||
require File.join(mixins_dir, 'validation')
|
||||
require File.join(mixins_dir, 'extended_attachments')
|
||||
require File.join(mixins_dir, 'class_proxy')
|
||||
require File.join(mixins_dir, 'collection')
|
||||
require File.join(mixins_dir, 'attribute_protection')
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
module CouchRest
|
||||
module Mixins
|
||||
module Attachments
|
||||
|
||||
# saves an attachment directly to couchdb
|
||||
def put_attachment(name, file, options={})
|
||||
raise ArgumentError, "doc must be saved" unless self.rev
|
||||
raise ArgumentError, "doc.database required to put_attachment" unless database
|
||||
result = database.put_attachment(self, name, file, options)
|
||||
self['_rev'] = result['rev']
|
||||
result['ok']
|
||||
end
|
||||
|
||||
# returns an attachment's data
|
||||
def fetch_attachment(name)
|
||||
raise ArgumentError, "doc must be saved" unless self.rev
|
||||
raise ArgumentError, "doc.database required to put_attachment" unless database
|
||||
database.fetch_attachment(self, name)
|
||||
end
|
||||
|
||||
# deletes an attachment directly from couchdb
|
||||
def delete_attachment(name, force=false)
|
||||
raise ArgumentError, "doc.database required to delete_attachment" unless database
|
||||
result = database.delete_attachment(self, name, force)
|
||||
self['_rev'] = result['rev']
|
||||
result['ok']
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
|
@ -23,508 +23,508 @@
|
|||
# http://github.com/rails/rails/raw/d6e4113c83a9d55be6f2af247da2cecaa855f43b/activesupport/lib/active_support/new_callbacks.rb
|
||||
# http://github.com/rails/rails/commit/1126a85aed576402d978e6f76eb393b6baaa9541
|
||||
|
||||
require File.join(File.dirname(__FILE__), '..', 'support', 'class')
|
||||
|
||||
module CouchRest
|
||||
# Callbacks are hooks into the lifecycle of an object that allow you to trigger logic
|
||||
# before or after an alteration of the object state.
|
||||
#
|
||||
# Mixing in this module allows you to define callbacks in your class.
|
||||
#
|
||||
# Example:
|
||||
# class Storage
|
||||
# include ActiveSupport::Callbacks
|
||||
#
|
||||
# define_callbacks :save
|
||||
# end
|
||||
#
|
||||
# class ConfigStorage < Storage
|
||||
# save_callback :before, :saving_message
|
||||
# def saving_message
|
||||
# puts "saving..."
|
||||
# end
|
||||
#
|
||||
# save_callback :after do |object|
|
||||
# puts "saved"
|
||||
# end
|
||||
#
|
||||
# def save
|
||||
# _run_save_callbacks do
|
||||
# puts "- save"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# config = ConfigStorage.new
|
||||
# config.save
|
||||
#
|
||||
# Output:
|
||||
# saving...
|
||||
# - save
|
||||
# saved
|
||||
#
|
||||
# Callbacks from parent classes are inherited.
|
||||
#
|
||||
# Example:
|
||||
# class Storage
|
||||
# include ActiveSupport::Callbacks
|
||||
#
|
||||
# define_callbacks :save
|
||||
#
|
||||
# save_callback :before, :prepare
|
||||
# def prepare
|
||||
# puts "preparing save"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# class ConfigStorage < Storage
|
||||
# save_callback :before, :saving_message
|
||||
# def saving_message
|
||||
# puts "saving..."
|
||||
# end
|
||||
#
|
||||
# save_callback :after do |object|
|
||||
# puts "saved"
|
||||
# end
|
||||
#
|
||||
# def save
|
||||
# _run_save_callbacks do
|
||||
# puts "- save"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# config = ConfigStorage.new
|
||||
# config.save
|
||||
#
|
||||
# Output:
|
||||
# preparing save
|
||||
# saving...
|
||||
# - save
|
||||
# saved
|
||||
module Callbacks
|
||||
def self.included(klass)
|
||||
klass.extend ClassMethods
|
||||
end
|
||||
|
||||
def run_callbacks(kind, options = {}, &blk)
|
||||
send("_run_#{kind}_callbacks", &blk)
|
||||
end
|
||||
|
||||
class Callback
|
||||
@@_callback_sequence = 0
|
||||
|
||||
attr_accessor :filter, :kind, :name, :options, :per_key, :klass
|
||||
def initialize(filter, kind, options, klass)
|
||||
@kind, @klass = kind, klass
|
||||
|
||||
normalize_options!(options)
|
||||
|
||||
@per_key = options.delete(:per_key)
|
||||
@raw_filter, @options = filter, options
|
||||
@filter = _compile_filter(filter)
|
||||
@compiled_options = _compile_options(options)
|
||||
@callback_id = next_id
|
||||
|
||||
_compile_per_key_options
|
||||
module Mixins
|
||||
# Callbacks are hooks into the lifecycle of an object that allow you to trigger logic
|
||||
# before or after an alteration of the object state.
|
||||
#
|
||||
# Mixing in this module allows you to define callbacks in your class.
|
||||
#
|
||||
# Example:
|
||||
# class Storage
|
||||
# include ActiveSupport::Callbacks
|
||||
#
|
||||
# define_callbacks :save
|
||||
# end
|
||||
#
|
||||
# class ConfigStorage < Storage
|
||||
# save_callback :before, :saving_message
|
||||
# def saving_message
|
||||
# puts "saving..."
|
||||
# end
|
||||
#
|
||||
# save_callback :after do |object|
|
||||
# puts "saved"
|
||||
# end
|
||||
#
|
||||
# def save
|
||||
# _run_save_callbacks do
|
||||
# puts "- save"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# config = ConfigStorage.new
|
||||
# config.save
|
||||
#
|
||||
# Output:
|
||||
# saving...
|
||||
# - save
|
||||
# saved
|
||||
#
|
||||
# Callbacks from parent classes are inherited.
|
||||
#
|
||||
# Example:
|
||||
# class Storage
|
||||
# include ActiveSupport::Callbacks
|
||||
#
|
||||
# define_callbacks :save
|
||||
#
|
||||
# save_callback :before, :prepare
|
||||
# def prepare
|
||||
# puts "preparing save"
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# class ConfigStorage < Storage
|
||||
# save_callback :before, :saving_message
|
||||
# def saving_message
|
||||
# puts "saving..."
|
||||
# end
|
||||
#
|
||||
# save_callback :after do |object|
|
||||
# puts "saved"
|
||||
# end
|
||||
#
|
||||
# def save
|
||||
# _run_save_callbacks do
|
||||
# puts "- save"
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
#
|
||||
# config = ConfigStorage.new
|
||||
# config.save
|
||||
#
|
||||
# Output:
|
||||
# preparing save
|
||||
# saving...
|
||||
# - save
|
||||
# saved
|
||||
module Callbacks
|
||||
def self.included(klass)
|
||||
klass.extend ClassMethods
|
||||
end
|
||||
|
||||
def clone(klass)
|
||||
obj = super()
|
||||
obj.klass = klass
|
||||
obj.per_key = @per_key.dup
|
||||
obj.options = @options.dup
|
||||
obj.per_key[:if] = @per_key[:if].dup
|
||||
obj.per_key[:unless] = @per_key[:unless].dup
|
||||
obj.options[:if] = @options[:if].dup
|
||||
obj.options[:unless] = @options[:unless].dup
|
||||
obj
|
||||
def run_callbacks(kind, options = {}, &blk)
|
||||
send("_run_#{kind}_callbacks", &blk)
|
||||
end
|
||||
|
||||
def normalize_options!(options)
|
||||
options[:if] = Array.wrap(options[:if])
|
||||
options[:unless] = Array.wrap(options[:unless])
|
||||
class Callback
|
||||
@@_callback_sequence = 0
|
||||
|
||||
options[:per_key] ||= {}
|
||||
options[:per_key][:if] = Array.wrap(options[:per_key][:if])
|
||||
options[:per_key][:unless] = Array.wrap(options[:per_key][:unless])
|
||||
end
|
||||
attr_accessor :filter, :kind, :name, :options, :per_key, :klass
|
||||
def initialize(filter, kind, options, klass)
|
||||
@kind, @klass = kind, klass
|
||||
|
||||
def next_id
|
||||
@@_callback_sequence += 1
|
||||
end
|
||||
normalize_options!(options)
|
||||
|
||||
def matches?(_kind, _filter)
|
||||
@kind == _kind &&
|
||||
@filter == _filter
|
||||
end
|
||||
@per_key = options.delete(:per_key)
|
||||
@raw_filter, @options = filter, options
|
||||
@filter = _compile_filter(filter)
|
||||
@compiled_options = _compile_options(options)
|
||||
@callback_id = next_id
|
||||
|
||||
def _update_filter(filter_options, new_options)
|
||||
filter_options[:if].push(new_options[:unless]) if new_options.key?(:unless)
|
||||
filter_options[:unless].push(new_options[:if]) if new_options.key?(:if)
|
||||
end
|
||||
|
||||
def recompile!(_options, _per_key)
|
||||
_update_filter(self.options, _options)
|
||||
_update_filter(self.per_key, _per_key)
|
||||
|
||||
@callback_id = next_id
|
||||
@filter = _compile_filter(@raw_filter)
|
||||
@compiled_options = _compile_options(@options)
|
||||
_compile_per_key_options
|
||||
end
|
||||
|
||||
def _compile_per_key_options
|
||||
key_options = _compile_options(@per_key)
|
||||
|
||||
@klass.class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
|
||||
def _one_time_conditions_valid_#{@callback_id}?
|
||||
true #{key_options[0]}
|
||||
end
|
||||
RUBY_EVAL
|
||||
end
|
||||
|
||||
# This will supply contents for before and around filters, and no
|
||||
# contents for after filters (for the forward pass).
|
||||
def start(key = nil, options = {})
|
||||
object, terminator = (options || {}).values_at(:object, :terminator)
|
||||
|
||||
return if key && !object.send("_one_time_conditions_valid_#{@callback_id}?")
|
||||
|
||||
terminator ||= false
|
||||
|
||||
# options[0] is the compiled form of supplied conditions
|
||||
# options[1] is the "end" for the conditional
|
||||
|
||||
if @kind == :before || @kind == :around
|
||||
if @kind == :before
|
||||
# if condition # before_save :filter_name, :if => :condition
|
||||
# filter_name
|
||||
# end
|
||||
filter = <<-RUBY_EVAL
|
||||
unless halted
|
||||
result = #{@filter}
|
||||
halted = (#{terminator})
|
||||
end
|
||||
RUBY_EVAL
|
||||
|
||||
[@compiled_options[0], filter, @compiled_options[1]].compact.join("\n")
|
||||
else
|
||||
# Compile around filters with conditions into proxy methods
|
||||
# that contain the conditions.
|
||||
#
|
||||
# For `around_save :filter_name, :if => :condition':
|
||||
#
|
||||
# def _conditional_callback_save_17
|
||||
# if condition
|
||||
# filter_name do
|
||||
# yield self
|
||||
# end
|
||||
# else
|
||||
# yield self
|
||||
# end
|
||||
# end
|
||||
|
||||
name = "_conditional_callback_#{@kind}_#{next_id}"
|
||||
txt, line = <<-RUBY_EVAL, __LINE__ + 1
|
||||
def #{name}(halted)
|
||||
#{@compiled_options[0] || "if true"} && !halted
|
||||
#{@filter} do
|
||||
yield self
|
||||
end
|
||||
else
|
||||
yield self
|
||||
end
|
||||
end
|
||||
RUBY_EVAL
|
||||
@klass.class_eval(txt, __FILE__, line)
|
||||
"#{name}(halted) do"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# This will supply contents for around and after filters, but not
|
||||
# before filters (for the backward pass).
|
||||
def end(key = nil, options = {})
|
||||
object = (options || {})[:object]
|
||||
|
||||
return if key && !object.send("_one_time_conditions_valid_#{@callback_id}?")
|
||||
|
||||
if @kind == :around || @kind == :after
|
||||
# if condition # after_save :filter_name, :if => :condition
|
||||
# filter_name
|
||||
# end
|
||||
if @kind == :after
|
||||
[@compiled_options[0], @filter, @compiled_options[1]].compact.join("\n")
|
||||
else
|
||||
"end"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
# Options support the same options as filters themselves (and support
|
||||
# symbols, string, procs, and objects), so compile a conditional
|
||||
# expression based on the options
|
||||
def _compile_options(options)
|
||||
return [] if options[:if].empty? && options[:unless].empty?
|
||||
|
||||
conditions = []
|
||||
|
||||
unless options[:if].empty?
|
||||
conditions << Array.wrap(_compile_filter(options[:if]))
|
||||
_compile_per_key_options
|
||||
end
|
||||
|
||||
unless options[:unless].empty?
|
||||
conditions << Array.wrap(_compile_filter(options[:unless])).map {|f| "!#{f}"}
|
||||
def clone(klass)
|
||||
obj = super()
|
||||
obj.klass = klass
|
||||
obj.per_key = @per_key.dup
|
||||
obj.options = @options.dup
|
||||
obj.per_key[:if] = @per_key[:if].dup
|
||||
obj.per_key[:unless] = @per_key[:unless].dup
|
||||
obj.options[:if] = @options[:if].dup
|
||||
obj.options[:unless] = @options[:unless].dup
|
||||
obj
|
||||
end
|
||||
|
||||
["if #{conditions.flatten.join(" && ")}", "end"]
|
||||
end
|
||||
def normalize_options!(options)
|
||||
options[:if] = Array.wrap(options[:if])
|
||||
options[:unless] = Array.wrap(options[:unless])
|
||||
|
||||
# Filters support:
|
||||
# Arrays:: Used in conditions. This is used to specify
|
||||
# multiple conditions. Used internally to
|
||||
# merge conditions from skip_* filters
|
||||
# Symbols:: A method to call
|
||||
# Strings:: Some content to evaluate
|
||||
# Procs:: A proc to call with the object
|
||||
# Objects:: An object with a before_foo method on it to call
|
||||
#
|
||||
# All of these objects are compiled into methods and handled
|
||||
# the same after this point:
|
||||
# Arrays:: Merged together into a single filter
|
||||
# Symbols:: Already methods
|
||||
# Strings:: class_eval'ed into methods
|
||||
# Procs:: define_method'ed into methods
|
||||
# Objects::
|
||||
# a method is created that calls the before_foo method
|
||||
# on the object.
|
||||
def _compile_filter(filter)
|
||||
method_name = "_callback_#{@kind}_#{next_id}"
|
||||
case filter
|
||||
when Array
|
||||
filter.map {|f| _compile_filter(f)}
|
||||
when Symbol
|
||||
filter
|
||||
when String
|
||||
"(#{filter})"
|
||||
when Proc
|
||||
@klass.send(:define_method, method_name, &filter)
|
||||
return method_name if filter.arity == 0
|
||||
options[:per_key] ||= {}
|
||||
options[:per_key][:if] = Array.wrap(options[:per_key][:if])
|
||||
options[:per_key][:unless] = Array.wrap(options[:per_key][:unless])
|
||||
end
|
||||
|
||||
method_name << (filter.arity == 1 ? "(self)" : " self, Proc.new ")
|
||||
else
|
||||
@klass.send(:define_method, "#{method_name}_object") { filter }
|
||||
def next_id
|
||||
@@_callback_sequence += 1
|
||||
end
|
||||
|
||||
_normalize_legacy_filter(kind, filter)
|
||||
def matches?(_kind, _filter)
|
||||
@kind == _kind &&
|
||||
@filter == _filter
|
||||
end
|
||||
|
||||
def _update_filter(filter_options, new_options)
|
||||
filter_options[:if].push(new_options[:unless]) if new_options.key?(:unless)
|
||||
filter_options[:unless].push(new_options[:if]) if new_options.key?(:if)
|
||||
end
|
||||
|
||||
def recompile!(_options, _per_key)
|
||||
_update_filter(self.options, _options)
|
||||
_update_filter(self.per_key, _per_key)
|
||||
|
||||
@callback_id = next_id
|
||||
@filter = _compile_filter(@raw_filter)
|
||||
@compiled_options = _compile_options(@options)
|
||||
_compile_per_key_options
|
||||
end
|
||||
|
||||
def _compile_per_key_options
|
||||
key_options = _compile_options(@per_key)
|
||||
|
||||
@klass.class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
|
||||
def #{method_name}(&blk)
|
||||
#{method_name}_object.send(:#{kind}, self, &blk)
|
||||
def _one_time_conditions_valid_#{@callback_id}?
|
||||
true #{key_options[0]}
|
||||
end
|
||||
RUBY_EVAL
|
||||
end
|
||||
|
||||
# This will supply contents for before and around filters, and no
|
||||
# contents for after filters (for the forward pass).
|
||||
def start(key = nil, options = {})
|
||||
object, terminator = (options || {}).values_at(:object, :terminator)
|
||||
|
||||
return if key && !object.send("_one_time_conditions_valid_#{@callback_id}?")
|
||||
|
||||
terminator ||= false
|
||||
|
||||
# options[0] is the compiled form of supplied conditions
|
||||
# options[1] is the "end" for the conditional
|
||||
|
||||
if @kind == :before || @kind == :around
|
||||
if @kind == :before
|
||||
# if condition # before_save :filter_name, :if => :condition
|
||||
# filter_name
|
||||
# end
|
||||
filter = <<-RUBY_EVAL
|
||||
unless halted
|
||||
result = #{@filter}
|
||||
halted = (#{terminator})
|
||||
end
|
||||
RUBY_EVAL
|
||||
|
||||
[@compiled_options[0], filter, @compiled_options[1]].compact.join("\n")
|
||||
else
|
||||
# Compile around filters with conditions into proxy methods
|
||||
# that contain the conditions.
|
||||
#
|
||||
# For `around_save :filter_name, :if => :condition':
|
||||
#
|
||||
# def _conditional_callback_save_17
|
||||
# if condition
|
||||
# filter_name do
|
||||
# yield self
|
||||
# end
|
||||
# else
|
||||
# yield self
|
||||
# end
|
||||
# end
|
||||
|
||||
name = "_conditional_callback_#{@kind}_#{next_id}"
|
||||
txt, line = <<-RUBY_EVAL, __LINE__ + 1
|
||||
def #{name}(halted)
|
||||
#{@compiled_options[0] || "if true"} && !halted
|
||||
#{@filter} do
|
||||
yield self
|
||||
end
|
||||
else
|
||||
yield self
|
||||
end
|
||||
end
|
||||
RUBY_EVAL
|
||||
@klass.class_eval(txt, __FILE__, line)
|
||||
"#{name}(halted) do"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# This will supply contents for around and after filters, but not
|
||||
# before filters (for the backward pass).
|
||||
def end(key = nil, options = {})
|
||||
object = (options || {})[:object]
|
||||
|
||||
return if key && !object.send("_one_time_conditions_valid_#{@callback_id}?")
|
||||
|
||||
if @kind == :around || @kind == :after
|
||||
# if condition # after_save :filter_name, :if => :condition
|
||||
# filter_name
|
||||
# end
|
||||
if @kind == :after
|
||||
[@compiled_options[0], @filter, @compiled_options[1]].compact.join("\n")
|
||||
else
|
||||
"end"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
# Options support the same options as filters themselves (and support
|
||||
# symbols, string, procs, and objects), so compile a conditional
|
||||
# expression based on the options
|
||||
def _compile_options(options)
|
||||
return [] if options[:if].empty? && options[:unless].empty?
|
||||
|
||||
conditions = []
|
||||
|
||||
unless options[:if].empty?
|
||||
conditions << Array.wrap(_compile_filter(options[:if]))
|
||||
end
|
||||
|
||||
unless options[:unless].empty?
|
||||
conditions << Array.wrap(_compile_filter(options[:unless])).map {|f| "!#{f}"}
|
||||
end
|
||||
|
||||
["if #{conditions.flatten.join(" && ")}", "end"]
|
||||
end
|
||||
|
||||
# Filters support:
|
||||
# Arrays:: Used in conditions. This is used to specify
|
||||
# multiple conditions. Used internally to
|
||||
# merge conditions from skip_* filters
|
||||
# Symbols:: A method to call
|
||||
# Strings:: Some content to evaluate
|
||||
# Procs:: A proc to call with the object
|
||||
# Objects:: An object with a before_foo method on it to call
|
||||
#
|
||||
# All of these objects are compiled into methods and handled
|
||||
# the same after this point:
|
||||
# Arrays:: Merged together into a single filter
|
||||
# Symbols:: Already methods
|
||||
# Strings:: class_eval'ed into methods
|
||||
# Procs:: define_method'ed into methods
|
||||
# Objects::
|
||||
# a method is created that calls the before_foo method
|
||||
# on the object.
|
||||
def _compile_filter(filter)
|
||||
method_name = "_callback_#{@kind}_#{next_id}"
|
||||
case filter
|
||||
when Array
|
||||
filter.map {|f| _compile_filter(f)}
|
||||
when Symbol
|
||||
filter
|
||||
when String
|
||||
"(#{filter})"
|
||||
when Proc
|
||||
@klass.send(:define_method, method_name, &filter)
|
||||
return method_name if filter.arity == 0
|
||||
|
||||
method_name << (filter.arity == 1 ? "(self)" : " self, Proc.new ")
|
||||
else
|
||||
@klass.send(:define_method, "#{method_name}_object") { filter }
|
||||
|
||||
_normalize_legacy_filter(kind, filter)
|
||||
|
||||
@klass.class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
|
||||
def #{method_name}(&blk)
|
||||
#{method_name}_object.send(:#{kind}, self, &blk)
|
||||
end
|
||||
RUBY_EVAL
|
||||
|
||||
method_name
|
||||
end
|
||||
end
|
||||
|
||||
def _normalize_legacy_filter(kind, filter)
|
||||
if !filter.respond_to?(kind) && filter.respond_to?(:filter)
|
||||
filter.class_eval(
|
||||
"def #{kind}(context, &block) filter(context, &block) end",
|
||||
__FILE__, __LINE__ - 1)
|
||||
elsif filter.respond_to?(:before) && filter.respond_to?(:after) && kind == :around
|
||||
def filter.around(context)
|
||||
should_continue = before(context)
|
||||
yield if should_continue
|
||||
after(context)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# An Array with a compile method
|
||||
class CallbackChain < Array
|
||||
def initialize(symbol)
|
||||
@symbol = symbol
|
||||
end
|
||||
|
||||
def compile(key = nil, options = {})
|
||||
method = []
|
||||
method << "halted = false"
|
||||
each do |callback|
|
||||
method << callback.start(key, options)
|
||||
end
|
||||
method << "yield self if block_given? && !halted"
|
||||
reverse_each do |callback|
|
||||
method << callback.end(key, options)
|
||||
end
|
||||
method.compact.join("\n")
|
||||
end
|
||||
|
||||
def clone(klass)
|
||||
chain = CallbackChain.new(@symbol)
|
||||
chain.push(*map {|c| c.clone(klass)})
|
||||
end
|
||||
end
|
||||
|
||||
module ClassMethods
|
||||
#CHAINS = {:before => :before, :around => :before, :after => :after}
|
||||
|
||||
# Make the _run_save_callbacks method. The generated method takes
|
||||
# a block that it'll yield to. It'll call the before and around filters
|
||||
# in order, yield the block, and then run the after filters.
|
||||
#
|
||||
# _run_save_callbacks do
|
||||
# save
|
||||
# end
|
||||
#
|
||||
# The _run_save_callbacks method can optionally take a key, which
|
||||
# will be used to compile an optimized callback method for each
|
||||
# key. See #define_callbacks for more information.
|
||||
def _define_runner(symbol)
|
||||
body = send("_#{symbol}_callback").
|
||||
compile(nil, :terminator => send("_#{symbol}_terminator"))
|
||||
|
||||
body, line = <<-RUBY_EVAL, __LINE__ + 1
|
||||
def _run_#{symbol}_callbacks(key = nil, &blk)
|
||||
if key
|
||||
name = "_run__\#{self.class.name.hash.abs}__#{symbol}__\#{key.hash.abs}__callbacks"
|
||||
|
||||
unless respond_to?(name)
|
||||
self.class._create_keyed_callback(name, :#{symbol}, self, &blk)
|
||||
end
|
||||
|
||||
send(name, &blk)
|
||||
else
|
||||
#{body}
|
||||
end
|
||||
end
|
||||
RUBY_EVAL
|
||||
|
||||
method_name
|
||||
undef_method "_run_#{symbol}_callbacks" if method_defined?("_run_#{symbol}_callbacks")
|
||||
class_eval body, __FILE__, line
|
||||
end
|
||||
end
|
||||
|
||||
def _normalize_legacy_filter(kind, filter)
|
||||
if !filter.respond_to?(kind) && filter.respond_to?(:filter)
|
||||
filter.class_eval(
|
||||
"def #{kind}(context, &block) filter(context, &block) end",
|
||||
__FILE__, __LINE__ - 1)
|
||||
elsif filter.respond_to?(:before) && filter.respond_to?(:after) && kind == :around
|
||||
def filter.around(context)
|
||||
should_continue = before(context)
|
||||
yield if should_continue
|
||||
after(context)
|
||||
# This is called the first time a callback is called with a particular
|
||||
# key. It creates a new callback method for the key, calculating
|
||||
# which callbacks can be omitted because of per_key conditions.
|
||||
def _create_keyed_callback(name, kind, obj, &blk)
|
||||
@_keyed_callbacks ||= {}
|
||||
@_keyed_callbacks[name] ||= begin
|
||||
str = send("_#{kind}_callback").
|
||||
compile(name, :object => obj, :terminator => send("_#{kind}_terminator"))
|
||||
|
||||
class_eval "def #{name}() #{str} end", __FILE__, __LINE__
|
||||
|
||||
true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
# Define callbacks.
|
||||
#
|
||||
# Creates a <name>_callback method that you can use to add callbacks.
|
||||
#
|
||||
# Syntax:
|
||||
# save_callback :before, :before_meth
|
||||
# save_callback :after, :after_meth, :if => :condition
|
||||
# save_callback :around {|r| stuff; yield; stuff }
|
||||
#
|
||||
# The <name>_callback method also updates the _run_<name>_callbacks
|
||||
# method, which is the public API to run the callbacks.
|
||||
#
|
||||
# Also creates a skip_<name>_callback method that you can use to skip
|
||||
# callbacks.
|
||||
#
|
||||
# When creating or skipping callbacks, you can specify conditions that
|
||||
# are always the same for a given key. For instance, in ActionPack,
|
||||
# we convert :only and :except conditions into per-key conditions.
|
||||
#
|
||||
# before_filter :authenticate, :except => "index"
|
||||
# becomes
|
||||
# dispatch_callback :before, :authenticate, :per_key => {:unless => proc {|c| c.action_name == "index"}}
|
||||
#
|
||||
# Per-Key conditions are evaluated only once per use of a given key.
|
||||
# In the case of the above example, you would do:
|
||||
#
|
||||
# run_dispatch_callbacks(action_name) { ... dispatch stuff ... }
|
||||
#
|
||||
# In that case, each action_name would get its own compiled callback
|
||||
# method that took into consideration the per_key conditions. This
|
||||
# is a speed improvement for ActionPack.
|
||||
def _update_callbacks(name, filters = CallbackChain.new(name), block = nil)
|
||||
type = [:before, :after, :around].include?(filters.first) ? filters.shift : :before
|
||||
options = filters.last.is_a?(Hash) ? filters.pop : {}
|
||||
filters.unshift(block) if block
|
||||
|
||||
# An Array with a compile method
|
||||
class CallbackChain < Array
|
||||
def initialize(symbol)
|
||||
@symbol = symbol
|
||||
end
|
||||
callbacks = send("_#{name}_callback")
|
||||
yield callbacks, type, filters, options if block_given?
|
||||
|
||||
def compile(key = nil, options = {})
|
||||
method = []
|
||||
method << "halted = false"
|
||||
each do |callback|
|
||||
method << callback.start(key, options)
|
||||
_define_runner(name)
|
||||
end
|
||||
method << "yield self if block_given? && !halted"
|
||||
reverse_each do |callback|
|
||||
method << callback.end(key, options)
|
||||
|
||||
alias_method :_reset_callbacks, :_update_callbacks
|
||||
|
||||
def set_callback(name, *filters, &block)
|
||||
_update_callbacks(name, filters, block) do |callbacks, type, filters, options|
|
||||
filters.map! do |filter|
|
||||
# overrides parent class
|
||||
callbacks.delete_if {|c| c.matches?(type, filter) }
|
||||
Callback.new(filter, type, options.dup, self)
|
||||
end
|
||||
|
||||
options[:prepend] ? callbacks.unshift(*filters) : callbacks.push(*filters)
|
||||
end
|
||||
end
|
||||
method.compact.join("\n")
|
||||
end
|
||||
|
||||
def clone(klass)
|
||||
chain = CallbackChain.new(@symbol)
|
||||
chain.push(*map {|c| c.clone(klass)})
|
||||
end
|
||||
end
|
||||
def skip_callback(name, *filters, &block)
|
||||
_update_callbacks(name, filters, block) do |callbacks, type, filters, options|
|
||||
filters.each do |filter|
|
||||
callbacks = send("_#{name}_callback=", callbacks.clone(self))
|
||||
|
||||
module ClassMethods
|
||||
#CHAINS = {:before => :before, :around => :before, :after => :after}
|
||||
filter = callbacks.find {|c| c.matches?(type, filter) }
|
||||
|
||||
# Make the _run_save_callbacks method. The generated method takes
|
||||
# a block that it'll yield to. It'll call the before and around filters
|
||||
# in order, yield the block, and then run the after filters.
|
||||
#
|
||||
# _run_save_callbacks do
|
||||
# save
|
||||
# end
|
||||
#
|
||||
# The _run_save_callbacks method can optionally take a key, which
|
||||
# will be used to compile an optimized callback method for each
|
||||
# key. See #define_callbacks for more information.
|
||||
def _define_runner(symbol)
|
||||
body = send("_#{symbol}_callback").
|
||||
compile(nil, :terminator => send("_#{symbol}_terminator"))
|
||||
|
||||
body, line = <<-RUBY_EVAL, __LINE__ + 1
|
||||
def _run_#{symbol}_callbacks(key = nil, &blk)
|
||||
if key
|
||||
name = "_run__\#{self.class.name.hash.abs}__#{symbol}__\#{key.hash.abs}__callbacks"
|
||||
|
||||
unless respond_to?(name)
|
||||
self.class._create_keyed_callback(name, :#{symbol}, self, &blk)
|
||||
if filter && options.any?
|
||||
filter.recompile!(options, options[:per_key] || {})
|
||||
else
|
||||
callbacks.delete(filter)
|
||||
end
|
||||
|
||||
send(name, &blk)
|
||||
else
|
||||
#{body}
|
||||
end
|
||||
end
|
||||
RUBY_EVAL
|
||||
|
||||
undef_method "_run_#{symbol}_callbacks" if method_defined?("_run_#{symbol}_callbacks")
|
||||
class_eval body, __FILE__, line
|
||||
end
|
||||
|
||||
# This is called the first time a callback is called with a particular
|
||||
# key. It creates a new callback method for the key, calculating
|
||||
# which callbacks can be omitted because of per_key conditions.
|
||||
def _create_keyed_callback(name, kind, obj, &blk)
|
||||
@_keyed_callbacks ||= {}
|
||||
@_keyed_callbacks[name] ||= begin
|
||||
str = send("_#{kind}_callback").
|
||||
compile(name, :object => obj, :terminator => send("_#{kind}_terminator"))
|
||||
|
||||
class_eval "def #{name}() #{str} end", __FILE__, __LINE__
|
||||
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
# Define callbacks.
|
||||
#
|
||||
# Creates a <name>_callback method that you can use to add callbacks.
|
||||
#
|
||||
# Syntax:
|
||||
# save_callback :before, :before_meth
|
||||
# save_callback :after, :after_meth, :if => :condition
|
||||
# save_callback :around {|r| stuff; yield; stuff }
|
||||
#
|
||||
# The <name>_callback method also updates the _run_<name>_callbacks
|
||||
# method, which is the public API to run the callbacks.
|
||||
#
|
||||
# Also creates a skip_<name>_callback method that you can use to skip
|
||||
# callbacks.
|
||||
#
|
||||
# When creating or skipping callbacks, you can specify conditions that
|
||||
# are always the same for a given key. For instance, in ActionPack,
|
||||
# we convert :only and :except conditions into per-key conditions.
|
||||
#
|
||||
# before_filter :authenticate, :except => "index"
|
||||
# becomes
|
||||
# dispatch_callback :before, :authenticate, :per_key => {:unless => proc {|c| c.action_name == "index"}}
|
||||
#
|
||||
# Per-Key conditions are evaluated only once per use of a given key.
|
||||
# In the case of the above example, you would do:
|
||||
#
|
||||
# run_dispatch_callbacks(action_name) { ... dispatch stuff ... }
|
||||
#
|
||||
# In that case, each action_name would get its own compiled callback
|
||||
# method that took into consideration the per_key conditions. This
|
||||
# is a speed improvement for ActionPack.
|
||||
def _update_callbacks(name, filters = CallbackChain.new(name), block = nil)
|
||||
type = [:before, :after, :around].include?(filters.first) ? filters.shift : :before
|
||||
options = filters.last.is_a?(Hash) ? filters.pop : {}
|
||||
filters.unshift(block) if block
|
||||
|
||||
callbacks = send("_#{name}_callback")
|
||||
yield callbacks, type, filters, options if block_given?
|
||||
|
||||
_define_runner(name)
|
||||
end
|
||||
|
||||
alias_method :_reset_callbacks, :_update_callbacks
|
||||
|
||||
def set_callback(name, *filters, &block)
|
||||
_update_callbacks(name, filters, block) do |callbacks, type, filters, options|
|
||||
filters.map! do |filter|
|
||||
# overrides parent class
|
||||
callbacks.delete_if {|c| c.matches?(type, filter) }
|
||||
Callback.new(filter, type, options.dup, self)
|
||||
end
|
||||
|
||||
options[:prepend] ? callbacks.unshift(*filters) : callbacks.push(*filters)
|
||||
end
|
||||
end
|
||||
|
||||
def skip_callback(name, *filters, &block)
|
||||
_update_callbacks(name, filters, block) do |callbacks, type, filters, options|
|
||||
filters.each do |filter|
|
||||
callbacks = send("_#{name}_callback=", callbacks.clone(self))
|
||||
|
||||
filter = callbacks.find {|c| c.matches?(type, filter) }
|
||||
|
||||
if filter && options.any?
|
||||
filter.recompile!(options, options[:per_key] || {})
|
||||
else
|
||||
callbacks.delete(filter)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def define_callbacks(*symbols)
|
||||
terminator = symbols.pop if symbols.last.is_a?(String)
|
||||
symbols.each do |symbol|
|
||||
extlib_inheritable_accessor("_#{symbol}_terminator") { terminator }
|
||||
def define_callbacks(*symbols)
|
||||
terminator = symbols.pop if symbols.last.is_a?(String)
|
||||
symbols.each do |symbol|
|
||||
extlib_inheritable_accessor("_#{symbol}_terminator") { terminator }
|
||||
|
||||
extlib_inheritable_accessor("_#{symbol}_callback") do
|
||||
CallbackChain.new(symbol)
|
||||
end
|
||||
extlib_inheritable_accessor("_#{symbol}_callback") do
|
||||
CallbackChain.new(symbol)
|
||||
end
|
||||
|
||||
_define_runner(symbol)
|
||||
|
||||
# Define more convenient callback methods
|
||||
# set_callback(:save, :before) becomes before_save
|
||||
[:before, :after, :around].each do |filter|
|
||||
self.class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
|
||||
def self.#{filter}_#{symbol}(*symbols, &blk)
|
||||
_alias_callbacks(symbols, blk) do |callback, options|
|
||||
set_callback(:#{symbol}, :#{filter}, callback, options)
|
||||
_define_runner(symbol)
|
||||
|
||||
# Define more convenient callback methods
|
||||
# set_callback(:save, :before) becomes before_save
|
||||
[:before, :after, :around].each do |filter|
|
||||
self.class_eval <<-RUBY_EVAL, __FILE__, __LINE__ + 1
|
||||
def self.#{filter}_#{symbol}(*symbols, &blk)
|
||||
_alias_callbacks(symbols, blk) do |callback, options|
|
||||
set_callback(:#{symbol}, :#{filter}, callback, options)
|
||||
end
|
||||
end
|
||||
end
|
||||
RUBY_EVAL
|
||||
RUBY_EVAL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def _alias_callbacks(callbacks, block)
|
||||
options = callbacks.last.is_a?(Hash) ? callbacks.pop : {}
|
||||
callbacks.push(block) if block
|
||||
callbacks.each do |callback|
|
||||
yield callback, options
|
||||
|
||||
def _alias_callbacks(callbacks, block)
|
||||
options = callbacks.last.is_a?(Hash) ? callbacks.pop : {}
|
||||
callbacks.push(block) if block
|
||||
callbacks.each do |callback|
|
||||
yield callback, options
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
require File.join(File.dirname(__FILE__), 'properties')
|
||||
require File.join(File.dirname(__FILE__), 'document_queries')
|
||||
require File.join(File.dirname(__FILE__), 'views')
|
||||
require File.join(File.dirname(__FILE__), 'design_doc')
|
||||
require File.join(File.dirname(__FILE__), 'validation')
|
||||
require File.join(File.dirname(__FILE__), 'extended_attachments')
|
||||
require File.join(File.dirname(__FILE__), 'class_proxy')
|
||||
require File.join(File.dirname(__FILE__), 'collection')
|
||||
require File.join(File.dirname(__FILE__), 'attribute_protection')
|
|
@ -1,7 +1,7 @@
|
|||
require 'time'
|
||||
require File.join(File.dirname(__FILE__), '..', 'more', 'property')
|
||||
require File.join(File.dirname(__FILE__), '..', 'more', 'casted_array')
|
||||
require File.join(File.dirname(__FILE__), '..', 'more', 'typecast')
|
||||
require File.join(File.dirname(__FILE__), '..', 'property')
|
||||
require File.join(File.dirname(__FILE__), '..', 'casted_array')
|
||||
require File.join(File.dirname(__FILE__), '..', 'typecast')
|
||||
|
||||
module CouchRest
|
||||
module Mixins
|
||||
|
|
|
@ -28,7 +28,6 @@ class Object
|
|||
end
|
||||
|
||||
require 'pathname'
|
||||
require File.join(File.dirname(__FILE__), '..', 'support', 'class')
|
||||
|
||||
dir = File.join(Pathname(__FILE__).dirname.expand_path, '..', 'validation')
|
||||
|
||||
|
|
|
@ -1,113 +1,5 @@
|
|||
require File.join(File.dirname(__FILE__), 'support', 'class')
|
||||
require File.join(File.dirname(__FILE__), 'support', 'blank')
|
||||
require 'timeout'
|
||||
|
||||
# This file must be loaded after the JSON gem and any other library that beats up the Time class.
|
||||
class Time
|
||||
# This date format sorts lexicographically
|
||||
# and is compatible with Javascript's <tt>new Date(time_string)</tt> constructor.
|
||||
# Note this this format stores all dates in UTC so that collation
|
||||
# order is preserved. (There's no longer a need to set <tt>ENV['TZ'] = 'UTC'</tt>
|
||||
# in your application.)
|
||||
# CouchRest already includes the class extlib patches.
|
||||
|
||||
def to_json(options = nil)
|
||||
u = self.getutc
|
||||
%("#{u.strftime("%Y/%m/%d %H:%M:%S +0000")}")
|
||||
end
|
||||
|
||||
# Decodes the JSON time format to a UTC time.
|
||||
# Based on Time.parse from ActiveSupport. ActiveSupport's version
|
||||
# is more complete, returning a time in your current timezone,
|
||||
# rather than keeping the time in UTC. YMMV.
|
||||
# def self.parse string, fallback=nil
|
||||
# d = DateTime.parse(string).new_offset
|
||||
# self.utc(d.year, d.month, d.day, d.hour, d.min, d.sec)
|
||||
# rescue
|
||||
# fallback
|
||||
# end
|
||||
end
|
||||
|
||||
# Monkey patch for faster net/http io
|
||||
if RUBY_VERSION.to_f < 1.9
|
||||
class Net::BufferedIO #:nodoc:
|
||||
alias :old_rbuf_fill :rbuf_fill
|
||||
def rbuf_fill
|
||||
if @io.respond_to?(:read_nonblock)
|
||||
begin
|
||||
@rbuf << @io.read_nonblock(65536)
|
||||
rescue Errno::EWOULDBLOCK
|
||||
if IO.select([@io], nil, nil, @read_timeout)
|
||||
retry
|
||||
else
|
||||
raise Timeout::Error, "IO timeout"
|
||||
end
|
||||
end
|
||||
else
|
||||
timeout(@read_timeout) do
|
||||
@rbuf << @io.sysread(65536)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# module RestClient
|
||||
# # def self.copy(url, headers={})
|
||||
# # Request.execute(:method => :copy,
|
||||
# # :url => url,
|
||||
# # :headers => headers)
|
||||
# # end
|
||||
#
|
||||
# # class Request
|
||||
# #
|
||||
# # def establish_connection(uri)
|
||||
# # Thread.current[:connection].finish if (Thread.current[:connection] && Thread.current[:connection].started?)
|
||||
# # p net_http_class
|
||||
# # net = net_http_class.new(uri.host, uri.port)
|
||||
# # net.use_ssl = uri.is_a?(URI::HTTPS)
|
||||
# # net.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
||||
# # Thread.current[:connection] = net
|
||||
# # Thread.current[:connection].start
|
||||
# # Thread.current[:connection]
|
||||
# # end
|
||||
# #
|
||||
# # def transmit(uri, req, payload)
|
||||
# # setup_credentials(req)
|
||||
# #
|
||||
# # Thread.current[:host] ||= uri.host
|
||||
# # Thread.current[:port] ||= uri.port
|
||||
# #
|
||||
# # if (Thread.current[:connection].nil? || (Thread.current[:host] != uri.host))
|
||||
# # p "establishing a connection"
|
||||
# # establish_connection(uri)
|
||||
# # end
|
||||
# #
|
||||
# # display_log request_log
|
||||
# # http = Thread.current[:connection]
|
||||
# # http.read_timeout = @timeout if @timeout
|
||||
# #
|
||||
# # begin
|
||||
# # res = http.request(req, payload)
|
||||
# # rescue
|
||||
# # p "Net::HTTP connection failed, reconnecting"
|
||||
# # establish_connection(uri)
|
||||
# # http = Thread.current[:connection]
|
||||
# # require 'ruby-debug'
|
||||
# # req.body_stream = nil
|
||||
# #
|
||||
# # res = http.request(req, payload)
|
||||
# # display_log response_log(res)
|
||||
# # result res
|
||||
# # else
|
||||
# # display_log response_log(res)
|
||||
# # process_result res
|
||||
# # end
|
||||
# #
|
||||
# # rescue EOFError
|
||||
# # raise RestClient::ServerBrokeConnection
|
||||
# # rescue Timeout::Error
|
||||
# # raise RestClient::RequestTimeout
|
||||
# # end
|
||||
# # end
|
||||
#
|
||||
# end
|
||||
require File.join(File.dirname(__FILE__), 'support', 'couchrest')
|
||||
require File.join(File.dirname(__FILE__), 'support', 'rails') if defined?(Rails)
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
# blank? methods for several different class types
|
||||
class Object
|
||||
# Returns true if the object is nil or empty (if applicable)
|
||||
def blank?
|
||||
nil? || (respond_to?(:empty?) && empty?)
|
||||
end
|
||||
end # class Object
|
||||
|
||||
class Numeric
|
||||
# Numerics can't be blank
|
||||
def blank?
|
||||
false
|
||||
end
|
||||
end # class Numeric
|
||||
|
||||
class NilClass
|
||||
# Nils are always blank
|
||||
def blank?
|
||||
true
|
||||
end
|
||||
end # class NilClass
|
||||
|
||||
class TrueClass
|
||||
# True is not blank.
|
||||
def blank?
|
||||
false
|
||||
end
|
||||
end # class TrueClass
|
||||
|
||||
class FalseClass
|
||||
# False is always blank.
|
||||
def blank?
|
||||
true
|
||||
end
|
||||
end # class FalseClass
|
||||
|
||||
class String
|
||||
# Strips out whitespace then tests if the string is empty.
|
||||
def blank?
|
||||
strip.empty?
|
||||
end
|
||||
end # class String
|
|
@ -1,190 +0,0 @@
|
|||
# Copyright (c) 2006-2009 David Heinemeier Hansson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
#
|
||||
# Extracted From
|
||||
# http://github.com/rails/rails/commit/971e2438d98326c994ec6d3ef8e37b7e868ed6e2
|
||||
|
||||
# Extends the class object with class and instance accessors for class attributes,
|
||||
# just like the native attr* accessors for instance attributes.
|
||||
#
|
||||
# class Person
|
||||
# cattr_accessor :hair_colors
|
||||
# end
|
||||
#
|
||||
# Person.hair_colors = [:brown, :black, :blonde, :red]
|
||||
class Class
|
||||
def cattr_reader(*syms)
|
||||
syms.flatten.each do |sym|
|
||||
next if sym.is_a?(Hash)
|
||||
class_eval(<<-EOS, __FILE__, __LINE__ + 1)
|
||||
unless defined? @@#{sym} # unless defined? @@hair_colors
|
||||
@@#{sym} = nil # @@hair_colors = nil
|
||||
end # end
|
||||
#
|
||||
def self.#{sym} # def self.hair_colors
|
||||
@@#{sym} # @@hair_colors
|
||||
end # end
|
||||
#
|
||||
def #{sym} # def hair_colors
|
||||
@@#{sym} # @@hair_colors
|
||||
end # end
|
||||
EOS
|
||||
end
|
||||
end unless Class.respond_to?(:cattr_reader)
|
||||
|
||||
def cattr_writer(*syms)
|
||||
options = syms.extract_options!
|
||||
syms.flatten.each do |sym|
|
||||
class_eval(<<-EOS, __FILE__, __LINE__ + 1)
|
||||
unless defined? @@#{sym} # unless defined? @@hair_colors
|
||||
@@#{sym} = nil # @@hair_colors = nil
|
||||
end # end
|
||||
#
|
||||
def self.#{sym}=(obj) # def self.hair_colors=(obj)
|
||||
@@#{sym} = obj # @@hair_colors = obj
|
||||
end # end
|
||||
#
|
||||
#{" #
|
||||
def #{sym}=(obj) # def hair_colors=(obj)
|
||||
@@#{sym} = obj # @@hair_colors = obj
|
||||
end # end
|
||||
" unless options[:instance_writer] == false } # # instance writer above is generated unless options[:instance_writer] == false
|
||||
EOS
|
||||
end
|
||||
end unless Class.respond_to?(:cattr_writer)
|
||||
|
||||
def cattr_accessor(*syms)
|
||||
cattr_reader(*syms)
|
||||
cattr_writer(*syms)
|
||||
end unless Class.respond_to?(:cattr_accessor)
|
||||
|
||||
# Defines class-level inheritable attribute reader. Attributes are available to subclasses,
|
||||
# each subclass has a copy of parent's attribute.
|
||||
#
|
||||
# @param *syms<Array[#to_s]> Array of attributes to define inheritable reader for.
|
||||
# @return <Array[#to_s]> Array of attributes converted into inheritable_readers.
|
||||
#
|
||||
# @api public
|
||||
#
|
||||
# @todo Do we want to block instance_reader via :instance_reader => false
|
||||
# @todo It would be preferable that we do something with a Hash passed in
|
||||
# (error out or do the same as other methods above) instead of silently
|
||||
# moving on). In particular, this makes the return value of this function
|
||||
# less useful.
|
||||
def extlib_inheritable_reader(*ivars)
|
||||
instance_reader = ivars.pop[:reader] if ivars.last.is_a?(Hash)
|
||||
|
||||
ivars.each do |ivar|
|
||||
self.class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
||||
def self.#{ivar}
|
||||
return @#{ivar} if self.object_id == #{self.object_id} || defined?(@#{ivar})
|
||||
ivar = superclass.#{ivar}
|
||||
return nil if ivar.nil? && !#{self}.instance_variable_defined?("@#{ivar}")
|
||||
@#{ivar} = ivar && !ivar.is_a?(Module) && !ivar.is_a?(Numeric) && !ivar.is_a?(TrueClass) && !ivar.is_a?(FalseClass) ? ivar.dup : ivar
|
||||
end
|
||||
RUBY
|
||||
unless instance_reader == false
|
||||
self.class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
||||
def #{ivar}
|
||||
self.class.#{ivar}
|
||||
end
|
||||
RUBY
|
||||
end
|
||||
end
|
||||
end unless Class.respond_to?(:extlib_inheritable_reader)
|
||||
|
||||
# Defines class-level inheritable attribute writer. Attributes are available to subclasses,
|
||||
# each subclass has a copy of parent's attribute.
|
||||
#
|
||||
# @param *syms<Array[*#to_s, Hash{:instance_writer => Boolean}]> Array of attributes to
|
||||
# define inheritable writer for.
|
||||
# @option syms :instance_writer<Boolean> if true, instance-level inheritable attribute writer is defined.
|
||||
# @return <Array[#to_s]> An Array of the attributes that were made into inheritable writers.
|
||||
#
|
||||
# @api public
|
||||
#
|
||||
# @todo We need a style for class_eval <<-HEREDOC. I'd like to make it
|
||||
# class_eval(<<-RUBY, __FILE__, __LINE__), but we should codify it somewhere.
|
||||
def extlib_inheritable_writer(*ivars)
|
||||
instance_writer = ivars.pop[:writer] if ivars.last.is_a?(Hash)
|
||||
ivars.each do |ivar|
|
||||
self.class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
||||
def self.#{ivar}=(obj)
|
||||
@#{ivar} = obj
|
||||
end
|
||||
RUBY
|
||||
unless instance_writer == false
|
||||
self.class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
||||
def #{ivar}=(obj) self.class.#{ivar} = obj end
|
||||
RUBY
|
||||
end
|
||||
|
||||
self.send("#{ivar}=", yield) if block_given?
|
||||
end
|
||||
end unless Class.respond_to?(:extlib_inheritable_writer)
|
||||
|
||||
# Defines class-level inheritable attribute accessor. Attributes are available to subclasses,
|
||||
# each subclass has a copy of parent's attribute.
|
||||
#
|
||||
# @param *syms<Array[*#to_s, Hash{:instance_writer => Boolean}]> Array of attributes to
|
||||
# define inheritable accessor for.
|
||||
# @option syms :instance_writer<Boolean> if true, instance-level inheritable attribute writer is defined.
|
||||
# @return <Array[#to_s]> An Array of attributes turned into inheritable accessors.
|
||||
#
|
||||
# @api public
|
||||
def extlib_inheritable_accessor(*syms, &block)
|
||||
extlib_inheritable_reader(*syms)
|
||||
extlib_inheritable_writer(*syms, &block)
|
||||
end unless Class.respond_to?(:extlib_inheritable_accessor)
|
||||
end
|
||||
|
||||
class Array
|
||||
# Extracts options from a set of arguments. Removes and returns the last
|
||||
# element in the array if it's a hash, otherwise returns a blank hash.
|
||||
#
|
||||
# def options(*args)
|
||||
# args.extract_options!
|
||||
# end
|
||||
#
|
||||
# options(1, 2) # => {}
|
||||
# options(1, 2, :a => :b) # => {:a=>:b}
|
||||
def extract_options!
|
||||
last.is_a?(::Hash) ? pop : {}
|
||||
end unless Array.new.respond_to?(:extract_options!)
|
||||
|
||||
# Wraps the object in an Array unless it's an Array. Converts the
|
||||
# object to an Array using #to_ary if it implements that.
|
||||
def self.wrap(object)
|
||||
case object
|
||||
when nil
|
||||
[]
|
||||
when self
|
||||
object
|
||||
else
|
||||
if object.respond_to?(:to_ary)
|
||||
object.to_ary
|
||||
else
|
||||
[object]
|
||||
end
|
||||
end
|
||||
end unless Array.respond_to?(:wrap)
|
||||
end
|
||||
|
56
lib/couchrest/support/couchrest.rb
Normal file
56
lib/couchrest/support/couchrest.rb
Normal file
|
@ -0,0 +1,56 @@
|
|||
|
||||
module CouchRest
|
||||
|
||||
|
||||
# The CouchRest module methods handle the basic JSON serialization
|
||||
# and deserialization, as well as query parameters. The module also includes
|
||||
# some helpers for tasks like instantiating a new Database or Server instance.
|
||||
class << self
|
||||
|
||||
# extracted from Extlib
|
||||
#
|
||||
# Constantize tries to find a declared constant with the name specified
|
||||
# in the string. It raises a NameError when the name is not in CamelCase
|
||||
# or is not initialized.
|
||||
#
|
||||
# @example
|
||||
# "Module".constantize #=> Module
|
||||
# "Class".constantize #=> Class
|
||||
def constantize(camel_cased_word)
|
||||
unless /\A(?:::)?([A-Z]\w*(?:::[A-Z]\w*)*)\z/ =~ camel_cased_word
|
||||
raise NameError, "#{camel_cased_word.inspect} is not a valid constant name!"
|
||||
end
|
||||
|
||||
Object.module_eval("::#{$1}", __FILE__, __LINE__)
|
||||
end
|
||||
|
||||
# extracted from Extlib
|
||||
#
|
||||
# Capitalizes the first word and turns underscores into spaces and strips _id.
|
||||
# Like titleize, this is meant for creating pretty output.
|
||||
#
|
||||
# @example
|
||||
# "employee_salary" #=> "Employee salary"
|
||||
# "author_id" #=> "Author"
|
||||
def humanize(lower_case_and_underscored_word)
|
||||
lower_case_and_underscored_word.to_s.gsub(/_id$/, "").gsub(/_/, " ").capitalize
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
class Database
|
||||
|
||||
alias :delete_old! :delete!
|
||||
def delete!
|
||||
clear_extended_doc_fresh_cache
|
||||
delete_old!
|
||||
end
|
||||
|
||||
# If the database is deleted, ensure that the design docs will be refreshed.
|
||||
def clear_extended_doc_fresh_cache
|
||||
::CouchRest::ExtendedDocument.subclasses.each{|klass| klass.req_design_doc_refresh if klass.respond_to?(:req_design_doc_refresh)}
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -1,7 +1,7 @@
|
|||
require 'time'
|
||||
require 'bigdecimal'
|
||||
require 'bigdecimal/util'
|
||||
require File.join(File.dirname(__FILE__), '..', 'more', 'property')
|
||||
require File.join(File.dirname(__FILE__), 'property')
|
||||
|
||||
class Time
|
||||
# returns a local time value much faster than Time.parse
|
|
@ -1,5 +1,4 @@
|
|||
# Ported from dm-migrations
|
||||
require File.join(File.dirname(__FILE__), '..', 'support', 'class')
|
||||
|
||||
module CouchRest
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
require File.expand_path("../../spec_helper", __FILE__)
|
||||
|
||||
describe "ExtendedDocument", "no declarations" do
|
||||
class NoProtection < CouchRest::ExtendedDocument
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path('../../../spec_helper', __FILE__)
|
||||
require File.expand_path('../../spec_helper', __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'cat')
|
||||
require File.join(FIXTURE_PATH, 'more', 'person')
|
||||
require File.join(FIXTURE_PATH, 'more', 'card')
|
|
@ -1,6 +1,6 @@
|
|||
# encoding: utf-8
|
||||
|
||||
require File.expand_path('../../../spec_helper', __FILE__)
|
||||
require File.expand_path('../../spec_helper', __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'person')
|
||||
require File.join(FIXTURE_PATH, 'more', 'card')
|
||||
require File.join(FIXTURE_PATH, 'more', 'cat')
|
||||
|
@ -201,7 +201,7 @@ describe CouchRest::CastedModel do
|
|||
|
||||
describe "saving document with array of casted models and validation" do
|
||||
before :each do
|
||||
@cat = Cat.new
|
||||
@cat = Cat.new :name => "felix"
|
||||
@cat.save
|
||||
end
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest do
|
||||
|
||||
before(:each) do
|
||||
@cr = CouchRest.new(COUCHHOST)
|
||||
begin
|
||||
@db = @cr.database(TESTDB)
|
||||
@db.delete! rescue nil
|
||||
end
|
||||
end
|
||||
|
||||
after(:each) do
|
||||
begin
|
||||
@db.delete! rescue nil
|
||||
end
|
||||
end
|
||||
|
||||
describe "getting info" do
|
||||
it "should list databases" do
|
||||
@cr.databases.should be_an_instance_of(Array)
|
||||
end
|
||||
it "should get info" do
|
||||
@cr.info["couchdb"].should == "Welcome"
|
||||
@cr.info.class.should == Hash
|
||||
end
|
||||
end
|
||||
|
||||
it "should restart" do
|
||||
@cr.restart!
|
||||
end
|
||||
|
||||
it "should provide one-time access to uuids" do
|
||||
@cr.next_uuid.should_not be_nil
|
||||
end
|
||||
|
||||
describe "initializing a database" do
|
||||
it "should return a db" do
|
||||
db = @cr.database(TESTDB)
|
||||
db.should be_an_instance_of(CouchRest::Database)
|
||||
db.host.should == @cr.uri
|
||||
end
|
||||
end
|
||||
|
||||
describe "parsing urls" do
|
||||
it "should parse just a dbname" do
|
||||
db = CouchRest.parse "my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1:5984"
|
||||
end
|
||||
it "should parse a host and db" do
|
||||
db = CouchRest.parse "127.0.0.1/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1"
|
||||
end
|
||||
it "should parse a host and db with http" do
|
||||
db = CouchRest.parse "http://127.0.0.1/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1"
|
||||
end
|
||||
it "should parse a host and db with https" do
|
||||
db = CouchRest.parse "https://127.0.0.1/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "https://127.0.0.1"
|
||||
end
|
||||
it "should parse a host with a port and db" do
|
||||
db = CouchRest.parse "127.0.0.1:5555/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
end
|
||||
it "should parse a host with a port and db with http" do
|
||||
db = CouchRest.parse "http://127.0.0.1:5555/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
end
|
||||
it "should parse a host with a port and db with https" do
|
||||
db = CouchRest.parse "https://127.0.0.1:5555/my-db"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "https://127.0.0.1:5555"
|
||||
end
|
||||
it "should parse just a host" do
|
||||
db = CouchRest.parse "http://127.0.0.1:5555/"
|
||||
db[:database].should be_nil
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
end
|
||||
it "should parse just a host with https" do
|
||||
db = CouchRest.parse "https://127.0.0.1:5555/"
|
||||
db[:database].should be_nil
|
||||
db[:host].should == "https://127.0.0.1:5555"
|
||||
end
|
||||
it "should parse just a host no slash" do
|
||||
db = CouchRest.parse "http://127.0.0.1:5555"
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
db[:database].should be_nil
|
||||
end
|
||||
it "should parse just a host no slash and https" do
|
||||
db = CouchRest.parse "https://127.0.0.1:5555"
|
||||
db[:host].should == "https://127.0.0.1:5555"
|
||||
db[:database].should be_nil
|
||||
end
|
||||
it "should get docid" do
|
||||
db = CouchRest.parse "127.0.0.1:5555/my-db/my-doc"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
db[:doc].should == "my-doc"
|
||||
end
|
||||
it "should get docid with http" do
|
||||
db = CouchRest.parse "http://127.0.0.1:5555/my-db/my-doc"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "http://127.0.0.1:5555"
|
||||
db[:doc].should == "my-doc"
|
||||
end
|
||||
it "should get docid with https" do
|
||||
db = CouchRest.parse "https://127.0.0.1:5555/my-db/my-doc"
|
||||
db[:database].should == "my-db"
|
||||
db[:host].should == "https://127.0.0.1:5555"
|
||||
db[:doc].should == "my-doc"
|
||||
end
|
||||
end
|
||||
|
||||
describe "easy initializing a database adapter" do
|
||||
it "should be possible without an explicit CouchRest instantiation" do
|
||||
db = CouchRest.database "http://127.0.0.1:5984/couchrest-test"
|
||||
db.should be_an_instance_of(CouchRest::Database)
|
||||
db.host.should == "http://127.0.0.1:5984"
|
||||
end
|
||||
# TODO add https support (need test environment...)
|
||||
# it "should work with https" # do
|
||||
# db = CouchRest.database "https://127.0.0.1:5984/couchrest-test"
|
||||
# db.host.should == "https://127.0.0.1:5984"
|
||||
# end
|
||||
it "should not create the database automatically" do
|
||||
db = CouchRest.database "http://127.0.0.1:5984/couchrest-test"
|
||||
lambda{db.info}.should raise_error(RestClient::ResourceNotFound)
|
||||
end
|
||||
end
|
||||
|
||||
describe "ensuring the db exists" do
|
||||
it "should be super easy" do
|
||||
db = CouchRest.database! "http://127.0.0.1:5984/couchrest-test-2"
|
||||
db.name.should == 'couchrest-test-2'
|
||||
db.info["db_name"].should == 'couchrest-test-2'
|
||||
end
|
||||
end
|
||||
|
||||
describe "successfully creating a database" do
|
||||
it "should start without a database" do
|
||||
@cr.databases.should_not include(TESTDB)
|
||||
end
|
||||
it "should return the created database" do
|
||||
db = @cr.create_db(TESTDB)
|
||||
db.should be_an_instance_of(CouchRest::Database)
|
||||
end
|
||||
it "should create the database" do
|
||||
db = @cr.create_db(TESTDB)
|
||||
@cr.databases.should include(TESTDB)
|
||||
end
|
||||
end
|
||||
|
||||
describe "failing to create a database because the name is taken" do
|
||||
before(:each) do
|
||||
db = @cr.create_db(TESTDB)
|
||||
end
|
||||
it "should start with the test database" do
|
||||
@cr.databases.should include(TESTDB)
|
||||
end
|
||||
it "should PUT the database and raise an error" do
|
||||
lambda{
|
||||
@cr.create_db(TESTDB)
|
||||
}.should raise_error(RestClient::Request::RequestFailed)
|
||||
end
|
||||
end
|
||||
|
||||
describe "using a proxy for RestClient connections" do
|
||||
it "should set proxy url for RestClient" do
|
||||
CouchRest.proxy 'http://localhost:8888/'
|
||||
proxy_uri = URI.parse(HttpAbstraction.proxy)
|
||||
proxy_uri.host.should eql( 'localhost' )
|
||||
proxy_uri.port.should eql( 8888 )
|
||||
CouchRest.proxy nil
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,840 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest::Database do
|
||||
before(:each) do
|
||||
@cr = CouchRest.new(COUCHHOST)
|
||||
@db = @cr.database(TESTDB)
|
||||
@db.delete! rescue nil
|
||||
@db = @cr.create_db(TESTDB) rescue nil
|
||||
end
|
||||
|
||||
describe "database name including slash" do
|
||||
it "should escape the name in the URI" do
|
||||
db = @cr.database("foo/bar")
|
||||
db.name.should == "foo/bar"
|
||||
db.root.should == "#{COUCHHOST}/foo%2Fbar"
|
||||
db.uri.should == "/foo%2Fbar"
|
||||
end
|
||||
end
|
||||
|
||||
describe "map query with _temp_view in Javascript" do
|
||||
before(:each) do
|
||||
@db.bulk_save([
|
||||
{"wild" => "and random"},
|
||||
{"mild" => "yet local"},
|
||||
{"another" => ["set","of","keys"]}
|
||||
])
|
||||
@temp_view = {:map => "function(doc){for(var w in doc){ if(!w.match(/^_/))emit(w,doc[w])}}"}
|
||||
end
|
||||
it "should return the result of the temporary function" do
|
||||
rs = @db.temp_view(@temp_view)
|
||||
rs['rows'].select{|r|r['key'] == 'wild' && r['value'] == 'and random'}.length.should == 1
|
||||
end
|
||||
it "should work with a range" do
|
||||
rs = @db.temp_view(@temp_view, :startkey => "b", :endkey => "z")
|
||||
rs['rows'].length.should == 2
|
||||
end
|
||||
it "should work with a key" do
|
||||
rs = @db.temp_view(@temp_view, :key => "wild")
|
||||
rs['rows'].length.should == 1
|
||||
end
|
||||
it "should work with a limit" do
|
||||
rs = @db.temp_view(@temp_view, :limit => 1)
|
||||
rs['rows'].length.should == 1
|
||||
end
|
||||
it "should work with multi-keys" do
|
||||
rs = @db.temp_view(@temp_view, :keys => ["another", "wild"])
|
||||
rs['rows'].length.should == 2
|
||||
end
|
||||
end
|
||||
|
||||
describe "map/reduce query with _temp_view in Javascript" do
|
||||
before(:each) do
|
||||
@db.bulk_save([
|
||||
{"beverage" => "beer", :count => 4},
|
||||
{"beverage" => "beer", :count => 2},
|
||||
{"beverage" => "tea", :count => 3}
|
||||
])
|
||||
end
|
||||
it "should return the result of the temporary function" do
|
||||
rs = @db.temp_view(:map => "function(doc){emit(doc.beverage, doc.count)}", :reduce => "function(beverage,counts){return sum(counts)}")
|
||||
# rs.should == 'x'
|
||||
rs['rows'][0]['value'].should == 9
|
||||
end
|
||||
end
|
||||
|
||||
describe "saving a view" do
|
||||
before(:each) do
|
||||
@view = {'test' => {'map' => <<-JS
|
||||
function(doc) {
|
||||
var reg = new RegExp("\\\\W");
|
||||
if (doc.word && !reg.test(doc.word)) {
|
||||
emit(doc.word,null);
|
||||
}
|
||||
}
|
||||
JS
|
||||
}}
|
||||
@db.save_doc({
|
||||
"_id" => "_design/test",
|
||||
:views => @view
|
||||
})
|
||||
end
|
||||
it "should work properly" do
|
||||
r = @db.bulk_save([
|
||||
{"word" => "once"},
|
||||
{"word" => "and again"}
|
||||
])
|
||||
r = @db.view('test/test')
|
||||
r['total_rows'].should == 1
|
||||
end
|
||||
it "should round trip" do
|
||||
@db.get("_design/test")['views'].should == @view
|
||||
end
|
||||
end
|
||||
|
||||
describe "select from an existing view" do
|
||||
before(:each) do
|
||||
r = @db.save_doc({
|
||||
"_id" => "_design/first",
|
||||
:views => {
|
||||
:test => {
|
||||
:map => "function(doc){for(var w in doc){ if(!w.match(/^_/))emit(w,doc[w])}}"
|
||||
}
|
||||
}
|
||||
})
|
||||
@db.bulk_save([
|
||||
{"wild" => "and random"},
|
||||
{"mild" => "yet local"},
|
||||
{"another" => ["set","of","keys"]}
|
||||
])
|
||||
end
|
||||
it "should have the view" do
|
||||
@db.get('_design/first')['views']['test']['map'].should include("for(var w in doc)")
|
||||
end
|
||||
it "should list from the view" do
|
||||
rs = @db.view('first/test')
|
||||
rs['rows'].select{|r|r['key'] == 'wild' && r['value'] == 'and random'}.length.should == 1
|
||||
end
|
||||
it "should work with a range" do
|
||||
rs = @db.view('first/test', :startkey => "b", :endkey => "z")
|
||||
rs['rows'].length.should == 2
|
||||
end
|
||||
it "should work with a key" do
|
||||
rs = @db.view('first/test', :key => "wild")
|
||||
rs['rows'].length.should == 1
|
||||
end
|
||||
it "should work with a limit" do
|
||||
rs = @db.view('first/test', :limit => 1)
|
||||
rs['rows'].length.should == 1
|
||||
end
|
||||
it "should work with multi-keys" do
|
||||
rs = @db.view('first/test', :keys => ["another", "wild"])
|
||||
rs['rows'].length.should == 2
|
||||
end
|
||||
it "should accept a block" do
|
||||
rows = []
|
||||
rs = @db.view('first/test', :include_docs => true) do |row|
|
||||
rows << row
|
||||
end
|
||||
rows.length.should == 3
|
||||
rs["total_rows"].should == 3
|
||||
end
|
||||
it "should accept a block with several params" do
|
||||
rows = []
|
||||
rs = @db.view('first/test', :include_docs => true, :limit => 2) do |row|
|
||||
rows << row
|
||||
end
|
||||
rows.length.should == 2
|
||||
end
|
||||
end
|
||||
|
||||
describe "GET (document by id) when the doc exists" do
|
||||
before(:each) do
|
||||
@r = @db.save_doc({'lemons' => 'from texas', 'and' => 'spain'})
|
||||
@docid = "http://example.com/stuff.cgi?things=and%20stuff"
|
||||
@db.save_doc({'_id' => @docid, 'will-exist' => 'here'})
|
||||
end
|
||||
it "should get the document" do
|
||||
doc = @db.get(@r['id'])
|
||||
doc['lemons'].should == 'from texas'
|
||||
end
|
||||
it "should work with a funky id" do
|
||||
@db.get(@docid)['will-exist'].should == 'here'
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST (adding bulk documents)" do
|
||||
it "should add them without ids" do
|
||||
rs = @db.bulk_save([
|
||||
{"wild" => "and random"},
|
||||
{"mild" => "yet local"},
|
||||
{"another" => ["set","of","keys"]}
|
||||
])
|
||||
rs.each do |r|
|
||||
@db.get(r['id']).rev.should == r["rev"]
|
||||
end
|
||||
end
|
||||
|
||||
it "should use uuids when ids aren't provided" do
|
||||
@db.server.stub!(:next_uuid).and_return('asdf6sgadkfhgsdfusdf')
|
||||
|
||||
docs = [{'key' => 'value'}, {'_id' => 'totally-uniq'}]
|
||||
id_docs = [{'key' => 'value', '_id' => 'asdf6sgadkfhgsdfusdf'}, {'_id' => 'totally-uniq'}]
|
||||
CouchRest.should_receive(:post).with("http://127.0.0.1:5984/couchrest-test/_bulk_docs", {:docs => id_docs})
|
||||
|
||||
@db.bulk_save(docs)
|
||||
end
|
||||
|
||||
it "should add them with uniq ids" do
|
||||
rs = @db.bulk_save([
|
||||
{"_id" => "oneB", "wild" => "and random"},
|
||||
{"_id" => "twoB", "mild" => "yet local"},
|
||||
{"another" => ["set","of","keys"]}
|
||||
])
|
||||
rs.each do |r|
|
||||
@db.get(r['id']).rev.should == r["rev"]
|
||||
end
|
||||
end
|
||||
|
||||
it "should empty the bulk save cache if no documents are given" do
|
||||
@db.save_doc({"_id" => "bulk_cache_1", "val" => "test"}, true)
|
||||
lambda do
|
||||
@db.get('bulk_cache_1')
|
||||
end.should raise_error(RestClient::ResourceNotFound)
|
||||
@db.bulk_save
|
||||
@db.get("bulk_cache_1")["val"].should == "test"
|
||||
end
|
||||
|
||||
it "should raise an error that is useful for recovery" do
|
||||
@r = @db.save_doc({"_id" => "taken", "field" => "stuff"})
|
||||
begin
|
||||
rs = @db.bulk_save([
|
||||
{"_id" => "taken", "wild" => "and random"},
|
||||
{"_id" => "free", "mild" => "yet local"},
|
||||
{"another" => ["set","of","keys"]}
|
||||
])
|
||||
rescue RestClient::RequestFailed => e
|
||||
# soon CouchDB will provide _which_ docs conflicted
|
||||
JSON.parse(e.response.body)['error'].should == 'conflict'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "new document without an id" do
|
||||
it "should start empty" do
|
||||
@db.documents["total_rows"].should == 0
|
||||
end
|
||||
it "should create the document and return the id" do
|
||||
r = @db.save_doc({'lemons' => 'from texas', 'and' => 'spain'})
|
||||
r2 = @db.get(r['id'])
|
||||
r2["lemons"].should == "from texas"
|
||||
end
|
||||
it "should use PUT with UUIDs" do
|
||||
CouchRest.should_receive(:put).and_return({"ok" => true, "id" => "100", "rev" => "55"})
|
||||
r = @db.save_doc({'just' => ['another document']})
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe "fetch_attachment" do
|
||||
before do
|
||||
@attach = "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
@doc = {
|
||||
"_id" => "mydocwithattachment",
|
||||
"field" => ["some value"],
|
||||
"_attachments" => {
|
||||
"test.html" => {
|
||||
"type" => "text/html",
|
||||
"data" => @attach
|
||||
}
|
||||
}
|
||||
}
|
||||
@db.save_doc(@doc)
|
||||
end
|
||||
|
||||
# Depreacated
|
||||
# it "should get the attachment with the doc's _id" do
|
||||
# @db.fetch_attachment("mydocwithattachment", "test.html").should == @attach
|
||||
# end
|
||||
|
||||
it "should get the attachment with the doc itself" do
|
||||
@db.fetch_attachment(@db.get('mydocwithattachment'), 'test.html').should == @attach
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT attachment from file" do
|
||||
before(:each) do
|
||||
filename = FIXTURE_PATH + '/attachments/couchdb.png'
|
||||
@file = File.open(filename, "rb")
|
||||
end
|
||||
after(:each) do
|
||||
@file.close
|
||||
end
|
||||
it "should save the attachment to a new doc" do
|
||||
r = @db.put_attachment({'_id' => 'attach-this'}, 'couchdb.png', image = @file.read, {:content_type => 'image/png'})
|
||||
r['ok'].should == true
|
||||
doc = @db.get("attach-this")
|
||||
attachment = @db.fetch_attachment(doc,"couchdb.png")
|
||||
if attachment.respond_to?(:net_http_res)
|
||||
attachment.net_http_res.body.should == image
|
||||
else
|
||||
attachment.should == image
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT document with attachment" do
|
||||
before(:each) do
|
||||
@attach = "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
doc = {
|
||||
"_id" => "mydocwithattachment",
|
||||
"field" => ["some value"],
|
||||
"_attachments" => {
|
||||
"test.html" => {
|
||||
"type" => "text/html",
|
||||
"data" => @attach
|
||||
}
|
||||
}
|
||||
}
|
||||
@db.save_doc(doc)
|
||||
@doc = @db.get("mydocwithattachment")
|
||||
end
|
||||
it "should save and be indicated" do
|
||||
@doc['_attachments']['test.html']['length'].should == @attach.length
|
||||
end
|
||||
it "should be there" do
|
||||
attachment = @db.fetch_attachment(@doc,"test.html")
|
||||
attachment.should == @attach
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT document with attachment stub" do
|
||||
before(:each) do
|
||||
@attach = "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
doc = {
|
||||
'_id' => 'mydocwithattachment',
|
||||
'field' => ['some_value'],
|
||||
'_attachments' => {
|
||||
'test.html' => {
|
||||
'type' => 'text/html', 'data' => @attach
|
||||
}
|
||||
}
|
||||
}
|
||||
@db.save_doc(doc)
|
||||
doc['_rev'].should_not be_nil
|
||||
doc['field'] << 'another value'
|
||||
@db.save_doc(doc)["ok"].should be_true
|
||||
end
|
||||
|
||||
it 'should be there' do
|
||||
doc = @db.get('mydocwithattachment')
|
||||
attachment = @db.fetch_attachment(doc, 'test.html')
|
||||
Base64.decode64(attachment).should == @attach
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT document with multiple attachments" do
|
||||
before(:each) do
|
||||
@attach = "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
@attach2 = "<html><head><title>Other Doc</title></head><body><p>Has more words.</p></body></html>"
|
||||
@doc = {
|
||||
"_id" => "mydocwithattachment",
|
||||
"field" => ["some value"],
|
||||
"_attachments" => {
|
||||
"test.html" => {
|
||||
"type" => "text/html",
|
||||
"data" => @attach
|
||||
},
|
||||
"other.html" => {
|
||||
"type" => "text/html",
|
||||
"data" => @attach2
|
||||
}
|
||||
}
|
||||
}
|
||||
@db.save_doc(@doc)
|
||||
@doc = @db.get("mydocwithattachment")
|
||||
end
|
||||
it "should save and be indicated" do
|
||||
@doc['_attachments']['test.html']['length'].should == @attach.length
|
||||
@doc['_attachments']['other.html']['length'].should == @attach2.length
|
||||
end
|
||||
it "should be there" do
|
||||
attachment = @db.fetch_attachment(@doc,"test.html")
|
||||
attachment.should == @attach
|
||||
end
|
||||
it "should be there" do
|
||||
attachment = @db.fetch_attachment(@doc,"other.html")
|
||||
attachment.should == @attach2
|
||||
end
|
||||
end
|
||||
|
||||
describe "DELETE an attachment directly from the database" do
|
||||
before(:each) do
|
||||
doc = {
|
||||
'_id' => 'mydocwithattachment',
|
||||
'_attachments' => {
|
||||
'test.html' => {
|
||||
'type' => 'text/html',
|
||||
'data' => "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
}
|
||||
}
|
||||
}
|
||||
@db.save_doc(doc)
|
||||
@doc = @db.get('mydocwithattachment')
|
||||
end
|
||||
it "should delete the attachment" do
|
||||
lambda { @db.fetch_attachment(@doc,'test.html') }.should_not raise_error
|
||||
@db.delete_attachment(@doc, "test.html")
|
||||
@doc = @db.get('mydocwithattachment') # avoid getting a 409
|
||||
lambda{ @db.fetch_attachment(@doc,'test.html')}.should raise_error
|
||||
end
|
||||
|
||||
it "should force a delete even if we get a 409" do
|
||||
@doc['new_attribute'] = 'something new'
|
||||
@db.put_attachment(@doc, 'test', File.open(File.join(File.dirname(__FILE__), '..', '..', 'fixtures', 'attachments', 'test.html')).read)
|
||||
# at this point the revision number changed, if we try to save doc one more time
|
||||
# we would get a 409.
|
||||
lambda{ @db.save_doc(@doc) }.should raise_error
|
||||
lambda{ @db.delete_attachment(@doc, "test", true) }.should_not raise_error
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST document with attachment (with funky name)" do
|
||||
before(:each) do
|
||||
@attach = "<html><head><title>My Funky Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
@doc = {
|
||||
"field" => ["some other value"],
|
||||
"_attachments" => {
|
||||
"http://example.com/stuff.cgi?things=and%20stuff" => {
|
||||
"type" => "text/html",
|
||||
"data" => @attach
|
||||
}
|
||||
}
|
||||
}
|
||||
@docid = @db.save_doc(@doc)['id']
|
||||
end
|
||||
it "should save and be indicated" do
|
||||
doc = @db.get(@docid)
|
||||
doc['_attachments']['http://example.com/stuff.cgi?things=and%20stuff']['length'].should == @attach.length
|
||||
end
|
||||
it "should be there" do
|
||||
doc = @db.get(@docid)
|
||||
attachment = @db.fetch_attachment(doc,"http://example.com/stuff.cgi?things=and%20stuff")
|
||||
attachment.should == @attach
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT (new document with url id)" do
|
||||
it "should create the document" do
|
||||
@docid = "http://example.com/stuff.cgi?things=and%20stuff"
|
||||
@db.save_doc({'_id' => @docid, 'will-exist' => 'here'})
|
||||
lambda{@db.save_doc({'_id' => @docid})}.should raise_error(RestClient::Request::RequestFailed)
|
||||
@db.get(@docid)['will-exist'].should == 'here'
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT (new document with id)" do
|
||||
it "should start without the document" do
|
||||
# r = @db.save_doc({'lemons' => 'from texas', 'and' => 'spain'})
|
||||
@db.documents['rows'].each do |doc|
|
||||
doc['id'].should_not == 'my-doc'
|
||||
end
|
||||
# should_not include({'_id' => 'my-doc'})
|
||||
# this needs to be a loop over docs on content with the post
|
||||
# or instead make it return something with a fancy <=> method
|
||||
end
|
||||
it "should create the document" do
|
||||
@db.save_doc({'_id' => 'my-doc', 'will-exist' => 'here'})
|
||||
lambda{@db.save_doc({'_id' => 'my-doc'})}.should raise_error(RestClient::Request::RequestFailed)
|
||||
end
|
||||
end
|
||||
|
||||
describe "PUT (existing document with rev)" do
|
||||
before(:each) do
|
||||
@db.save_doc({'_id' => 'my-doc', 'will-exist' => 'here'})
|
||||
@doc = @db.get('my-doc')
|
||||
@docid = "http://example.com/stuff.cgi?things=and%20stuff"
|
||||
@db.save_doc({'_id' => @docid, 'now' => 'save'})
|
||||
end
|
||||
it "should start with the document" do
|
||||
@doc['will-exist'].should == 'here'
|
||||
@db.get(@docid)['now'].should == 'save'
|
||||
end
|
||||
it "should save with url id" do
|
||||
doc = @db.get(@docid)
|
||||
doc['yaml'] = ['json', 'word.']
|
||||
@db.save_doc doc
|
||||
@db.get(@docid)['yaml'].should == ['json', 'word.']
|
||||
end
|
||||
it "should fail to resave without the rev" do
|
||||
@doc['them-keys'] = 'huge'
|
||||
@doc['_rev'] = 'wrong'
|
||||
# @db.save_doc(@doc)
|
||||
lambda {@db.save_doc(@doc)}.should raise_error
|
||||
end
|
||||
it "should update the document" do
|
||||
@doc['them-keys'] = 'huge'
|
||||
@db.save_doc(@doc)
|
||||
now = @db.get('my-doc')
|
||||
now['them-keys'].should == 'huge'
|
||||
end
|
||||
end
|
||||
|
||||
describe "cached bulk save" do
|
||||
it "stores documents in a database-specific cache" do
|
||||
td = {"_id" => "btd1", "val" => "test"}
|
||||
@db.save_doc(td, true)
|
||||
@db.instance_variable_get("@bulk_save_cache").should == [td]
|
||||
|
||||
end
|
||||
|
||||
it "doesn't save to the database until the configured cache size is exceded" do
|
||||
@db.bulk_save_cache_limit = 3
|
||||
td1 = {"_id" => "td1", "val" => true}
|
||||
td2 = {"_id" => "td2", "val" => 4}
|
||||
@db.save_doc(td1, true)
|
||||
@db.save_doc(td2, true)
|
||||
lambda do
|
||||
@db.get(td1["_id"])
|
||||
end.should raise_error(RestClient::ResourceNotFound)
|
||||
lambda do
|
||||
@db.get(td2["_id"])
|
||||
end.should raise_error(RestClient::ResourceNotFound)
|
||||
td3 = {"_id" => "td3", "val" => "foo"}
|
||||
@db.save_doc(td3, true)
|
||||
@db.get(td1["_id"])["val"].should == td1["val"]
|
||||
@db.get(td2["_id"])["val"].should == td2["val"]
|
||||
@db.get(td3["_id"])["val"].should == td3["val"]
|
||||
end
|
||||
|
||||
it "clears the bulk save cache the first time a non bulk save is requested" do
|
||||
td1 = {"_id" => "blah", "val" => true}
|
||||
td2 = {"_id" => "steve", "val" => 3}
|
||||
@db.bulk_save_cache_limit = 50
|
||||
@db.save_doc(td1, true)
|
||||
lambda do
|
||||
@db.get(td1["_id"])
|
||||
end.should raise_error(RestClient::ResourceNotFound)
|
||||
@db.save_doc(td2)
|
||||
@db.get(td1["_id"])["val"].should == td1["val"]
|
||||
@db.get(td2["_id"])["val"].should == td2["val"]
|
||||
end
|
||||
end
|
||||
|
||||
describe "DELETE existing document" do
|
||||
before(:each) do
|
||||
@r = @db.save_doc({'lemons' => 'from texas', 'and' => 'spain'})
|
||||
@docid = "http://example.com/stuff.cgi?things=and%20stuff"
|
||||
@db.save_doc({'_id' => @docid, 'will-exist' => 'here'})
|
||||
end
|
||||
it "should work" do
|
||||
doc = @db.get(@r['id'])
|
||||
doc['and'].should == 'spain'
|
||||
@db.delete_doc doc
|
||||
lambda{@db.get @r['id']}.should raise_error
|
||||
end
|
||||
it "should work with uri id" do
|
||||
doc = @db.get(@docid)
|
||||
@db.delete_doc doc
|
||||
lambda{@db.get @docid}.should raise_error
|
||||
end
|
||||
it "should fail without an _id" do
|
||||
lambda{@db.delete_doc({"not"=>"a real doc"})}.should raise_error(ArgumentError)
|
||||
end
|
||||
it "should defer actual deletion when using bulk save" do
|
||||
doc = @db.get(@docid)
|
||||
@db.delete_doc doc, true
|
||||
lambda{@db.get @docid}.should_not raise_error
|
||||
@db.bulk_save
|
||||
lambda{@db.get @docid}.should raise_error
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe "UPDATE existing document" do
|
||||
before :each do
|
||||
@id = @db.save_doc({
|
||||
'article' => 'Pete Doherty Kicked Out For Nazi Anthem',
|
||||
'upvotes' => 10,
|
||||
'link' => 'http://beatcrave.com/2009-11-30/pete-doherty-kicked-out-for-nazi-anthem/'})['id']
|
||||
end
|
||||
it "should work under normal conditions" do
|
||||
@db.update_doc @id do |doc|
|
||||
doc['upvotes'] += 1
|
||||
doc
|
||||
end
|
||||
@db.get(@id)['upvotes'].should == 11
|
||||
end
|
||||
it "should fail if update_limit is reached" do
|
||||
lambda do
|
||||
@db.update_doc @id do |doc|
|
||||
# modify and save the doc so that a collision happens
|
||||
conflicting_doc = @db.get @id
|
||||
conflicting_doc['upvotes'] += 1
|
||||
@db.save_doc conflicting_doc
|
||||
|
||||
# then try saving it through the update
|
||||
doc['upvotes'] += 1
|
||||
doc
|
||||
end
|
||||
end.should raise_error(RestClient::RequestFailed)
|
||||
end
|
||||
it "should not fail if update_limit is not reached" do
|
||||
limit = 5
|
||||
lambda do
|
||||
@db.update_doc @id do |doc|
|
||||
# same as the last spec except we're only forcing 5 conflicts
|
||||
if limit > 0
|
||||
conflicting_doc = @db.get @id
|
||||
conflicting_doc['upvotes'] += 1
|
||||
@db.save_doc conflicting_doc
|
||||
limit -= 1
|
||||
end
|
||||
doc['upvotes'] += 1
|
||||
doc
|
||||
end
|
||||
end.should_not raise_error
|
||||
@db.get(@id)['upvotes'].should == 16
|
||||
end
|
||||
end
|
||||
|
||||
describe "COPY existing document" do
|
||||
before :each do
|
||||
@r = @db.save_doc({'artist' => 'Zappa', 'title' => 'Muffin Man'})
|
||||
@docid = 'tracks/zappa/muffin-man'
|
||||
@doc = @db.get(@r['id'])
|
||||
end
|
||||
describe "to a new location" do
|
||||
it "should work" do
|
||||
@db.copy_doc @doc, @docid
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['artist'].should == 'Zappa'
|
||||
end
|
||||
it "should fail without an _id" do
|
||||
lambda{@db.copy_doc({"not"=>"a real doc"})}.should raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
describe "to an existing location" do
|
||||
before :each do
|
||||
@db.save_doc({'_id' => @docid, 'will-exist' => 'here'})
|
||||
end
|
||||
it "should fail without a rev" do
|
||||
lambda{@db.copy_doc @doc, @docid}.should raise_error(RestClient::RequestFailed)
|
||||
end
|
||||
it "should succeed with a rev" do
|
||||
@to_be_overwritten = @db.get(@docid)
|
||||
@db.copy_doc @doc, "#{@docid}?rev=#{@to_be_overwritten['_rev']}"
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['artist'].should == 'Zappa'
|
||||
end
|
||||
it "should succeed given the doc to overwrite" do
|
||||
@to_be_overwritten = @db.get(@docid)
|
||||
@db.copy_doc @doc, @to_be_overwritten
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['artist'].should == 'Zappa'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
it "should list documents" do
|
||||
5.times do
|
||||
@db.save_doc({'another' => 'doc', 'will-exist' => 'anywhere'})
|
||||
end
|
||||
ds = @db.documents
|
||||
ds['rows'].should be_an_instance_of(Array)
|
||||
ds['rows'][0]['id'].should_not be_nil
|
||||
ds['total_rows'].should == 5
|
||||
end
|
||||
|
||||
describe "documents / _all_docs" do
|
||||
before(:each) do
|
||||
9.times do |i|
|
||||
@db.save_doc({'_id' => "doc#{i}",'another' => 'doc', 'will-exist' => 'here'})
|
||||
end
|
||||
end
|
||||
it "should list documents with keys and such" do
|
||||
ds = @db.documents
|
||||
ds['rows'].should be_an_instance_of(Array)
|
||||
ds['rows'][0]['id'].should == "doc0"
|
||||
ds['total_rows'].should == 9
|
||||
end
|
||||
it "should take query params" do
|
||||
ds = @db.documents(:startkey => 'doc0', :endkey => 'doc3')
|
||||
ds['rows'].length.should == 4
|
||||
ds = @db.documents(:key => 'doc0')
|
||||
ds['rows'].length.should == 1
|
||||
end
|
||||
it "should work with multi-key" do
|
||||
rs = @db.documents :keys => ["doc0", "doc7"]
|
||||
rs['rows'].length.should == 2
|
||||
end
|
||||
it "should work with include_docs" do
|
||||
ds = @db.documents(:startkey => 'doc0', :endkey => 'doc3', :include_docs => true)
|
||||
ds['rows'][0]['doc']['another'].should == "doc"
|
||||
end
|
||||
it "should have the bulk_load macro" do
|
||||
rs = @db.bulk_load ["doc0", "doc7"]
|
||||
rs['rows'].length.should == 2
|
||||
rs['rows'][0]['doc']['another'].should == "doc"
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
describe "compacting a database" do
|
||||
it "should compact the database" do
|
||||
db = @cr.database('couchrest-test')
|
||||
# r =
|
||||
db.compact!
|
||||
# r['ok'].should == true
|
||||
end
|
||||
end
|
||||
|
||||
describe "deleting a database" do
|
||||
it "should start with the test database" do
|
||||
@cr.databases.should include('couchrest-test')
|
||||
end
|
||||
it "should delete the database" do
|
||||
db = @cr.database('couchrest-test')
|
||||
# r =
|
||||
db.delete!
|
||||
# r['ok'].should == true
|
||||
@cr.databases.should_not include('couchrest-test')
|
||||
end
|
||||
end
|
||||
|
||||
describe "simply replicating a database" do
|
||||
before do
|
||||
@db.save_doc({'_id' => 'test_doc', 'some-value' => 'foo'})
|
||||
@other_db = @cr.database REPLICATIONDB
|
||||
@other_db.delete! rescue nil
|
||||
@other_db = @cr.create_db REPLICATIONDB
|
||||
end
|
||||
|
||||
describe "via pulling" do
|
||||
before do
|
||||
@other_db.replicate_from @db
|
||||
end
|
||||
|
||||
it "contains the document from the original database" do
|
||||
doc = @other_db.get('test_doc')
|
||||
doc['some-value'].should == 'foo'
|
||||
end
|
||||
end
|
||||
|
||||
describe "via pushing" do
|
||||
before do
|
||||
@db.replicate_to @other_db
|
||||
end
|
||||
|
||||
it "copies the document to the other database" do
|
||||
doc = @other_db.get('test_doc')
|
||||
doc['some-value'].should == 'foo'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "continuously replicating a database" do
|
||||
before do
|
||||
@db.save_doc({'_id' => 'test_doc', 'some-value' => 'foo'})
|
||||
@other_db = @cr.database REPLICATIONDB
|
||||
@other_db.delete! rescue nil
|
||||
@other_db = @cr.create_db REPLICATIONDB
|
||||
end
|
||||
|
||||
describe "via pulling" do
|
||||
before do
|
||||
@other_db.replicate_from @db, true
|
||||
end
|
||||
|
||||
it "contains the document from the original database" do
|
||||
sleep(1) # Allow some time to replicate
|
||||
doc = @other_db.get('test_doc')
|
||||
doc['some-value'].should == 'foo'
|
||||
end
|
||||
|
||||
it "contains documents saved after replication initiated" do
|
||||
@db.save_doc({'_id' => 'test_doc_after', 'some-value' => 'bar'})
|
||||
sleep(1) # Allow some time to replicate
|
||||
doc = @other_db.get('test_doc_after')
|
||||
doc['some-value'].should == 'bar'
|
||||
end
|
||||
end
|
||||
|
||||
describe "via pushing" do
|
||||
before do
|
||||
@db.replicate_to @other_db, true
|
||||
end
|
||||
|
||||
it "copies the document to the other database" do
|
||||
sleep(1) # Allow some time to replicate
|
||||
doc = @other_db.get('test_doc')
|
||||
doc['some-value'].should == 'foo'
|
||||
end
|
||||
|
||||
it "copies documents saved after replication initiated" do
|
||||
@db.save_doc({'_id' => 'test_doc_after', 'some-value' => 'bar'})
|
||||
sleep(1) # Allow some time to replicate
|
||||
doc = @other_db.get('test_doc_after')
|
||||
doc['some-value'].should == 'bar'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "creating a database" do
|
||||
before(:each) do
|
||||
@db = @cr.database('couchrest-test-db_to_create')
|
||||
@db.delete! if @cr.databases.include?('couchrest-test-db_to_create')
|
||||
end
|
||||
|
||||
it "should just work fine" do
|
||||
@cr.databases.should_not include('couchrest-test-db_to_create')
|
||||
@db.create!
|
||||
@cr.databases.should include('couchrest-test-db_to_create')
|
||||
end
|
||||
end
|
||||
|
||||
describe "recreating a database" do
|
||||
before(:each) do
|
||||
@db = @cr.database('couchrest-test-db_to_create')
|
||||
@db2 = @cr.database('couchrest-test-db_to_recreate')
|
||||
@cr.databases.include?(@db.name) ? nil : @db.create!
|
||||
@cr.databases.include?(@db2.name) ? @db2.delete! : nil
|
||||
end
|
||||
|
||||
it "should drop and recreate a database" do
|
||||
@cr.databases.should include(@db.name)
|
||||
@db.recreate!
|
||||
@cr.databases.should include(@db.name)
|
||||
end
|
||||
|
||||
it "should recreate a db even tho it doesn't exist" do
|
||||
@cr.databases.should_not include(@db2.name)
|
||||
@db2.recreate!
|
||||
@cr.databases.should include(@db2.name)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe "searching a database" do
|
||||
before(:each) do
|
||||
search_function = { 'defaults' => {'store' => 'no', 'index' => 'analyzed_no_norms'},
|
||||
'index' => "function(doc) { ret = new Document(); ret.add(doc['name'], {'field':'name'}); ret.add(doc['age'], {'field':'age'}); return ret; }" }
|
||||
@db.save_doc({'_id' => '_design/search', 'fulltext' => {'people' => search_function}})
|
||||
@db.save_doc({'_id' => 'john', 'name' => 'John', 'age' => '31'})
|
||||
@db.save_doc({'_id' => 'jack', 'name' => 'Jack', 'age' => '32'})
|
||||
@db.save_doc({'_id' => 'dave', 'name' => 'Dave', 'age' => '33'})
|
||||
end
|
||||
|
||||
it "should be able to search a database using couchdb-lucene" do
|
||||
if couchdb_lucene_available?
|
||||
result = @db.search('search/people', :q => 'name:J*')
|
||||
doc_ids = result['rows'].collect{ |row| row['id'] }
|
||||
doc_ids.size.should == 2
|
||||
doc_ids.should include('john')
|
||||
doc_ids.should include('jack')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,138 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest::Design do
|
||||
|
||||
describe "defining a view" do
|
||||
it "should add a view to the design doc" do
|
||||
@des = CouchRest::Design.new
|
||||
method = @des.view_by :name
|
||||
method.should == "by_name"
|
||||
@des["views"]["by_name"].should_not be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe "with an unsaved view" do
|
||||
before(:each) do
|
||||
@des = CouchRest::Design.new
|
||||
@des.view_by :name
|
||||
end
|
||||
it "should accept a name" do
|
||||
@des.name = "mytest"
|
||||
@des.name.should == "mytest"
|
||||
end
|
||||
it "should not save on view definition" do
|
||||
@des.rev.should be_nil
|
||||
end
|
||||
it "should freak out on view access" do
|
||||
lambda{@des.view :by_name}.should raise_error
|
||||
end
|
||||
end
|
||||
|
||||
describe "saving" do
|
||||
before(:each) do
|
||||
@des = CouchRest::Design.new
|
||||
@des.view_by :name
|
||||
@des.database = reset_test_db!
|
||||
end
|
||||
it "should fail without a name" do
|
||||
lambda{@des.save}.should raise_error(ArgumentError)
|
||||
end
|
||||
it "should work with a name" do
|
||||
@des.name = "myview"
|
||||
@des.save
|
||||
end
|
||||
end
|
||||
|
||||
describe "when it's saved" do
|
||||
before(:each) do
|
||||
@db = reset_test_db!
|
||||
@db.bulk_save([{"name" => "x"},{"name" => "y"}])
|
||||
@des = CouchRest::Design.new
|
||||
@des.database = @db
|
||||
@des.view_by :name
|
||||
end
|
||||
it "should by queryable when it's saved" do
|
||||
@des.name = "mydesign"
|
||||
@des.save
|
||||
res = @des.view :by_name
|
||||
res["rows"][0]["key"].should == "x"
|
||||
end
|
||||
it "should be queryable on specified database" do
|
||||
@des.name = "mydesign"
|
||||
@des.save
|
||||
@des.database = nil
|
||||
res = @des.view_on @db, :by_name
|
||||
res["rows"][0]["key"].should == "x"
|
||||
end
|
||||
end
|
||||
|
||||
describe "from a saved document" do
|
||||
before(:each) do
|
||||
@db = reset_test_db!
|
||||
@db.save_doc({
|
||||
"_id" => "_design/test",
|
||||
"views" => {
|
||||
"by_name" => {
|
||||
"map" => "function(doc){if (doc.name) emit(doc.name, null)}"
|
||||
}
|
||||
}
|
||||
})
|
||||
@db.bulk_save([{"name" => "a"},{"name" => "b"}])
|
||||
@des = @db.get "_design/test"
|
||||
end
|
||||
it "should be a Design" do
|
||||
@des.should be_an_instance_of(CouchRest::Design)
|
||||
end
|
||||
it "should have a modifiable name" do
|
||||
@des.name.should == "test"
|
||||
@des.name = "supertest"
|
||||
@des.id.should == "_design/supertest"
|
||||
end
|
||||
it "should by queryable" do
|
||||
res = @des.view :by_name
|
||||
res["rows"][0]["key"].should == "a"
|
||||
end
|
||||
end
|
||||
|
||||
describe "a view with default options" do
|
||||
before(:all) do
|
||||
@db = reset_test_db!
|
||||
@des = CouchRest::Design.new
|
||||
@des.name = "test"
|
||||
@des.view_by :name, :descending => true
|
||||
@des.database = @db
|
||||
@des.save
|
||||
@db.bulk_save([{"name" => "a"},{"name" => "z"}])
|
||||
end
|
||||
it "should save them" do
|
||||
@d2 = @db.get(@des.id)
|
||||
@d2["views"]["by_name"]["couchrest-defaults"].should == {"descending"=>true}
|
||||
end
|
||||
it "should use them" do
|
||||
res = @des.view :by_name
|
||||
res["rows"].first["key"].should == "z"
|
||||
end
|
||||
it "should override them" do
|
||||
res = @des.view :by_name, :descending => false
|
||||
res["rows"].first["key"].should == "a"
|
||||
end
|
||||
end
|
||||
|
||||
describe "a view with multiple keys" do
|
||||
before(:all) do
|
||||
@db = reset_test_db!
|
||||
@des = CouchRest::Design.new
|
||||
@des.name = "test"
|
||||
@des.view_by :name, :age
|
||||
@des.database = @db
|
||||
@des.save
|
||||
@db.bulk_save([{"name" => "a", "age" => 2},
|
||||
{"name" => "a", "age" => 4},{"name" => "z", "age" => 9}])
|
||||
end
|
||||
it "should work" do
|
||||
res = @des.view :by_name_and_age
|
||||
res["rows"].first["key"].should == ["a",2]
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,275 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
class Video < CouchRest::Document; end
|
||||
|
||||
describe CouchRest::Document do
|
||||
|
||||
before(:all) do
|
||||
@couch = CouchRest.new
|
||||
@db = @couch.database!(TESTDB)
|
||||
end
|
||||
|
||||
describe "[]=" do
|
||||
before(:each) do
|
||||
@doc = CouchRest::Document.new
|
||||
end
|
||||
it "should work" do
|
||||
@doc["enamel"].should == nil
|
||||
@doc["enamel"] = "Strong"
|
||||
@doc["enamel"].should == "Strong"
|
||||
end
|
||||
it "[]= should convert to string" do
|
||||
@doc["enamel"].should == nil
|
||||
@doc[:enamel] = "Strong"
|
||||
@doc["enamel"].should == "Strong"
|
||||
end
|
||||
it "should read as a string" do
|
||||
@doc[:enamel] = "Strong"
|
||||
@doc[:enamel].should == "Strong"
|
||||
end
|
||||
end
|
||||
|
||||
describe "default database" do
|
||||
before(:each) do
|
||||
Video.use_database nil
|
||||
end
|
||||
it "should be set using use_database on the model" do
|
||||
Video.new.database.should be_nil
|
||||
Video.use_database @db
|
||||
Video.new.database.should == @db
|
||||
Video.use_database nil
|
||||
end
|
||||
|
||||
it "should be overwritten by instance" do
|
||||
db = @couch.database('test')
|
||||
article = Video.new
|
||||
article.database.should be_nil
|
||||
article.database = db
|
||||
article.database.should_not be_nil
|
||||
article.database.should == db
|
||||
end
|
||||
end
|
||||
|
||||
describe "new" do
|
||||
before(:each) do
|
||||
@doc = CouchRest::Document.new("key" => [1,2,3], :more => "values")
|
||||
end
|
||||
it "should create itself from a Hash" do
|
||||
@doc["key"].should == [1,2,3]
|
||||
@doc["more"].should == "values"
|
||||
end
|
||||
it "should not have rev and id" do
|
||||
@doc.rev.should be_nil
|
||||
@doc.id.should be_nil
|
||||
end
|
||||
|
||||
it "should freak out when saving without a database" do
|
||||
lambda{@doc.save}.should raise_error(ArgumentError)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# move to database spec
|
||||
describe "saving using a database" do
|
||||
before(:all) do
|
||||
@doc = CouchRest::Document.new("key" => [1,2,3], :more => "values")
|
||||
@db = reset_test_db!
|
||||
@resp = @db.save_doc(@doc)
|
||||
end
|
||||
it "should apply the database" do
|
||||
@doc.database.should == @db
|
||||
end
|
||||
it "should get id and rev" do
|
||||
@doc.id.should == @resp["id"]
|
||||
@doc.rev.should == @resp["rev"]
|
||||
end
|
||||
it "should generate a correct URI" do
|
||||
@doc.uri.should == "#{@db.root}/#{@doc.id}"
|
||||
URI.parse(@doc.uri).to_s.should == @doc.uri
|
||||
end
|
||||
it "should generate a correct URI with revision" do
|
||||
@doc.uri(true).should == "#{@db.root}/#{@doc.id}?rev=#{@doc.rev}"
|
||||
URI.parse(@doc.uri(true)).to_s.should == @doc.uri(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe "bulk saving" do
|
||||
before :all do
|
||||
@db = reset_test_db!
|
||||
end
|
||||
|
||||
it "should use the document bulk save cache" do
|
||||
doc = CouchRest::Document.new({"_id" => "bulkdoc", "val" => 3})
|
||||
doc.database = @db
|
||||
doc.save(true)
|
||||
lambda { doc.database.get(doc["_id"]) }.should raise_error(RestClient::ResourceNotFound)
|
||||
doc.database.bulk_save
|
||||
doc.database.get(doc["_id"])["val"].should == doc["val"]
|
||||
end
|
||||
end
|
||||
|
||||
describe "getting from a database" do
|
||||
before(:all) do
|
||||
@db = reset_test_db!
|
||||
@resp = @db.save_doc({
|
||||
"key" => "value"
|
||||
})
|
||||
@doc = @db.get @resp['id']
|
||||
end
|
||||
it "should return a document" do
|
||||
@doc.should be_an_instance_of(CouchRest::Document)
|
||||
end
|
||||
it "should have a database" do
|
||||
@doc.database.should == @db
|
||||
end
|
||||
it "should be saveable and resavable" do
|
||||
@doc["more"] = "keys"
|
||||
@doc.save
|
||||
@db.get(@resp['id'])["more"].should == "keys"
|
||||
@doc["more"] = "these keys"
|
||||
@doc.save
|
||||
@db.get(@resp['id'])["more"].should == "these keys"
|
||||
end
|
||||
end
|
||||
|
||||
describe "destroying a document from a db" do
|
||||
before(:all) do
|
||||
@db = reset_test_db!
|
||||
@resp = @db.save_doc({
|
||||
"key" => "value"
|
||||
})
|
||||
@doc = @db.get @resp['id']
|
||||
end
|
||||
it "should make it disappear" do
|
||||
@doc.destroy
|
||||
lambda{@db.get @resp['id']}.should raise_error
|
||||
end
|
||||
it "should error when there's no db" do
|
||||
@doc = CouchRest::Document.new("key" => [1,2,3], :more => "values")
|
||||
lambda{@doc.destroy}.should raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
describe "destroying a document from a db using bulk save" do
|
||||
before(:all) do
|
||||
@db = reset_test_db!
|
||||
@resp = @db.save_doc({
|
||||
"key" => "value"
|
||||
})
|
||||
@doc = @db.get @resp['id']
|
||||
end
|
||||
it "should defer actual deletion" do
|
||||
@doc.destroy(true)
|
||||
@doc['_id'].should == nil
|
||||
@doc['_rev'].should == nil
|
||||
lambda{@db.get @resp['id']}.should_not raise_error
|
||||
@db.bulk_save
|
||||
lambda{@db.get @resp['id']}.should raise_error
|
||||
end
|
||||
end
|
||||
|
||||
describe "copying a document" do
|
||||
before :each do
|
||||
@db = reset_test_db!
|
||||
@resp = @db.save_doc({'key' => 'value'})
|
||||
@docid = 'new-location'
|
||||
@doc = @db.get(@resp['id'])
|
||||
end
|
||||
describe "to a new location" do
|
||||
it "should work" do
|
||||
@doc.copy @docid
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['key'].should == 'value'
|
||||
end
|
||||
it "should fail without a database" do
|
||||
lambda{CouchRest::Document.new({"not"=>"a real doc"}).copy}.should raise_error(ArgumentError)
|
||||
end
|
||||
end
|
||||
describe "to an existing location" do
|
||||
before :each do
|
||||
@db.save_doc({'_id' => @docid, 'will-exist' => 'here'})
|
||||
end
|
||||
it "should fail without a rev" do
|
||||
lambda{@doc.copy @docid}.should raise_error(RestClient::RequestFailed)
|
||||
end
|
||||
it "should succeed with a rev" do
|
||||
@to_be_overwritten = @db.get(@docid)
|
||||
@doc.copy "#{@docid}?rev=#{@to_be_overwritten['_rev']}"
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['key'].should == 'value'
|
||||
end
|
||||
it "should succeed given the doc to overwrite" do
|
||||
@to_be_overwritten = @db.get(@docid)
|
||||
@doc.copy @to_be_overwritten
|
||||
newdoc = @db.get(@docid)
|
||||
newdoc['key'].should == 'value'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "dealing with attachments" do
|
||||
before do
|
||||
@db = reset_test_db!
|
||||
@attach = "<html><head><title>My Doc</title></head><body><p>Has words.</p></body></html>"
|
||||
response = @db.save_doc({'key' => 'value'})
|
||||
@doc = @db.get(response['id'])
|
||||
end
|
||||
|
||||
def append_attachment(name='test.html', attach=@attach)
|
||||
@doc['_attachments'] ||= {}
|
||||
@doc['_attachments'][name] = {
|
||||
'type' => 'text/html',
|
||||
'data' => attach
|
||||
}
|
||||
@doc.save
|
||||
@rev = @doc['_rev']
|
||||
end
|
||||
|
||||
describe "PUTing an attachment directly to the doc" do
|
||||
before do
|
||||
@doc.put_attachment('test.html', @attach)
|
||||
end
|
||||
|
||||
it "is there" do
|
||||
@db.fetch_attachment(@doc, 'test.html').should == @attach
|
||||
end
|
||||
|
||||
it "updates the revision" do
|
||||
@doc['_rev'].should_not == @rev
|
||||
end
|
||||
|
||||
it "updates attachments" do
|
||||
@attach2 = "<html><head><title>My Doc</title></head><body><p>Is Different.</p></body></html>"
|
||||
@doc.put_attachment('test.html', @attach2)
|
||||
@db.fetch_attachment(@doc, 'test.html').should == @attach2
|
||||
end
|
||||
end
|
||||
|
||||
describe "fetching an attachment from a doc directly" do
|
||||
before do
|
||||
append_attachment
|
||||
end
|
||||
|
||||
it "pulls the attachment" do
|
||||
@doc.fetch_attachment('test.html').should == @attach
|
||||
end
|
||||
end
|
||||
|
||||
describe "deleting an attachment from a doc directly" do
|
||||
before do
|
||||
append_attachment
|
||||
@doc.delete_attachment('test.html')
|
||||
end
|
||||
|
||||
it "removes it" do
|
||||
lambda { @db.fetch_attachment(@doc, 'test.html').should }.should raise_error(RestClient::ResourceNotFound)
|
||||
end
|
||||
|
||||
it "updates the revision" do
|
||||
@doc['_rev'].should_not == @rev
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,35 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest::Server do
|
||||
|
||||
describe "available databases" do
|
||||
before(:each) do
|
||||
@couch = CouchRest::Server.new
|
||||
end
|
||||
|
||||
after(:each) do
|
||||
@couch.available_databases.each do |ref, db|
|
||||
db.delete!
|
||||
end
|
||||
end
|
||||
|
||||
it "should let you add more databases" do
|
||||
@couch.available_databases.should be_empty
|
||||
@couch.define_available_database(:default, "cr-server-test-db")
|
||||
@couch.available_databases.keys.should include(:default)
|
||||
end
|
||||
|
||||
it "should verify that a database is available" do
|
||||
@couch.define_available_database(:default, "cr-server-test-db")
|
||||
@couch.available_database?(:default).should be_true
|
||||
@couch.available_database?("cr-server-test-db").should be_true
|
||||
@couch.available_database?(:matt).should be_false
|
||||
end
|
||||
|
||||
it "should let you set a default database" do
|
||||
@couch.default_database = 'cr-server-test-default-db'
|
||||
@couch.available_database?(:default).should be_true
|
||||
end
|
||||
end
|
||||
|
||||
end
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path('../../../spec_helper', __FILE__)
|
||||
require File.expand_path('../../spec_helper', __FILE__)
|
||||
|
||||
describe "ExtendedDocument attachments" do
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path('../../../spec_helper', __FILE__)
|
||||
require File.expand_path('../../spec_helper', __FILE__)
|
||||
|
||||
begin
|
||||
require 'rubygems' unless ENV['SKIP_RUBYGEMS']
|
|
@ -1,6 +1,6 @@
|
|||
# encoding: utf-8
|
||||
|
||||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
require File.expand_path("../../spec_helper", __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'article')
|
||||
require File.join(FIXTURE_PATH, 'more', 'course')
|
||||
require File.join(FIXTURE_PATH, 'more', 'card')
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
require File.expand_path("../../spec_helper", __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'person')
|
||||
require File.join(FIXTURE_PATH, 'more', 'card')
|
||||
require File.join(FIXTURE_PATH, 'more', 'course')
|
|
@ -1,4 +1,4 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
require File.expand_path("../../spec_helper", __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'article')
|
||||
require File.join(FIXTURE_PATH, 'more', 'course')
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest::Pager do
|
||||
before(:all) do
|
||||
@cr = CouchRest.new(COUCHHOST)
|
||||
@db = @cr.database(TESTDB)
|
||||
@db.delete! rescue nil
|
||||
@db = @cr.create_db(TESTDB) rescue nil
|
||||
@pager = CouchRest::Pager.new(@db)
|
||||
end
|
||||
|
||||
after(:all) do
|
||||
begin
|
||||
@db.delete!
|
||||
rescue RestClient::Request::RequestFailed
|
||||
end
|
||||
end
|
||||
|
||||
it "should store the db" do
|
||||
@pager.db.should == @db
|
||||
end
|
||||
|
||||
describe "paging all docs" do
|
||||
before(:all) do
|
||||
@docs = []
|
||||
100.times do |i|
|
||||
@docs << ({:number => (i % 10)})
|
||||
end
|
||||
@db.bulk_save(@docs)
|
||||
end
|
||||
it "should yield total_docs / limit times" do
|
||||
n = 0
|
||||
@pager.all_docs(10) do |doc|
|
||||
n += 1
|
||||
end
|
||||
n.should == 10
|
||||
end
|
||||
it "should yield each docrow group without duplicate docs" do
|
||||
docids = {}
|
||||
@pager.all_docs(10) do |docrows|
|
||||
docrows.each do |row|
|
||||
docids[row['id']].should be_nil
|
||||
docids[row['id']] = true
|
||||
end
|
||||
end
|
||||
docids.keys.length.should == 100
|
||||
end
|
||||
it "should yield each docrow group" do
|
||||
@pager.all_docs(10) do |docrows|
|
||||
doc = @db.get(docrows[0]['id'])
|
||||
doc['number'].class.should == Fixnum
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "Pager with a view and docs" do
|
||||
before(:all) do
|
||||
@docs = []
|
||||
100.times do |i|
|
||||
@docs << ({:number => (i % 10)})
|
||||
end
|
||||
@db.bulk_save(@docs)
|
||||
@db.save_doc({
|
||||
'_id' => '_design/magic',
|
||||
'views' => {
|
||||
'number' => {
|
||||
'map' => 'function(doc){emit(doc.number,null)}'
|
||||
}
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
it "should have docs" do
|
||||
@docs.length.should == 100
|
||||
@db.documents['rows'].length.should == 101
|
||||
end
|
||||
|
||||
it "should have a view" do
|
||||
@db.view('magic/number', :limit => 10)['rows'][0]['key'].should == 0
|
||||
end
|
||||
|
||||
it "should yield once per key" do
|
||||
results = {}
|
||||
@pager.key_reduce('magic/number', 20) do |k,vs|
|
||||
results[k] = vs.length
|
||||
end
|
||||
results[0].should == 10
|
||||
results[3].should == 10
|
||||
end
|
||||
|
||||
it "with a small step size should yield once per key" do
|
||||
results = {}
|
||||
@pager.key_reduce('magic/number', 7) do |k,vs|
|
||||
results[k] = vs.length
|
||||
end
|
||||
results[0].should == 10
|
||||
results[3].should == 10
|
||||
results[9].should == 10
|
||||
end
|
||||
it "with a large step size should yield once per key" do
|
||||
results = {}
|
||||
@pager.key_reduce('magic/number', 1000) do |k,vs|
|
||||
results[k] = vs.length
|
||||
end
|
||||
results[0].should == 10
|
||||
results[3].should == 10
|
||||
results[9].should == 10
|
||||
end
|
||||
it "with a begin and end should only yield in the range (and leave out the lastkey)" do
|
||||
results = {}
|
||||
@pager.key_reduce('magic/number', 1000, 4, 7) do |k,vs|
|
||||
results[k] = vs.length
|
||||
end
|
||||
results[0].should be_nil
|
||||
results[4].should == 10
|
||||
results[6].should == 10
|
||||
results[7].should be_nil
|
||||
results[8].should be_nil
|
||||
results[9].should be_nil
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,52 +0,0 @@
|
|||
require File.expand_path("../../../spec_helper", __FILE__)
|
||||
|
||||
describe CouchRest::Streamer do
|
||||
before(:all) do
|
||||
@cr = CouchRest.new(COUCHHOST)
|
||||
@db = @cr.database(TESTDB)
|
||||
@db.delete! rescue nil
|
||||
@db = @cr.create_db(TESTDB) rescue nil
|
||||
@streamer = CouchRest::Streamer.new(@db)
|
||||
@docs = (1..1000).collect{|i| {:integer => i, :string => i.to_s}}
|
||||
@db.bulk_save(@docs)
|
||||
@db.save_doc({
|
||||
"_id" => "_design/first",
|
||||
:views => {
|
||||
:test => {
|
||||
:map => "function(doc){for(var w in doc){ if(!w.match(/^_/))emit(w,doc[w])}}"
|
||||
}
|
||||
}
|
||||
})
|
||||
end
|
||||
|
||||
it "should yield each row in a view" do
|
||||
count = 0
|
||||
sum = 0
|
||||
@streamer.view("_all_docs") do |row|
|
||||
count += 1
|
||||
end
|
||||
count.should == 1001
|
||||
end
|
||||
|
||||
it "should accept several params" do
|
||||
count = 0
|
||||
@streamer.view("_design/first/_view/test", :include_docs => true, :limit => 5) do |row|
|
||||
count += 1
|
||||
end
|
||||
count.should == 5
|
||||
end
|
||||
|
||||
it "should accept both view formats" do
|
||||
count = 0
|
||||
@streamer.view("_design/first/_view/test") do |row|
|
||||
count += 1
|
||||
end
|
||||
count.should == 2000
|
||||
count = 0
|
||||
@streamer.view("first/test") do |row|
|
||||
count += 1
|
||||
end
|
||||
count.should == 2000
|
||||
end
|
||||
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
# encoding: utf-8
|
||||
require File.expand_path('../../../spec_helper', __FILE__)
|
||||
require File.expand_path('../../spec_helper', __FILE__)
|
||||
require File.join(FIXTURE_PATH, 'more', 'person')
|
||||
require File.join(FIXTURE_PATH, 'more', 'card')
|
||||
require File.join(FIXTURE_PATH, 'more', 'invoice')
|
4
spec/fixtures/more/cat.rb
vendored
4
spec/fixtures/more/cat.rb
vendored
|
@ -15,8 +15,8 @@ class Cat < CouchRest::ExtendedDocument
|
|||
use_database DB
|
||||
|
||||
property :name, :accessible => true
|
||||
property :toys, :cast_as => ['CatToy'], :default => [], :accessible => true
|
||||
property :favorite_toy, :cast_as => 'CatToy', :accessible => true
|
||||
property :toys, :cast_as => [CatToy], :default => [], :accessible => true
|
||||
property :favorite_toy, :cast_as => CatToy, :accessible => true
|
||||
property :number
|
||||
end
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
require "rubygems"
|
||||
require "spec" # Satisfies Autotest and anyone else not using the Rake tasks
|
||||
|
||||
require File.join(File.dirname(__FILE__), '..','lib','couchrest')
|
||||
require File.join(File.dirname(__FILE__), '..','lib','couchrest','extended_document')
|
||||
# check the following file to see how to use the spec'd features.
|
||||
|
||||
unless defined?(FIXTURE_PATH)
|
||||
|
|
Loading…
Reference in a new issue