move files around alot today
This commit is contained in:
parent
e411207b79
commit
ac07c15c28
|
@ -1,5 +0,0 @@
|
||||||
require File.join(File.dirname(__FILE__), "..", "couchrest")
|
|
||||||
|
|
||||||
%w(push generate).each do |filename|
|
|
||||||
require File.join(File.dirname(__FILE__), "commands", filename)
|
|
||||||
end
|
|
|
@ -1,71 +0,0 @@
|
||||||
require 'fileutils'
|
|
||||||
|
|
||||||
class CouchRest
|
|
||||||
module Commands
|
|
||||||
module Generate
|
|
||||||
|
|
||||||
def self.run(options)
|
|
||||||
directory = options[:directory]
|
|
||||||
design_names = options[:trailing_args]
|
|
||||||
|
|
||||||
FileUtils.mkdir_p(directory)
|
|
||||||
filename = File.join(directory, "lib.js")
|
|
||||||
self.write(filename, <<-FUNC)
|
|
||||||
// Put global functions here.
|
|
||||||
// Include in your views with
|
|
||||||
//
|
|
||||||
// //include-lib
|
|
||||||
FUNC
|
|
||||||
|
|
||||||
design_names.each do |design_name|
|
|
||||||
subdirectory = File.join(directory, design_name)
|
|
||||||
FileUtils.mkdir_p(subdirectory)
|
|
||||||
filename = File.join(subdirectory, "sample-map.js")
|
|
||||||
self.write(filename, <<-FUNC)
|
|
||||||
function(doc) {
|
|
||||||
// Keys is first letter of _id
|
|
||||||
emit(doc._id[0], doc);
|
|
||||||
}
|
|
||||||
FUNC
|
|
||||||
|
|
||||||
filename = File.join(subdirectory, "sample-reduce.js")
|
|
||||||
self.write(filename, <<-FUNC)
|
|
||||||
function(keys, values) {
|
|
||||||
// Count the number of keys starting with this letter
|
|
||||||
return values.length;
|
|
||||||
}
|
|
||||||
FUNC
|
|
||||||
|
|
||||||
filename = File.join(subdirectory, "lib.js")
|
|
||||||
self.write(filename, <<-FUNC)
|
|
||||||
// Put functions specific to '#{design_name}' here.
|
|
||||||
// Include in your views with
|
|
||||||
//
|
|
||||||
// //include-lib
|
|
||||||
FUNC
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.help
|
|
||||||
helpstring = <<-GEN
|
|
||||||
|
|
||||||
Usage: couchview generate directory design1 design2 design3 ...
|
|
||||||
|
|
||||||
Couchview will create directories and example views for the design documents you specify.
|
|
||||||
|
|
||||||
GEN
|
|
||||||
helpstring.gsub(/^ /, '')
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.write(filename, contents)
|
|
||||||
puts "Writing #{filename}"
|
|
||||||
File.open(filename, "w") do |f|
|
|
||||||
# Remove leading spaces
|
|
||||||
contents.gsub!(/^ ( )?/, '')
|
|
||||||
f.write contents
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,99 +0,0 @@
|
||||||
class CouchRest
|
|
||||||
|
|
||||||
module Commands
|
|
||||||
|
|
||||||
module Push
|
|
||||||
|
|
||||||
def self.run(options)
|
|
||||||
directory = options[:directory]
|
|
||||||
database = options[:trailing_args].first
|
|
||||||
|
|
||||||
fm = CouchRest::FileManager.new(database)
|
|
||||||
fm.loud = options[:loud]
|
|
||||||
puts "Pushing views from directory #{directory} to database #{fm.db}"
|
|
||||||
fm.push_views(directory)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.help
|
|
||||||
helpstring = <<-GEN
|
|
||||||
|
|
||||||
== Pushing views with Couchview ==
|
|
||||||
|
|
||||||
Usage: couchview push directory dbname
|
|
||||||
|
|
||||||
Couchview expects a specific filesystem layout for your CouchDB views (see
|
|
||||||
example below). It also supports advanced features like inlining of library
|
|
||||||
code (so you can keep DRY) as well as avoiding unnecessary document
|
|
||||||
modification.
|
|
||||||
|
|
||||||
Couchview also solves a problem with CouchDB's view API, which only provides
|
|
||||||
access to the final reduce side of any views which have both a map and a
|
|
||||||
reduce function defined. The intermediate map results are often useful for
|
|
||||||
development and production. CouchDB is smart enough to reuse map indexes for
|
|
||||||
functions duplicated across views within the same design document.
|
|
||||||
|
|
||||||
For views with a reduce function defined, Couchview creates both a reduce view
|
|
||||||
and a map-only view, so that you can browse and query the map side as well as
|
|
||||||
the reduction, with no performance penalty.
|
|
||||||
|
|
||||||
== Example ==
|
|
||||||
|
|
||||||
couchview push foo-project/bar-views baz-database
|
|
||||||
|
|
||||||
This will push the views defined in foo-project/bar-views into a database
|
|
||||||
called baz-database. Couchview expects the views to be defined in files with
|
|
||||||
names like:
|
|
||||||
|
|
||||||
foo-project/bar-views/my-design/viewname-map.js
|
|
||||||
foo-project/bar-views/my-design/viewname-reduce.js
|
|
||||||
foo-project/bar-views/my-design/noreduce-map.js
|
|
||||||
|
|
||||||
Pushed to => http://localhost:5984/baz-database/_design/my-design
|
|
||||||
|
|
||||||
And the design document:
|
|
||||||
{
|
|
||||||
"views" : {
|
|
||||||
"viewname-map" : {
|
|
||||||
"map" : "### contents of view-name-map.js ###"
|
|
||||||
},
|
|
||||||
"viewname-reduce" : {
|
|
||||||
"map" : "### contents of view-name-map.js ###",
|
|
||||||
"reduce" : "### contents of view-name-reduce.js ###"
|
|
||||||
},
|
|
||||||
"noreduce-map" : {
|
|
||||||
"map" : "### contents of noreduce-map.js ###"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Couchview will create a design document for each subdirectory of the views
|
|
||||||
directory specified on the command line.
|
|
||||||
|
|
||||||
== Library Inlining ==
|
|
||||||
|
|
||||||
Couchview can optionally inline library code into your views so you only have
|
|
||||||
to maintain it in one place. It looks for any files named lib.* in your
|
|
||||||
design-doc directory (for doc specific libs) and in the parent views directory
|
|
||||||
(for project global libs). These libraries are only inserted into views which
|
|
||||||
include the text
|
|
||||||
|
|
||||||
//include-lib
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
#include-lib
|
|
||||||
|
|
||||||
Couchview is a result of scratching my own itch. I'd be happy to make it more
|
|
||||||
general, so please contact me at jchris@grabb.it if you'd like to see anything
|
|
||||||
added or changed.
|
|
||||||
|
|
||||||
GEN
|
|
||||||
helpstring.gsub(/^ /, '')
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
|
@ -6,8 +6,8 @@ $:.unshift File.expand_path(File.dirname(__FILE__))
|
||||||
|
|
||||||
|
|
||||||
require 'monkeypatches'
|
require 'monkeypatches'
|
||||||
require 'lib/server'
|
require 'couchrest/server'
|
||||||
require 'lib/database'
|
require 'couchrest/database'
|
||||||
|
|
||||||
|
|
||||||
module CouchRest
|
module CouchRest
|
||||||
|
|
|
@ -1,223 +0,0 @@
|
||||||
require 'digest/md5'
|
|
||||||
|
|
||||||
class CouchRest
|
|
||||||
class FileManager
|
|
||||||
attr_reader :db
|
|
||||||
attr_accessor :loud
|
|
||||||
|
|
||||||
LANGS = {"rb" => "ruby", "js" => "javascript"}
|
|
||||||
MIMES = {
|
|
||||||
"html" => "text/html",
|
|
||||||
"htm" => "text/html",
|
|
||||||
"png" => "image/png",
|
|
||||||
"css" => "text/css",
|
|
||||||
"js" => "test/javascript"
|
|
||||||
}
|
|
||||||
def initialize(dbname, host="http://localhost:5984")
|
|
||||||
@db = CouchRest.new(host).database(dbname)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_directory(push_dir, docid=nil)
|
|
||||||
docid ||= push_dir.split('/').reverse.find{|part|!part.empty?}
|
|
||||||
|
|
||||||
pushfiles = Dir["#{push_dir}/**/*.*"].collect do |f|
|
|
||||||
{f.split("#{push_dir}/").last => open(f).read}
|
|
||||||
end
|
|
||||||
|
|
||||||
return if pushfiles.empty?
|
|
||||||
|
|
||||||
@attachments = {}
|
|
||||||
@signatures = {}
|
|
||||||
pushfiles.each do |file|
|
|
||||||
name = file.keys.first
|
|
||||||
value = file.values.first
|
|
||||||
@signatures[name] = md5(value)
|
|
||||||
|
|
||||||
@attachments[name] = {
|
|
||||||
"data" => value,
|
|
||||||
"content_type" => MIMES[name.split('.').last]
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
doc = @db.get(docid) rescue nil
|
|
||||||
|
|
||||||
unless doc
|
|
||||||
say "creating #{docid}"
|
|
||||||
@db.save({"_id" => docid, "_attachments" => @attachments, "signatures" => @signatures})
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
# remove deleted docs
|
|
||||||
to_be_removed = doc["signatures"].keys.select do |d|
|
|
||||||
!pushfiles.collect{|p| p.keys.first}.include?(d)
|
|
||||||
end
|
|
||||||
|
|
||||||
to_be_removed.each do |p|
|
|
||||||
say "deleting #{p}"
|
|
||||||
doc["signatures"].delete(p)
|
|
||||||
doc["_attachments"].delete(p)
|
|
||||||
end
|
|
||||||
|
|
||||||
# update existing docs:
|
|
||||||
doc["signatures"].each do |path, sig|
|
|
||||||
if (@signatures[path] == sig)
|
|
||||||
say "no change to #{path}. skipping..."
|
|
||||||
else
|
|
||||||
say "replacing #{path}"
|
|
||||||
doc["signatures"][path] = md5(@attachments[path]["data"])
|
|
||||||
doc["_attachments"][path].delete("stub")
|
|
||||||
doc["_attachments"][path].delete("length")
|
|
||||||
doc["_attachments"][path]["data"] = @attachments[path]["data"]
|
|
||||||
doc["_attachments"][path].merge!({"data" => @attachments[path]["data"]} )
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# add in new files
|
|
||||||
new_files = pushfiles.select{|d| !doc["signatures"].keys.include?( d.keys.first) }
|
|
||||||
|
|
||||||
new_files.each do |f|
|
|
||||||
say "creating #{f}"
|
|
||||||
path = f.keys.first
|
|
||||||
content = f.values.first
|
|
||||||
doc["signatures"][path] = md5(content)
|
|
||||||
|
|
||||||
doc["_attachments"][path] = {
|
|
||||||
"data" => content,
|
|
||||||
"content_type" => MIMES[path.split('.').last]
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
begin
|
|
||||||
@db.save(doc)
|
|
||||||
rescue Exception => e
|
|
||||||
say e.message
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_views(view_dir)
|
|
||||||
designs = {}
|
|
||||||
|
|
||||||
Dir["#{view_dir}/**/*.*"].each do |design_doc|
|
|
||||||
design_doc_parts = design_doc.split('/')
|
|
||||||
next if /^lib\..*$/.match design_doc_parts.last
|
|
||||||
pre_normalized_view_name = design_doc_parts.last.split("-")
|
|
||||||
view_name = pre_normalized_view_name[0..pre_normalized_view_name.length-2].join("-")
|
|
||||||
|
|
||||||
folder = design_doc_parts[-2]
|
|
||||||
|
|
||||||
designs[folder] ||= {}
|
|
||||||
designs[folder]["views"] ||= {}
|
|
||||||
design_lang = design_doc_parts.last.split(".").last
|
|
||||||
designs[folder]["language"] ||= LANGS[design_lang]
|
|
||||||
|
|
||||||
libs = ""
|
|
||||||
Dir["#{view_dir}/lib.#{design_lang}"].collect do |global_lib|
|
|
||||||
libs << open(global_lib).read
|
|
||||||
libs << "\n"
|
|
||||||
end
|
|
||||||
Dir["#{view_dir}/#{folder}/lib.#{design_lang}"].collect do |global_lib|
|
|
||||||
libs << open(global_lib).read
|
|
||||||
libs << "\n"
|
|
||||||
end
|
|
||||||
if design_doc_parts.last =~ /-map/
|
|
||||||
designs[folder]["views"]["#{view_name}-map"] ||= {}
|
|
||||||
|
|
||||||
designs[folder]["views"]["#{view_name}-map"]["map"] = read(design_doc, libs)
|
|
||||||
|
|
||||||
designs[folder]["views"]["#{view_name}-reduce"] ||= {}
|
|
||||||
designs[folder]["views"]["#{view_name}-reduce"]["map"] = read(design_doc, libs)
|
|
||||||
end
|
|
||||||
|
|
||||||
if design_doc_parts.last =~ /-reduce/
|
|
||||||
designs[folder]["views"]["#{view_name}-reduce"] ||= {}
|
|
||||||
|
|
||||||
designs[folder]["views"]["#{view_name}-reduce"]["reduce"] = read(design_doc, libs)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# cleanup empty maps and reduces
|
|
||||||
designs.each do |name, props|
|
|
||||||
props["views"].each do |view, funcs|
|
|
||||||
next unless view.include?("reduce")
|
|
||||||
props["views"].delete(view) unless funcs.keys.include?("reduce")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
designs.each do |k,v|
|
|
||||||
create_or_update("_design/#{k}", v)
|
|
||||||
end
|
|
||||||
|
|
||||||
designs
|
|
||||||
end
|
|
||||||
|
|
||||||
def pull_views(view_dir)
|
|
||||||
prefix = "_design"
|
|
||||||
ds = db.documents(:startkey => '#{prefix}/', :endkey => '#{prefix}/ZZZZZZZZZ')
|
|
||||||
ds['rows'].collect{|r|r['id']}.each do |id|
|
|
||||||
puts directory = id.split('/').last
|
|
||||||
FileUtils.mkdir_p(File.join(view_dir,directory))
|
|
||||||
views = db.get(id)['views']
|
|
||||||
|
|
||||||
vgroups = views.keys.group_by{|k|k.sub(/\-(map|reduce)$/,'')}
|
|
||||||
vgroups.each do|g,vs|
|
|
||||||
mapname = vs.find {|v|views[v]["map"]}
|
|
||||||
if mapname
|
|
||||||
# save map
|
|
||||||
mapfunc = views[mapname]["map"]
|
|
||||||
mapfile = File.join(view_dir, directory, "#{g}-map.js") # todo support non-js views
|
|
||||||
File.open(mapfile,'w') do |f|
|
|
||||||
f.write mapfunc
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
reducename = vs.find {|v|views[v]["reduce"]}
|
|
||||||
if reducename
|
|
||||||
# save reduce
|
|
||||||
reducefunc = views[reducename]["reduce"]
|
|
||||||
reducefile = File.join(view_dir, directory, "#{g}-reduce.js") # todo support non-js views
|
|
||||||
File.open(reducefile,'w') do |f|
|
|
||||||
f.write reducefunc
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def say words
|
|
||||||
puts words if @loud
|
|
||||||
end
|
|
||||||
|
|
||||||
def md5 string
|
|
||||||
Digest::MD5.hexdigest(string)
|
|
||||||
end
|
|
||||||
|
|
||||||
def read(file, libs=nil)
|
|
||||||
st = open(file).read
|
|
||||||
st.sub!(/(\/\/|#)include-lib/,libs) if libs
|
|
||||||
st
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_or_update(id, fields)
|
|
||||||
existing = @db.get(id) rescue nil
|
|
||||||
|
|
||||||
if existing
|
|
||||||
updated = fields.merge({"_id" => id, "_rev" => existing["_rev"]})
|
|
||||||
if existing != updated
|
|
||||||
say "replacing #{id}"
|
|
||||||
db.save(updated)
|
|
||||||
else
|
|
||||||
say "skipping #{id}"
|
|
||||||
end
|
|
||||||
else
|
|
||||||
say "creating #{id}"
|
|
||||||
db.save(fields.merge({"_id" => id}))
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,103 +0,0 @@
|
||||||
class CouchRest
|
|
||||||
class Pager
|
|
||||||
attr_accessor :db
|
|
||||||
def initialize db
|
|
||||||
@db = db
|
|
||||||
end
|
|
||||||
|
|
||||||
def all_docs(count=100, &block)
|
|
||||||
startkey = nil
|
|
||||||
oldend = nil
|
|
||||||
|
|
||||||
while docrows = request_all_docs(count+1, startkey)
|
|
||||||
startkey = docrows.last['key']
|
|
||||||
docrows.pop if docrows.length > count
|
|
||||||
if oldend == startkey
|
|
||||||
break
|
|
||||||
end
|
|
||||||
yield(docrows)
|
|
||||||
oldend = startkey
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def key_reduce(view, count, firstkey = nil, lastkey = nil, &block)
|
|
||||||
# start with no keys
|
|
||||||
startkey = firstkey
|
|
||||||
# lastprocessedkey = nil
|
|
||||||
keepgoing = true
|
|
||||||
|
|
||||||
while keepgoing && viewrows = request_view(view, count, startkey)
|
|
||||||
startkey = viewrows.first['key']
|
|
||||||
endkey = viewrows.last['key']
|
|
||||||
|
|
||||||
if (startkey == endkey)
|
|
||||||
# we need to rerequest to get a bigger page
|
|
||||||
# so we know we have all the rows for that key
|
|
||||||
viewrows = @db.view(view, :key => startkey)['rows']
|
|
||||||
# we need to do an offset thing to find the next startkey
|
|
||||||
# otherwise we just get stuck
|
|
||||||
lastdocid = viewrows.last['id']
|
|
||||||
fornextloop = @db.view(view, :startkey => startkey, :startkey_docid => lastdocid, :count => 2)['rows']
|
|
||||||
|
|
||||||
newendkey = fornextloop.last['key']
|
|
||||||
if (newendkey == endkey)
|
|
||||||
keepgoing = false
|
|
||||||
else
|
|
||||||
startkey = newendkey
|
|
||||||
end
|
|
||||||
rows = viewrows
|
|
||||||
else
|
|
||||||
rows = []
|
|
||||||
for r in viewrows
|
|
||||||
if (lastkey && r['key'] == lastkey)
|
|
||||||
keepgoing = false
|
|
||||||
break
|
|
||||||
end
|
|
||||||
break if (r['key'] == endkey)
|
|
||||||
rows << r
|
|
||||||
end
|
|
||||||
startkey = endkey
|
|
||||||
end
|
|
||||||
|
|
||||||
key = :begin
|
|
||||||
values = []
|
|
||||||
|
|
||||||
rows.each do |r|
|
|
||||||
if key != r['key']
|
|
||||||
# we're on a new key, yield the old first and then reset
|
|
||||||
yield(key, values) if key != :begin
|
|
||||||
key = r['key']
|
|
||||||
values = []
|
|
||||||
end
|
|
||||||
# keep accumulating
|
|
||||||
values << r['value']
|
|
||||||
end
|
|
||||||
yield(key, values)
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def request_all_docs count, startkey = nil
|
|
||||||
opts = {}
|
|
||||||
opts[:count] = count if count
|
|
||||||
opts[:startkey] = startkey if startkey
|
|
||||||
results = @db.documents(opts)
|
|
||||||
rows = results['rows']
|
|
||||||
rows unless rows.length == 0
|
|
||||||
end
|
|
||||||
|
|
||||||
def request_view view, count = nil, startkey = nil, endkey = nil
|
|
||||||
opts = {}
|
|
||||||
opts[:count] = count if count
|
|
||||||
opts[:startkey] = startkey if startkey
|
|
||||||
opts[:endkey] = endkey if endkey
|
|
||||||
|
|
||||||
results = @db.view(view, opts)
|
|
||||||
rows = results['rows']
|
|
||||||
rows unless rows.length == 0
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,29 +0,0 @@
|
||||||
class CouchRest
|
|
||||||
class Streamer
|
|
||||||
attr_accessor :db
|
|
||||||
def initialize db
|
|
||||||
@db = db
|
|
||||||
end
|
|
||||||
|
|
||||||
def view name, params = nil
|
|
||||||
urlst = /^_/.match(name) ? "#{@db.root}/#{name}" : "#{@db.root}/_view/#{name}"
|
|
||||||
url = CouchRest.paramify_url urlst, params
|
|
||||||
IO.popen("curl --silent #{url}") do |view|
|
|
||||||
view.gets # discard header
|
|
||||||
while row = parse_line(view.gets)
|
|
||||||
yield row
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def parse_line line
|
|
||||||
return nil unless line
|
|
||||||
if /(\{.*\}),?/.match(line.chomp)
|
|
||||||
JSON.parse($1)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,22 +0,0 @@
|
||||||
|
|
||||||
# this file must be loaded after the JSON gem
|
|
||||||
|
|
||||||
class Time
|
|
||||||
# this date format sorts lexicographically
|
|
||||||
# and is compatible with Javascript's new Date(time_string) constructor
|
|
||||||
# note that sorting will break if you store times from multiple timezones
|
|
||||||
# I like to add a ENV['TZ'] = 'UTC' to my apps
|
|
||||||
|
|
||||||
def to_json(options = nil)
|
|
||||||
%("#{strftime("%Y/%m/%d %H:%M:%S %z")}")
|
|
||||||
end
|
|
||||||
|
|
||||||
# this works to decode the outputted time format
|
|
||||||
# copied from ActiveSupport
|
|
||||||
# def self.parse string, fallback=nil
|
|
||||||
# d = DateTime.parse(string).new_offset
|
|
||||||
# self.utc(d.year, d.month, d.day, d.hour, d.min, d.sec)
|
|
||||||
# rescue
|
|
||||||
# fallback
|
|
||||||
# end
|
|
||||||
end
|
|
Loading…
Reference in a new issue