2008-09-12 06:09:39 +02:00
|
|
|
require 'digest/md5'
|
|
|
|
|
2008-09-12 06:14:34 +02:00
|
|
|
module CouchRest
|
2008-09-12 06:09:39 +02:00
|
|
|
class FileManager
|
|
|
|
attr_reader :db
|
|
|
|
attr_accessor :loud
|
|
|
|
|
|
|
|
LANGS = {"rb" => "ruby", "js" => "javascript"}
|
|
|
|
MIMES = {
|
|
|
|
"html" => "text/html",
|
|
|
|
"htm" => "text/html",
|
|
|
|
"png" => "image/png",
|
2008-11-11 23:10:13 +01:00
|
|
|
"gif" => "image/gif",
|
2008-09-12 06:09:39 +02:00
|
|
|
"css" => "text/css",
|
2008-12-14 13:14:31 +01:00
|
|
|
"js" => "test/javascript",
|
|
|
|
"txt" => "text/plain"
|
2009-01-02 08:11:01 +01:00
|
|
|
}
|
|
|
|
|
2008-12-14 12:05:02 +01:00
|
|
|
def initialize(dbname, host="http://127.0.0.1:5984")
|
2008-09-12 06:09:39 +02:00
|
|
|
@db = CouchRest.new(host).database(dbname)
|
|
|
|
end
|
|
|
|
|
2009-01-02 06:22:14 +01:00
|
|
|
def push_app(appdir, appname)
|
|
|
|
libs = []
|
|
|
|
viewdir = File.join(appdir,"views")
|
|
|
|
attachdir = File.join(appdir,"_attachments")
|
2009-01-02 07:29:39 +01:00
|
|
|
|
2009-01-02 12:22:28 +01:00
|
|
|
@doc = dir_to_fields(appdir)
|
2009-01-04 12:28:08 +01:00
|
|
|
package_forms(@doc["forms"]) if @doc['forms']
|
|
|
|
package_views(@doc["views"]) if @doc['views']
|
2009-01-02 07:29:39 +01:00
|
|
|
|
2009-01-02 06:22:14 +01:00
|
|
|
docid = "_design/#{appname}"
|
|
|
|
design = @db.get(docid) rescue {}
|
2009-01-02 12:22:28 +01:00
|
|
|
design.merge!(@doc)
|
2009-01-02 06:22:14 +01:00
|
|
|
design['_id'] = docid
|
2009-01-02 07:29:39 +01:00
|
|
|
# design['language'] = lang if lang
|
2009-01-02 06:22:14 +01:00
|
|
|
@db.save(design)
|
|
|
|
push_directory(attachdir, docid)
|
|
|
|
end
|
2009-01-02 08:11:01 +01:00
|
|
|
|
|
|
|
def dir_to_fields(dir)
|
|
|
|
fields = {}
|
|
|
|
(Dir["#{dir}/**/*.*"] -
|
|
|
|
Dir["#{dir}/_attachments/**/*.*"]).each do |file|
|
|
|
|
farray = file.sub(dir, '').sub(/^\//,'').split('/')
|
|
|
|
myfield = fields
|
|
|
|
while farray.length > 1
|
|
|
|
front = farray.shift
|
|
|
|
myfield[front] ||= {}
|
|
|
|
myfield = myfield[front]
|
|
|
|
end
|
|
|
|
fname, fext = farray.shift.split('.')
|
|
|
|
fguts = File.open(file).read
|
|
|
|
if fext == 'json'
|
|
|
|
myfield[fname] = JSON.parse(fguts)
|
|
|
|
else
|
|
|
|
myfield[fname] = fguts
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return fields
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# Generate an application in the given directory.
|
|
|
|
# This is a class method because it doesn't depend on
|
|
|
|
# specifying a database.
|
|
|
|
def self.generate_app(app_dir)
|
|
|
|
templatedir = File.join(File.expand_path(File.dirname(__FILE__)), 'template-app')
|
|
|
|
FileUtils.cp_r(templatedir, app_dir)
|
|
|
|
end
|
|
|
|
|
2008-09-12 06:09:39 +02:00
|
|
|
def push_directory(push_dir, docid=nil)
|
|
|
|
docid ||= push_dir.split('/').reverse.find{|part|!part.empty?}
|
|
|
|
|
|
|
|
pushfiles = Dir["#{push_dir}/**/*.*"].collect do |f|
|
|
|
|
{f.split("#{push_dir}/").last => open(f).read}
|
|
|
|
end
|
|
|
|
|
|
|
|
return if pushfiles.empty?
|
|
|
|
|
|
|
|
@attachments = {}
|
|
|
|
@signatures = {}
|
|
|
|
pushfiles.each do |file|
|
|
|
|
name = file.keys.first
|
|
|
|
value = file.values.first
|
|
|
|
@signatures[name] = md5(value)
|
|
|
|
|
|
|
|
@attachments[name] = {
|
|
|
|
"data" => value,
|
|
|
|
"content_type" => MIMES[name.split('.').last]
|
2008-12-25 21:03:58 +01:00
|
|
|
}
|
2008-09-12 06:09:39 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
doc = @db.get(docid) rescue nil
|
|
|
|
|
|
|
|
unless doc
|
|
|
|
say "creating #{docid}"
|
|
|
|
@db.save({"_id" => docid, "_attachments" => @attachments, "signatures" => @signatures})
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2008-09-16 17:44:02 +02:00
|
|
|
doc["signatures"] ||= {}
|
|
|
|
doc["_attachments"] ||= {}
|
2008-09-12 06:09:39 +02:00
|
|
|
# remove deleted docs
|
|
|
|
to_be_removed = doc["signatures"].keys.select do |d|
|
|
|
|
!pushfiles.collect{|p| p.keys.first}.include?(d)
|
2008-09-16 17:44:02 +02:00
|
|
|
end
|
2008-09-12 06:09:39 +02:00
|
|
|
|
|
|
|
to_be_removed.each do |p|
|
|
|
|
say "deleting #{p}"
|
|
|
|
doc["signatures"].delete(p)
|
|
|
|
doc["_attachments"].delete(p)
|
|
|
|
end
|
|
|
|
|
|
|
|
# update existing docs:
|
|
|
|
doc["signatures"].each do |path, sig|
|
|
|
|
if (@signatures[path] == sig)
|
|
|
|
say "no change to #{path}. skipping..."
|
|
|
|
else
|
|
|
|
say "replacing #{path}"
|
|
|
|
doc["signatures"][path] = md5(@attachments[path]["data"])
|
|
|
|
doc["_attachments"][path].delete("stub")
|
|
|
|
doc["_attachments"][path].delete("length")
|
|
|
|
doc["_attachments"][path]["data"] = @attachments[path]["data"]
|
|
|
|
doc["_attachments"][path].merge!({"data" => @attachments[path]["data"]} )
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# add in new files
|
|
|
|
new_files = pushfiles.select{|d| !doc["signatures"].keys.include?( d.keys.first) }
|
|
|
|
|
|
|
|
new_files.each do |f|
|
|
|
|
say "creating #{f}"
|
|
|
|
path = f.keys.first
|
|
|
|
content = f.values.first
|
|
|
|
doc["signatures"][path] = md5(content)
|
|
|
|
|
|
|
|
doc["_attachments"][path] = {
|
|
|
|
"data" => content,
|
|
|
|
"content_type" => MIMES[path.split('.').last]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
@db.save(doc)
|
|
|
|
rescue Exception => e
|
|
|
|
say e.message
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2009-01-02 12:22:28 +01:00
|
|
|
def package_forms(funcs)
|
|
|
|
apply_lib(funcs)
|
2009-01-02 07:29:39 +01:00
|
|
|
end
|
|
|
|
|
2009-01-02 12:22:28 +01:00
|
|
|
def package_views(views)
|
|
|
|
views.each do |view, funcs|
|
|
|
|
apply_lib(funcs)
|
2008-09-16 17:14:25 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-01-02 12:22:28 +01:00
|
|
|
def apply_lib(funcs)
|
2009-01-02 07:45:41 +01:00
|
|
|
funcs.each do |k,v|
|
2009-01-02 08:33:14 +01:00
|
|
|
next unless v.is_a?(String)
|
2009-01-03 02:37:04 +01:00
|
|
|
funcs[k] = process_include(process_require(v))
|
2009-01-02 06:22:14 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-01-02 12:46:26 +01:00
|
|
|
# process requires
|
|
|
|
def process_require(f_string)
|
|
|
|
f_string.gsub /(\/\/|#)\ ?!require (.*)/ do
|
|
|
|
fields = $2.split('.')
|
|
|
|
library = @doc
|
|
|
|
fields.each do |field|
|
|
|
|
library = library[field]
|
|
|
|
break unless library
|
|
|
|
end
|
|
|
|
library
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
def process_include(f_string)
|
2009-01-02 12:22:28 +01:00
|
|
|
|
|
|
|
# process includes
|
|
|
|
included = {}
|
|
|
|
f_string.gsub /(\/\/|#)\ ?!include (.*)/ do
|
|
|
|
fields = $2.split('.')
|
|
|
|
library = @doc
|
|
|
|
include_to = included
|
|
|
|
count = fields.length
|
|
|
|
fields.each_with_index do |field, i|
|
|
|
|
break unless library[field]
|
|
|
|
library = library[field]
|
|
|
|
# normal case
|
|
|
|
if i+1 < count
|
|
|
|
include_to[field] = include_to[field] || {}
|
|
|
|
include_to = include_to[field]
|
|
|
|
else
|
|
|
|
# last one
|
|
|
|
include_to[field] = library
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
# puts included.inspect
|
2009-01-02 12:46:26 +01:00
|
|
|
rval = if included == {}
|
|
|
|
f_string
|
2009-01-02 12:22:28 +01:00
|
|
|
else
|
|
|
|
varstrings = included.collect do |k, v|
|
|
|
|
"var #{k} = #{v.to_json};"
|
|
|
|
end
|
2009-01-02 12:46:26 +01:00
|
|
|
f_string.sub /(\/\/|#)\ ?!include (.*)/, varstrings.join("\n")
|
2009-01-02 12:22:28 +01:00
|
|
|
end
|
2009-01-02 12:46:26 +01:00
|
|
|
|
|
|
|
rval
|
2009-01-02 12:22:28 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
|
2008-09-12 06:09:39 +02:00
|
|
|
def say words
|
|
|
|
puts words if @loud
|
|
|
|
end
|
|
|
|
|
|
|
|
def md5 string
|
|
|
|
Digest::MD5.hexdigest(string)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|