Fixed includes; started wrking onn caching strategy
This commit is contained in:
parent
70fa15e3f3
commit
bfecd09b56
|
@ -5,6 +5,9 @@ require 'zip/zip'
|
|||
|
||||
class WikiController < ApplicationController
|
||||
|
||||
# TODO implement cache sweeping
|
||||
caches_page :show
|
||||
|
||||
layout 'default', :except => [:rss_feed, :rss_with_content, :rss_with_headlines, :tex, :export_tex, :export_html]
|
||||
|
||||
def index
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
Dependencies.mechanism = :require
|
||||
ActionController::Base.consider_all_requests_local = false
|
||||
ActionController::Base.perform_caching = false
|
||||
|
||||
ActionController::Base.perform_caching = true
|
||||
|
|
|
@ -22,16 +22,17 @@ class Include < WikiChunk::WikiReference
|
|||
private
|
||||
|
||||
def get_unmask_text_avoiding_recursion_loops
|
||||
if refpage then
|
||||
refpage.clear_display_cache
|
||||
if refpage.wiki_includes.include?(@content.page_name)
|
||||
if refpage
|
||||
# TODO This way of instantiating a renderer is ugly.
|
||||
renderer = PageRenderer.new(refpage.current_revision)
|
||||
if renderer.wiki_includes.include?(@content.page_name)
|
||||
# this will break the recursion
|
||||
@content.delete_chunk(self)
|
||||
return "<em>Recursive include detected; #{@page_name} --> #{@content.page_name} " +
|
||||
"--> #{@page_name}</em>\n"
|
||||
else
|
||||
@content.merge_chunks(refpage.display_content)
|
||||
return refpage.display_content.pre_rendered
|
||||
@content.merge_chunks(renderer.display_content)
|
||||
return renderer.display_content.pre_rendered
|
||||
end
|
||||
else
|
||||
return "<em>Could not include #{@page_name}</em>\n"
|
||||
|
|
|
@ -16,7 +16,7 @@ module WikiChunk
|
|||
|
||||
# the referenced page
|
||||
def refpage
|
||||
@content.web.pages[@page_name]
|
||||
@content.web.page(@page_name)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -45,11 +45,6 @@ module WikiChunk
|
|||
end
|
||||
end
|
||||
|
||||
# the referenced page
|
||||
def refpage
|
||||
@content.web.pages[@page_name]
|
||||
end
|
||||
|
||||
def textile_url?
|
||||
not @textile_link_suffix.nil?
|
||||
end
|
||||
|
|
|
@ -5,6 +5,7 @@ require 'optparse'
|
|||
OPTIONS = {
|
||||
:instiki_root => nil,
|
||||
:storage => nil,
|
||||
:database => 'mysql'
|
||||
}
|
||||
|
||||
ARGV.options do |opts|
|
||||
|
@ -38,6 +39,12 @@ ARGV.options do |opts|
|
|||
OPTIONS[:outfile] = outfile
|
||||
end
|
||||
|
||||
opts.on("-d", "--database {mysql|sqlite|postgres}", String,
|
||||
"Target database (they have slightly different syntax)",
|
||||
"default: mysql") do |database|
|
||||
OPTIONS[:database] = database
|
||||
end
|
||||
|
||||
opts.separator ""
|
||||
|
||||
opts.on_tail("-h", "--help",
|
||||
|
@ -95,15 +102,29 @@ class Revision
|
|||
end
|
||||
end
|
||||
|
||||
class Time
|
||||
def ansi
|
||||
strftime('%Y-%m-%d %H:%M:%S')
|
||||
end
|
||||
end
|
||||
|
||||
def sql_insert(table, hash)
|
||||
output = "INSERT INTO #{table} ("
|
||||
output << hash.keys.join(", ")
|
||||
|
||||
output << ") VALUES ('"
|
||||
output << hash.values.map{|v| v.to_s.gsub("'", "\\\\'")}.join("', '")
|
||||
|
||||
output << hash.values.map do |v|
|
||||
case OPTIONS[:database]
|
||||
when 'mysql', 'postgres'
|
||||
v.to_s.gsub("'", "\\\\'")
|
||||
when 'sqlite'
|
||||
v.to_s.gsub("'", "''")
|
||||
else
|
||||
raise "Unsupported database option #{OPTIONS[:database]}"
|
||||
end
|
||||
end.join("', '")
|
||||
output << "');"
|
||||
return output
|
||||
output
|
||||
end
|
||||
|
||||
WikiService.storage_path = OPTIONS[:storage]
|
||||
|
@ -125,6 +146,8 @@ File.open(OPTIONS[:outfile], 'w') { |outfile|
|
|||
:max_upload_size => web.max_upload_size,
|
||||
:safe_mode => web.safe_mode,
|
||||
:brackets_only => web.brackets_only,
|
||||
:created_at => web.pages.values.map { |p| p.revisions.first.created_at }.min.ansi,
|
||||
:updated_at => web.pages.values.map { |p| p.revisions.last.created_at }.max.ansi
|
||||
})
|
||||
|
||||
puts "Web #{web_name} has #{web.pages.keys.size} pages"
|
||||
|
@ -133,26 +156,26 @@ File.open(OPTIONS[:outfile], 'w') { |outfile|
|
|||
:id => page.object_id,
|
||||
:web_id => web.object_id,
|
||||
:locked_by => page.locked_by,
|
||||
:name => page.name
|
||||
:name => page.name,
|
||||
:created_at => page.revisions.first.created_at.ansi,
|
||||
:updated_at => page.revisions.last.created_at.ansi
|
||||
})
|
||||
|
||||
puts " Page #{page_name} has #{page.revisions.size} revisions"
|
||||
page.revisions.each_with_index do |rev, i|
|
||||
|
||||
outfile.puts sql_insert(:revisions, {
|
||||
:id => rev.object_id,
|
||||
:page_id => page.object_id,
|
||||
:content => rev.content,
|
||||
:author => rev.author,
|
||||
:ip => '0.0.0.0',
|
||||
:author => rev.author.to_s,
|
||||
:ip => (rev.author.is_a?(Author) ? rev.author.ip : 'N/A'),
|
||||
:created_at => rev.created_at.ansi,
|
||||
:updated_at => rev.created_at.ansi,
|
||||
:revised_at => rev.created_at.ansi
|
||||
})
|
||||
puts " Revision #{i} created at #{rev.created_at}"
|
||||
puts " Revision #{i} created at #{rev.created_at.ansi}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
['webs', 'pages', 'revisions'].each do |table|
|
||||
outfile.puts "UPDATE #{table} SET created_at = NOW();"
|
||||
outfile.puts "UPDATE #{table} SET updated_at = NOW();"
|
||||
end
|
||||
outfile.puts "UPDATE revisions SET revised_at = NOW();"
|
||||
}
|
||||
|
|
|
@ -6,6 +6,11 @@ require 'rexml/document'
|
|||
INSTIKI_ROOT = File.expand_path(File.dirname(__FILE__) + "/../..")
|
||||
require(File.expand_path(File.dirname(__FILE__) + "/../../config/environment"))
|
||||
|
||||
# TODO Create tests for:
|
||||
# * exporting HTML
|
||||
# * exporting markup
|
||||
# * include tag
|
||||
|
||||
# Use instiki/../watir, if such a directory exists; This can be a CVS HEAD version of Watir.
|
||||
# Otherwise Watir has to be installed in ruby/lib.
|
||||
$:.unshift INSTIKI_ROOT + '/../watir' if File.exists?(INSTIKI_ROOT + '/../watir/watir.rb')
|
||||
|
|
Loading…
Reference in a new issue