2005-01-24 19:52:04 +01:00
|
|
|
require 'fileutils'
|
|
|
|
require 'redcloth_for_tex'
|
2005-02-18 03:24:16 +01:00
|
|
|
require 'parsedate'
|
2005-08-15 02:07:43 +02:00
|
|
|
require 'zip/zip'
|
2005-01-24 19:52:04 +01:00
|
|
|
|
|
|
|
class WikiController < ApplicationController
|
|
|
|
|
2006-03-24 08:53:20 +01:00
|
|
|
before_filter :load_page
|
2005-09-12 03:12:00 +02:00
|
|
|
caches_action :show, :published, :authors, :recently_revised, :list
|
2005-09-11 20:02:56 +02:00
|
|
|
cache_sweeper :revision_sweeper
|
|
|
|
|
2005-02-18 03:24:16 +01:00
|
|
|
layout 'default', :except => [:rss_feed, :rss_with_content, :rss_with_headlines, :tex, :export_tex, :export_html]
|
2005-01-24 19:52:04 +01:00
|
|
|
|
|
|
|
def index
|
|
|
|
if @web_name
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_home
|
2005-01-24 19:52:04 +01:00
|
|
|
elsif not @wiki.setup?
|
2005-01-28 03:35:59 +01:00
|
|
|
redirect_to :controller => 'admin', :action => 'create_system'
|
2005-01-24 19:52:04 +01:00
|
|
|
elsif @wiki.webs.length == 1
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_home @wiki.webs.values.first.address
|
2005-01-24 19:52:04 +01:00
|
|
|
else
|
|
|
|
redirect_to :action => 'web_list'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Outside a single web --------------------------------------------------------
|
|
|
|
|
|
|
|
def authenticate
|
2007-05-08 00:46:00 +02:00
|
|
|
if password_check(params['password'])
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_home
|
2005-01-24 19:52:04 +01:00
|
|
|
else
|
2007-05-08 00:46:00 +02:00
|
|
|
flash[:info] = password_error(params['password'])
|
2005-03-25 20:11:41 +01:00
|
|
|
redirect_to :action => 'login', :web => @web_name
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def login
|
|
|
|
# to template
|
|
|
|
end
|
|
|
|
|
|
|
|
def web_list
|
|
|
|
@webs = wiki.webs.values.sort_by { |web| web.name }
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# Within a single web ---------------------------------------------------------
|
|
|
|
|
|
|
|
def authors
|
2005-09-27 15:46:02 +02:00
|
|
|
@page_names_by_author = @web.page_names_by_author
|
|
|
|
@authors = @page_names_by_author.keys.sort
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def export_html
|
2005-10-27 07:29:24 +02:00
|
|
|
stylesheet = File.read(File.join(RAILS_ROOT, 'public', 'stylesheets', 'instiki.css'))
|
2005-01-30 06:33:05 +01:00
|
|
|
export_pages_as_zip('html') do |page|
|
2005-10-27 07:29:24 +02:00
|
|
|
|
|
|
|
renderer = PageRenderer.new(page.revisions.last)
|
|
|
|
rendered_page = <<-EOL
|
|
|
|
<!DOCTYPE html
|
|
|
|
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
|
|
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
|
|
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
|
|
|
<head>
|
|
|
|
<title>#{page.plain_name} in #{@web.name}</title>
|
|
|
|
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
|
|
|
|
|
|
|
<style type="text/css">
|
|
|
|
h1#pageName, .newWikiWord a, a.existingWikiWord, .newWikiWord a:hover {
|
|
|
|
color: ##{@web ? @web.color : "393" };
|
|
|
|
}
|
|
|
|
.newWikiWord { background-color: white; font-style: italic; }
|
|
|
|
#{stylesheet}
|
|
|
|
</style>
|
|
|
|
<style type="text/css">
|
|
|
|
#{@web.additional_style}
|
|
|
|
</style>
|
|
|
|
</head>
|
|
|
|
<body>
|
|
|
|
#{renderer.display_content_for_export}
|
|
|
|
<div class="byline">
|
|
|
|
#{page.revisions? ? "Revised" : "Created" } on #{ page.revised_at.strftime('%B %d, %Y %H:%M:%S') }
|
|
|
|
by
|
|
|
|
#{ UrlGenerator.new(self).make_link(page.author.name, @web, nil, { :mode => :export }) }
|
|
|
|
</div>
|
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
EOL
|
|
|
|
rendered_page
|
2005-01-30 06:33:05 +01:00
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def export_markup
|
|
|
|
export_pages_as_zip(@web.markup) { |page| page.content }
|
|
|
|
end
|
|
|
|
|
|
|
|
def export_pdf
|
2005-08-15 01:27:55 +02:00
|
|
|
file_name = "#{@web.address}-tex-#{@web.revised_at.strftime('%Y-%m-%d-%H-%M-%S')}"
|
2005-04-08 07:29:20 +02:00
|
|
|
file_path = File.join(@wiki.storage_path, file_name)
|
2005-01-24 19:52:04 +01:00
|
|
|
|
|
|
|
export_web_to_tex "#{file_path}.tex" unless FileTest.exists? "#{file_path}.tex"
|
|
|
|
convert_tex_to_pdf "#{file_path}.tex"
|
|
|
|
send_file "#{file_path}.pdf"
|
|
|
|
end
|
|
|
|
|
|
|
|
def export_tex
|
2005-08-15 01:27:55 +02:00
|
|
|
file_name = "#{@web.address}-tex-#{@web.revised_at.strftime('%Y-%m-%d-%H-%M-%S')}.tex"
|
2005-04-07 07:15:46 +02:00
|
|
|
file_path = File.join(@wiki.storage_path, file_name)
|
2005-01-24 19:52:04 +01:00
|
|
|
export_web_to_tex(file_path) unless FileTest.exists?(file_path)
|
|
|
|
send_file file_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def feeds
|
2005-04-03 09:31:11 +02:00
|
|
|
@rss_with_content_allowed = rss_with_content_allowed?
|
2005-04-03 08:46:45 +02:00
|
|
|
# show the template
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def list
|
|
|
|
parse_category
|
|
|
|
@page_names_that_are_wanted = @pages_in_category.wanted_pages
|
|
|
|
@pages_that_are_orphaned = @pages_in_category.orphaned_pages
|
|
|
|
end
|
|
|
|
|
|
|
|
def recently_revised
|
|
|
|
parse_category
|
|
|
|
@pages_by_revision = @pages_in_category.by_revision
|
2006-03-11 23:59:55 +01:00
|
|
|
@pages_by_day = Hash.new { |h, day| h[day] = [] }
|
|
|
|
@pages_by_revision.each do |page|
|
|
|
|
day = Date.new(page.revised_at.year, page.revised_at.month, page.revised_at.day)
|
|
|
|
@pages_by_day[day] << page
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def rss_with_content
|
2005-04-03 09:31:11 +02:00
|
|
|
if rss_with_content_allowed?
|
|
|
|
render_rss(hide_description = false, *parse_rss_params)
|
|
|
|
else
|
|
|
|
render_text 'RSS feed with content for this web is blocked for security reasons. ' +
|
|
|
|
'The web is password-protected and not published', '403 Forbidden'
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def rss_with_headlines
|
2005-02-18 03:24:16 +01:00
|
|
|
render_rss(hide_description = true, *parse_rss_params)
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def search
|
2007-05-08 00:46:00 +02:00
|
|
|
@query = params['query']
|
2005-01-30 04:17:02 +01:00
|
|
|
@title_results = @web.select { |page| page.name =~ /#{@query}/i }.sort
|
2005-01-24 19:52:04 +01:00
|
|
|
@results = @web.select { |page| page.content =~ /#{@query}/i }.sort
|
2005-01-30 04:17:02 +01:00
|
|
|
all_pages_found = (@results + @title_results).uniq
|
|
|
|
if all_pages_found.size == 1
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_to_page(all_pages_found.first.name)
|
2005-01-30 04:17:02 +01:00
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
# Within a single page --------------------------------------------------------
|
|
|
|
|
|
|
|
def cancel_edit
|
|
|
|
@page.unlock
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_to_page(@page_name)
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
2005-04-07 05:24:02 +02:00
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
def edit
|
|
|
|
if @page.nil?
|
2005-05-29 20:40:25 +02:00
|
|
|
redirect_home
|
2007-05-08 00:46:00 +02:00
|
|
|
elsif @page.locked?(Time.now) and not params['break_lock']
|
2005-01-24 19:52:04 +01:00
|
|
|
redirect_to :web => @web_name, :action => 'locked', :id => @page_name
|
|
|
|
else
|
|
|
|
@page.lock(Time.now, @author)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def locked
|
|
|
|
# to template
|
|
|
|
end
|
|
|
|
|
|
|
|
def new
|
|
|
|
# to template
|
|
|
|
end
|
|
|
|
|
|
|
|
def pdf
|
|
|
|
page = wiki.read_page(@web_name, @page_name)
|
|
|
|
safe_page_name = @page.name.gsub(/\W/, '')
|
2005-08-15 01:27:55 +02:00
|
|
|
file_name = "#{safe_page_name}-#{@web.address}-#{@page.revised_at.strftime('%Y-%m-%d-%H-%M-%S')}"
|
2005-04-08 07:29:20 +02:00
|
|
|
file_path = File.join(@wiki.storage_path, file_name)
|
2005-01-24 19:52:04 +01:00
|
|
|
|
|
|
|
export_page_to_tex("#{file_path}.tex") unless FileTest.exists?("#{file_path}.tex")
|
|
|
|
# NB: this is _very_ slow
|
|
|
|
convert_tex_to_pdf("#{file_path}.tex")
|
|
|
|
send_file "#{file_path}.pdf"
|
|
|
|
end
|
|
|
|
|
|
|
|
def print
|
2005-09-27 05:30:01 +02:00
|
|
|
if @page.nil?
|
|
|
|
redirect_home
|
|
|
|
end
|
2005-01-30 06:33:05 +01:00
|
|
|
@link_mode ||= :show
|
2005-09-09 07:31:27 +02:00
|
|
|
@renderer = PageRenderer.new(@page.revisions.last)
|
2005-01-24 19:52:04 +01:00
|
|
|
# to template
|
|
|
|
end
|
|
|
|
|
|
|
|
def published
|
2005-11-02 05:55:06 +01:00
|
|
|
if not @web.published?
|
|
|
|
render(:text => "Published version of web '#{@web_name}' is not available", :status => 404)
|
|
|
|
return
|
|
|
|
end
|
2005-11-01 08:31:44 +01:00
|
|
|
|
2006-05-04 06:45:05 +02:00
|
|
|
@page_name ||= 'HomePage'
|
|
|
|
@page ||= wiki.read_page(@web_name, @page_name)
|
|
|
|
render(:text => "Page '#{@page_name}' not found", :status => 404) and return unless @page
|
2005-11-01 08:31:44 +01:00
|
|
|
|
2006-05-04 06:45:05 +02:00
|
|
|
@renderer = PageRenderer.new(@page.revisions.last)
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def revision
|
|
|
|
get_page_and_revision
|
2007-05-08 00:46:00 +02:00
|
|
|
@show_diff = (params[:mode] == 'diff')
|
2005-09-09 07:31:27 +02:00
|
|
|
@renderer = PageRenderer.new(@revision)
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def rollback
|
|
|
|
get_page_and_revision
|
|
|
|
end
|
|
|
|
|
|
|
|
def save
|
2005-11-04 06:23:34 +01:00
|
|
|
render(:status => 404, :text => 'Undefined page name') and return if @page_name.nil?
|
2007-10-07 19:59:20 +02:00
|
|
|
unless (request.post? || ENV["RAILS_ENV"] == "test")
|
|
|
|
headers['Allow'] = 'POST'
|
|
|
|
render(:status => 405, :text => 'You must use an HTTP POST')
|
|
|
|
return
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
|
2007-05-08 00:46:00 +02:00
|
|
|
author_name = params['author']
|
2006-03-19 08:54:54 +01:00
|
|
|
author_name = 'AnonymousCoward' if author_name =~ /^\s*$/
|
|
|
|
cookies['author'] = { :value => author_name, :expires => Time.utc(2030) }
|
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
begin
|
2007-05-08 00:46:00 +02:00
|
|
|
filter_spam(params['content'])
|
2005-05-29 20:40:25 +02:00
|
|
|
if @page
|
2007-05-08 00:46:00 +02:00
|
|
|
wiki.revise_page(@web_name, @page_name, params['content'], Time.now,
|
2006-03-19 08:54:54 +01:00
|
|
|
Author.new(author_name, remote_ip), PageRenderer.new)
|
2005-05-29 20:40:25 +02:00
|
|
|
@page.unlock
|
2005-01-24 19:52:04 +01:00
|
|
|
else
|
2007-05-08 00:46:00 +02:00
|
|
|
wiki.write_page(@web_name, @page_name, params['content'], Time.now,
|
2006-03-19 08:54:54 +01:00
|
|
|
Author.new(author_name, remote_ip), PageRenderer.new)
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
2005-04-29 01:07:42 +02:00
|
|
|
redirect_to_page @page_name
|
2005-05-09 07:16:20 +02:00
|
|
|
rescue => e
|
2005-01-24 19:52:04 +01:00
|
|
|
flash[:error] = e
|
2005-09-11 20:02:56 +02:00
|
|
|
logger.error e
|
2007-05-08 00:46:00 +02:00
|
|
|
flash[:content] = params['content']
|
2005-05-29 20:40:25 +02:00
|
|
|
if @page
|
|
|
|
@page.unlock
|
|
|
|
redirect_to :action => 'edit', :web => @web_name, :id => @page_name
|
|
|
|
else
|
|
|
|
redirect_to :action => 'new', :web => @web_name, :id => @page_name
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def show
|
|
|
|
if @page
|
|
|
|
begin
|
2005-09-09 07:31:27 +02:00
|
|
|
@renderer = PageRenderer.new(@page.revisions.last)
|
2007-05-08 00:46:00 +02:00
|
|
|
@show_diff = (params[:mode] == 'diff')
|
2005-01-24 19:52:04 +01:00
|
|
|
render_action 'page'
|
|
|
|
# TODO this rescue should differentiate between errors due to rendering and errors in
|
|
|
|
# the application itself (for application errors, it's better not to rescue the error at all)
|
|
|
|
rescue => e
|
|
|
|
logger.error e
|
2005-05-09 06:31:02 +02:00
|
|
|
flash[:error] = e.message
|
2005-01-24 19:52:04 +01:00
|
|
|
if in_a_web?
|
2005-05-09 07:16:20 +02:00
|
|
|
redirect_to :action => 'edit', :web => @web_name, :id => @page_name
|
2005-01-24 19:52:04 +01:00
|
|
|
else
|
|
|
|
raise e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
2005-03-26 06:37:04 +01:00
|
|
|
if not @page_name.nil? and not @page_name.empty?
|
2005-03-29 08:10:01 +02:00
|
|
|
redirect_to :web => @web_name, :action => 'new', :id => @page_name
|
2005-03-26 06:37:04 +01:00
|
|
|
else
|
|
|
|
render_text 'Page name is not specified', '404 Not Found'
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def tex
|
|
|
|
@tex_content = RedClothForTex.new(@page.content).to_tex
|
|
|
|
end
|
|
|
|
|
2005-11-14 09:38:37 +01:00
|
|
|
protected
|
|
|
|
|
2006-03-24 08:53:20 +01:00
|
|
|
def load_page
|
2007-05-08 00:46:00 +02:00
|
|
|
@page_name = params['id']
|
2005-11-14 09:38:37 +01:00
|
|
|
@page = @wiki.read_page(@web_name, @page_name) if @page_name
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
|
|
|
|
private
|
2005-11-14 09:38:37 +01:00
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
def convert_tex_to_pdf(tex_path)
|
|
|
|
# TODO remove earlier PDF files with the same prefix
|
|
|
|
# TODO handle gracefully situation where pdflatex is not available
|
2005-01-30 03:56:03 +01:00
|
|
|
begin
|
|
|
|
wd = Dir.getwd
|
|
|
|
Dir.chdir(File.dirname(tex_path))
|
|
|
|
logger.info `pdflatex --interaction=nonstopmode #{File.basename(tex_path)}`
|
|
|
|
ensure
|
|
|
|
Dir.chdir(wd)
|
|
|
|
end
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def export_page_to_tex(file_path)
|
|
|
|
tex
|
2007-01-16 08:21:16 +01:00
|
|
|
File.open(file_path, 'w') { |f| f.write(render_to_string(:template => 'wiki/tex', :layout => false)) }
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def export_pages_as_zip(file_type, &block)
|
|
|
|
|
|
|
|
file_prefix = "#{@web.address}-#{file_type}-"
|
2005-08-15 01:27:55 +02:00
|
|
|
timestamp = @web.revised_at.strftime('%Y-%m-%d-%H-%M-%S')
|
2005-04-08 07:29:20 +02:00
|
|
|
file_path = File.join(@wiki.storage_path, file_prefix + timestamp + '.zip')
|
2005-01-24 19:52:04 +01:00
|
|
|
tmp_path = "#{file_path}.tmp"
|
|
|
|
|
|
|
|
Zip::ZipOutputStream.open(tmp_path) do |zip_out|
|
|
|
|
@web.select.by_name.each do |page|
|
2005-04-08 07:40:11 +02:00
|
|
|
zip_out.put_next_entry("#{CGI.escape(page.name)}.#{file_type}")
|
2005-01-24 19:52:04 +01:00
|
|
|
zip_out.puts(block.call(page))
|
|
|
|
end
|
|
|
|
# add an index file, if exporting to HTML
|
|
|
|
if file_type.to_s.downcase == 'html'
|
|
|
|
zip_out.put_next_entry 'index.html'
|
2005-05-05 11:42:25 +02:00
|
|
|
zip_out.puts "<html><head>" +
|
|
|
|
"<META HTTP-EQUIV=\"Refresh\" CONTENT=\"0;URL=HomePage.#{file_type}\"></head></html>"
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
end
|
2005-04-08 07:29:20 +02:00
|
|
|
FileUtils.rm_rf(Dir[File.join(@wiki.storage_path, file_prefix + '*.zip')])
|
2005-01-24 19:52:04 +01:00
|
|
|
FileUtils.mv(tmp_path, file_path)
|
|
|
|
send_file file_path
|
|
|
|
end
|
|
|
|
|
|
|
|
def export_web_to_tex(file_path)
|
2005-08-09 04:20:28 +02:00
|
|
|
@tex_content = table_of_contents(@web.page('HomePage').content, render_tex_web)
|
2005-10-27 07:29:24 +02:00
|
|
|
File.open(file_path, 'w') { |f| f.write(render_to_string(:template => 'wiki/tex_web', :layout => nil)) }
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_page_and_revision
|
2007-05-08 00:46:00 +02:00
|
|
|
if params['rev']
|
|
|
|
@revision_number = params['rev'].to_i
|
2006-09-19 21:13:02 +02:00
|
|
|
else
|
|
|
|
@revision_number = @page.revisions.length
|
|
|
|
end
|
2006-03-19 08:54:54 +01:00
|
|
|
@revision = @page.revisions[@revision_number - 1]
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def parse_category
|
2007-09-28 05:57:52 +02:00
|
|
|
@categories = WikiReference.list_categories(@web).sort
|
2007-05-08 00:46:00 +02:00
|
|
|
@category = params['category']
|
2005-11-14 12:00:46 +01:00
|
|
|
if @category
|
|
|
|
@set_name = "category '#{@category}'"
|
2007-09-28 05:57:52 +02:00
|
|
|
pages = WikiReference.pages_in_category(@web, @category).sort.map { |page_name| @web.page(page_name) }
|
2005-11-14 12:07:14 +01:00
|
|
|
@pages_in_category = PageSet.new(@web, pages)
|
2005-11-14 12:00:46 +01:00
|
|
|
else
|
|
|
|
# no category specified, return all pages of the web
|
2005-09-11 18:49:08 +02:00
|
|
|
@pages_in_category = @web.select_all.by_name
|
2005-01-24 19:52:04 +01:00
|
|
|
@set_name = 'the web'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2005-02-18 03:24:16 +01:00
|
|
|
def parse_rss_params
|
2007-05-08 00:46:00 +02:00
|
|
|
if params.include? 'limit'
|
|
|
|
limit = params['limit'].to_i rescue nil
|
2005-02-18 03:24:16 +01:00
|
|
|
limit = nil if limit == 0
|
|
|
|
else
|
|
|
|
limit = 15
|
|
|
|
end
|
2007-05-08 00:46:00 +02:00
|
|
|
start_date = Time.local(*ParseDate::parsedate(params['start'])) rescue nil
|
|
|
|
end_date = Time.local(*ParseDate::parsedate(params['end'])) rescue nil
|
2005-02-18 03:24:16 +01:00
|
|
|
[ limit, start_date, end_date ]
|
|
|
|
end
|
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
def remote_ip
|
2007-05-08 00:46:00 +02:00
|
|
|
ip = request.remote_ip
|
2005-01-24 19:52:04 +01:00
|
|
|
logger.info(ip)
|
|
|
|
ip
|
|
|
|
end
|
|
|
|
|
2005-02-18 03:24:16 +01:00
|
|
|
def render_rss(hide_description = false, limit = 15, start_date = nil, end_date = nil)
|
|
|
|
if limit && !start_date && !end_date
|
|
|
|
@pages_by_revision = @web.select.by_revision.first(limit)
|
|
|
|
else
|
|
|
|
@pages_by_revision = @web.select.by_revision
|
2005-08-15 01:27:55 +02:00
|
|
|
@pages_by_revision.reject! { |page| page.revised_at < start_date } if start_date
|
|
|
|
@pages_by_revision.reject! { |page| page.revised_at > end_date } if end_date
|
2005-02-18 03:24:16 +01:00
|
|
|
end
|
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
@hide_description = hide_description
|
2005-04-03 09:31:11 +02:00
|
|
|
@link_action = @web.password ? 'published' : 'show'
|
|
|
|
|
2005-09-12 03:12:00 +02:00
|
|
|
render :action => 'rss_feed'
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def render_tex_web
|
|
|
|
@web.select.by_name.inject({}) do |tex_web, page|
|
|
|
|
tex_web[page.name] = RedClothForTex.new(page.content).to_tex
|
|
|
|
tex_web
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2005-04-03 09:31:11 +02:00
|
|
|
def rss_with_content_allowed?
|
2005-08-09 04:20:28 +02:00
|
|
|
@web.password.nil? or @web.published?
|
2005-04-03 09:31:11 +02:00
|
|
|
end
|
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
def truncate(text, length = 30, truncate_string = '...')
|
|
|
|
if text.length > length then text[0..(length - 3)] + truncate_string else text end
|
|
|
|
end
|
|
|
|
|
2006-03-20 02:36:45 +01:00
|
|
|
def filter_spam(content)
|
|
|
|
@@spam_patterns ||= load_spam_patterns
|
|
|
|
@@spam_patterns.each do |pattern|
|
|
|
|
raise "Your edit was blocked by spam filtering" if content =~ pattern
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def load_spam_patterns
|
|
|
|
spam_patterns_file = "#{RAILS_ROOT}/config/spam_patterns.txt"
|
|
|
|
if File.exists?(spam_patterns_file)
|
|
|
|
File.readlines(spam_patterns_file).inject([]) { |patterns, line| patterns << Regexp.new(line.chomp, Regexp::IGNORECASE) }
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
2007-01-16 08:21:16 +01:00
|
|
|
|
2006-03-20 02:36:45 +01:00
|
|
|
|
2005-01-24 19:52:04 +01:00
|
|
|
end
|