move to AR
This commit is contained in:
parent
c4b7b2d9f2
commit
26c046cdfa
51 changed files with 2345 additions and 516 deletions
18
lib/author.rb
Normal file
18
lib/author.rb
Normal file
|
@ -0,0 +1,18 @@
|
|||
class Author < String
|
||||
attr_accessor :ip
|
||||
attr_reader :name
|
||||
def initialize(name, ip = nil)
|
||||
@ip = ip
|
||||
super(name)
|
||||
end
|
||||
|
||||
def name=(value)
|
||||
self.gsub!(/.+/, value)
|
||||
end
|
||||
|
||||
alias_method :name, :to_s
|
||||
|
||||
def <=>(other)
|
||||
name <=> other.to_s
|
||||
end
|
||||
end
|
|
@ -4,7 +4,7 @@ class ChunkTest < Test::Unit::TestCase
|
|||
|
||||
# Asserts a number of tests for the given type and text.
|
||||
def match(type, test_text, expected)
|
||||
pattern = type.pattern
|
||||
pattern = type.pattern
|
||||
assert_match(pattern, test_text)
|
||||
pattern =~ test_text # Previous assertion guarantees match
|
||||
chunk = type.new($~)
|
||||
|
|
|
@ -6,6 +6,17 @@ def create_options
|
|||
end
|
||||
end
|
||||
|
||||
def db_quote(column)
|
||||
case @db
|
||||
when 'postgresql'
|
||||
return "\"#{column}\""
|
||||
when 'sqlite', 'sqlite3'
|
||||
return "'#{column}'"
|
||||
when 'mysql'
|
||||
return "`#{column}`"
|
||||
end
|
||||
end
|
||||
|
||||
def db_structure(db)
|
||||
db.downcase!
|
||||
@db = db
|
||||
|
@ -13,12 +24,15 @@ def db_structure(db)
|
|||
when 'postgresql'
|
||||
@pk = 'SERIAL PRIMARY KEY'
|
||||
@datetime = 'TIMESTAMP'
|
||||
@boolean = "BOOLEAN"
|
||||
when 'sqlite', 'sqlite3'
|
||||
@pk = 'INTEGER PRIMARY KEY'
|
||||
@datetime = 'DATETIME'
|
||||
@boolean = "INTEGER"
|
||||
when 'mysql'
|
||||
@pk = 'INTEGER UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY'
|
||||
@datetime = 'DATETIME'
|
||||
@boolean = "TINYINT"
|
||||
@mysql_engine = 'InnoDB'
|
||||
else
|
||||
raise "Unknown db type #{db}"
|
||||
|
|
59
lib/file_yard.rb
Normal file
59
lib/file_yard.rb
Normal file
|
@ -0,0 +1,59 @@
|
|||
require 'fileutils'
|
||||
require 'instiki_errors'
|
||||
|
||||
class FileYard
|
||||
cattr_accessor :restrict_upload_access
|
||||
restrict_upload_access = true
|
||||
attr_reader :files_path
|
||||
|
||||
def initialize(files_path, max_upload_size)
|
||||
@files_path, @max_upload_size = files_path, max_upload_size
|
||||
FileUtils.mkdir_p(@files_path) unless File.exist?(@files_path)
|
||||
@files = Dir["#{@files_path}/*"].collect{|path| File.basename(path) if File.file?(path) }.compact
|
||||
end
|
||||
|
||||
def upload_file(name, io)
|
||||
sanitize_file_name(name)
|
||||
if io.kind_of?(Tempfile)
|
||||
io.close
|
||||
check_upload_size(io.size)
|
||||
File.chmod(600, file_path(name)) if File.exists? file_path(name)
|
||||
FileUtils.mv(io.path, file_path(name))
|
||||
else
|
||||
content = io.read
|
||||
check_upload_size(content.length)
|
||||
File.open(file_path(name), 'wb') { |f| f.write(content) }
|
||||
end
|
||||
# just in case, restrict read access and prohibit write access to the uploaded file
|
||||
FileUtils.chmod(0440, file_path(name)) if restrict_upload_access
|
||||
end
|
||||
|
||||
def files
|
||||
Dir["#{files_path}/*"].collect{|path| File.basename(path) if File.file?(path)}.compact
|
||||
end
|
||||
|
||||
def has_file?(name)
|
||||
files.include?(name)
|
||||
end
|
||||
|
||||
def file_path(name)
|
||||
"#{files_path}/#{name}"
|
||||
end
|
||||
|
||||
SANE_FILE_NAME = /[a-zA-Z0-9\-_\. ]{1,255}/
|
||||
|
||||
def sanitize_file_name(name)
|
||||
unless name =~ SANE_FILE_NAME or name == '.' or name == '..'
|
||||
raise Instiki::ValidationError.new("Invalid file name: '#{name}'.\n" +
|
||||
"Only latin characters, digits, dots, underscores, dashes and spaces are accepted.")
|
||||
end
|
||||
end
|
||||
|
||||
def check_upload_size(actual_upload_size)
|
||||
if actual_upload_size > @max_upload_size.kilobytes
|
||||
raise Instiki::ValidationError.new("Uploaded file size (#{actual_upload_size / 1024} " +
|
||||
"kbytes) exceeds the maximum (#{@max_upload_size} kbytes) set for this wiki")
|
||||
end
|
||||
end
|
||||
|
||||
end
|
89
lib/page_set.rb
Normal file
89
lib/page_set.rb
Normal file
|
@ -0,0 +1,89 @@
|
|||
# Container for a set of pages with methods for manipulation.
|
||||
|
||||
class PageSet < Array
|
||||
attr_reader :web
|
||||
|
||||
def initialize(web, pages = nil, condition = nil)
|
||||
@web = web
|
||||
# if pages is not specified, make a list of all pages in the web
|
||||
if pages.nil?
|
||||
super(web.pages)
|
||||
# otherwise use specified pages and condition to produce a set of pages
|
||||
elsif condition.nil?
|
||||
super(pages)
|
||||
else
|
||||
super(pages.select { |page| condition[page] })
|
||||
end
|
||||
end
|
||||
|
||||
def most_recent_revision
|
||||
self.map { |page| page.created_at }.max || Time.at(0)
|
||||
end
|
||||
|
||||
|
||||
def by_name
|
||||
PageSet.new(@web, sort_by { |page| page.name })
|
||||
end
|
||||
|
||||
alias :sort :by_name
|
||||
|
||||
def by_revision
|
||||
PageSet.new(@web, sort_by { |page| page.created_at }).reverse
|
||||
end
|
||||
|
||||
def pages_that_reference(page_name)
|
||||
self.select { |page| page.wiki_references.include?(page_name) }
|
||||
end
|
||||
|
||||
def pages_that_link_to(page_name)
|
||||
self.select { |page| page.wiki_words.include?(page_name) }
|
||||
end
|
||||
|
||||
def pages_that_include(page_name)
|
||||
self.select { |page| page.wiki_includes.include?(page_name) }
|
||||
end
|
||||
|
||||
def pages_authored_by(author)
|
||||
self.select { |page| page.authors.include?(author) }
|
||||
end
|
||||
|
||||
def characters
|
||||
self.inject(0) { |chars,page| chars += page.content.size }
|
||||
end
|
||||
|
||||
# Returns all the orphaned pages in this page set. That is,
|
||||
# pages in this set for which there is no reference in the web.
|
||||
# The HomePage and author pages are always assumed to have
|
||||
# references and so cannot be orphans
|
||||
# Pages that refer to themselves and have no links from outside are oprphans.
|
||||
def orphaned_pages
|
||||
never_orphans = web.select.authors + ['HomePage']
|
||||
self.select { |page|
|
||||
if never_orphans.include? page.name
|
||||
false
|
||||
else
|
||||
references = pages_that_reference(page.name)
|
||||
references.empty? or references == [page]
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
# Returns all the wiki words in this page set for which
|
||||
# there are no pages in this page set's web
|
||||
def wanted_pages
|
||||
wiki_words - web.select.names
|
||||
end
|
||||
|
||||
def names
|
||||
self.map { |page| page.name }
|
||||
end
|
||||
|
||||
def wiki_words
|
||||
self.inject([]) { |wiki_words, page| wiki_words << page.wiki_words }.flatten.uniq
|
||||
end
|
||||
|
||||
def authors
|
||||
self.inject([]) { |authors, page| authors << page.authors }.flatten.uniq.sort
|
||||
end
|
||||
|
||||
end
|
96
lib/wiki.rb
Normal file
96
lib/wiki.rb
Normal file
|
@ -0,0 +1,96 @@
|
|||
class Wiki
|
||||
cattr_accessor :storage_path, :logger
|
||||
self.storage_path = "#{RAILS_ROOT}/storage/"
|
||||
self.logger = RAILS_DEFAULT_LOGGER
|
||||
|
||||
def authenticate(password)
|
||||
password == (system.password || 'instiki')
|
||||
end
|
||||
|
||||
def create_web(name, address, password = nil)
|
||||
@webs = nil
|
||||
Web.create(:name => name, :address => address, :password => password)
|
||||
end
|
||||
|
||||
def delete_web(address)
|
||||
web = Web.find_by_address(address)
|
||||
unless web.nil?
|
||||
web.destroy
|
||||
@webs = nil
|
||||
end
|
||||
end
|
||||
|
||||
def file_yard(web)
|
||||
web.file_yard
|
||||
end
|
||||
|
||||
def edit_web(old_address, new_address, name, markup, color, additional_style, safe_mode = false,
|
||||
password = nil, published = false, brackets_only = false, count_pages = false,
|
||||
allow_uploads = true, max_upload_size = nil)
|
||||
|
||||
if not (web = Web.find_by_address(old_address))
|
||||
raise Instiki::ValidationError.new("Web with address '#{old_address}' does not exist")
|
||||
end
|
||||
|
||||
web.refresh_revisions if web.settings_changed?(markup, safe_mode, brackets_only)
|
||||
web.update_attributes(:address => new_address, :name => name, :markup => markup, :color => color,
|
||||
:additional_style => additional_style, :safe_mode => safe_mode, :password => password, :published => published,
|
||||
:brackets_only => brackets_only, :count_pages => count_pages, :allow_uploads => allow_uploads, :max_upload_size => max_upload_size)
|
||||
@webs = nil
|
||||
raise Instiki::ValidationError.new("There is already a web with address '#{new_address}'") unless web.errors.on(:address).nil?
|
||||
web
|
||||
end
|
||||
|
||||
def read_page(web_address, page_name)
|
||||
self.class.logger.debug "Reading page '#{page_name}' from web '#{web_address}'"
|
||||
web = Web.find_by_address(web_address)
|
||||
if web.nil?
|
||||
self.class.logger.debug "Web '#{web_address}' not found"
|
||||
return nil
|
||||
else
|
||||
page = web.pages.find(:first, :conditions => ['name = ?', page_name])
|
||||
self.class.logger.debug "Page '#{page_name}' #{page.nil? ? 'not' : ''} found"
|
||||
return page
|
||||
end
|
||||
end
|
||||
|
||||
def remove_orphaned_pages(web_address)
|
||||
web = Web.find_by_address(web_address)
|
||||
web.remove_pages(web.select.orphaned_pages)
|
||||
end
|
||||
|
||||
def revise_page(web_address, page_name, content, revised_on, author)
|
||||
page = read_page(web_address, page_name)
|
||||
page.revise(content, revised_on, author)
|
||||
end
|
||||
|
||||
def rollback_page(web_address, page_name, revision_number, created_at, author_id = nil)
|
||||
page = read_page(web_address, page_name)
|
||||
page.rollback(revision_number, created_at, author_id)
|
||||
end
|
||||
|
||||
def setup(password, web_name, web_address)
|
||||
system.update_attribute(:password, password)
|
||||
create_web(web_name, web_address)
|
||||
end
|
||||
|
||||
def system
|
||||
@system ||= (System.find(:first) || System.create)
|
||||
end
|
||||
|
||||
def setup?
|
||||
Web.count > 0
|
||||
end
|
||||
|
||||
def webs
|
||||
@webs ||= Web.find(:all).inject({}) { |webs, web| webs.merge(web.address => web) }
|
||||
end
|
||||
|
||||
def storage_path
|
||||
self.class.storage_path
|
||||
end
|
||||
|
||||
def write_page(web_address, page_name, content, written_on, author)
|
||||
Web.find_by_address(web_address).add_page(page_name, content, written_on, author)
|
||||
end
|
||||
end
|
206
lib/wiki_content.rb
Normal file
206
lib/wiki_content.rb
Normal file
|
@ -0,0 +1,206 @@
|
|||
require 'cgi'
|
||||
require 'chunks/engines'
|
||||
require 'chunks/category'
|
||||
require 'chunks/include'
|
||||
require 'chunks/wiki'
|
||||
require 'chunks/literal'
|
||||
require 'chunks/uri'
|
||||
require 'chunks/nowiki'
|
||||
|
||||
# Wiki content is just a string that can process itself with a chain of
|
||||
# actions. The actions can modify wiki content so that certain parts of
|
||||
# it are protected from being rendered by later actions.
|
||||
#
|
||||
# When wiki content is rendered, it can be interrogated to find out
|
||||
# which chunks were rendered. This means things like categories, wiki
|
||||
# links, can be determined.
|
||||
#
|
||||
# Exactly how wiki content is rendered is determined by a number of
|
||||
# settings that are optionally passed in to a constructor. The current
|
||||
# options are:
|
||||
# * :engine
|
||||
# => The structural markup engine to use (Textile, Markdown, RDoc)
|
||||
# * :engine_opts
|
||||
# => A list of options to pass to the markup engines (safe modes, etc)
|
||||
# * :pre_engine_actions
|
||||
# => A list of render actions or chunks to be processed before the
|
||||
# markup engine is applied. By default this is:
|
||||
# Category, Include, URIChunk, WikiChunk::Link, WikiChunk::Word
|
||||
# * :post_engine_actions
|
||||
# => A list of render actions or chunks to apply after the markup
|
||||
# engine. By default these are:
|
||||
# Literal::Pre, Literal::Tags
|
||||
# * :mode
|
||||
# => How should the content be rendered? For normal display (show),
|
||||
# publishing (:publish) or export (:export)?
|
||||
|
||||
module ChunkManager
|
||||
attr_reader :chunks_by_type, :chunks_by_id, :chunks, :chunk_id
|
||||
|
||||
ACTIVE_CHUNKS = [ NoWiki, Category, WikiChunk::Link, URIChunk, LocalURIChunk,
|
||||
WikiChunk::Word ]
|
||||
|
||||
HIDE_CHUNKS = [ Literal::Pre, Literal::Tags ]
|
||||
|
||||
MASK_RE = {
|
||||
ACTIVE_CHUNKS => Chunk::Abstract.mask_re(ACTIVE_CHUNKS),
|
||||
HIDE_CHUNKS => Chunk::Abstract.mask_re(HIDE_CHUNKS)
|
||||
}
|
||||
|
||||
def init_chunk_manager
|
||||
@chunks_by_type = Hash.new
|
||||
Chunk::Abstract::derivatives.each{|chunk_type|
|
||||
@chunks_by_type[chunk_type] = Array.new
|
||||
}
|
||||
@chunks_by_id = Hash.new
|
||||
@chunks = []
|
||||
@chunk_id = 0
|
||||
end
|
||||
|
||||
def add_chunk(c)
|
||||
@chunks_by_type[c.class] << c
|
||||
@chunks_by_id[c.id] = c
|
||||
@chunks << c
|
||||
@chunk_id += 1
|
||||
end
|
||||
|
||||
def delete_chunk(c)
|
||||
@chunks_by_type[c.class].delete(c)
|
||||
@chunks_by_id.delete(c.id)
|
||||
@chunks.delete(c)
|
||||
end
|
||||
|
||||
def merge_chunks(other)
|
||||
other.chunks.each{|c| add_chunk(c)}
|
||||
end
|
||||
|
||||
def scan_chunkid(text)
|
||||
text.scan(MASK_RE[ACTIVE_CHUNKS]){|a| yield a[0] }
|
||||
end
|
||||
|
||||
def find_chunks(chunk_type)
|
||||
@chunks.select { |chunk| chunk.kind_of?(chunk_type) and chunk.rendered? }
|
||||
end
|
||||
|
||||
# for testing and WikiContentStub; we need a page_id even if we have no page
|
||||
def page_id
|
||||
0
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# A simplified version of WikiContent. Useful to avoid recursion problems in
|
||||
# WikiContent.new
|
||||
class WikiContentStub < String
|
||||
attr_reader :options
|
||||
include ChunkManager
|
||||
def initialize(content, options)
|
||||
super(content)
|
||||
@options = options
|
||||
init_chunk_manager
|
||||
end
|
||||
|
||||
# Detects the mask strings contained in the text of chunks of type chunk_types
|
||||
# and yields the corresponding chunk ids
|
||||
# example: content = "chunk123categorychunk <pre>chunk456categorychunk</pre>"
|
||||
# inside_chunks(Literal::Pre) ==> yield 456
|
||||
def inside_chunks(chunk_types)
|
||||
chunk_types.each{|chunk_type| chunk_type.apply_to(self) }
|
||||
|
||||
chunk_types.each{|chunk_type| @chunks_by_type[chunk_type].each{|hide_chunk|
|
||||
scan_chunkid(hide_chunk.text){|id| yield id }
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
class WikiContent < String
|
||||
|
||||
include ChunkManager
|
||||
|
||||
DEFAULT_OPTS = {
|
||||
:active_chunks => ACTIVE_CHUNKS,
|
||||
:engine => Engines::Textile,
|
||||
:engine_opts => [],
|
||||
:mode => :show
|
||||
}.freeze
|
||||
|
||||
attr_reader :web, :options, :revision, :not_rendered, :pre_rendered
|
||||
|
||||
# Create a new wiki content string from the given one.
|
||||
# The options are explained at the top of this file.
|
||||
def initialize(revision, options = {})
|
||||
@revision = revision
|
||||
@web = @revision.page.web
|
||||
|
||||
@options = DEFAULT_OPTS.dup.merge(options)
|
||||
@options[:engine] = Engines::MAP[@web.markup]
|
||||
@options[:engine_opts] = [:filter_html, :filter_styles] if @web.safe_mode?
|
||||
@options[:active_chunks] = (ACTIVE_CHUNKS - [WikiChunk::Word] ) if @web.brackets_only?
|
||||
|
||||
@not_rendered = @pre_rendered = nil
|
||||
|
||||
super(@revision.content)
|
||||
init_chunk_manager
|
||||
build_chunks
|
||||
@not_rendered = String.new(self)
|
||||
end
|
||||
|
||||
# Call @web.page_link using current options.
|
||||
def page_link(name, text, link_type)
|
||||
@options[:link_type] = (link_type || :show)
|
||||
@web.make_link(name, text, @options)
|
||||
end
|
||||
|
||||
def build_chunks
|
||||
# create and mask Includes and "active_chunks" chunks
|
||||
Include.apply_to(self)
|
||||
@options[:active_chunks].each{|chunk_type| chunk_type.apply_to(self)}
|
||||
|
||||
# Handle hiding contexts like "pre" and "code" etc..
|
||||
# The markup (textile, rdoc etc) can produce such contexts with its own syntax.
|
||||
# To reveal them, we work on a copy of the content.
|
||||
# The copy is rendered and used to detect the chunks that are inside protecting context
|
||||
# These chunks are reverted on the original content string.
|
||||
|
||||
copy = WikiContentStub.new(self, @options)
|
||||
@options[:engine].apply_to(copy)
|
||||
|
||||
copy.inside_chunks(HIDE_CHUNKS) do |id|
|
||||
@chunks_by_id[id].revert
|
||||
end
|
||||
end
|
||||
|
||||
def pre_render!
|
||||
unless @pre_rendered
|
||||
@chunks_by_type[Include].each{|chunk| chunk.unmask }
|
||||
@pre_rendered = String.new(self)
|
||||
end
|
||||
@pre_rendered
|
||||
end
|
||||
|
||||
def render!
|
||||
pre_render!
|
||||
@options[:engine].apply_to(self)
|
||||
# unmask in one go. $~[1] is the chunk id
|
||||
gsub!(MASK_RE[ACTIVE_CHUNKS]){
|
||||
if chunk = @chunks_by_id[$~[1]]
|
||||
chunk.unmask_text
|
||||
# if we match a chunkmask that existed in the original content string
|
||||
# just keep it as it is
|
||||
else
|
||||
$~[0]
|
||||
end}
|
||||
self
|
||||
end
|
||||
|
||||
def page_name
|
||||
@revision.page.name
|
||||
end
|
||||
|
||||
def page_id
|
||||
@revision.page.id
|
||||
end
|
||||
|
||||
end
|
||||
|
23
lib/wiki_words.rb
Normal file
23
lib/wiki_words.rb
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Contains all the methods for finding and replacing wiki words
|
||||
module WikiWords
|
||||
# In order of appearance: Latin, greek, cyrillian, armenian
|
||||
I18N_HIGHER_CASE_LETTERS =
|
||||
"À<EFBFBD>?ÂÃÄÅĀĄĂÆÇĆČĈĊĎ<C48A>?ÈÉÊËĒĘĚĔĖĜĞĠĢĤĦÌ<C4A6>?Î<>?ĪĨĬĮİIJĴĶ<C4B4>?ĽĹĻĿÑŃŇŅŊÒÓÔÕÖØŌ<C398>?ŎŒŔŘŖŚŠŞŜȘŤŢŦȚÙÚÛÜŪŮŰŬŨŲŴ<C5B2>?ŶŸŹŽŻ" +
|
||||
"ΑΒΓΔΕΖΗΘΙΚΛΜ<EFBFBD>?ΞΟΠΡΣΤΥΦΧΨΩ" +
|
||||
"ΆΈΉΊΌΎ<EFBFBD>?ѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎ<D28C>?ҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾ<D2BC>?ӃӅӇӉӋ<D389>?<3F>?ӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӸЖ" +
|
||||
"ԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀ<EFBFBD>?ՂՃՄՅՆՇՈՉՊՋՌ<D58B>?<3F>?<3F>?ՑՒՓՔՕՖ"
|
||||
|
||||
I18N_LOWER_CASE_LETTERS =
|
||||
"àáâãäå<EFBFBD>?ąăæçć<C3A7>?ĉċ<C489>?đèéêëēęěĕėƒ<C497>?ğġģĥħìíîïīĩĭįıijĵķĸłľĺļŀñńňņʼnŋòóôõöø<C3B6>?ő<>?œŕřŗśšş<C5A1>?șťţŧțùúûüūůűŭũųŵýÿŷžżźÞþßſ<C39F>?ð" +
|
||||
"άέήίΰαβγδεζηθικλμνξοπ<EFBFBD>?ςστυφχψωϊϋό<CF8B>?ώ<>?" +
|
||||
"абвгдежзийклмнопр<EFBFBD>?туфхцчшщъыь<D18B>?ю<>?<3F>?ёђѓєѕіїјљћќ<D19B>?ўџѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿ<D1BD>?ҋ<>?<3F>?ґғҕҗҙқ<D299>?ҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӀӂӄӆӈӊӌӎӑӓӕӗәӛ<D399>?ӟӡӣӥӧөӫӭӯӱӳӵӹ" +
|
||||
"աբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտր<EFBFBD>?ւփքօֆև"
|
||||
|
||||
WIKI_WORD_PATTERN = '[A-Z' + I18N_HIGHER_CASE_LETTERS + '][a-z' + I18N_LOWER_CASE_LETTERS + ']+[A-Z' + I18N_HIGHER_CASE_LETTERS + ']\w+'
|
||||
CAMEL_CASED_WORD_BORDER = /([a-z#{I18N_LOWER_CASE_LETTERS}])([A-Z#{I18N_HIGHER_CASE_LETTERS}])/u
|
||||
|
||||
def self.separate(wiki_word)
|
||||
wiki_word.gsub(CAMEL_CASED_WORD_BORDER, '\1 \2')
|
||||
end
|
||||
|
||||
end
|
Loading…
Add table
Add a link
Reference in a new issue