Use an internal sequence instead of object_id for an ID of a chunk [Denis Mertz]
This commit is contained in:
parent
4850e59d18
commit
0dabfca465
|
@ -1,4 +1,3 @@
|
|||
require 'digest/md5'
|
||||
require 'uri/common'
|
||||
|
||||
# A chunk is a pattern of text that can be protected
|
||||
|
@ -29,12 +28,12 @@ module Chunk
|
|||
# a regexp that matches all chunk_types masks
|
||||
def Abstract::mask_re(chunk_types)
|
||||
tmp = chunk_types.map{|klass| klass.mask_string}.join("|")
|
||||
Regexp.new("chunk(\\d+)(#{tmp})chunk")
|
||||
Regexp.new("chunk([0-9a-f]+n\\d+)(#{tmp})chunk")
|
||||
end
|
||||
|
||||
attr_reader :text, :unmask_text, :unmask_mode
|
||||
|
||||
def initialize(match_data, content)
|
||||
def initialize(match_data, content)
|
||||
@text = match_data[0]
|
||||
@content = content
|
||||
@unmask_mode = :normal
|
||||
|
@ -52,9 +51,17 @@ module Chunk
|
|||
end
|
||||
end
|
||||
|
||||
# should contain only [a-z0-9]
|
||||
def mask
|
||||
"chunk#{self.object_id}#{self.class.mask_string}chunk"
|
||||
end
|
||||
@mask ||="chunk#{@id}#{self.class.mask_string}chunk"
|
||||
end
|
||||
|
||||
# We should not use object_id because object_id is not guarantied
|
||||
# to be unique when we restart the wiki (new object ids can equal old ones
|
||||
# that were restored form madeleine storage)
|
||||
def id
|
||||
@id ||= "#{@content.page_id}n#{@content.chunk_id}"
|
||||
end
|
||||
|
||||
def unmask
|
||||
@content.sub!(mask, @unmask_text)
|
||||
|
|
|
@ -85,6 +85,11 @@ class Page
|
|||
@web.brackets_only ? name : WikiWords.separate(name)
|
||||
end
|
||||
|
||||
# used to build chunk ids.
|
||||
def id
|
||||
@id ||= name.unpack('H*').first
|
||||
end
|
||||
|
||||
def link(options = {})
|
||||
@web.make_link(name, nil, options)
|
||||
end
|
||||
|
|
|
@ -88,11 +88,15 @@ class Revision
|
|||
end
|
||||
|
||||
def clear_display_cache
|
||||
@wiki_references_cache = @wiki_includes = @display_cache = nil
|
||||
@wiki_references_cache = @published_cache = @display_cache = nil
|
||||
@wiki_includes_cache = nil
|
||||
end
|
||||
|
||||
def display_published
|
||||
@published_cache = WikiContent.new(self, {:mode => :publish}) if @published_cache.nil?
|
||||
unless @published_cache && @published_cache.respond_to?(:chunks_by_type)
|
||||
@published_cache = WikiContent.new(self, {:mode => :publish})
|
||||
@published_cache.render!
|
||||
end
|
||||
@published_cache
|
||||
end
|
||||
|
||||
|
|
|
@ -39,10 +39,17 @@ require 'chunks/nowiki'
|
|||
# UPDATED: 22nd May 2004
|
||||
|
||||
module ChunkManager
|
||||
attr_reader :chunks_by_type, :chunks_by_id
|
||||
attr_reader :chunks_by_type, :chunks_by_id, :chunks, :chunk_id
|
||||
|
||||
# regexp that match all chunk type masks
|
||||
CHUNK_MASK_RE = Chunk::Abstract.mask_re(Chunk::Abstract::derivatives)
|
||||
ACTIVE_CHUNKS = [ NoWiki, Category, WikiChunk::Link, URIChunk, LocalURIChunk,
|
||||
WikiChunk::Word ]
|
||||
|
||||
HIDE_CHUNKS = [ Literal::Pre, Literal::Tags ]
|
||||
|
||||
MASK_RE = {
|
||||
ACTIVE_CHUNKS => Chunk::Abstract.mask_re(ACTIVE_CHUNKS),
|
||||
HIDE_CHUNKS => Chunk::Abstract.mask_re(HIDE_CHUNKS)
|
||||
}
|
||||
|
||||
def init_chunk_manager
|
||||
@chunks_by_type = Hash.new
|
||||
|
@ -50,33 +57,40 @@ module ChunkManager
|
|||
@chunks_by_type[chunk_type] = Array.new
|
||||
}
|
||||
@chunks_by_id = Hash.new
|
||||
@chunks = []
|
||||
@chunk_id = 0
|
||||
end
|
||||
|
||||
def add_chunk(c)
|
||||
@chunks_by_type[c.class] << c
|
||||
@chunks_by_id[c.object_id] = c
|
||||
@chunks_by_type[c.class] << c
|
||||
@chunks_by_id[c.id] = c
|
||||
@chunks << c
|
||||
@chunk_id += 1
|
||||
end
|
||||
|
||||
def delete_chunk(c)
|
||||
@chunks_by_type[c.class].delete(c)
|
||||
@chunks_by_id.delete(c.object_id)
|
||||
end
|
||||
|
||||
def chunks
|
||||
@chunks_by_id.values
|
||||
@chunks_by_id.delete(c.id)
|
||||
@chunks.delete(c)
|
||||
end
|
||||
|
||||
def merge_chunks(other)
|
||||
other.chunks_by_id.each_value{|c| add_chunk(c)}
|
||||
other.chunks.each{|c| add_chunk(c)}
|
||||
end
|
||||
|
||||
def scan_chunkid(text)
|
||||
text.scan(CHUNK_MASK_RE){|a| yield a[0].to_i }
|
||||
text.scan(MASK_RE[ACTIVE_CHUNKS]){|a| yield a[0] }
|
||||
end
|
||||
|
||||
def find_chunks(chunk_type)
|
||||
@chunks_by_id.values.select { |chunk| chunk.kind_of?(chunk_type) and chunk.rendered? }
|
||||
@chunks.select { |chunk| chunk.kind_of?(chunk_type) and chunk.rendered? }
|
||||
end
|
||||
|
||||
# for testing and WikiContentStub; we need a page_id even if we have no page
|
||||
def page_id
|
||||
0
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
# A simplified version of WikiContent. Useful to avoid recursion problems in
|
||||
|
@ -106,14 +120,7 @@ end
|
|||
|
||||
class WikiContent < String
|
||||
|
||||
ACTIVE_CHUNKS = [ NoWiki, Category, WikiChunk::Link, URIChunk, LocalURIChunk,
|
||||
WikiChunk::Word ]
|
||||
HIDE_CHUNKS = [ Literal::Pre, Literal::Tags ]
|
||||
|
||||
MASK_RE = {
|
||||
ACTIVE_CHUNKS => Chunk::Abstract.mask_re(ACTIVE_CHUNKS),
|
||||
HIDE_CHUNKS => Chunk::Abstract.mask_re(HIDE_CHUNKS)
|
||||
}
|
||||
include ChunkManager
|
||||
|
||||
DEFAULT_OPTS = {
|
||||
:active_chunks => ACTIVE_CHUNKS,
|
||||
|
@ -124,8 +131,6 @@ class WikiContent < String
|
|||
|
||||
attr_reader :web, :options, :revision, :not_rendered, :pre_rendered
|
||||
|
||||
include ChunkManager
|
||||
|
||||
# Create a new wiki content string from the given one.
|
||||
# The options are explained at the top of this file.
|
||||
def initialize(revision, options = {})
|
||||
|
@ -179,9 +184,9 @@ class WikiContent < String
|
|||
def render!
|
||||
pre_render!
|
||||
@options[:engine].apply_to(self)
|
||||
# unmask in one go. $~[1].to_i is the chunk id
|
||||
# unmask in one go. $~[1] is the chunk id
|
||||
gsub!(MASK_RE[ACTIVE_CHUNKS]){
|
||||
if chunk = @chunks_by_id[$~[1].to_i]
|
||||
if chunk = @chunks_by_id[$~[1]]
|
||||
chunk.unmask_text
|
||||
# if we match a chunkmask that existed in the original content string
|
||||
# just keep it as it is
|
||||
|
@ -195,4 +200,8 @@ class WikiContent < String
|
|||
@revision.page.name
|
||||
end
|
||||
|
||||
def page_id
|
||||
@revision.page.id
|
||||
end
|
||||
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue