diff --git a/Gemfile b/Gemfile index d9b07165..5f41b820 100755 --- a/Gemfile +++ b/Gemfile @@ -1,9 +1,8 @@ source "http://gemcutter.org" bin_path "vendor/bin" -disable_rubygems -disable_system_gems +# disable_rubygems +# disable_system_gems -gem "thin" gem "shotgun" gem "templater" gem "sprockets" @@ -14,7 +13,11 @@ gem "yui-compressor" gem "haml" gem "compass" gem "smusher" +gem "compass-slickmap" +gem "maruku" +gem "markaby" +gem "jeweler" gem "rspec" gem "rdoc" gem "sdoc" diff --git a/Rakefile b/Rakefile index b8f992dd..229439bc 100644 --- a/Rakefile +++ b/Rakefile @@ -13,20 +13,20 @@ begin gem.rubyforge_project = "middleman" gem.executables = %w(mm-init mm-build mm-server) gem.add_dependency("thin") - gem.add_dependency("shotgun") - gem.add_dependency("templater") - gem.add_dependency("sprockets") - gem.add_dependency("sinatra") - gem.add_dependency("sinatra-content-for") - gem.add_dependency("rack-test") - gem.add_dependency("yui-compressor") - gem.add_dependency("smusher") - gem.add_dependency("haml", ">=2.1.0") - gem.add_dependency("compass") - gem.add_development_dependency("rdoc") - gem.add_development_dependency("rspec") - gem.add_development_dependency("sdoc") - gem.add_development_dependency("cucumber") + # gem.add_dependency("shotgun") + # gem.add_dependency("templater") + # gem.add_dependency("sprockets") + # gem.add_dependency("sinatra") + # gem.add_dependency("sinatra-content-for") + # gem.add_dependency("rack-test") + # gem.add_dependency("yui-compressor") + # gem.add_dependency("smusher") + # gem.add_dependency("haml", ">=2.1.0") + # gem.add_dependency("compass") + # gem.add_development_dependency("rdoc") + # gem.add_development_dependency("rspec") + # gem.add_development_dependency("sdoc") + # gem.add_development_dependency("cucumber") # Ignore vendored files gem.files = gem.files.exclude("vendor/*") diff --git a/bin/mm-server b/bin/mm-server index c1fb7134..e8c236f7 100755 --- a/bin/mm-server +++ b/bin/mm-server @@ -52,6 +52,7 @@ require 'shotgun' config = File.join(File.dirname(__FILE__), '..', 'lib', 'middleman', 'config.ru') app = Shotgun.new(config, lambda { |inner_app| Middleman::Base }) +require 'rubygems' require 'thin' Thin::Logging.silent = true diff --git a/vendor/bin/jeweler b/vendor/bin/jeweler new file mode 100755 index 00000000..ad06100f --- /dev/null +++ b/vendor/bin/jeweler @@ -0,0 +1,3 @@ +#!/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby +require File.join(File.dirname(__FILE__), "../gems/environment") +load File.join(File.dirname(__FILE__), "../gems/gems/jeweler-1.3.0/bin/jeweler") \ No newline at end of file diff --git a/vendor/bin/thin b/vendor/bin/maruku similarity index 61% rename from vendor/bin/thin rename to vendor/bin/maruku index 30cb28b6..2a46dff6 100755 --- a/vendor/bin/thin +++ b/vendor/bin/maruku @@ -1,3 +1,3 @@ #!/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby require File.join(File.dirname(__FILE__), "../gems/environment") -load File.join(File.dirname(__FILE__), "../gems/gems/thin-1.2.5/bin/thin") \ No newline at end of file +load File.join(File.dirname(__FILE__), "../gems/gems/maruku-0.6.0/bin/maruku") \ No newline at end of file diff --git a/vendor/bin/marutex b/vendor/bin/marutex new file mode 100755 index 00000000..7e135eb4 --- /dev/null +++ b/vendor/bin/marutex @@ -0,0 +1,3 @@ +#!/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby +require File.join(File.dirname(__FILE__), "../gems/environment") +load File.join(File.dirname(__FILE__), "../gems/gems/maruku-0.6.0/bin/marutex") \ No newline at end of file diff --git a/vendor/bin/rubyforge b/vendor/bin/rubyforge new file mode 100755 index 00000000..ed066f3f --- /dev/null +++ b/vendor/bin/rubyforge @@ -0,0 +1,3 @@ +#!/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby +require File.join(File.dirname(__FILE__), "../gems/environment") +load File.join(File.dirname(__FILE__), "../gems/gems/rubyforge-2.0.3/bin/rubyforge") \ No newline at end of file diff --git a/vendor/gems/cache/compass-slickmap-0.2.1.gem b/vendor/gems/cache/compass-slickmap-0.2.1.gem new file mode 100644 index 00000000..0ea43fac Binary files /dev/null and b/vendor/gems/cache/compass-slickmap-0.2.1.gem differ diff --git a/vendor/gems/cache/gemcutter-0.1.7.gem b/vendor/gems/cache/gemcutter-0.1.7.gem new file mode 100644 index 00000000..2edc3c02 Binary files /dev/null and b/vendor/gems/cache/gemcutter-0.1.7.gem differ diff --git a/vendor/gems/cache/git-1.2.5.gem b/vendor/gems/cache/git-1.2.5.gem new file mode 100644 index 00000000..d9cda559 Binary files /dev/null and b/vendor/gems/cache/git-1.2.5.gem differ diff --git a/vendor/gems/cache/jeweler-1.3.0.gem b/vendor/gems/cache/jeweler-1.3.0.gem new file mode 100644 index 00000000..e3129950 Binary files /dev/null and b/vendor/gems/cache/jeweler-1.3.0.gem differ diff --git a/vendor/gems/cache/json_pure-1.2.0.gem b/vendor/gems/cache/json_pure-1.2.0.gem new file mode 100644 index 00000000..f9e2535e Binary files /dev/null and b/vendor/gems/cache/json_pure-1.2.0.gem differ diff --git a/vendor/gems/cache/markaby-0.5.gem b/vendor/gems/cache/markaby-0.5.gem new file mode 100644 index 00000000..16dea9e8 Binary files /dev/null and b/vendor/gems/cache/markaby-0.5.gem differ diff --git a/vendor/gems/cache/maruku-0.6.0.gem b/vendor/gems/cache/maruku-0.6.0.gem new file mode 100644 index 00000000..d048b507 Binary files /dev/null and b/vendor/gems/cache/maruku-0.6.0.gem differ diff --git a/vendor/gems/cache/net-scp-1.0.2.gem b/vendor/gems/cache/net-scp-1.0.2.gem new file mode 100644 index 00000000..b30a5310 Binary files /dev/null and b/vendor/gems/cache/net-scp-1.0.2.gem differ diff --git a/vendor/gems/cache/net-ssh-2.0.15.gem b/vendor/gems/cache/net-ssh-2.0.15.gem new file mode 100644 index 00000000..a513edb1 Binary files /dev/null and b/vendor/gems/cache/net-ssh-2.0.15.gem differ diff --git a/vendor/gems/cache/rubyforge-2.0.3.gem b/vendor/gems/cache/rubyforge-2.0.3.gem new file mode 100644 index 00000000..0a516c5a Binary files /dev/null and b/vendor/gems/cache/rubyforge-2.0.3.gem differ diff --git a/vendor/gems/cache/syntax-1.0.0.gem b/vendor/gems/cache/syntax-1.0.0.gem new file mode 100644 index 00000000..d193067a Binary files /dev/null and b/vendor/gems/cache/syntax-1.0.0.gem differ diff --git a/vendor/gems/environment.rb b/vendor/gems/environment.rb index 00ae43a9..627dffd8 100644 --- a/vendor/gems/environment.rb +++ b/vendor/gems/environment.rb @@ -3,37 +3,43 @@ module Bundler file = File.expand_path(__FILE__) dir = File.dirname(file) - ENV["GEM_HOME"] = dir - ENV["GEM_PATH"] = dir ENV["PATH"] = "#{dir}/../bin:#{ENV["PATH"]}" ENV["RUBYOPT"] = "-r#{file} #{ENV["RUBYOPT"]}" - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/builder-2.1.2/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/builder-2.1.2/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/polyglot-0.2.9/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/polyglot-0.2.9/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/treetop-1.4.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/treetop-1.4.2/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/term-ansicolor-1.0.4/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/term-ansicolor-1.0.4/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/syntax-1.0.0/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/syntax-1.0.0/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/maruku-0.6.0/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/maruku-0.6.0/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/extlib-0.9.13/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/extlib-0.9.13/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/highline-1.5.1/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/highline-1.5.1/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/diff-lcs-1.1.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/diff-lcs-1.1.2/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/cucumber-0.4.4/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/cucumber-0.4.4/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rspec-1.2.9/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rspec-1.2.9/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/templater-1.0.0/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/templater-1.0.0/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/eventmachine-0.12.10/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/eventmachine-0.12.10/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/daemons-1.0.10/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/daemons-1.0.10/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/net-ssh-2.0.15/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/net-ssh-2.0.15/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/haml-2.2.13/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/haml-2.2.13/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/net-scp-1.0.2/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/net-scp-1.0.2/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/json_pure-1.2.0/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/json_pure-1.2.0/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/gemcutter-0.1.7/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/gemcutter-0.1.7/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rubyforge-2.0.3/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rubyforge-2.0.3/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/git-1.2.5/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/git-1.2.5/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/jeweler-1.3.0/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/jeweler-1.3.0/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/json-1.2.0/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/json-1.2.0/ext/json/ext") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/json-1.2.0/ext") @@ -44,12 +50,18 @@ module Bundler $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rake-0.8.7/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sprockets-1.0.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sprockets-1.0.2/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/builder-2.1.2/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/builder-2.1.2/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/cucumber-0.4.4/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/cucumber-0.4.4/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rspec-1.2.9/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rspec-1.2.9/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/markaby-0.5/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/markaby-0.5/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/smusher-0.4.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/smusher-0.4.2/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rack-1.0.1/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rack-1.0.1/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/thin-1.2.5/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/thin-1.2.5/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rack-test-0.5.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rack-test-0.5.2/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/configuration-1.1.0/bin") @@ -64,15 +76,109 @@ module Bundler $LOAD_PATH.unshift File.expand_path("#{dir}/gems/rdoc-2.4.3/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sdoc-0.2.14.1/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sdoc-0.2.14.1/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/yui-compressor-0.9.1/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/yui-compressor-0.9.1/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sinatra-content-for-0.2/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/sinatra-content-for-0.2/lib") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/compass-0.8.17/bin") $LOAD_PATH.unshift File.expand_path("#{dir}/gems/compass-0.8.17/lib") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/yui-compressor-0.9.1/bin") - $LOAD_PATH.unshift File.expand_path("#{dir}/gems/yui-compressor-0.9.1/lib") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/compass-slickmap-0.2.1/bin") + $LOAD_PATH.unshift File.expand_path("#{dir}/gems/compass-slickmap-0.2.1/lib") @gemfile = "#{dir}/../../Gemfile" + require "rubygems" + + @bundled_specs = {} + @bundled_specs["polyglot"] = eval(File.read("#{dir}/specifications/polyglot-0.2.9.gemspec")) + @bundled_specs["polyglot"].loaded_from = "#{dir}/specifications/polyglot-0.2.9.gemspec" + @bundled_specs["treetop"] = eval(File.read("#{dir}/specifications/treetop-1.4.2.gemspec")) + @bundled_specs["treetop"].loaded_from = "#{dir}/specifications/treetop-1.4.2.gemspec" + @bundled_specs["term-ansicolor"] = eval(File.read("#{dir}/specifications/term-ansicolor-1.0.4.gemspec")) + @bundled_specs["term-ansicolor"].loaded_from = "#{dir}/specifications/term-ansicolor-1.0.4.gemspec" + @bundled_specs["syntax"] = eval(File.read("#{dir}/specifications/syntax-1.0.0.gemspec")) + @bundled_specs["syntax"].loaded_from = "#{dir}/specifications/syntax-1.0.0.gemspec" + @bundled_specs["maruku"] = eval(File.read("#{dir}/specifications/maruku-0.6.0.gemspec")) + @bundled_specs["maruku"].loaded_from = "#{dir}/specifications/maruku-0.6.0.gemspec" + @bundled_specs["extlib"] = eval(File.read("#{dir}/specifications/extlib-0.9.13.gemspec")) + @bundled_specs["extlib"].loaded_from = "#{dir}/specifications/extlib-0.9.13.gemspec" + @bundled_specs["highline"] = eval(File.read("#{dir}/specifications/highline-1.5.1.gemspec")) + @bundled_specs["highline"].loaded_from = "#{dir}/specifications/highline-1.5.1.gemspec" + @bundled_specs["diff-lcs"] = eval(File.read("#{dir}/specifications/diff-lcs-1.1.2.gemspec")) + @bundled_specs["diff-lcs"].loaded_from = "#{dir}/specifications/diff-lcs-1.1.2.gemspec" + @bundled_specs["templater"] = eval(File.read("#{dir}/specifications/templater-1.0.0.gemspec")) + @bundled_specs["templater"].loaded_from = "#{dir}/specifications/templater-1.0.0.gemspec" + @bundled_specs["net-ssh"] = eval(File.read("#{dir}/specifications/net-ssh-2.0.15.gemspec")) + @bundled_specs["net-ssh"].loaded_from = "#{dir}/specifications/net-ssh-2.0.15.gemspec" + @bundled_specs["haml"] = eval(File.read("#{dir}/specifications/haml-2.2.13.gemspec")) + @bundled_specs["haml"].loaded_from = "#{dir}/specifications/haml-2.2.13.gemspec" + @bundled_specs["net-scp"] = eval(File.read("#{dir}/specifications/net-scp-1.0.2.gemspec")) + @bundled_specs["net-scp"].loaded_from = "#{dir}/specifications/net-scp-1.0.2.gemspec" + @bundled_specs["json_pure"] = eval(File.read("#{dir}/specifications/json_pure-1.2.0.gemspec")) + @bundled_specs["json_pure"].loaded_from = "#{dir}/specifications/json_pure-1.2.0.gemspec" + @bundled_specs["gemcutter"] = eval(File.read("#{dir}/specifications/gemcutter-0.1.7.gemspec")) + @bundled_specs["gemcutter"].loaded_from = "#{dir}/specifications/gemcutter-0.1.7.gemspec" + @bundled_specs["rubyforge"] = eval(File.read("#{dir}/specifications/rubyforge-2.0.3.gemspec")) + @bundled_specs["rubyforge"].loaded_from = "#{dir}/specifications/rubyforge-2.0.3.gemspec" + @bundled_specs["git"] = eval(File.read("#{dir}/specifications/git-1.2.5.gemspec")) + @bundled_specs["git"].loaded_from = "#{dir}/specifications/git-1.2.5.gemspec" + @bundled_specs["jeweler"] = eval(File.read("#{dir}/specifications/jeweler-1.3.0.gemspec")) + @bundled_specs["jeweler"].loaded_from = "#{dir}/specifications/jeweler-1.3.0.gemspec" + @bundled_specs["json"] = eval(File.read("#{dir}/specifications/json-1.2.0.gemspec")) + @bundled_specs["json"].loaded_from = "#{dir}/specifications/json-1.2.0.gemspec" + @bundled_specs["httpclient"] = eval(File.read("#{dir}/specifications/httpclient-2.1.5.2.gemspec")) + @bundled_specs["httpclient"].loaded_from = "#{dir}/specifications/httpclient-2.1.5.2.gemspec" + @bundled_specs["rake"] = eval(File.read("#{dir}/specifications/rake-0.8.7.gemspec")) + @bundled_specs["rake"].loaded_from = "#{dir}/specifications/rake-0.8.7.gemspec" + @bundled_specs["sprockets"] = eval(File.read("#{dir}/specifications/sprockets-1.0.2.gemspec")) + @bundled_specs["sprockets"].loaded_from = "#{dir}/specifications/sprockets-1.0.2.gemspec" + @bundled_specs["builder"] = eval(File.read("#{dir}/specifications/builder-2.1.2.gemspec")) + @bundled_specs["builder"].loaded_from = "#{dir}/specifications/builder-2.1.2.gemspec" + @bundled_specs["cucumber"] = eval(File.read("#{dir}/specifications/cucumber-0.4.4.gemspec")) + @bundled_specs["cucumber"].loaded_from = "#{dir}/specifications/cucumber-0.4.4.gemspec" + @bundled_specs["rspec"] = eval(File.read("#{dir}/specifications/rspec-1.2.9.gemspec")) + @bundled_specs["rspec"].loaded_from = "#{dir}/specifications/rspec-1.2.9.gemspec" + @bundled_specs["markaby"] = eval(File.read("#{dir}/specifications/markaby-0.5.gemspec")) + @bundled_specs["markaby"].loaded_from = "#{dir}/specifications/markaby-0.5.gemspec" + @bundled_specs["smusher"] = eval(File.read("#{dir}/specifications/smusher-0.4.2.gemspec")) + @bundled_specs["smusher"].loaded_from = "#{dir}/specifications/smusher-0.4.2.gemspec" + @bundled_specs["rack"] = eval(File.read("#{dir}/specifications/rack-1.0.1.gemspec")) + @bundled_specs["rack"].loaded_from = "#{dir}/specifications/rack-1.0.1.gemspec" + @bundled_specs["rack-test"] = eval(File.read("#{dir}/specifications/rack-test-0.5.2.gemspec")) + @bundled_specs["rack-test"].loaded_from = "#{dir}/specifications/rack-test-0.5.2.gemspec" + @bundled_specs["configuration"] = eval(File.read("#{dir}/specifications/configuration-1.1.0.gemspec")) + @bundled_specs["configuration"].loaded_from = "#{dir}/specifications/configuration-1.1.0.gemspec" + @bundled_specs["launchy"] = eval(File.read("#{dir}/specifications/launchy-0.3.3.gemspec")) + @bundled_specs["launchy"].loaded_from = "#{dir}/specifications/launchy-0.3.3.gemspec" + @bundled_specs["shotgun"] = eval(File.read("#{dir}/specifications/shotgun-0.4.gemspec")) + @bundled_specs["shotgun"].loaded_from = "#{dir}/specifications/shotgun-0.4.gemspec" + @bundled_specs["sinatra"] = eval(File.read("#{dir}/specifications/sinatra-0.9.4.gemspec")) + @bundled_specs["sinatra"].loaded_from = "#{dir}/specifications/sinatra-0.9.4.gemspec" + @bundled_specs["rdoc"] = eval(File.read("#{dir}/specifications/rdoc-2.4.3.gemspec")) + @bundled_specs["rdoc"].loaded_from = "#{dir}/specifications/rdoc-2.4.3.gemspec" + @bundled_specs["sdoc"] = eval(File.read("#{dir}/specifications/sdoc-0.2.14.1.gemspec")) + @bundled_specs["sdoc"].loaded_from = "#{dir}/specifications/sdoc-0.2.14.1.gemspec" + @bundled_specs["yui-compressor"] = eval(File.read("#{dir}/specifications/yui-compressor-0.9.1.gemspec")) + @bundled_specs["yui-compressor"].loaded_from = "#{dir}/specifications/yui-compressor-0.9.1.gemspec" + @bundled_specs["sinatra-content-for"] = eval(File.read("#{dir}/specifications/sinatra-content-for-0.2.gemspec")) + @bundled_specs["sinatra-content-for"].loaded_from = "#{dir}/specifications/sinatra-content-for-0.2.gemspec" + @bundled_specs["compass"] = eval(File.read("#{dir}/specifications/compass-0.8.17.gemspec")) + @bundled_specs["compass"].loaded_from = "#{dir}/specifications/compass-0.8.17.gemspec" + @bundled_specs["compass-slickmap"] = eval(File.read("#{dir}/specifications/compass-slickmap-0.2.1.gemspec")) + @bundled_specs["compass-slickmap"].loaded_from = "#{dir}/specifications/compass-slickmap-0.2.1.gemspec" + + def self.add_specs_to_loaded_specs + Gem.loaded_specs.merge! @bundled_specs + end + + def self.add_specs_to_index + @bundled_specs.each do |name, spec| + Gem.source_index.add_spec spec + end + end + + add_specs_to_loaded_specs + add_specs_to_index def self.require_env(env = nil) context = Class.new do @@ -129,36 +235,11 @@ module Bundler end end -$" << "rubygems.rb" +module Gem + @loaded_stacks = Hash.new { |h,k| h[k] = [] } -module Kernel - def gem(*) - # Silently ignore calls to gem, since, in theory, everything - # is activated correctly already. + def source_index.refresh! + super + Bundler.add_specs_to_index end end - -# Define all the Gem errors for gems that reference them. -module Gem - def self.ruby ; "/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby" ; end - class LoadError < ::LoadError; end - class Exception < RuntimeError; end - class CommandLineError < Exception; end - class DependencyError < Exception; end - class DependencyRemovalException < Exception; end - class GemNotInHomeException < Exception ; end - class DocumentError < Exception; end - class EndOfYAMLException < Exception; end - class FilePermissionError < Exception; end - class FormatException < Exception; end - class GemNotFoundException < Exception; end - class InstallError < Exception; end - class InvalidSpecificationException < Exception; end - class OperationNotSupportedError < Exception; end - class RemoteError < Exception; end - class RemoteInstallationCancelled < Exception; end - class RemoteInstallationSkipped < Exception; end - class RemoteSourceException < Exception; end - class VerificationError < Exception; end - class SystemExitException < SystemExit; end -end diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/.gitignore b/vendor/gems/gems/compass-slickmap-0.2.1/.gitignore new file mode 100644 index 00000000..cac65c5d --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/.gitignore @@ -0,0 +1,3 @@ +.DS_Store +pkg +.sass-cache \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/Rakefile b/vendor/gems/gems/compass-slickmap-0.2.1/Rakefile new file mode 100644 index 00000000..76b6c364 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/Rakefile @@ -0,0 +1,20 @@ +require 'rubygems' +require 'rake' + +begin + require 'jeweler' + Jeweler::Tasks.new do |gem| + gem.name = "compass-slickmap" + gem.summary = %Q{An implementation of SlickmapCSS sitemap in Sass} + gem.email = "tdreyno@gmail.com" + gem.homepage = "http://github.com/tdreyno/compass-slickmap" + gem.authors = ["Thomas Reynolds"] + gem.rubyforge_project = "compassslickmap" + # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings + gem.add_dependency("compass") + end + + Jeweler::RubyforgeTasks.new +rescue LoadError + puts "Jeweler (or a dependency) not available. Install it with: sudo gem install jeweler" +end \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/VERSION b/vendor/gems/gems/compass-slickmap-0.2.1/VERSION new file mode 100644 index 00000000..0c62199f --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/VERSION @@ -0,0 +1 @@ +0.2.1 diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/compass-slickmap.gemspec b/vendor/gems/gems/compass-slickmap-0.2.1/compass-slickmap.gemspec new file mode 100644 index 00000000..879030a8 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/compass-slickmap.gemspec @@ -0,0 +1,53 @@ +# Generated by jeweler +# DO NOT EDIT THIS FILE +# Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec` +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{compass-slickmap} + s.version = "0.2.1" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Thomas Reynolds"] + s.date = %q{2009-10-09} + s.email = %q{tdreyno@gmail.com} + s.files = [ + ".gitignore", + "Rakefile", + "VERSION", + "compass-slickmap.gemspec", + "lib/slickmap.rb", + "lib/slickmap/compass_plugin.rb", + "sass/_slickmap.sass", + "templates/project/images/L1-center.png", + "templates/project/images/L1-left.png", + "templates/project/images/L1-right.png", + "templates/project/images/L3-bottom.png", + "templates/project/images/L3-center.png", + "templates/project/images/L3-li-top.png", + "templates/project/images/L3-ul-top.png", + "templates/project/images/vertical-line.png", + "templates/project/images/white-highlight.png", + "templates/project/manifest.rb", + "templates/project/sitemap.sass" + ] + s.homepage = %q{http://github.com/tdreyno/compass-slickmap} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{compassslickmap} + s.rubygems_version = %q{1.3.5} + s.summary = %q{An implementation of SlickmapCSS sitemap in Sass} + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap.rb b/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap.rb new file mode 100755 index 00000000..79a43799 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap.rb @@ -0,0 +1,45 @@ +require 'base64' +require File.join(File.dirname(__FILE__), 'slickmap', 'compass_plugin') + +module Compass::SlickmapImage + def slickmap_image(path, mime_type = nil) + path = path.value + real_path = File.join(File.dirname(__FILE__), "..", "templates", "project", "images", path) + url = "url('data:#{compute_mime_type(path,mime_type)};base64,#{data(real_path)}')" + Sass::Script::String.new(url) + end + +private + def compute_mime_type(path, mime_type) + return mime_type if mime_type + case path + when /\.png$/i + 'image/png' + when /\.jpe?g$/i + 'image/jpeg' + when /\.gif$/i + 'image/gif' + when /\.([a-zA-Z]+)$/ + "image/#{Regexp.last_match(1).downcase}" + else + raise Compass::Error, "A mime type could not be determined for #{path}, please specify one explicitly." + end + end + + def data(real_path) + if File.readable?(real_path) + Base64.encode64(File.read(real_path)).gsub("\n","") + else + raise Compass::Error, "File not found or cannot be read: #{real_path}" + end + end +end + + +module ::Sass::Script::Functions + include Compass::SlickmapImage +end + +class ::Sass::Script::Functions::EvaluationContext + include ::Sass::Script::Functions +end \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap/compass_plugin.rb b/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap/compass_plugin.rb new file mode 100755 index 00000000..d7086b2d --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/lib/slickmap/compass_plugin.rb @@ -0,0 +1,5 @@ +options = Hash.new +options[:stylesheets_directory] = File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'sass')) +options[:templates_directory] = File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'templates')) + +Compass::Frameworks.register('slickmap', options) \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/sass/_slickmap.sass b/vendor/gems/gems/compass-slickmap-0.2.1/sass/_slickmap.sass new file mode 100644 index 00000000..9526b7a1 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/sass/_slickmap.sass @@ -0,0 +1,206 @@ +@import compass/utilities/general/reset.sass + +=slickmap(!body_selector = "body") + +global-reset + #{!body_selector} + +slickmap-body + @if !body_selector != "body" + +slickmap-defaults + @if !body_selector == "body" + +slickmap-defaults + +=slickmap-primary-nav + margin: 0 + float: left + width: 100% + li + width: 25% + &.col1 li + width: 99.9% + &.col2 li + width: 50.0% + &.col3 li + width: 33.3% + &.col4 li + width: 25.0% + &.col5 li + width: 20.0% + &.col6 li + width: 16.6% + &.col7 li + width: 14.2% + &.col8 li + width: 12.5% + &.col9 li + width: 11.1% + &.col10 li + width: 10.0% + li + ul li + width: 100% !important + + a:link:before, a:visited:before + color: #78a9c0 + + // Second Level + li + width: 100% + clear: left + margin-top: 0 + padding: 10px 0 0 0 + background= slickmap_image("vertical-line.png") "center" "bottom" "repeat-y" + a + background-color: #cee3ac + border-color: #b8da83 + &:hover + border-color: #94b75f + background-color: #e7f1d7 + &:first-child + padding-top: 30px + &:last-child + background= slickmap_image("vertical-line.png") "center" "bottom" "repeat-y" + + a:link:before, a:visited:before + color: #8faf5c + + // Third Level + ul + margin: 10px 0 0 0 + width: 100% + float: right + padding: 9px 0 10px 0 + background= #fff slickmap_image("L3-ul-top.png") "center" "top" "no-repeat" + + li + background= slickmap_image("L3-center.png") "left" "center" "no-repeat" + padding: 5px 0 + a + background-color: #fff7aa + border-color: #e3ca4b + font-size: 12px + padding: 5px 0 + width: 80% + float: right + &:hover + background-color: #fffce5 + border-color: #d1b62c + &:first-child + padding: 15px 0 5px 0 + background= slickmap_image("L3-li-top.png") "left" "center" "no-repeat" + &:last-child + background= slickmap_image("L3-bottom.png") "left" "center" "no-repeat" + a:link:before, a:visited:before + color: #ccae14 + font-size: 9px + + li + float: left + background= slickmap_image("L1-center.png") "center" "top" "no-repeat" + padding: 30px 0 + margin-top: -30px + &:last-child + background= slickmap_image("L1-right.png") "center" "top" "no-repeat" + a + margin: 0 20px 0 0 + padding: 10px 0 + display: block + font-size: 14px + font-weight: bold + text-align: center + color: black + background= #c3eafb slickmap_image("white-highlight.png") "top" "left" "repeat-x" + border: 2px solid #b5d9ea + -moz-border-radius: 5px + -webkit-border-radius: 5px + -webkit-box-shadow: rgba(0,0,0,0.5) 2px 2px 2px + -moz-box-shadow: rgba(0,0,0,0.5) 2px 2px 2px + &:hover + background-color: #e2f4fd + border-color: #97bdcf + +=slickmap-primary-nav-home + display: block + float: none + background= #fff slickmap_image("L1-left.png") "center" "bottom" "no-repeat" + position: relative + z-index: 2 + padding: 0 0 30px 0 + +=slickmap-utility-nav + float: right + max-width: 50% + margin-right: 10px + li + float: left + margin-bottom: 10px + a + margin: 0 10px 0 0 + padding: 5px 10px + display: block + border: 2px solid #e3ca4b + font-size: 12px + font-weight: bold + text-align: center + color: black + background= #fff7aa slickmap_image("white-highlight.png") "top" "left" "repeat-x" + -moz-border-radius: 5px + -webkit-border-radius: 5px + -webkit-box-shadow: rgba(0,0,0,0.5) 2px 2px 2px + -moz-box-shadow: rgba(0,0,0,0.5) 2px 2px 2px + &:hover + background-color: #fffce5 + border-color: #d1b62c + &:link:before, &:visited:before + color: #ccae14 + font-size: 9px + margin-bottom: 3px + +// General Styles +=slickmap-body + background: white + color: black + padding: 40px + font-family: Gotham, Helvetica, Arial, sans-serif + font-size: 12px + line-height: 1 + +=slickmap-defaults + .sitemap + margin: 0 0 40px 0 + float: left + width: 100% + h1 + font-weight: bold + text-transform: uppercase + font-size: 20px + margin: 0 0 5px 0 + h2 + font-family: "Lucida Grande", Verdana, sans-serif + font-size: 10px + color: #777777 + margin: 0 0 20px 0 + a + text-decoration: none + &:link:before, &:visited:before + content: " "attr(href)" " + display: block + text-transform: uppercase + font-size: 10px + margin-bottom: 5px + word-wrap: break-word + ol, ul + list-style: none + + // + NUMBER OF COLUMNS: Adjust #primaryNav li to set the number + of columns required in your site map. The default is + 4 columns (25%). 5 columns would be 20%, 6 columns would + be 16.6%, etc. + #primaryNav + +slickmap-primary-nav + li#home + +slickmap-primary-nav-home + + // Utility Navigation + #utilityNav + +slickmap-utility-nav \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-center.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-center.png new file mode 100644 index 00000000..8023abc5 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-center.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-left.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-left.png new file mode 100644 index 00000000..320fd43d Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-left.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-right.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-right.png new file mode 100644 index 00000000..154441a3 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L1-right.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-bottom.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-bottom.png new file mode 100644 index 00000000..572b282e Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-bottom.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-center.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-center.png new file mode 100644 index 00000000..bf04ec17 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-center.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-li-top.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-li-top.png new file mode 100644 index 00000000..7799e40e Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-li-top.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-ul-top.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-ul-top.png new file mode 100644 index 00000000..dfa0b4b3 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/L3-ul-top.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/vertical-line.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/vertical-line.png new file mode 100644 index 00000000..af2d0372 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/vertical-line.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/white-highlight.png b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/white-highlight.png new file mode 100644 index 00000000..ed0176d7 Binary files /dev/null and b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/images/white-highlight.png differ diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/manifest.rb b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/manifest.rb new file mode 100755 index 00000000..8204fe58 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/manifest.rb @@ -0,0 +1,10 @@ +image 'images/L1-center.png' +image 'images/L1-left.png' +image 'images/L1-right.png' +image 'images/L3-bottom.png' +image 'images/L3-center.png' +image 'images/L3-li-top.png' +image 'images/L3-ul-top.png' +image 'images/vertical-line.png' +image 'images/white-highlight.png' +stylesheet 'sitemap.sass', :media => "screen, projection" \ No newline at end of file diff --git a/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/sitemap.sass b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/sitemap.sass new file mode 100755 index 00000000..c11b2095 --- /dev/null +++ b/vendor/gems/gems/compass-slickmap-0.2.1/templates/project/sitemap.sass @@ -0,0 +1,3 @@ +@import slickmap.sass + ++slickmap diff --git a/vendor/gems/gems/daemons-1.0.10/LICENSE b/vendor/gems/gems/daemons-1.0.10/LICENSE deleted file mode 100644 index 44887f32..00000000 --- a/vendor/gems/gems/daemons-1.0.10/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -Copyright (c) 2005-2007 Thomas Uehlinger - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - -This license does not apply to daemonize.rb, which is was written by -Travis Whitton und published under the following license: - -The Daemonize extension module is copywrited free software by Travis Whitton -. You can redistribute it under the terms specified in -the COPYING file of the Ruby distribution. \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/README b/vendor/gems/gems/daemons-1.0.10/README deleted file mode 100644 index 27297c8d..00000000 --- a/vendor/gems/gems/daemons-1.0.10/README +++ /dev/null @@ -1,223 +0,0 @@ -= Daemons Version 1.0.10 - -(See Releases for release-specific information) - -== What is Daemons? - -Daemons provides an easy way to wrap existing ruby scripts (for example a self-written server) -to be run as a daemon and to be controlled by simple start/stop/restart commands. - -If you want, you can also use daemons to run blocks of ruby code in a daemon process and to control -these processes from the main application. - -Besides this basic functionality, daemons offers many advanced features like exception backtracing -and logging (in case your ruby script crashes) and monitoring and automatic restarting of your processes -if they crash. - -Daemons includes the daemonize.rb script written by Travis Whitton to do the daemonization -process. - -== Basic Usage - -You can use Daemons in four differet ways: - -=== 1. Create wrapper scripts for your server scripts or applications - -Layout: suppose you have your self-written server myserver.rb: - - # this is myserver.rb - # it does nothing really useful at the moment - - loop do - sleep(5) - end - -To use myserver.rb in a production environment, you need to be able to -run myserver.rb in the _background_ (this means detach it from the console, fork it -in the background, release all directories and file descriptors). - -Just create myserver_control.rb like this: - - # this is myserver_control.rb - - require 'rubygems' # if you use RubyGems - require 'daemons' - - Daemons.run('myserver.rb') - -And use it like this from the console: - - $ ruby myserver_control.rb start - (myserver.rb is now running in the background) - $ ruby myserver_control.rb restart - (...) - $ ruby myserver_control.rb stop - -For testing purposes you can even run myserver.rb without forking in the background: - - $ ruby myserver_control.rb run - -An additional nice feature of Daemons is that you can pass additional arguments to the script that -should be daemonized by seperating them by two _hyphens_: - - $ ruby myserver_control.rb start -- --file=anyfile --a_switch another_argument - - -=== 2. Create wrapper scripts that include your server procs - -Layout: suppose you have some code you want to run in the background and control that background process -from a script: - - # this is your code - # it does nothing really useful at the moment - - loop do - sleep(5) - end - -To run this code as a daemon create myproc_control.rb like this and include your code: - - # this is myproc_control.rb - - require 'rubygems' # if you use RubyGems - require 'daemons' - - Daemons.run_proc('myproc.rb') do - loop do - sleep(5) - end - end - -And use it like this from the console: - - $ ruby myproc_control.rb start - (myproc.rb is now running in the background) - $ ruby myproc_control.rb restart - (...) - $ ruby myproc_control.rb stop - -For testing purposes you can even run myproc.rb without forking in the background: - - $ ruby myproc_control.rb run - -=== 3. Control a bunch of daemons from another application - -Layout: you have an application my_app.rb that wants to run a bunch of -server tasks as daemon processes. - - # this is my_app.rb - - require 'rubygems' # if you use RubyGems - require 'daemons' - - task1 = Daemons.call(:multiple => true) do - # first server task - - loop { - conn = accept_conn() - serve(conn) - } - end - - task2 = Daemons.call do - # second server task - - loop { - something_different() - } - end - - # the parent process continues to run - - # we can even control our tasks, for example stop them - task1.stop - task2.stop - - exit - -=== 4. Daemonize the currently running process - -Layout: you have an application my_daemon.rb that wants to run as a daemon -(but without the ability to be controlled by daemons via start/stop commands) - - # this is my_daemons.rb - - require 'rubygems' # if you use RubyGems - require 'daemons' - - # Initialize the app while we're not a daemon - init() - - # Become a daemon - Daemons.daemonize - - # The server loop - loop { - conn = accept_conn() - serve(conn) - } - - -For further documentation, refer to the module documentation of Daemons. - - -== Download and Installation - -*Download*: just go to http://rubyforge.org/projects/daemons/ - -Installation *with* RubyGems: - $ su - # gem install daemons - -Installation *without* RubyGems: - $ tar xfz daemons-x.x.x.tar.gz - $ cd daemons-x.x.x - $ su - # ruby setup.rb - -== Documentation - -For further documentation, refer to the module documentation of Daemons (click on Daemons). - -The RDoc documentation is also online at http://daemons.rubyforge.org - - -== Author - -Written in 2005-2008 by Thomas Uehlinger . - -== License - -Copyright (c) 2005-2008 Thomas Uehlinger - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - -This license does not apply to daemonize.rb, which is was written by -Travis Whitton und published under the following license: - -The Daemonize extension module is copywrited free software by Travis Whitton -. You can redistribute it under the terms specified in -the COPYING file of the Ruby distribution. - -== Feedback and other resources - -At http://rubyforge.org/projects/daemons. diff --git a/vendor/gems/gems/daemons-1.0.10/Rakefile b/vendor/gems/gems/daemons-1.0.10/Rakefile deleted file mode 100644 index 15234f87..00000000 --- a/vendor/gems/gems/daemons-1.0.10/Rakefile +++ /dev/null @@ -1,84 +0,0 @@ -require 'rubygems' -Gem::manage_gems - -require 'rake/gempackagetask' -#require 'rake/testtask' -require 'rake/packagetask' -require 'rake/rdoctask' - -$LOAD_PATH << './lib' -require 'daemons' - - -PKG_NAME = "daemons" - -PKG_FILES = FileList[ - "Rakefile", "Releases", "TODO", "README", "LICENSE", - "setup.rb", - "lib/**/*.rb", - #"test/**/*", - "examples/**/*" -] -#PKG_FILES.exclude(%r(^test/tmp/.+)) -PKG_FILES.exclude(%r(\.pid$)) -PKG_FILES.exclude(%r(\.log$)) - -spec = Gem::Specification.new do |s| - s.name = PKG_NAME - s.version = Daemons::VERSION - s.author = "Thomas Uehlinger" - s.email = "th.uehlinger@gmx.ch" - s.rubyforge_project = "daemons" - s.homepage = "http://daemons.rubyforge.org" - s.platform = Gem::Platform::RUBY - s.summary = "A toolkit to create and control daemons in different ways" - s.description = <<-EOF - Daemons provides an easy way to wrap existing ruby scripts (for example a self-written server) - to be run as a daemon and to be controlled by simple start/stop/restart commands. - - You can also call blocks as daemons and control them from the parent or just daemonize the current - process. - - Besides this basic functionality, daemons offers many advanced features like exception - backtracing and logging (in case your ruby script crashes) and monitoring and automatic - restarting of your processes if they crash. - EOF - - #s.files = FileList["{test,lib}/**/*"].exclude("rdoc").to_a - s.files = PKG_FILES - s.require_path = "lib" - s.autorequire = "daemons" - s.has_rdoc = true - s.extra_rdoc_files = ["README", "Releases", "TODO"] -end - -Rake::GemPackageTask.new(spec) do |pkg| - pkg.need_tar = true -end - - -#Rake::PackageTask.new("package") do |p| -# p.name = PKG_NAME -# p.version = Daemons::VERSION -# p.need_tar = true -# p.need_zip = true -# p.package_files = PKG_FILES -#end - - -task :default => [:package] - - -task :upload do - sh "scp -r html/* uehli@rubyforge.org:/var/www/gforge-projects/daemons" -end - - -desc "Create the RDOC html files" -rd = Rake::RDocTask.new("rdoc") { |rdoc| - rdoc.rdoc_dir = 'html' - rdoc.title = "Daemons" - rdoc.options << '--line-numbers' << '--inline-source' << '--main' << 'README' - rdoc.rdoc_files.include('README', 'TODO', 'Releases') - rdoc.rdoc_files.include('lib/**/*.rb') -} \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/Releases b/vendor/gems/gems/daemons-1.0.10/Releases deleted file mode 100644 index 2926c845..00000000 --- a/vendor/gems/gems/daemons-1.0.10/Releases +++ /dev/null @@ -1,126 +0,0 @@ -= Daemons Release History - -== Release 1.0.10: November 16, 2007 - -* By default, we now delete stray pid-files (i.e. pid-files which result for - example from a killed daemon) automatically. This function can be deactivated by - passing :keep_pid_files => true as an option. -* All pid files of :multiple daemons new get deleted correctly upon exit of the daemons (reported by Han Holl). -* Use the signal 'KILL' instead of 'TERM' on Windows platforms. -* Use exit! in trap('TERM') instead of exit when option :hard_exit is given (thanks to Han Holl). -* Did some clarification on the exception log. - -== Release 1.0.9: October 29, 2007 - -* fixed a severe bug in the new Pid.running? function: function returned true if the process did not exist (thanks to Jeremy Lawler). - -== Release 1.0.8: September 24, 2007 - -* new Pid.running? function. Checking whether a process exists by sending signal '0' (thanks to Dru Nelson). - -== Release 1.0.7: July 7, 2007 - -* Patch to fix wrong ARGV when using :exec (in def start_exec: Kernel.exec(script(), *(@app_argv || []))) (thanks to Alex McGuire). - -== Release 1.0.6: Mai 8, 2007 - -* New option to pass an ARGV-style array to run and run_proc (thanks to Marc Evans). -* Additional patches for '/var/log' (thanks to Marc Evans). - -== Release 1.0.5: February 24, 2007 - -* Applied patch that makes daemons to use '/var/log' as logfile - directory if you use :dir_mode = :system (thanks to Han Holl). -* Daemons should now work with Ruby 1.9 (at least the basic features). - -== Release 1.0.4: January 17, 2007 - -* Document the :log_output option (thanks to Andrew Kuklewicz). -* Set STDOUT.sync = true when redirecting to a logfile (thanks to Andrew Kuklewicz). -* Should now run also correctly when there is no working 'ps ax' on the system (thanks to Daniel Kehoe). - -== Release 1.0.3: November 1, 2006 - -* Set the app_name correctly also for the monitor process (thanks to Ilya Novoselov). - -== Release 1.0.2: September 26, 2006 - -* Changed the 'ps -ax' call back to 'ps ax'. -* Fixed the documentation for the :normal :dir_mode. -* As a default for Daemons.run_proc, the pid file is now saved in the current directory. -* In :ontop mode for running a proc (this is equal to calling something like 'ruby ctrl_proc.rb run'), - the proc now runs directly in the calling script, not in a forked process anymore (thanks to Paul Butcher). -* Set $0 to app_name in the daemons (thanks to Ilya Novoselov). - -== Release 1.0.1: August 30, 2006 - -* Fixed a regex for parsing the 'ps ax' system call. (thanks to Garance Alistair Drosehn) - -== Release 1.0.0: August 29, 2006 - -* Fix the parsing of the 'ps ax' system call. (thanks to Garance Alistair Drosehn) - -== Release 0.4.4: February 14, 2006 - -* Several fixes that allow us to use the Daemons::Controller - with a proc instead of wrapping a script file. This gives us all the - PID file management, monitoring, command line options, etc. without having - to specify a path to our script which can be tricky, especially when using - RubyGems. (thanks to John-Mason Shackelford) - -== Release 0.4.3: November 29, 2005 - -* New Option: You can specify the name of the application with :app_name - on calling Daemons.run. This will be used to contruct the name of the pid files - and log files. Defaults to the basename of the script. (thanks to Stephen R. Veit) - -* Bugfix: Handle the case where no controller options are given when calling Daemons, - just options after "--". (thanks to Stephen R. Veit) - - -== Release 0.4.2: November 15, 2005 - -* Bugfix for problem with :normal pid-file directory mode (pid.rb), fixed (thanks to Stephen R. Veit) - - -== Release 0.4.1: September 11, 2005 - -* Bugfix for 'run' command line mode: didn't work anymore in 0.4.0, fixed - - -== Release 0.4.0: July 30, 2005 - -* Two completely new operation modes: - 1. Call a block as a daemon (Daemons.call { my_daemon_code }) - and control it from the parent process. - 2. Daemonize the currently running process (Daemons.daemonize) - plus the already existing mode to control your scripts (Daemons.run("script.rb")) -* Improved documentation (for example "How does the daemonization process work?") -* Improved "simulation mode" (:ontop option) -* Some minor bugfixes - - -== Release 0.3.0: April 21, 2005 - -* New monitor functionality: automatic restarting of your applications if they crash -* 'restart' command fixed -* '--force' command modifier (please refer to the documentation) -* Some more bugfixes and improvements - - -== Release 0.2.1: Mar 21, 2005 - -* Bugfix for a problem with the 'status' command - - -== Release 0.2.0: Mar 21, 2005 - -* Exception backtrace functionality added -* Exec functionality added -* More examples added -* New commands: status, zap - - -== Release 0.0.1: Feb 8, 2005 - -* Initial release diff --git a/vendor/gems/gems/daemons-1.0.10/TODO b/vendor/gems/gems/daemons-1.0.10/TODO deleted file mode 100644 index 81e4d2e4..00000000 --- a/vendor/gems/gems/daemons-1.0.10/TODO +++ /dev/null @@ -1,6 +0,0 @@ -* write the README (2005-02-07) *DONE* -* write some real tests (2005-02-08) -* document the new options (2005-03-14) *DONE* -* start/stop with --force options (2005-04-05) -* option to give some console output on start/stop commands (2005-04-05) - diff --git a/vendor/gems/gems/daemons-1.0.10/examples/call/call.rb b/vendor/gems/gems/daemons-1.0.10/examples/call/call.rb deleted file mode 100644 index 0738cf50..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/call/call.rb +++ /dev/null @@ -1,56 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - - -require 'daemons' - -testfile = File.expand_path(__FILE__) + '.log' - - -# On the first call to , an application group (accessible by Daemons.group) -# will be created an the options will be kept within, so you only have to specify -# :multiple once. -# - -options = { -# :ontop => true, - :multiple => true -} - - -Daemons.call(options) do - File.open(testfile, 'w') {|f| - f.puts "test" - } - - loop { puts "1"; sleep 5 } -end -puts "first task started" - -Daemons.call do - loop { puts "2"; sleep 4 } -end -puts "second task started" - -# NOTE: this process will exit after 5 seconds -Daemons.call do - puts "3" - sleep 5 -end -puts "third task started" - -puts "waiting 20 seconds..." -sleep(20) - -# This call would result in an exception as it will try to kill the third process -# which has already terminated by that time; but using the 'true' parameter forces the -# stop_all procedure. -puts "trying to stop all tasks..." -Daemons.group.stop_all(true) - -puts "done" diff --git a/vendor/gems/gems/daemons-1.0.10/examples/call/call_monitor.rb b/vendor/gems/gems/daemons-1.0.10/examples/call/call_monitor.rb deleted file mode 100644 index f58430fb..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/call/call_monitor.rb +++ /dev/null @@ -1,55 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - - -require 'daemons' - -testfile = File.expand_path(__FILE__) + '.log' - - -# On the first call to , an application group (accessible by Daemons.group) -# will be created an the options will be kept within, so you only have to specify -# :multiple once. -# - -options = { -# :ontop => true, - :multiple => true, - :monitor => true -} - - -Daemons.call(options) do - loop { puts "1"; sleep 20 } -end -puts "first task started" - - -# NOTE: this process will exit after 5 seconds -Daemons.call do - File.open(testfile, 'a') {|f| - f.puts "started..." - puts "2" - - sleep 5 - - f.puts "...exit" - } -end -puts "second task started" - -puts "waiting 100 seconds..." -sleep(100) - -# This call would result in an exception as it will try to kill the third process -# which has already terminated by that time; but using the 'true' parameter forces the -# stop_all procedure. -puts "trying to stop all tasks..." -Daemons.group.stop_all(true) - -puts "done" diff --git a/vendor/gems/gems/daemons-1.0.10/examples/daemonize/daemonize.rb b/vendor/gems/gems/daemons-1.0.10/examples/daemonize/daemonize.rb deleted file mode 100644 index 749ae80c..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/daemonize/daemonize.rb +++ /dev/null @@ -1,20 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - - - -require 'daemons' - - -testfile = File.expand_path(__FILE__) + '.log' - -Daemons.daemonize - -File.open(testfile, 'w') {|f| - f.write("test") -} \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_crash.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_crash.rb deleted file mode 100644 index 97d024a0..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_crash.rb +++ /dev/null @@ -1,17 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :log_output => true, - :backtrace => true -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver_crashing.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exec.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exec.rb deleted file mode 100644 index ccc8f725..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exec.rb +++ /dev/null @@ -1,16 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :mode => :exec -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exit.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exit.rb deleted file mode 100644 index 9983a033..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_exit.rb +++ /dev/null @@ -1,15 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver_exiting.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_keep_pid_files.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_keep_pid_files.rb deleted file mode 100644 index fd3c9c52..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_keep_pid_files.rb +++ /dev/null @@ -1,17 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :keep_pid_files => true -} - - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver.rb'), options) \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_monitor.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_monitor.rb deleted file mode 100644 index 48c2827d..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_monitor.rb +++ /dev/null @@ -1,16 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :monitor => true -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver_crashing.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_multiple.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_multiple.rb deleted file mode 100644 index fa8e776e..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_multiple.rb +++ /dev/null @@ -1,16 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :multiple => true -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_normal.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_normal.rb deleted file mode 100644 index 2b6517da..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_normal.rb +++ /dev/null @@ -1,12 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver.rb')) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_ontop.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_ontop.rb deleted file mode 100644 index ca36222a..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_ontop.rb +++ /dev/null @@ -1,16 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :ontop => true -} - -Daemons.run(File.join(File.dirname(__FILE__), 'myserver.rb'), options) diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_optionparser.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_optionparser.rb deleted file mode 100644 index a84c69b3..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_optionparser.rb +++ /dev/null @@ -1,43 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' -require 'optparse' -require 'logger' -require 'ostruct' - - -class MyApp < Logger::Application - def initialize(args) - super(self.class) - @options = OpenStruct.new(:daemonize => true) - opts = OptionParser.new do |opts| - opts.banner = 'Usage: myapp [options]' - opts.separator '' - opts.on('-N','--no-daemonize',"Don't run as a daemon") do - @options.daemonize = false - end - end - @args = opts.parse!(args) - end - - def run - Daemons.run_proc('myapp',{:ARGV => @args, :ontop => !@options.daemonize}) do - puts "@options.daemonize: #{@options.daemonize}" - STDOUT.sync = true - loop do - print '.' - sleep(2) - end - end - end -end - - -myapp = MyApp.new(ARGV) -myapp.run \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb deleted file mode 100644 index 7bd1fac1..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb +++ /dev/null @@ -1,25 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :multiple => false, - :ontop => false, - :backtrace => true, - :log_output => true, - :monitor => true - } - -Daemons.run_proc('ctrl_proc.rb', options) do - loop do - puts 'ping from proc!' - sleep(3) - end -end \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb.output b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb.output deleted file mode 100644 index f9aeae8b..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc.rb.output +++ /dev/null @@ -1,101 +0,0 @@ -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! -ping from proc! diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb deleted file mode 100644 index 8c3b0a2a..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb +++ /dev/null @@ -1,22 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -options = { - :log_output => true, - :multiple => true, -} - - -Daemons.run_proc('ctrl_proc_multiple.rb', options) do - puts "hello" - sleep(5) - puts "done" -end \ No newline at end of file diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb.output b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb.output deleted file mode 100644 index 3e23ae48..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_multiple.rb.output +++ /dev/null @@ -1,2 +0,0 @@ - -hello diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_simple.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_simple.rb deleted file mode 100644 index a26fe141..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/ctrl_proc_simple.rb +++ /dev/null @@ -1,17 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), '../../lib')) - -if File.exist?(File.join(lib_dir, 'daemons.rb')) - $LOAD_PATH.unshift lib_dir -else - begin; require 'rubygems'; rescue ::Exception; end -end - -require 'daemons' - - -Daemons.run_proc('ctrl_proc_simple.rb') do - loop do - puts 'ping from proc!' - sleep(3) - end -end diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/myserver.rb deleted file mode 100644 index 12430b2d..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver.rb +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env ruby - - -# This is myserver.rb, an example server that is to be controlled by daemons -# and that does nothing really useful at the moment. -# -# Don't run this script by yourself, it can be controlled by the ctrl*.rb scripts. - -loop do - puts 'ping from myserver.rb!' - sleep(3) -end diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb deleted file mode 100644 index f158e7b8..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb +++ /dev/null @@ -1,14 +0,0 @@ -# This is myserver.rb, an example server that is to be controlled by daemons -# and that does nothing really useful at the moment. -# -# Don't run this script by yourself, it can be controlled by the ctrl*.rb scripts. - -loop do - puts 'ping from myserver.rb!' - puts 'this example server will crash in 3 seconds...' - - sleep(3) - - puts 'CRASH!' - raise 'CRASH!' -end diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb.output b/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb.output deleted file mode 100644 index cd5ca5d5..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_crashing.rb.output +++ /dev/null @@ -1,30 +0,0 @@ -/home/uehli/Desktop/daemons-current/examples/myserver_crashing.rb:13: CRASH! (RuntimeError) - from /home/uehli/Desktop/daemons-current/examples/myserver_crashing.rb:6:in `loop' - from /home/uehli/Desktop/daemons-current/examples/myserver_crashing.rb:6 - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:116:in `load' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:116:in `run_via_load' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:90:in `start' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:359:in `run' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:469:in `run' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:468:in `call' - from /home/uehli/Desktop/daemons-current/lib/daemons/cmdline.rb:94:in `catch_exceptions' - from /home/uehli/Desktop/daemons-current/lib/daemons.rb:468:in `run' - from ctrl_crash.rb:17 -ping from myserver.rb! -this example server will crash in 3 seconds... -CRASH! -ping from myserver.rb! -this example server will crash in 3 seconds... -CRASH! -/Users/uehli/Projects/daemons-proj/examples/run/myserver_crashing.rb:13: CRASH! (RuntimeError) - from /Users/uehli/Projects/daemons-proj/examples/run/myserver_crashing.rb:6:in `loop' - from /Users/uehli/Projects/daemons-proj/examples/run/myserver_crashing.rb:6 - from /Users/uehli/Projects/daemons-proj/lib/daemons/application.rb:176:in `load' - from /Users/uehli/Projects/daemons-proj/lib/daemons/application.rb:176:in `start_load' - from /Users/uehli/Projects/daemons-proj/lib/daemons/application.rb:257:in `start' - from /Users/uehli/Projects/daemons-proj/lib/daemons/controller.rb:69:in `run' - from /Users/uehli/Projects/daemons-proj/lib/daemons.rb:139:in `run' - from /Users/uehli/Projects/daemons-proj/lib/daemons/cmdline.rb:105:in `call' - from /Users/uehli/Projects/daemons-proj/lib/daemons/cmdline.rb:105:in `catch_exceptions' - from /Users/uehli/Projects/daemons-proj/lib/daemons.rb:138:in `run' - from ctrl_crash.rb:17 diff --git a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_exiting.rb b/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_exiting.rb deleted file mode 100644 index e5c6fe78..00000000 --- a/vendor/gems/gems/daemons-1.0.10/examples/run/myserver_exiting.rb +++ /dev/null @@ -1,8 +0,0 @@ -loop do - puts 'ping from myserver.rb!' - puts 'this example server will exit in 3 seconds...' - - sleep(3) - - Process.exit -end diff --git a/vendor/gems/gems/daemons-1.0.10/lib/daemons.rb b/vendor/gems/gems/daemons-1.0.10/lib/daemons.rb deleted file mode 100644 index 2de1e0c7..00000000 --- a/vendor/gems/gems/daemons-1.0.10/lib/daemons.rb +++ /dev/null @@ -1,283 +0,0 @@ -require 'optparse' -require 'optparse/time' - - -require 'daemons/pidfile' -require 'daemons/cmdline' -require 'daemons/exceptions' -require 'daemons/monitor' - - -require 'daemons/application' -require 'daemons/application_group' -require 'daemons/controller' - - -# All functions and classes that Daemons provides reside in this module. -# -# Daemons is normally invoked by one of the following four ways: -# -# 1. Daemons.run(script, options): -# This is used in wrapper-scripts that are supposed to control other ruby scripts or -# external applications. Control is completely passed to the daemons library. -# Such wrapper script need to be invoked with command line options like 'start' or 'stop' -# to do anything useful. -# -# 2. Daemons.run_proc(app_name, options) { (...) }: -# This is used in wrapper-scripts that are supposed to control a proc. -# Control is completely passed to the daemons library. -# Such wrapper script need to be invoked with command line options like 'start' or 'stop' -# to do anything useful. -# -# 3. Daemons.call(options) { block }: -# Execute the block in a new daemon. Daemons.call will return immediately -# after spawning the daemon with the new Application object as a return value. -# -# 4. Daemons.daemonize(options): -# Daemonize the currently runnig process, i.e. the calling process will become a daemon. -# -# == What does daemons internally do with my daemons? -# *or*:: why do my daemons crash when they try to open a file? -# *or*:: why can I not see any output from the daemon on the console (when using for example +puts+)? -# -# From a technical aspect of view, daemons does the following when creating a daemon: -# -# 1. Forks a child (and exits the parent process, if needed) -# 2. Becomes a session leader (which detaches the program from -# the controlling terminal). -# 3. Forks another child process and exits first child. This prevents -# the potential of acquiring a controlling terminal. -# 4. Changes the current working directory to "/". -# 5. Clears the file creation mask (sets +umask+ to 0000). -# 6. Closes file descriptors (reopens +STDOUT+ and +STDERR+ to point to a logfile if -# possible). -# -# So what does this mean for your daemons: -# - the current directory is '/' -# - you cannot receive any input from the console (for example no +gets+) -# - you cannot output anything from the daemons with +puts+/+print+ unless a logfile is used -# -# == How do PidFiles work? Where are they stored? -# -# Also, you are maybe interested in reading the documentation for the class PidFile. -# There you can find out about how Daemons works internally and how and where the so -# called PidFiles are stored. -# -module Daemons - - VERSION = "1.0.10" - - require 'daemons/daemonize' - - - # Passes control to Daemons. - # This is used in wrapper-scripts that are supposed to control other ruby scripts or - # external applications. Control is completely passed to the daemons library. - # Such wrapper script should be invoked with command line options like 'start' or 'stop' - # to do anything useful. - # - # +script+:: This is the path to the script that should be run as a daemon. - # Please note that Daemons runs this script with load + + + +

Fetching object from server

+
+ Wait...
+ +
+ + + diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/prototype.js b/vendor/gems/gems/json_pure-1.2.0/data/prototype.js similarity index 95% rename from vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/prototype.js rename to vendor/gems/gems/json_pure-1.2.0/data/prototype.js index 546f9fe4..5c734629 100644 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/prototype.js +++ b/vendor/gems/gems/json_pure-1.2.0/data/prototype.js @@ -1,4 +1,4 @@ -/* Prototype JavaScript framework, version 1.6.0.1 +/* Prototype JavaScript framework, version 1.6.0 * (c) 2005-2007 Sam Stephenson * * Prototype is freely distributable under the terms of an MIT-style license. @@ -7,7 +7,7 @@ *--------------------------------------------------------------------------*/ var Prototype = { - Version: '1.6.0.1', + Version: '1.6.0', Browser: { IE: !!(window.attachEvent && !window.opera), @@ -36,6 +36,8 @@ var Prototype = { if (Prototype.Browser.MobileSafari) Prototype.BrowserFeatures.SpecificElementExtensions = false; +if (Prototype.Browser.WebKit) + Prototype.BrowserFeatures.XPath = false; /* Based on Alex Arnell's inheritance implementation. */ var Class = { @@ -108,7 +110,7 @@ Object.extend = function(destination, source) { Object.extend(Object, { inspect: function(object) { try { - if (Object.isUndefined(object)) return 'undefined'; + if (object === undefined) return 'undefined'; if (object === null) return 'null'; return object.inspect ? object.inspect() : object.toString(); } catch (e) { @@ -133,7 +135,7 @@ Object.extend(Object, { var results = []; for (var property in object) { var value = Object.toJSON(object[property]); - if (!Object.isUndefined(value)) + if (value !== undefined) results.push(property.toJSON() + ': ' + value); } @@ -202,7 +204,7 @@ Object.extend(Function.prototype, { }, bind: function() { - if (arguments.length < 2 && Object.isUndefined(arguments[0])) return this; + if (arguments.length < 2 && arguments[0] === undefined) return this; var __method = this, args = $A(arguments), object = args.shift(); return function() { return __method.apply(object, args.concat($A(arguments))); @@ -349,7 +351,7 @@ Object.extend(String.prototype, { sub: function(pattern, replacement, count) { replacement = this.gsub.prepareReplacement(replacement); - count = Object.isUndefined(count) ? 1 : count; + count = count === undefined ? 1 : count; return this.gsub(pattern, function(match) { if (--count < 0) return match[0]; @@ -364,7 +366,7 @@ Object.extend(String.prototype, { truncate: function(length, truncation) { length = length || 30; - truncation = Object.isUndefined(truncation) ? '...' : truncation; + truncation = truncation === undefined ? '...' : truncation; return this.length > length ? this.slice(0, length - truncation.length) + truncation : String(this); }, @@ -484,9 +486,7 @@ Object.extend(String.prototype, { }, isJSON: function() { - var str = this; - if (str.blank()) return false; - str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, ''); + var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, ''); return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str); }, @@ -565,8 +565,7 @@ var Template = Class.create({ if (before == '\\') return match[2]; var ctx = object, expr = match[3]; - var pattern = /^([^.[]+|\[((?:.*?[^\\])?)\])(\.|\[|$)/; - match = pattern.exec(expr); + var pattern = /^([^.[]+|\[((?:.*?[^\\])?)\])(\.|\[|$)/, match = pattern.exec(expr); if (match == null) return before; while (match != null) { @@ -687,7 +686,7 @@ var Enumerable = { }, inGroupsOf: function(number, fillWith) { - fillWith = Object.isUndefined(fillWith) ? null : fillWith; + fillWith = fillWith === undefined ? null : fillWith; return this.eachSlice(number, function(slice) { while(slice.length < number) slice.push(fillWith); return slice; @@ -714,7 +713,7 @@ var Enumerable = { var result; this.each(function(value, index) { value = iterator(value, index); - if (result == null || value >= result) + if (result == undefined || value >= result) result = value; }); return result; @@ -725,7 +724,7 @@ var Enumerable = { var result; this.each(function(value, index) { value = iterator(value, index); - if (result == null || value < result) + if (result == undefined || value < result) result = value; }); return result; @@ -905,7 +904,7 @@ Object.extend(Array.prototype, { var results = []; this.each(function(object) { var value = Object.toJSON(object); - if (!Object.isUndefined(value)) results.push(value); + if (value !== undefined) results.push(value); }); return '[' + results.join(', ') + ']'; } @@ -985,6 +984,34 @@ function $H(object) { }; var Hash = Class.create(Enumerable, (function() { + if (function() { + var i = 0, Test = function(value) { this.key = value }; + Test.prototype.key = 'foo'; + for (var property in new Test('bar')) i++; + return i > 1; + }()) { + function each(iterator) { + var cache = []; + for (var key in this._object) { + var value = this._object[key]; + if (cache.include(key)) continue; + cache.push(key); + var pair = [key, value]; + pair.key = key; + pair.value = value; + iterator(pair); + } + } + } else { + function each(iterator) { + for (var key in this._object) { + var value = this._object[key], pair = [key, value]; + pair.key = key; + pair.value = value; + iterator(pair); + } + } + } function toQueryPair(key, value) { if (Object.isUndefined(value)) return key; @@ -996,14 +1023,7 @@ var Hash = Class.create(Enumerable, (function() { this._object = Object.isHash(object) ? object.toObject() : Object.clone(object); }, - _each: function(iterator) { - for (var key in this._object) { - var value = this._object[key], pair = [key, value]; - pair.key = key; - pair.value = value; - iterator(pair); - } - }, + _each: each, set: function(key, value) { return this._object[key] = value; @@ -1167,11 +1187,8 @@ Ajax.Base = Class.create({ Object.extend(this.options, options || { }); this.options.method = this.options.method.toLowerCase(); - if (Object.isString(this.options.parameters)) this.options.parameters = this.options.parameters.toQueryParams(); - else if (Object.isHash(this.options.parameters)) - this.options.parameters = this.options.parameters.toObject(); } }); @@ -1354,7 +1371,7 @@ Ajax.Response = Class.create({ if(readyState == 4) { var xml = transport.responseXML; - this.responseXML = Object.isUndefined(xml) ? null : xml; + this.responseXML = xml === undefined ? null : xml; this.responseJSON = this._getResponseJSON(); } }, @@ -1400,11 +1417,10 @@ Ajax.Response = Class.create({ _getResponseJSON: function() { var options = this.request.options; if (!options.evalJSON || (options.evalJSON != 'force' && - !(this.getHeader('Content-type') || '').include('application/json')) || - this.responseText.blank()) - return null; + !(this.getHeader('Content-type') || '').include('application/json'))) + return null; try { - return this.responseText.evalJSON(options.sanitizeJSON); + return this.transport.responseText.evalJSON(options.sanitizeJSON); } catch (e) { this.request.dispatchException(e); } @@ -1418,11 +1434,11 @@ Ajax.Updater = Class.create(Ajax.Request, { failure: (container.failure || (container.success ? null : container)) }; - options = Object.clone(options); + options = options || { }; var onComplete = options.onComplete; - options.onComplete = (function(response, json) { + options.onComplete = (function(response, param) { this.updateContent(response.responseText); - if (Object.isFunction(onComplete)) onComplete(response, json); + if (Object.isFunction(onComplete)) onComplete(response, param); }).bind(this); $super(url, options); @@ -1444,6 +1460,10 @@ Ajax.Updater = Class.create(Ajax.Request, { } else receiver.update(responseText); } + + if (this.success()) { + if (this.onComplete) this.onComplete.bind(this).defer(); + } } }); @@ -1670,7 +1690,7 @@ Element.Methods = { }, descendants: function(element) { - return $(element).getElementsBySelector("*"); + return $A($(element).getElementsByTagName('*')).each(Element.extend); }, firstDescendant: function(element) { @@ -1775,11 +1795,10 @@ Element.Methods = { var attributes = { }, t = Element._attributeTranslations.write; if (typeof name == 'object') attributes = name; - else attributes[name] = Object.isUndefined(value) ? true : value; + else attributes[name] = value === undefined ? true : value; for (var attr in attributes) { - name = t.names[attr] || attr; - value = attributes[attr]; + var name = t.names[attr] || attr, value = attributes[attr]; if (t.values[attr]) name = t.values[attr](element, value); if (value === false || value === null) element.removeAttribute(name); @@ -1848,7 +1867,6 @@ Element.Methods = { descendantOf: function(element, ancestor) { element = $(element), ancestor = $(ancestor); - var originalAncestor = ancestor; if (element.compareDocumentPosition) return (element.compareDocumentPosition(ancestor) & 8) === 8; @@ -1864,7 +1882,7 @@ Element.Methods = { } while (element = element.parentNode) - if (element == originalAncestor) return true; + if (element == ancestor) return true; return false; }, @@ -1903,7 +1921,7 @@ Element.Methods = { if (property == 'opacity') element.setOpacity(styles[property]); else elementStyle[(property == 'float' || property == 'cssFloat') ? - (Object.isUndefined(elementStyle.styleFloat) ? 'cssFloat' : 'styleFloat') : + (elementStyle.styleFloat === undefined ? 'cssFloat' : 'styleFloat') : property] = styles[property]; return element; @@ -2194,46 +2212,22 @@ if (!document.createRange || Prototype.Browser.Opera) { } if (Prototype.Browser.Opera) { - Element.Methods.getStyle = Element.Methods.getStyle.wrap( - function(proceed, element, style) { - switch (style) { - case 'left': case 'top': case 'right': case 'bottom': - if (proceed(element, 'position') === 'static') return null; - case 'height': case 'width': - // returns '0px' for hidden elements; we want it to return null - if (!Element.visible(element)) return null; - - // returns the border-box dimensions rather than the content-box - // dimensions, so we subtract padding and borders from the value - var dim = parseInt(proceed(element, style), 10); - - if (dim !== element['offset' + style.capitalize()]) - return dim + 'px'; - - var properties; - if (style === 'height') { - properties = ['border-top-width', 'padding-top', - 'padding-bottom', 'border-bottom-width']; - } - else { - properties = ['border-left-width', 'padding-left', - 'padding-right', 'border-right-width']; - } - return properties.inject(dim, function(memo, property) { - var val = proceed(element, property); - return val === null ? memo : memo - parseInt(val, 10); - }) + 'px'; - default: return proceed(element, style); - } + Element.Methods._getStyle = Element.Methods.getStyle; + Element.Methods.getStyle = function(element, style) { + switch(style) { + case 'left': + case 'top': + case 'right': + case 'bottom': + if (Element._getStyle(element, 'position') == 'static') return null; + default: return Element._getStyle(element, style); } - ); - - Element.Methods.readAttribute = Element.Methods.readAttribute.wrap( - function(proceed, element, attribute) { - if (attribute === 'title') return element.title; - return proceed(element, attribute); - } - ); + }; + Element.Methods._readAttribute = Element.Methods.readAttribute; + Element.Methods.readAttribute = function(element, attribute) { + if (attribute == 'title') return element.title; + return Element._readAttribute(element, attribute); + }; } else if (Prototype.Browser.IE) { @@ -2307,7 +2301,7 @@ else if (Prototype.Browser.IE) { return node ? node.value : ""; }, _getEv: function(element, attribute) { - attribute = element.getAttribute(attribute); + var attribute = element.getAttribute(attribute); return attribute ? attribute.toString().slice(23, -2) : null; }, _flag: function(element, attribute) { @@ -2404,7 +2398,7 @@ else if (Prototype.Browser.WebKit) { }; // Safari returns margins on body which is incorrect if the child is absolutely - // positioned. For performance reasons, redefine Element#cumulativeOffset for + // positioned. For performance reasons, redefine Position.cumulativeOffset for // KHTML/WebKit only. Element.Methods.cumulativeOffset = function(element) { var valueT = 0, valueL = 0; @@ -2693,11 +2687,10 @@ Element.addMethods = function(methods) { document.viewport = { getDimensions: function() { var dimensions = { }; - var B = Prototype.Browser; $w('width height').each(function(d) { var D = d.capitalize(); - dimensions[d] = (B.WebKit && !document.evaluate) ? self['inner' + D] : - (B.Opera) ? document.body['client' + D] : document.documentElement['client' + D]; + dimensions[d] = self['inner' + D] || + (document.documentElement['client' + D] || document.body['client' + D]); }); return dimensions; }, @@ -2726,26 +2719,9 @@ var Selector = Class.create({ this.compileMatcher(); }, - shouldUseXPath: function() { - if (!Prototype.BrowserFeatures.XPath) return false; - - var e = this.expression; - - // Safari 3 chokes on :*-of-type and :empty - if (Prototype.Browser.WebKit && - (e.include("-of-type") || e.include(":empty"))) - return false; - - // XPath can't do namespaced attributes, nor can it read - // the "checked" property from DOM nodes - if ((/(\[[\w-]*?:|:checked)/).test(this.expression)) - return false; - - return true; - }, - compileMatcher: function() { - if (this.shouldUseXPath()) + // Selectors with namespaced attributes can't use the XPath version + if (Prototype.BrowserFeatures.XPath && !(/(\[[\w-]*?:|:checked)/).test(this.expression)) return this.compileXPathMatcher(); var e = this.expression, ps = Selector.patterns, h = Selector.handlers, @@ -2868,12 +2844,8 @@ Object.extend(Selector, { }, className: "[contains(concat(' ', @class, ' '), ' #{1} ')]", id: "[@id='#{1}']", - attrPresence: function(m) { - m[1] = m[1].toLowerCase(); - return new Template("[@#{1}]").evaluate(m); - }, + attrPresence: "[@#{1}]", attr: function(m) { - m[1] = m[1].toLowerCase(); m[3] = m[5] || m[6]; return new Template(Selector.xpath.operators[m[2]]).evaluate(m); }, @@ -2902,7 +2874,7 @@ Object.extend(Selector, { 'enabled': "[not(@disabled)]", 'not': function(m) { var e = m[6], p = Selector.patterns, - x = Selector.xpath, le, v; + x = Selector.xpath, le, m, v; var exclusion = []; while (e && le != e && (/\S/).test(e)) { @@ -3079,7 +3051,7 @@ Object.extend(Selector, { child: function(nodes) { var h = Selector.handlers; for (var i = 0, results = [], node; node = nodes[i]; i++) { - for (var j = 0, child; child = node.childNodes[j]; j++) + for (var j = 0, children = [], child; child = node.childNodes[j]; j++) if (child.nodeType == 1 && child.tagName != '!') results.push(child); } return results; @@ -3351,8 +3323,7 @@ Object.extend(Selector, { }, findChildElements: function(element, expressions) { - var exprs = expressions.join(','); - expressions = []; + var exprs = expressions.join(','), expressions = []; exprs.scan(/(([\w#:.~>+()\s-]+|\*|\[.*?\])+)\s*(,|$)/, function(m) { expressions.push(m[1].strip()); }); @@ -3365,16 +3336,6 @@ Object.extend(Selector, { } }); -if (Prototype.Browser.IE) { - // IE returns comment nodes on getElementsByTagName("*"). - // Filter them out. - Selector.handlers.concat = function(a, b) { - for (var i = 0, node; node = b[i]; i++) - if (node.tagName !== "!") a.push(node); - return a; - }; -} - function $$() { return Selector.findChildElements(document, $A(arguments)); } @@ -3386,7 +3347,7 @@ var Form = { serializeElements: function(elements, options) { if (typeof options != 'object') options = { hash: !!options }; - else if (Object.isUndefined(options.hash)) options.hash = true; + else if (options.hash === undefined) options.hash = true; var key, value, submitted = false, submit = options.submit; var data = elements.inject({ }, function(result, element) { @@ -3584,17 +3545,17 @@ Form.Element.Serializers = { }, inputSelector: function(element, value) { - if (Object.isUndefined(value)) return element.checked ? element.value : null; + if (value === undefined) return element.checked ? element.value : null; else element.checked = !!value; }, textarea: function(element, value) { - if (Object.isUndefined(value)) return element.value; + if (value === undefined) return element.value; else element.value = value; }, select: function(element, index) { - if (Object.isUndefined(index)) + if (index === undefined) return this[element.type == 'select-one' ? 'selectOne' : 'selectMany'](element); else { @@ -3785,9 +3746,7 @@ Event.Methods = (function() { findElement: function(event, expression) { var element = Event.element(event); - if (!expression) return element; - var elements = [element].concat(element.ancestors()); - return Selector.findElement(elements, expression, 0); + return element.match(expression) ? element : element.up(expression); }, pointer: function(event) { @@ -3979,7 +3938,7 @@ Object.extend(Event, (function() { element.fireEvent(event.eventType, event); } - return Event.extend(event); + return event; } }; })()); diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/extconf.rb b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/extconf.rb new file mode 100644 index 00000000..797b566b --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/extconf.rb @@ -0,0 +1,11 @@ +require 'mkmf' +require 'rbconfig' + +if CONFIG['CC'] =~ /gcc/ + $CFLAGS += ' -Wall' + #$CFLAGS += ' -O0 -ggdb' +end + +have_header("ruby/st.h") || have_header("st.h") +have_header("ruby/encoding.h") +create_makefile 'generator' diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/generator.c b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/generator.c new file mode 100644 index 00000000..558f28dd --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/generator.c @@ -0,0 +1,935 @@ +#include +#include "ruby.h" +#if HAVE_RUBY_ST_H +#include "ruby/st.h" +#endif +#if HAVE_ST_H +#include "st.h" +#endif +#include "unicode.h" +#include + +#ifndef RHASH_TBL +#define RHASH_TBL(hsh) (RHASH(hsh)->tbl) +#endif + +#ifndef RHASH_SIZE +#define RHASH_SIZE(hsh) (RHASH(hsh)->tbl->num_entries) +#endif + +#ifndef RFLOAT_VALUE +#define RFLOAT_VALUE(val) (RFLOAT(val)->value) +#endif + +#ifdef HAVE_RUBY_ENCODING_H +#include "ruby/encoding.h" +#define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding()) +static VALUE mEncoding_UTF_8; +static ID i_encoding, i_encode; +#else +#define FORCE_UTF8(obj) +#endif + +#define check_max_nesting(state, depth) do { \ + long current_nesting = 1 + depth; \ + if (state->max_nesting != 0 && current_nesting > state->max_nesting) \ + rb_raise(eNestingError, "nesting of %ld is too deep", current_nesting); \ +} while (0); + +static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject, + mHash, mArray, mInteger, mFloat, mString, mString_Extend, + mTrueClass, mFalseClass, mNilClass, eGeneratorError, + eCircularDatastructure, eNestingError; + +static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before, + i_object_nl, i_array_nl, i_check_circular, i_max_nesting, + i_allow_nan, i_pack, i_unpack, i_create_id, i_extend; + +typedef struct JSON_Generator_StateStruct { + VALUE indent; + VALUE space; + VALUE space_before; + VALUE object_nl; + VALUE array_nl; + int check_circular; + VALUE seen; + VALUE memo; + VALUE depth; + long max_nesting; + int flag; + int allow_nan; +} JSON_Generator_State; + +#define GET_STATE(self) \ + JSON_Generator_State *state; \ + Data_Get_Struct(self, JSON_Generator_State, state); + +/* + * Document-module: JSON::Ext::Generator + * + * This is the JSON generator implemented as a C extension. It can be + * configured to be used by setting + * + * JSON.generator = JSON::Ext::Generator + * + * with the method generator= in JSON. + * + */ + +static int hash_to_json_state_i(VALUE key, VALUE value, VALUE Vstate) +{ + VALUE json, buf, Vdepth; + GET_STATE(Vstate); + buf = state->memo; + Vdepth = state->depth; + + if (key == Qundef) return ST_CONTINUE; + if (state->flag) { + state->flag = 0; + rb_str_buf_cat2(buf, ","); + if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(buf, state->object_nl); + } + if (RSTRING_LEN(state->object_nl)) { + rb_str_buf_append(buf, rb_str_times(state->indent, Vdepth)); + } + json = rb_funcall(rb_funcall(key, i_to_s, 0), i_to_json, 2, Vstate, Vdepth); + Check_Type(json, T_STRING); + rb_str_buf_append(buf, json); + OBJ_INFECT(buf, json); + if (RSTRING_LEN(state->space_before)) { + rb_str_buf_append(buf, state->space_before); + } + rb_str_buf_cat2(buf, ":"); + if (RSTRING_LEN(state->space)) rb_str_buf_append(buf, state->space); + json = rb_funcall(value, i_to_json, 2, Vstate, Vdepth); + Check_Type(json, T_STRING); + state->flag = 1; + rb_str_buf_append(buf, json); + OBJ_INFECT(buf, json); + state->depth = Vdepth; + state->memo = buf; + return ST_CONTINUE; +} + +inline static VALUE mHash_json_transfrom(VALUE self, VALUE Vstate, VALUE Vdepth) { + long depth, len = RHASH_SIZE(self); + VALUE result; + GET_STATE(Vstate); + + depth = 1 + FIX2LONG(Vdepth); + result = rb_str_buf_new(len); + state->memo = result; + state->depth = LONG2FIX(depth); + state->flag = 0; + rb_str_buf_cat2(result, "{"); + if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(result, state->object_nl); + rb_hash_foreach(self, hash_to_json_state_i, Vstate); + if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(result, state->object_nl); + if (RSTRING_LEN(state->object_nl)) { + rb_str_buf_append(result, rb_str_times(state->indent, Vdepth)); + } + rb_str_buf_cat2(result, "}"); + return result; +} + +static int hash_to_json_i(VALUE key, VALUE value, VALUE buf) +{ + VALUE tmp; + + if (key == Qundef) return ST_CONTINUE; + if (RSTRING_LEN(buf) > 1) rb_str_buf_cat2(buf, ","); + tmp = rb_funcall(rb_funcall(key, i_to_s, 0), i_to_json, 0); + Check_Type(tmp, T_STRING); + rb_str_buf_append(buf, tmp); + OBJ_INFECT(buf, tmp); + rb_str_buf_cat2(buf, ":"); + tmp = rb_funcall(value, i_to_json, 0); + Check_Type(tmp, T_STRING); + rb_str_buf_append(buf, tmp); + OBJ_INFECT(buf, tmp); + + return ST_CONTINUE; +} + +/* + * call-seq: to_json(state = nil, depth = 0) + * + * Returns a JSON string containing a JSON object, that is unparsed from + * this Hash instance. + * _state_ is a JSON::State object, that can also be used to configure the + * produced JSON string output further. + * _depth_ is used to find out nesting depth, to indent accordingly. + */ +static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE Vstate, Vdepth, result; + long depth; + + rb_scan_args(argc, argv, "02", &Vstate, &Vdepth); + depth = NIL_P(Vdepth) ? 0 : FIX2LONG(Vdepth); + if (NIL_P(Vstate)) { + long len = RHASH_SIZE(self); + result = rb_str_buf_new(len); + rb_str_buf_cat2(result, "{"); + rb_hash_foreach(self, hash_to_json_i, result); + rb_str_buf_cat2(result, "}"); + } else { + GET_STATE(Vstate); + check_max_nesting(state, depth); + if (state->check_circular) { + VALUE self_id = rb_obj_id(self); + if (RTEST(rb_hash_aref(state->seen, self_id))) { + rb_raise(eCircularDatastructure, + "circular data structures not supported!"); + } + rb_hash_aset(state->seen, self_id, Qtrue); + result = mHash_json_transfrom(self, Vstate, LONG2FIX(depth)); + rb_hash_delete(state->seen, self_id); + } else { + result = mHash_json_transfrom(self, Vstate, LONG2FIX(depth)); + } + } + OBJ_INFECT(result, self); + FORCE_UTF8(result); + return result; +} + +inline static VALUE mArray_json_transfrom(VALUE self, VALUE Vstate, VALUE Vdepth) { + long i, len = RARRAY_LEN(self); + VALUE shift, result; + long depth = NIL_P(Vdepth) ? 0 : FIX2LONG(Vdepth); + VALUE delim = rb_str_new2(","); + GET_STATE(Vstate); + + check_max_nesting(state, depth); + if (state->check_circular) { + VALUE self_id = rb_obj_id(self); + rb_hash_aset(state->seen, self_id, Qtrue); + result = rb_str_buf_new(len); + if (RSTRING_LEN(state->array_nl)) rb_str_append(delim, state->array_nl); + shift = rb_str_times(state->indent, LONG2FIX(depth + 1)); + + rb_str_buf_cat2(result, "["); + OBJ_INFECT(result, self); + rb_str_buf_append(result, state->array_nl); + for (i = 0; i < len; i++) { + VALUE element = RARRAY_PTR(self)[i]; + if (RTEST(rb_hash_aref(state->seen, rb_obj_id(element)))) { + rb_raise(eCircularDatastructure, + "circular data structures not supported!"); + } + OBJ_INFECT(result, element); + if (i > 0) rb_str_buf_append(result, delim); + rb_str_buf_append(result, shift); + element = rb_funcall(element, i_to_json, 2, Vstate, LONG2FIX(depth + 1)); + Check_Type(element, T_STRING); + rb_str_buf_append(result, element); + } + if (RSTRING_LEN(state->array_nl)) { + rb_str_buf_append(result, state->array_nl); + rb_str_buf_append(result, rb_str_times(state->indent, LONG2FIX(depth))); + } + rb_str_buf_cat2(result, "]"); + rb_hash_delete(state->seen, self_id); + } else { + result = rb_str_buf_new(len); + OBJ_INFECT(result, self); + if (RSTRING_LEN(state->array_nl)) rb_str_append(delim, state->array_nl); + shift = rb_str_times(state->indent, LONG2FIX(depth + 1)); + + rb_str_buf_cat2(result, "["); + rb_str_buf_append(result, state->array_nl); + for (i = 0; i < len; i++) { + VALUE element = RARRAY_PTR(self)[i]; + OBJ_INFECT(result, element); + if (i > 0) rb_str_buf_append(result, delim); + rb_str_buf_append(result, shift); + element = rb_funcall(element, i_to_json, 2, Vstate, LONG2FIX(depth + 1)); + Check_Type(element, T_STRING); + rb_str_buf_append(result, element); + } + rb_str_buf_append(result, state->array_nl); + if (RSTRING_LEN(state->array_nl)) { + rb_str_buf_append(result, rb_str_times(state->indent, LONG2FIX(depth))); + } + rb_str_buf_cat2(result, "]"); + } + return result; +} + +/* + * call-seq: to_json(state = nil, depth = 0) + * + * Returns a JSON string containing a JSON array, that is unparsed from + * this Array instance. + * _state_ is a JSON::State object, that can also be used to configure the + * produced JSON string output further. + * _depth_ is used to find out nesting depth, to indent accordingly. + */ +static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self) { + VALUE Vstate, Vdepth, result; + + rb_scan_args(argc, argv, "02", &Vstate, &Vdepth); + if (NIL_P(Vstate)) { + long i, len = RARRAY_LEN(self); + result = rb_str_buf_new(2 + 2 * len); + rb_str_buf_cat2(result, "["); + OBJ_INFECT(result, self); + for (i = 0; i < len; i++) { + VALUE element = RARRAY_PTR(self)[i]; + OBJ_INFECT(result, element); + if (i > 0) rb_str_buf_cat2(result, ","); + element = rb_funcall(element, i_to_json, 0); + Check_Type(element, T_STRING); + rb_str_buf_append(result, element); + } + rb_str_buf_cat2(result, "]"); + } else { + result = mArray_json_transfrom(self, Vstate, Vdepth); + } + OBJ_INFECT(result, self); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Integer number. + */ +static VALUE mInteger_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result = rb_funcall(self, i_to_s, 0); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string representation for this Float number. + */ +static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self) +{ + JSON_Generator_State *state = NULL; + VALUE Vstate, rest, tmp, result; + double value = RFLOAT_VALUE(self); + rb_scan_args(argc, argv, "01*", &Vstate, &rest); + if (!NIL_P(Vstate)) Data_Get_Struct(Vstate, JSON_Generator_State, state); + if (isinf(value)) { + if (state && state->allow_nan) { + result = rb_funcall(self, i_to_s, 0); + } else { + tmp = rb_funcall(self, i_to_s, 0); + rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); + } + } else if (isnan(value)) { + if (state && state->allow_nan) { + result = rb_funcall(self, i_to_s, 0); + } else { + tmp = rb_funcall(self, i_to_s, 0); + rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); + } + } else { + result = rb_funcall(self, i_to_s, 0); + } + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: String.included(modul) + * + * Extends _modul_ with the String::Extend module. + */ +static VALUE mString_included_s(VALUE self, VALUE modul) { + VALUE result = rb_funcall(modul, i_extend, 1, mString_Extend); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + * This string should be encoded with UTF-8 A call to this method + * returns a JSON string encoded with UTF16 big endian characters as + * \u????. + */ +static VALUE mString_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result = rb_str_buf_new(RSTRING_LEN(self)); + rb_str_buf_cat2(result, "\""); +#ifdef HAVE_RUBY_ENCODING_H + if (rb_funcall(self, i_encoding, 0) == mEncoding_UTF_8) { + JSON_convert_UTF8_to_JSON(result, self, strictConversion); + } else { + VALUE string = rb_funcall(self, i_encode, 1, mEncoding_UTF_8); + JSON_convert_UTF8_to_JSON(result, string, strictConversion); + } +#else + JSON_convert_UTF8_to_JSON(result, self, strictConversion); +#endif + rb_str_buf_cat2(result, "\""); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json_raw_object() + * + * This method creates a raw object hash, that can be nested into + * other data structures and will be unparsed as a raw string. This + * method should be used, if you want to convert raw strings to JSON + * instead of UTF-8 strings, e. g. binary data. + */ +static VALUE mString_to_json_raw_object(VALUE self) { + VALUE ary; + VALUE result = rb_hash_new(); + rb_hash_aset(result, rb_funcall(mJSON, i_create_id, 0), rb_class_name(rb_obj_class(self))); + ary = rb_funcall(self, i_unpack, 1, rb_str_new2("C*")); + rb_hash_aset(result, rb_str_new2("raw"), ary); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json_raw(*args) + * + * This method creates a JSON text from the result of a call to + * to_json_raw_object of this String. + */ +static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self) { + VALUE result, obj = mString_to_json_raw_object(self); + Check_Type(obj, T_HASH); + result = mHash_to_json(argc, argv, obj); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: json_create(o) + * + * Raw Strings are JSON Objects (the raw bytes are stored in an array for the + * key "raw"). The Ruby String can be created by this module method. + */ +static VALUE mString_Extend_json_create(VALUE self, VALUE o) { + VALUE ary; + Check_Type(o, T_HASH); + ary = rb_hash_aref(o, rb_str_new2("raw")); + return rb_funcall(ary, i_pack, 1, rb_str_new2("C*")); +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string for true: 'true'. + */ +static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result = rb_str_new2("true"); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + * Returns a JSON string for false: 'false'. + */ +static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result = rb_str_new2("false"); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + */ +static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result = rb_str_new2("null"); + FORCE_UTF8(result); + return result; +} + +/* + * call-seq: to_json(*) + * + * Converts this object to a string (calling #to_s), converts + * it to a JSON string, and returns the result. This is a fallback, if no + * special method #to_json was defined for some object. + */ +static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self) +{ + VALUE result, string = rb_funcall(self, i_to_s, 0); + Check_Type(string, T_STRING); + result = mString_to_json(argc, argv, string); + FORCE_UTF8(result); + return result; +} + +/* + * Document-class: JSON::Ext::Generator::State + * + * This class is used to create State instances, that are use to hold data + * while generating a JSON text from a a Ruby data structure. + */ + +static void State_mark(JSON_Generator_State *state) +{ + rb_gc_mark_maybe(state->indent); + rb_gc_mark_maybe(state->space); + rb_gc_mark_maybe(state->space_before); + rb_gc_mark_maybe(state->object_nl); + rb_gc_mark_maybe(state->array_nl); + rb_gc_mark_maybe(state->seen); + rb_gc_mark_maybe(state->memo); + rb_gc_mark_maybe(state->depth); +} + +static JSON_Generator_State *State_allocate() +{ + JSON_Generator_State *state = ALLOC(JSON_Generator_State); + return state; +} + +static VALUE cState_s_allocate(VALUE klass) +{ + JSON_Generator_State *state = State_allocate(); + return Data_Wrap_Struct(klass, State_mark, -1, state); +} + +/* + * call-seq: configure(opts) + * + * Configure this State instance with the Hash _opts_, and return + * itself. + */ +static VALUE cState_configure(VALUE self, VALUE opts) +{ + VALUE tmp; + GET_STATE(self); + tmp = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); + if (NIL_P(tmp)) tmp = rb_convert_type(opts, T_HASH, "Hash", "to_h"); + if (NIL_P(tmp)) { + rb_raise(rb_eArgError, "opts has to be hash like or convertable into a hash"); + } + opts = tmp; + tmp = rb_hash_aref(opts, ID2SYM(i_indent)); + if (RTEST(tmp)) { + Check_Type(tmp, T_STRING); + state->indent = tmp; + } + tmp = rb_hash_aref(opts, ID2SYM(i_space)); + if (RTEST(tmp)) { + Check_Type(tmp, T_STRING); + state->space = tmp; + } + tmp = rb_hash_aref(opts, ID2SYM(i_space_before)); + if (RTEST(tmp)) { + Check_Type(tmp, T_STRING); + state->space_before = tmp; + } + tmp = rb_hash_aref(opts, ID2SYM(i_array_nl)); + if (RTEST(tmp)) { + Check_Type(tmp, T_STRING); + state->array_nl = tmp; + } + tmp = rb_hash_aref(opts, ID2SYM(i_object_nl)); + if (RTEST(tmp)) { + Check_Type(tmp, T_STRING); + state->object_nl = tmp; + } + tmp = ID2SYM(i_check_circular); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + tmp = rb_hash_aref(opts, ID2SYM(i_check_circular)); + state->check_circular = RTEST(tmp); + } else { + state->check_circular = 1; + } + tmp = ID2SYM(i_max_nesting); + state->max_nesting = 19; + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE max_nesting = rb_hash_aref(opts, tmp); + if (RTEST(max_nesting)) { + Check_Type(max_nesting, T_FIXNUM); + state->max_nesting = FIX2LONG(max_nesting); + } else { + state->max_nesting = 0; + } + } + tmp = rb_hash_aref(opts, ID2SYM(i_allow_nan)); + state->allow_nan = RTEST(tmp); + return self; +} + +/* + * call-seq: to_h + * + * Returns the configuration instance variables as a hash, that can be + * passed to the configure method. + */ +static VALUE cState_to_h(VALUE self) +{ + VALUE result = rb_hash_new(); + GET_STATE(self); + rb_hash_aset(result, ID2SYM(i_indent), state->indent); + rb_hash_aset(result, ID2SYM(i_space), state->space); + rb_hash_aset(result, ID2SYM(i_space_before), state->space_before); + rb_hash_aset(result, ID2SYM(i_object_nl), state->object_nl); + rb_hash_aset(result, ID2SYM(i_array_nl), state->array_nl); + rb_hash_aset(result, ID2SYM(i_check_circular), state->check_circular ? Qtrue : Qfalse); + rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse); + rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting)); + return result; +} + + +/* + * call-seq: new(opts = {}) + * + * Instantiates a new State object, configured by _opts_. + * + * _opts_ can have the following keys: + * + * * *indent*: a string used to indent levels (default: ''), + * * *space*: a string that is put after, a : or , delimiter (default: ''), + * * *space_before*: a string that is put before a : pair delimiter (default: ''), + * * *object_nl*: a string that is put at the end of a JSON object (default: ''), + * * *array_nl*: a string that is put at the end of a JSON array (default: ''), + * * *check_circular*: true if checking for circular data structures + * should be done, false (the default) otherwise. + * * *allow_nan*: true if NaN, Infinity, and -Infinity should be + * generated, otherwise an exception is thrown, if these values are + * encountered. This options defaults to false. + */ +static VALUE cState_initialize(int argc, VALUE *argv, VALUE self) +{ + VALUE opts; + GET_STATE(self); + + rb_scan_args(argc, argv, "01", &opts); + state->indent = rb_str_new2(""); + state->space = rb_str_new2(""); + state->space_before = rb_str_new2(""); + state->array_nl = rb_str_new2(""); + state->object_nl = rb_str_new2(""); + if (NIL_P(opts)) { + state->check_circular = 1; + state->allow_nan = 0; + state->max_nesting = 19; + } else { + cState_configure(self, opts); + } + state->seen = rb_hash_new(); + state->memo = Qnil; + state->depth = INT2FIX(0); + return self; +} + +/* + * call-seq: from_state(opts) + * + * Creates a State object from _opts_, which ought to be Hash to create a + * new State instance configured by _opts_, something else to create an + * unconfigured instance. If _opts_ is a State object, it is just returned. + */ +static VALUE cState_from_state_s(VALUE self, VALUE opts) +{ + if (rb_obj_is_kind_of(opts, self)) { + return opts; + } else if (rb_obj_is_kind_of(opts, rb_cHash)) { + return rb_funcall(self, i_new, 1, opts); + } else { + return rb_funcall(self, i_new, 0); + } +} + +/* + * call-seq: indent() + * + * This string is used to indent levels in the JSON text. + */ +static VALUE cState_indent(VALUE self) +{ + GET_STATE(self); + return state->indent; +} + +/* + * call-seq: indent=(indent) + * + * This string is used to indent levels in the JSON text. + */ +static VALUE cState_indent_set(VALUE self, VALUE indent) +{ + GET_STATE(self); + Check_Type(indent, T_STRING); + return state->indent = indent; +} + +/* + * call-seq: space() + * + * This string is used to insert a space between the tokens in a JSON + * string. + */ +static VALUE cState_space(VALUE self) +{ + GET_STATE(self); + return state->space; +} + +/* + * call-seq: space=(space) + * + * This string is used to insert a space between the tokens in a JSON + * string. + */ +static VALUE cState_space_set(VALUE self, VALUE space) +{ + GET_STATE(self); + Check_Type(space, T_STRING); + return state->space = space; +} + +/* + * call-seq: space_before() + * + * This string is used to insert a space before the ':' in JSON objects. + */ +static VALUE cState_space_before(VALUE self) +{ + GET_STATE(self); + return state->space_before; +} + +/* + * call-seq: space_before=(space_before) + * + * This string is used to insert a space before the ':' in JSON objects. + */ +static VALUE cState_space_before_set(VALUE self, VALUE space_before) +{ + GET_STATE(self); + Check_Type(space_before, T_STRING); + return state->space_before = space_before; +} + +/* + * call-seq: object_nl() + * + * This string is put at the end of a line that holds a JSON object (or + * Hash). + */ +static VALUE cState_object_nl(VALUE self) +{ + GET_STATE(self); + return state->object_nl; +} + +/* + * call-seq: object_nl=(object_nl) + * + * This string is put at the end of a line that holds a JSON object (or + * Hash). + */ +static VALUE cState_object_nl_set(VALUE self, VALUE object_nl) +{ + GET_STATE(self); + Check_Type(object_nl, T_STRING); + return state->object_nl = object_nl; +} + +/* + * call-seq: array_nl() + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_array_nl(VALUE self) +{ + GET_STATE(self); + return state->array_nl; +} + +/* + * call-seq: array_nl=(array_nl) + * + * This string is put at the end of a line that holds a JSON array. + */ +static VALUE cState_array_nl_set(VALUE self, VALUE array_nl) +{ + GET_STATE(self); + Check_Type(array_nl, T_STRING); + return state->array_nl = array_nl; +} + +/* + * call-seq: check_circular? + * + * Returns true, if circular data structures should be checked, + * otherwise returns false. + */ +static VALUE cState_check_circular_p(VALUE self) +{ + GET_STATE(self); + return state->check_circular ? Qtrue : Qfalse; +} + +/* + * call-seq: max_nesting + * + * This integer returns the maximum level of data structure nesting in + * the generated JSON, max_nesting = 0 if no maximum is checked. + */ +static VALUE cState_max_nesting(VALUE self) +{ + GET_STATE(self); + return LONG2FIX(state->max_nesting); +} + +/* + * call-seq: max_nesting=(depth) + * + * This sets the maximum level of data structure nesting in the generated JSON + * to the integer depth, max_nesting = 0 if no maximum should be checked. + */ +static VALUE cState_max_nesting_set(VALUE self, VALUE depth) +{ + GET_STATE(self); + Check_Type(depth, T_FIXNUM); + state->max_nesting = FIX2LONG(depth); + return Qnil; +} + +/* + * call-seq: allow_nan? + * + * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise + * returns false. + */ +static VALUE cState_allow_nan_p(VALUE self) +{ + GET_STATE(self); + return state->allow_nan ? Qtrue : Qfalse; +} + +/* + * call-seq: seen?(object) + * + * Returns _true_, if _object_ was already seen during this generating run. + */ +static VALUE cState_seen_p(VALUE self, VALUE object) +{ + GET_STATE(self); + return rb_hash_aref(state->seen, rb_obj_id(object)); +} + +/* + * call-seq: remember(object) + * + * Remember _object_, to find out if it was already encountered (if a cyclic + * data structure is rendered). + */ +static VALUE cState_remember(VALUE self, VALUE object) +{ + GET_STATE(self); + return rb_hash_aset(state->seen, rb_obj_id(object), Qtrue); +} + +/* + * call-seq: forget(object) + * + * Forget _object_ for this generating run. + */ +static VALUE cState_forget(VALUE self, VALUE object) +{ + GET_STATE(self); + return rb_hash_delete(state->seen, rb_obj_id(object)); +} + +/* + * + */ +void Init_generator() +{ + rb_require("json/common"); + mJSON = rb_define_module("JSON"); + mExt = rb_define_module_under(mJSON, "Ext"); + mGenerator = rb_define_module_under(mExt, "Generator"); + eGeneratorError = rb_path2class("JSON::GeneratorError"); + eCircularDatastructure = rb_path2class("JSON::CircularDatastructure"); + eNestingError = rb_path2class("JSON::NestingError"); + cState = rb_define_class_under(mGenerator, "State", rb_cObject); + rb_define_alloc_func(cState, cState_s_allocate); + rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1); + rb_define_method(cState, "initialize", cState_initialize, -1); + + rb_define_method(cState, "indent", cState_indent, 0); + rb_define_method(cState, "indent=", cState_indent_set, 1); + rb_define_method(cState, "space", cState_space, 0); + rb_define_method(cState, "space=", cState_space_set, 1); + rb_define_method(cState, "space_before", cState_space_before, 0); + rb_define_method(cState, "space_before=", cState_space_before_set, 1); + rb_define_method(cState, "object_nl", cState_object_nl, 0); + rb_define_method(cState, "object_nl=", cState_object_nl_set, 1); + rb_define_method(cState, "array_nl", cState_array_nl, 0); + rb_define_method(cState, "array_nl=", cState_array_nl_set, 1); + rb_define_method(cState, "check_circular?", cState_check_circular_p, 0); + rb_define_method(cState, "max_nesting", cState_max_nesting, 0); + rb_define_method(cState, "max_nesting=", cState_max_nesting_set, 1); + rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0); + rb_define_method(cState, "seen?", cState_seen_p, 1); + rb_define_method(cState, "remember", cState_remember, 1); + rb_define_method(cState, "forget", cState_forget, 1); + rb_define_method(cState, "configure", cState_configure, 1); + rb_define_method(cState, "to_h", cState_to_h, 0); + + mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods"); + mObject = rb_define_module_under(mGeneratorMethods, "Object"); + rb_define_method(mObject, "to_json", mObject_to_json, -1); + mHash = rb_define_module_under(mGeneratorMethods, "Hash"); + rb_define_method(mHash, "to_json", mHash_to_json, -1); + mArray = rb_define_module_under(mGeneratorMethods, "Array"); + rb_define_method(mArray, "to_json", mArray_to_json, -1); + mInteger = rb_define_module_under(mGeneratorMethods, "Integer"); + rb_define_method(mInteger, "to_json", mInteger_to_json, -1); + mFloat = rb_define_module_under(mGeneratorMethods, "Float"); + rb_define_method(mFloat, "to_json", mFloat_to_json, -1); + mString = rb_define_module_under(mGeneratorMethods, "String"); + rb_define_singleton_method(mString, "included", mString_included_s, 1); + rb_define_method(mString, "to_json", mString_to_json, -1); + rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1); + rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0); + mString_Extend = rb_define_module_under(mString, "Extend"); + rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1); + mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass"); + rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1); + mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass"); + rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1); + mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass"); + rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1); + + i_to_s = rb_intern("to_s"); + i_to_json = rb_intern("to_json"); + i_new = rb_intern("new"); + i_indent = rb_intern("indent"); + i_space = rb_intern("space"); + i_space_before = rb_intern("space_before"); + i_object_nl = rb_intern("object_nl"); + i_array_nl = rb_intern("array_nl"); + i_check_circular = rb_intern("check_circular"); + i_max_nesting = rb_intern("max_nesting"); + i_allow_nan = rb_intern("allow_nan"); + i_pack = rb_intern("pack"); + i_unpack = rb_intern("unpack"); + i_create_id = rb_intern("create_id"); + i_extend = rb_intern("extend"); +#ifdef HAVE_RUBY_ENCODING_H + mEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8")); + i_encoding = rb_intern("encoding"); + i_encode = rb_intern("encode"); +#endif +} diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.c b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.c new file mode 100644 index 00000000..3ddfbe0e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.c @@ -0,0 +1,180 @@ +#include "unicode.h" + +#define unicode_escape(buffer, character) \ + snprintf(buf, 7, "\\u%04x", (unsigned int) (character)); \ + rb_str_buf_cat(buffer, buf, 6); + +/* + * Copyright 2001-2004 Unicode, Inc. + * + * Disclaimer + * + * This source code is provided as is by Unicode, Inc. No claims are + * made as to fitness for any particular purpose. No warranties of any + * kind are expressed or implied. The recipient agrees to determine + * applicability of information provided. If this file has been + * purchased on magnetic or optical media from Unicode, Inc., the + * sole remedy for any claim will be exchange of defective media + * within 90 days of receipt. + * + * Limitations on Rights to Redistribute This Code + * + * Unicode, Inc. hereby grants the right to freely use the information + * supplied in this file in the creation of products supporting the + * Unicode Standard, and to make copies of this file in any form + * for internal or external distribution as long as this notice + * remains attached. + */ + +/* + * Index into the table below with the first byte of a UTF-8 sequence to + * get the number of trailing bytes that are supposed to follow it. + * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is + * left as-is for anyone who may want to do such conversion, which was + * allowed in earlier algorithms. + */ +static const char trailingBytesForUTF8[256] = { + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, + 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 +}; + +/* + * Magic values subtracted from a buffer value during UTF8 conversion. + * This table contains as many values as there might be trailing bytes + * in a UTF-8 sequence. + */ +static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, + 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; + +/* + * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed + * into the first byte, depending on how many bytes follow. There are + * as many entries in this table as there are UTF-8 sequence types. + * (I.e., one byte sequence, two byte... etc.). Remember that sequencs + * for *legal* UTF-8 will be 4 or fewer bytes total. + */ +static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; + +/* + * Utility routine to tell whether a sequence of bytes is legal UTF-8. + * This must be called with the length pre-determined by the first byte. + * If not calling this from ConvertUTF8to*, then the length can be set by: + * length = trailingBytesForUTF8[*source]+1; + * and the sequence is illegal right away if there aren't that many bytes + * available. + * If presented with a length > 4, this returns 0. The Unicode + * definition of UTF-8 goes up to 4-byte sequences. + */ + +inline static unsigned char isLegalUTF8(const UTF8 *source, int length) +{ + UTF8 a; + const UTF8 *srcptr = source+length; + switch (length) { + default: return 0; + /* Everything else falls through when "1"... */ + case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; + case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; + case 2: if ((a = (*--srcptr)) > 0xBF) return 0; + + switch (*source) { + /* no fall-through in this inner switch */ + case 0xE0: if (a < 0xA0) return 0; break; + case 0xED: if (a > 0x9F) return 0; break; + case 0xF0: if (a < 0x90) return 0; break; + case 0xF4: if (a > 0x8F) return 0; break; + default: if (a < 0x80) return 0; + } + + case 1: if (*source >= 0x80 && *source < 0xC2) return 0; + } + if (*source > 0xF4) return 0; + return 1; +} + +void JSON_convert_UTF8_to_JSON(VALUE buffer, VALUE string, ConversionFlags flags) +{ + char buf[7]; + const UTF8* source = (UTF8 *) RSTRING_PTR(string); + const UTF8* sourceEnd = source + RSTRING_LEN(string); + + while (source < sourceEnd) { + UTF32 ch = 0; + unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; + if (source + extraBytesToRead >= sourceEnd) { + rb_raise(rb_path2class("JSON::GeneratorError"), + "partial character in source, but hit end"); + } + if (!isLegalUTF8(source, extraBytesToRead+1)) { + rb_raise(rb_path2class("JSON::GeneratorError"), + "source sequence is illegal/malformed utf-8"); + } + /* + * The cases all fall through. See "Note A" below. + */ + switch (extraBytesToRead) { + case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ + case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ + case 3: ch += *source++; ch <<= 6; + case 2: ch += *source++; ch <<= 6; + case 1: ch += *source++; ch <<= 6; + case 0: ch += *source++; + } + ch -= offsetsFromUTF8[extraBytesToRead]; + + if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { + if (flags == strictConversion) { + source -= (extraBytesToRead+1); /* return to the illegal value itself */ + rb_raise(rb_path2class("JSON::GeneratorError"), + "source sequence is illegal/malformed utf-8"); + } else { + unicode_escape(buffer, UNI_REPLACEMENT_CHAR); + } + } else { + /* normal case */ + if (ch == '"') { + rb_str_buf_cat2(buffer, "\\\""); + } else if (ch == '\\') { + rb_str_buf_cat2(buffer, "\\\\"); + } else if (ch >= 0x20 && ch <= 0x7f) { + rb_str_buf_cat(buffer, (char *) source - 1, 1); + } else if (ch == '\n') { + rb_str_buf_cat2(buffer, "\\n"); + } else if (ch == '\r') { + rb_str_buf_cat2(buffer, "\\r"); + } else if (ch == '\t') { + rb_str_buf_cat2(buffer, "\\t"); + } else if (ch == '\f') { + rb_str_buf_cat2(buffer, "\\f"); + } else if (ch == '\b') { + rb_str_buf_cat2(buffer, "\\b"); + } else if (ch < 0x20) { + unicode_escape(buffer, (UTF16) ch); + } else { + unicode_escape(buffer, (UTF16) ch); + } + } + } else if (ch > UNI_MAX_UTF16) { + if (flags == strictConversion) { + source -= (extraBytesToRead+1); /* return to the start */ + rb_raise(rb_path2class("JSON::GeneratorError"), + "source sequence is illegal/malformed utf8"); + } else { + unicode_escape(buffer, UNI_REPLACEMENT_CHAR); + } + } else { + /* target is a character in range 0xFFFF - 0x10FFFF. */ + ch -= halfBase; + unicode_escape(buffer, (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START)); + unicode_escape(buffer, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START)); + } + } +} diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.h b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.h new file mode 100644 index 00000000..841474bc --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/generator/unicode.h @@ -0,0 +1,53 @@ +#include "ruby.h" + +#ifndef _GENERATOR_UNICODE_H_ +#define _GENERATOR_UNICODE_H_ + +typedef enum { + conversionOK = 0, /* conversion successful */ + sourceExhausted, /* partial character in source, but hit end */ + targetExhausted, /* insuff. room in target for conversion */ + sourceIllegal /* source sequence is illegal/malformed */ +} ConversionResult; + +typedef enum { + strictConversion = 0, + lenientConversion +} ConversionFlags; + +typedef unsigned long UTF32; /* at least 32 bits */ +typedef unsigned short UTF16; /* at least 16 bits */ +typedef unsigned char UTF8; /* typically 8 bits */ + +#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD +#define UNI_MAX_BMP (UTF32)0x0000FFFF +#define UNI_MAX_UTF16 (UTF32)0x0010FFFF +#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF +#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF + +#define UNI_SUR_HIGH_START (UTF32)0xD800 +#define UNI_SUR_HIGH_END (UTF32)0xDBFF +#define UNI_SUR_LOW_START (UTF32)0xDC00 +#define UNI_SUR_LOW_END (UTF32)0xDFFF + +static const int halfShift = 10; /* used for shifting by 10 bits */ + +static const UTF32 halfBase = 0x0010000UL; +static const UTF32 halfMask = 0x3FFUL; + +void JSON_convert_UTF8_to_JSON(VALUE buffer, VALUE string, ConversionFlags flags); + +#ifndef RARRAY_PTR +#define RARRAY_PTR(ARRAY) RARRAY(ARRAY)->ptr +#endif +#ifndef RARRAY_LEN +#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len +#endif +#ifndef RSTRING_PTR +#define RSTRING_PTR(string) RSTRING(string)->ptr +#endif +#ifndef RSTRING_LEN +#define RSTRING_LEN(string) RSTRING(string)->len +#endif + +#endif diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/extconf.rb b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/extconf.rb new file mode 100644 index 00000000..62263947 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/extconf.rb @@ -0,0 +1,11 @@ +require 'mkmf' +require 'rbconfig' + +if CONFIG['CC'] =~ /gcc/ + $CFLAGS += ' -Wall' + #$CFLAGS += ' -O0 -ggdb' +end + +have_header("ruby/st.h") || have_header("st.h") +have_header("re.h") +create_makefile 'parser' diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.c b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.c new file mode 100644 index 00000000..17813818 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.c @@ -0,0 +1,1880 @@ + +#line 1 "parser.rl" +#include "ruby.h" +#include "unicode.h" +#if HAVE_RE_H +#include "re.h" +#endif +#if HAVE_RUBY_ST_H +#include "ruby/st.h" +#endif +#if HAVE_ST_H +#include "st.h" +#endif + +#define EVIL 0x666 + +#ifndef RHASH_TBL +#define RHASH_TBL(hsh) (RHASH(hsh)->tbl) +#endif + +#ifdef HAVE_RUBY_ENCODING_H +#include "ruby/encoding.h" +#define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding()) +static VALUE mEncoding_ASCII_8BIT, mEncoding_UTF_8, mEncoding_UTF_16BE, + mEncoding_UTF_16LE, mEncoding_UTF_32BE, mEncoding_UTF_32LE; +static ID i_encoding, i_encode, i_encode_bang, i_force_encoding; +#else +#define FORCE_UTF8(obj) +static ID i_iconv; +#endif + +static VALUE mJSON, mExt, cParser, eParserError, eNestingError; +static VALUE CNaN, CInfinity, CMinusInfinity; + +static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, + i_chr, i_max_nesting, i_allow_nan, i_object_class, i_array_class; + +#define MinusInfinity "-Infinity" + +typedef struct JSON_ParserStruct { + VALUE Vsource; + char *source; + long len; + char *memo; + VALUE create_id; + int max_nesting; + int current_nesting; + int allow_nan; + VALUE object_class; + VALUE array_class; +} JSON_Parser; + +static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result); + +#define GET_STRUCT \ + JSON_Parser *json; \ + Data_Get_Struct(self, JSON_Parser, json); + + +#line 88 "parser.rl" + + + +#line 70 "parser.c" +static const int JSON_object_start = 1; +static const int JSON_object_first_final = 27; +static const int JSON_object_error = 0; + +static const int JSON_object_en_main = 1; + + +#line 121 "parser.rl" + + +static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + VALUE last_name = Qnil; + VALUE object_class = json->object_class; + + if (json->max_nesting && json->current_nesting > json->max_nesting) { + rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting); + } + + *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class); + + +#line 94 "parser.c" + { + cs = JSON_object_start; + } + +#line 136 "parser.rl" + +#line 101 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + if ( (*p) == 123 ) + goto st2; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + switch( (*p) ) { + case 13: goto st2; + case 32: goto st2; + case 34: goto tr2; + case 47: goto st23; + case 125: goto tr4; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st2; + goto st0; +tr2: +#line 107 "parser.rl" + { + char *np = JSON_parse_string(json, p, pe, &last_name); + if (np == NULL) { p--; {p++; cs = 3; goto _out;} } else {p = (( np))-1;} + } + goto st3; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: +#line 139 "parser.c" + switch( (*p) ) { + case 13: goto st3; + case 32: goto st3; + case 47: goto st4; + case 58: goto st8; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st3; + goto st0; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + switch( (*p) ) { + case 42: goto st5; + case 47: goto st7; + } + goto st0; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + if ( (*p) == 42 ) + goto st6; + goto st5; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + switch( (*p) ) { + case 42: goto st6; + case 47: goto st3; + } + goto st5; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + if ( (*p) == 10 ) + goto st3; + goto st7; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: + switch( (*p) ) { + case 13: goto st8; + case 32: goto st8; + case 34: goto tr11; + case 45: goto tr11; + case 47: goto st19; + case 73: goto tr11; + case 78: goto tr11; + case 91: goto tr11; + case 102: goto tr11; + case 110: goto tr11; + case 116: goto tr11; + case 123: goto tr11; + } + if ( (*p) > 10 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto tr11; + } else if ( (*p) >= 9 ) + goto st8; + goto st0; +tr11: +#line 96 "parser.rl" + { + VALUE v = Qnil; + char *np = JSON_parse_value(json, p, pe, &v); + if (np == NULL) { + p--; {p++; cs = 9; goto _out;} + } else { + rb_hash_aset(*result, last_name, v); + {p = (( np))-1;} + } + } + goto st9; +st9: + if ( ++p == pe ) + goto _test_eof9; +case 9: +#line 222 "parser.c" + switch( (*p) ) { + case 13: goto st9; + case 32: goto st9; + case 44: goto st10; + case 47: goto st15; + case 125: goto tr4; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st9; + goto st0; +st10: + if ( ++p == pe ) + goto _test_eof10; +case 10: + switch( (*p) ) { + case 13: goto st10; + case 32: goto st10; + case 34: goto tr2; + case 47: goto st11; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st10; + goto st0; +st11: + if ( ++p == pe ) + goto _test_eof11; +case 11: + switch( (*p) ) { + case 42: goto st12; + case 47: goto st14; + } + goto st0; +st12: + if ( ++p == pe ) + goto _test_eof12; +case 12: + if ( (*p) == 42 ) + goto st13; + goto st12; +st13: + if ( ++p == pe ) + goto _test_eof13; +case 13: + switch( (*p) ) { + case 42: goto st13; + case 47: goto st10; + } + goto st12; +st14: + if ( ++p == pe ) + goto _test_eof14; +case 14: + if ( (*p) == 10 ) + goto st10; + goto st14; +st15: + if ( ++p == pe ) + goto _test_eof15; +case 15: + switch( (*p) ) { + case 42: goto st16; + case 47: goto st18; + } + goto st0; +st16: + if ( ++p == pe ) + goto _test_eof16; +case 16: + if ( (*p) == 42 ) + goto st17; + goto st16; +st17: + if ( ++p == pe ) + goto _test_eof17; +case 17: + switch( (*p) ) { + case 42: goto st17; + case 47: goto st9; + } + goto st16; +st18: + if ( ++p == pe ) + goto _test_eof18; +case 18: + if ( (*p) == 10 ) + goto st9; + goto st18; +tr4: +#line 112 "parser.rl" + { p--; {p++; cs = 27; goto _out;} } + goto st27; +st27: + if ( ++p == pe ) + goto _test_eof27; +case 27: +#line 318 "parser.c" + goto st0; +st19: + if ( ++p == pe ) + goto _test_eof19; +case 19: + switch( (*p) ) { + case 42: goto st20; + case 47: goto st22; + } + goto st0; +st20: + if ( ++p == pe ) + goto _test_eof20; +case 20: + if ( (*p) == 42 ) + goto st21; + goto st20; +st21: + if ( ++p == pe ) + goto _test_eof21; +case 21: + switch( (*p) ) { + case 42: goto st21; + case 47: goto st8; + } + goto st20; +st22: + if ( ++p == pe ) + goto _test_eof22; +case 22: + if ( (*p) == 10 ) + goto st8; + goto st22; +st23: + if ( ++p == pe ) + goto _test_eof23; +case 23: + switch( (*p) ) { + case 42: goto st24; + case 47: goto st26; + } + goto st0; +st24: + if ( ++p == pe ) + goto _test_eof24; +case 24: + if ( (*p) == 42 ) + goto st25; + goto st24; +st25: + if ( ++p == pe ) + goto _test_eof25; +case 25: + switch( (*p) ) { + case 42: goto st25; + case 47: goto st2; + } + goto st24; +st26: + if ( ++p == pe ) + goto _test_eof26; +case 26: + if ( (*p) == 10 ) + goto st2; + goto st26; + } + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof9: cs = 9; goto _test_eof; + _test_eof10: cs = 10; goto _test_eof; + _test_eof11: cs = 11; goto _test_eof; + _test_eof12: cs = 12; goto _test_eof; + _test_eof13: cs = 13; goto _test_eof; + _test_eof14: cs = 14; goto _test_eof; + _test_eof15: cs = 15; goto _test_eof; + _test_eof16: cs = 16; goto _test_eof; + _test_eof17: cs = 17; goto _test_eof; + _test_eof18: cs = 18; goto _test_eof; + _test_eof27: cs = 27; goto _test_eof; + _test_eof19: cs = 19; goto _test_eof; + _test_eof20: cs = 20; goto _test_eof; + _test_eof21: cs = 21; goto _test_eof; + _test_eof22: cs = 22; goto _test_eof; + _test_eof23: cs = 23; goto _test_eof; + _test_eof24: cs = 24; goto _test_eof; + _test_eof25: cs = 25; goto _test_eof; + _test_eof26: cs = 26; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 137 "parser.rl" + + if (cs >= JSON_object_first_final) { + if (RTEST(json->create_id)) { + VALUE klassname = rb_hash_aref(*result, json->create_id); + if (!NIL_P(klassname)) { + VALUE klass = rb_path2class(StringValueCStr(klassname)); + if RTEST(rb_funcall(klass, i_json_creatable_p, 0)) { + *result = rb_funcall(klass, i_json_create, 1, *result); + } + } + } + return p + 1; + } else { + return NULL; + } +} + + +#line 435 "parser.c" +static const int JSON_value_start = 1; +static const int JSON_value_first_final = 21; +static const int JSON_value_error = 0; + +static const int JSON_value_en_main = 1; + + +#line 235 "parser.rl" + + +static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + +#line 451 "parser.c" + { + cs = JSON_value_start; + } + +#line 242 "parser.rl" + +#line 458 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + switch( (*p) ) { + case 34: goto tr0; + case 45: goto tr2; + case 73: goto st2; + case 78: goto st9; + case 91: goto tr5; + case 102: goto st11; + case 110: goto st15; + case 116: goto st18; + case 123: goto tr9; + } + if ( 48 <= (*p) && (*p) <= 57 ) + goto tr2; + goto st0; +st0: +cs = 0; + goto _out; +tr0: +#line 183 "parser.rl" + { + char *np = JSON_parse_string(json, p, pe, result); + if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} + } + goto st21; +tr2: +#line 188 "parser.rl" + { + char *np; + if(pe > p + 9 && !strncmp(MinusInfinity, p, 9)) { + if (json->allow_nan) { + *result = CMinusInfinity; + {p = (( p + 10))-1;} + p--; {p++; cs = 21; goto _out;} + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } + } + np = JSON_parse_float(json, p, pe, result); + if (np != NULL) {p = (( np))-1;} + np = JSON_parse_integer(json, p, pe, result); + if (np != NULL) {p = (( np))-1;} + p--; {p++; cs = 21; goto _out;} + } + goto st21; +tr5: +#line 206 "parser.rl" + { + char *np; + json->current_nesting++; + np = JSON_parse_array(json, p, pe, result); + json->current_nesting--; + if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} + } + goto st21; +tr9: +#line 214 "parser.rl" + { + char *np; + json->current_nesting++; + np = JSON_parse_object(json, p, pe, result); + json->current_nesting--; + if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} + } + goto st21; +tr16: +#line 176 "parser.rl" + { + if (json->allow_nan) { + *result = CInfinity; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); + } + } + goto st21; +tr18: +#line 169 "parser.rl" + { + if (json->allow_nan) { + *result = CNaN; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); + } + } + goto st21; +tr22: +#line 163 "parser.rl" + { + *result = Qfalse; + } + goto st21; +tr25: +#line 160 "parser.rl" + { + *result = Qnil; + } + goto st21; +tr28: +#line 166 "parser.rl" + { + *result = Qtrue; + } + goto st21; +st21: + if ( ++p == pe ) + goto _test_eof21; +case 21: +#line 222 "parser.rl" + { p--; {p++; cs = 21; goto _out;} } +#line 573 "parser.c" + goto st0; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + if ( (*p) == 110 ) + goto st3; + goto st0; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: + if ( (*p) == 102 ) + goto st4; + goto st0; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + if ( (*p) == 105 ) + goto st5; + goto st0; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + if ( (*p) == 110 ) + goto st6; + goto st0; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + if ( (*p) == 105 ) + goto st7; + goto st0; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + if ( (*p) == 116 ) + goto st8; + goto st0; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: + if ( (*p) == 121 ) + goto tr16; + goto st0; +st9: + if ( ++p == pe ) + goto _test_eof9; +case 9: + if ( (*p) == 97 ) + goto st10; + goto st0; +st10: + if ( ++p == pe ) + goto _test_eof10; +case 10: + if ( (*p) == 78 ) + goto tr18; + goto st0; +st11: + if ( ++p == pe ) + goto _test_eof11; +case 11: + if ( (*p) == 97 ) + goto st12; + goto st0; +st12: + if ( ++p == pe ) + goto _test_eof12; +case 12: + if ( (*p) == 108 ) + goto st13; + goto st0; +st13: + if ( ++p == pe ) + goto _test_eof13; +case 13: + if ( (*p) == 115 ) + goto st14; + goto st0; +st14: + if ( ++p == pe ) + goto _test_eof14; +case 14: + if ( (*p) == 101 ) + goto tr22; + goto st0; +st15: + if ( ++p == pe ) + goto _test_eof15; +case 15: + if ( (*p) == 117 ) + goto st16; + goto st0; +st16: + if ( ++p == pe ) + goto _test_eof16; +case 16: + if ( (*p) == 108 ) + goto st17; + goto st0; +st17: + if ( ++p == pe ) + goto _test_eof17; +case 17: + if ( (*p) == 108 ) + goto tr25; + goto st0; +st18: + if ( ++p == pe ) + goto _test_eof18; +case 18: + if ( (*p) == 114 ) + goto st19; + goto st0; +st19: + if ( ++p == pe ) + goto _test_eof19; +case 19: + if ( (*p) == 117 ) + goto st20; + goto st0; +st20: + if ( ++p == pe ) + goto _test_eof20; +case 20: + if ( (*p) == 101 ) + goto tr28; + goto st0; + } + _test_eof21: cs = 21; goto _test_eof; + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof9: cs = 9; goto _test_eof; + _test_eof10: cs = 10; goto _test_eof; + _test_eof11: cs = 11; goto _test_eof; + _test_eof12: cs = 12; goto _test_eof; + _test_eof13: cs = 13; goto _test_eof; + _test_eof14: cs = 14; goto _test_eof; + _test_eof15: cs = 15; goto _test_eof; + _test_eof16: cs = 16; goto _test_eof; + _test_eof17: cs = 17; goto _test_eof; + _test_eof18: cs = 18; goto _test_eof; + _test_eof19: cs = 19; goto _test_eof; + _test_eof20: cs = 20; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 243 "parser.rl" + + if (cs >= JSON_value_first_final) { + return p; + } else { + return NULL; + } +} + + +#line 744 "parser.c" +static const int JSON_integer_start = 1; +static const int JSON_integer_first_final = 5; +static const int JSON_integer_error = 0; + +static const int JSON_integer_en_main = 1; + + +#line 259 "parser.rl" + + +static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + +#line 760 "parser.c" + { + cs = JSON_integer_start; + } + +#line 266 "parser.rl" + json->memo = p; + +#line 768 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + switch( (*p) ) { + case 45: goto st2; + case 48: goto st3; + } + if ( 49 <= (*p) && (*p) <= 57 ) + goto st4; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + if ( (*p) == 48 ) + goto st3; + if ( 49 <= (*p) && (*p) <= 57 ) + goto st4; + goto st0; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: + if ( 48 <= (*p) && (*p) <= 57 ) + goto st0; + goto tr4; +tr4: +#line 256 "parser.rl" + { p--; {p++; cs = 5; goto _out;} } + goto st5; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: +#line 809 "parser.c" + goto st0; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + if ( 48 <= (*p) && (*p) <= 57 ) + goto st4; + goto tr4; + } + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 268 "parser.rl" + + if (cs >= JSON_integer_first_final) { + long len = p - json->memo; + *result = rb_Integer(rb_str_new(json->memo, len)); + return p + 1; + } else { + return NULL; + } +} + + +#line 840 "parser.c" +static const int JSON_float_start = 1; +static const int JSON_float_first_final = 10; +static const int JSON_float_error = 0; + +static const int JSON_float_en_main = 1; + + +#line 290 "parser.rl" + + +static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + +#line 856 "parser.c" + { + cs = JSON_float_start; + } + +#line 297 "parser.rl" + json->memo = p; + +#line 864 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + switch( (*p) ) { + case 45: goto st2; + case 48: goto st3; + } + if ( 49 <= (*p) && (*p) <= 57 ) + goto st9; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + if ( (*p) == 48 ) + goto st3; + if ( 49 <= (*p) && (*p) <= 57 ) + goto st9; + goto st0; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: + switch( (*p) ) { + case 46: goto st4; + case 69: goto st6; + case 101: goto st6; + } + goto st0; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + if ( 48 <= (*p) && (*p) <= 57 ) + goto st5; + goto st0; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + switch( (*p) ) { + case 69: goto st6; + case 101: goto st6; + } + if ( (*p) > 46 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st5; + } else if ( (*p) >= 45 ) + goto st0; + goto tr7; +tr7: +#line 284 "parser.rl" + { p--; {p++; cs = 10; goto _out;} } + goto st10; +st10: + if ( ++p == pe ) + goto _test_eof10; +case 10: +#line 929 "parser.c" + goto st0; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + switch( (*p) ) { + case 43: goto st7; + case 45: goto st7; + } + if ( 48 <= (*p) && (*p) <= 57 ) + goto st8; + goto st0; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + if ( 48 <= (*p) && (*p) <= 57 ) + goto st8; + goto st0; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: + switch( (*p) ) { + case 69: goto st0; + case 101: goto st0; + } + if ( (*p) > 46 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st8; + } else if ( (*p) >= 45 ) + goto st0; + goto tr7; +st9: + if ( ++p == pe ) + goto _test_eof9; +case 9: + switch( (*p) ) { + case 46: goto st4; + case 69: goto st6; + case 101: goto st6; + } + if ( 48 <= (*p) && (*p) <= 57 ) + goto st9; + goto st0; + } + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof10: cs = 10; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof9: cs = 9; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 299 "parser.rl" + + if (cs >= JSON_float_first_final) { + long len = p - json->memo; + *result = rb_Float(rb_str_new(json->memo, len)); + return p + 1; + } else { + return NULL; + } +} + + + +#line 1003 "parser.c" +static const int JSON_array_start = 1; +static const int JSON_array_first_final = 17; +static const int JSON_array_error = 0; + +static const int JSON_array_en_main = 1; + + +#line 335 "parser.rl" + + +static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + VALUE array_class = json->array_class; + + if (json->max_nesting && json->current_nesting > json->max_nesting) { + rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting); + } + *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class); + + +#line 1025 "parser.c" + { + cs = JSON_array_start; + } + +#line 348 "parser.rl" + +#line 1032 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + if ( (*p) == 91 ) + goto st2; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + switch( (*p) ) { + case 13: goto st2; + case 32: goto st2; + case 34: goto tr2; + case 45: goto tr2; + case 47: goto st13; + case 73: goto tr2; + case 78: goto tr2; + case 91: goto tr2; + case 93: goto tr4; + case 102: goto tr2; + case 110: goto tr2; + case 116: goto tr2; + case 123: goto tr2; + } + if ( (*p) > 10 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto tr2; + } else if ( (*p) >= 9 ) + goto st2; + goto st0; +tr2: +#line 316 "parser.rl" + { + VALUE v = Qnil; + char *np = JSON_parse_value(json, p, pe, &v); + if (np == NULL) { + p--; {p++; cs = 3; goto _out;} + } else { + rb_ary_push(*result, v); + {p = (( np))-1;} + } + } + goto st3; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: +#line 1087 "parser.c" + switch( (*p) ) { + case 13: goto st3; + case 32: goto st3; + case 44: goto st4; + case 47: goto st9; + case 93: goto tr4; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st3; + goto st0; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + switch( (*p) ) { + case 13: goto st4; + case 32: goto st4; + case 34: goto tr2; + case 45: goto tr2; + case 47: goto st5; + case 73: goto tr2; + case 78: goto tr2; + case 91: goto tr2; + case 102: goto tr2; + case 110: goto tr2; + case 116: goto tr2; + case 123: goto tr2; + } + if ( (*p) > 10 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto tr2; + } else if ( (*p) >= 9 ) + goto st4; + goto st0; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + switch( (*p) ) { + case 42: goto st6; + case 47: goto st8; + } + goto st0; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + if ( (*p) == 42 ) + goto st7; + goto st6; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + switch( (*p) ) { + case 42: goto st7; + case 47: goto st4; + } + goto st6; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: + if ( (*p) == 10 ) + goto st4; + goto st8; +st9: + if ( ++p == pe ) + goto _test_eof9; +case 9: + switch( (*p) ) { + case 42: goto st10; + case 47: goto st12; + } + goto st0; +st10: + if ( ++p == pe ) + goto _test_eof10; +case 10: + if ( (*p) == 42 ) + goto st11; + goto st10; +st11: + if ( ++p == pe ) + goto _test_eof11; +case 11: + switch( (*p) ) { + case 42: goto st11; + case 47: goto st3; + } + goto st10; +st12: + if ( ++p == pe ) + goto _test_eof12; +case 12: + if ( (*p) == 10 ) + goto st3; + goto st12; +tr4: +#line 327 "parser.rl" + { p--; {p++; cs = 17; goto _out;} } + goto st17; +st17: + if ( ++p == pe ) + goto _test_eof17; +case 17: +#line 1194 "parser.c" + goto st0; +st13: + if ( ++p == pe ) + goto _test_eof13; +case 13: + switch( (*p) ) { + case 42: goto st14; + case 47: goto st16; + } + goto st0; +st14: + if ( ++p == pe ) + goto _test_eof14; +case 14: + if ( (*p) == 42 ) + goto st15; + goto st14; +st15: + if ( ++p == pe ) + goto _test_eof15; +case 15: + switch( (*p) ) { + case 42: goto st15; + case 47: goto st2; + } + goto st14; +st16: + if ( ++p == pe ) + goto _test_eof16; +case 16: + if ( (*p) == 10 ) + goto st2; + goto st16; + } + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof9: cs = 9; goto _test_eof; + _test_eof10: cs = 10; goto _test_eof; + _test_eof11: cs = 11; goto _test_eof; + _test_eof12: cs = 12; goto _test_eof; + _test_eof17: cs = 17; goto _test_eof; + _test_eof13: cs = 13; goto _test_eof; + _test_eof14: cs = 14; goto _test_eof; + _test_eof15: cs = 15; goto _test_eof; + _test_eof16: cs = 16; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 349 "parser.rl" + + if(cs >= JSON_array_first_final) { + return p + 1; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } +} + +static VALUE json_string_unescape(char *p, char *pe) +{ + VALUE result = rb_str_buf_new(pe - p + 1); + + while (p < pe) { + if (*p == '\\') { + p++; + if (p >= pe) return Qnil; /* raise an exception later, \ at end */ + switch (*p) { + case '"': + case '\\': + rb_str_buf_cat(result, p, 1); + p++; + break; + case 'b': + rb_str_buf_cat2(result, "\b"); + p++; + break; + case 'f': + rb_str_buf_cat2(result, "\f"); + p++; + break; + case 'n': + rb_str_buf_cat2(result, "\n"); + p++; + break; + case 'r': + rb_str_buf_cat2(result, "\r"); + p++; + break; + case 't': + rb_str_buf_cat2(result, "\t"); + p++; + break; + case 'u': + if (p > pe - 4) { + return Qnil; + } else { + p = JSON_convert_UTF16_to_UTF8(result, p, pe, strictConversion); + } + break; + default: + rb_str_buf_cat(result, p, 1); + p++; + break; + } + } else { + char *q = p; + while (*q != '\\' && q < pe) q++; + rb_str_buf_cat(result, p, q - p); + p = q; + } + } + return result; +} + + +#line 1316 "parser.c" +static const int JSON_string_start = 1; +static const int JSON_string_first_final = 8; +static const int JSON_string_error = 0; + +static const int JSON_string_en_main = 1; + + +#line 433 "parser.rl" + + +static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + *result = rb_str_new("", 0); + +#line 1333 "parser.c" + { + cs = JSON_string_start; + } + +#line 441 "parser.rl" + json->memo = p; + +#line 1341 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +case 1: + if ( (*p) == 34 ) + goto st2; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + switch( (*p) ) { + case 34: goto tr2; + case 92: goto st3; + } + if ( 0 <= (*p) && (*p) <= 31 ) + goto st0; + goto st2; +tr2: +#line 419 "parser.rl" + { + *result = json_string_unescape(json->memo + 1, p); + if (NIL_P(*result)) { + p--; + {p++; cs = 8; goto _out;} + } else { + FORCE_UTF8(*result); + {p = (( p + 1))-1;} + } + } +#line 430 "parser.rl" + { p--; {p++; cs = 8; goto _out;} } + goto st8; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: +#line 1384 "parser.c" + goto st0; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: + if ( (*p) == 117 ) + goto st4; + if ( 0 <= (*p) && (*p) <= 31 ) + goto st0; + goto st2; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + if ( (*p) < 65 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st5; + } else if ( (*p) > 70 ) { + if ( 97 <= (*p) && (*p) <= 102 ) + goto st5; + } else + goto st5; + goto st0; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + if ( (*p) < 65 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st6; + } else if ( (*p) > 70 ) { + if ( 97 <= (*p) && (*p) <= 102 ) + goto st6; + } else + goto st6; + goto st0; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + if ( (*p) < 65 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st7; + } else if ( (*p) > 70 ) { + if ( 97 <= (*p) && (*p) <= 102 ) + goto st7; + } else + goto st7; + goto st0; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + if ( (*p) < 65 ) { + if ( 48 <= (*p) && (*p) <= 57 ) + goto st2; + } else if ( (*p) > 70 ) { + if ( 97 <= (*p) && (*p) <= 102 ) + goto st2; + } else + goto st2; + goto st0; + } + _test_eof2: cs = 2; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 443 "parser.rl" + + if (cs >= JSON_string_first_final) { + return p + 1; + } else { + return NULL; + } +} + + + +#line 1471 "parser.c" +static const int JSON_start = 1; +static const int JSON_first_final = 10; +static const int JSON_error = 0; + +static const int JSON_en_main = 1; + + +#line 477 "parser.rl" + + +/* + * Document-class: JSON::Ext::Parser + * + * This is the JSON parser implemented as a C extension. It can be configured + * to be used by setting + * + * JSON.parser = JSON::Ext::Parser + * + * with the method parser= in JSON. + * + */ + +inline static VALUE convert_encoding(VALUE source) +{ + char *ptr = RSTRING_PTR(source); + long len = RSTRING_LEN(source); + if (len < 2) { + rb_raise(eParserError, "A JSON text must at least contain two octets!"); + } +#ifdef HAVE_RUBY_ENCODING_H + { + VALUE encoding = rb_funcall(source, i_encoding, 0); + if (encoding == mEncoding_ASCII_8BIT) { + if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_32BE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_16BE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_32LE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_16LE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else { + source = rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_8); + } + } else { + source = rb_funcall(source, i_encode, 1, mEncoding_UTF_8); + } + } +#else + if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32be"), source); + } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16be"), source); + } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32le"), source); + } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16le"), source); + } +#endif + return source; +} + +/* + * call-seq: new(source, opts => {}) + * + * Creates a new JSON::Ext::Parser instance for the string _source_. + * + * Creates a new JSON::Ext::Parser instance for the string _source_. + * + * It will be configured by the _opts_ hash. _opts_ can have the following + * keys: + * + * _opts_ can have the following keys: + * * *max_nesting*: The maximum depth of nesting allowed in the parsed data + * structures. Disable depth checking with :max_nesting => false|nil|0, it + * defaults to 19. + * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + * defiance of RFC 4627 to be parsed by the Parser. This option defaults to + * false. + * * *create_additions*: If set to false, the Parser doesn't create + * additions even if a matchin class and create_id was found. This option + * defaults to true. + * * *object_class*: Defaults to Hash + * * *array_class*: Defaults to Array + */ +static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) +{ + char *ptr; + long len; + VALUE source, opts; + GET_STRUCT; + rb_scan_args(argc, argv, "11", &source, &opts); + source = convert_encoding(StringValue(source)); + ptr = RSTRING_PTR(source); + len = RSTRING_LEN(source); + if (!NIL_P(opts)) { + opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); + if (NIL_P(opts)) { + rb_raise(rb_eArgError, "opts needs to be like a hash"); + } else { + VALUE tmp = ID2SYM(i_max_nesting); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE max_nesting = rb_hash_aref(opts, tmp); + if (RTEST(max_nesting)) { + Check_Type(max_nesting, T_FIXNUM); + json->max_nesting = FIX2INT(max_nesting); + } else { + json->max_nesting = 0; + } + } else { + json->max_nesting = 19; + } + tmp = ID2SYM(i_allow_nan); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE allow_nan = rb_hash_aref(opts, tmp); + json->allow_nan = RTEST(allow_nan) ? 1 : 0; + } else { + json->allow_nan = 0; + } + tmp = ID2SYM(i_create_additions); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE create_additions = rb_hash_aref(opts, tmp); + if (RTEST(create_additions)) { + json->create_id = rb_funcall(mJSON, i_create_id, 0); + } else { + json->create_id = Qnil; + } + } else { + json->create_id = rb_funcall(mJSON, i_create_id, 0); + } + tmp = ID2SYM(i_object_class); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + json->object_class = rb_hash_aref(opts, tmp); + } else { + json->object_class = Qnil; + } + tmp = ID2SYM(i_array_class); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + json->array_class = rb_hash_aref(opts, tmp); + } else { + json->array_class = Qnil; + } + } + } else { + json->max_nesting = 19; + json->allow_nan = 0; + json->create_id = rb_funcall(mJSON, i_create_id, 0); + json->object_class = Qnil; + json->array_class = Qnil; + } + json->current_nesting = 0; + json->len = len; + json->source = ptr; + json->Vsource = source; + return self; +} + +/* + * call-seq: parse() + * + * Parses the current JSON text _source_ and returns the complete data + * structure as a result. + */ +static VALUE cParser_parse(VALUE self) +{ + char *p, *pe; + int cs = EVIL; + VALUE result = Qnil; + GET_STRUCT; + + +#line 1651 "parser.c" + { + cs = JSON_start; + } + +#line 648 "parser.rl" + p = json->source; + pe = p + json->len; + +#line 1660 "parser.c" + { + if ( p == pe ) + goto _test_eof; + switch ( cs ) + { +st1: + if ( ++p == pe ) + goto _test_eof1; +case 1: + switch( (*p) ) { + case 13: goto st1; + case 32: goto st1; + case 47: goto st2; + case 91: goto tr3; + case 123: goto tr4; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st1; + goto st0; +st0: +cs = 0; + goto _out; +st2: + if ( ++p == pe ) + goto _test_eof2; +case 2: + switch( (*p) ) { + case 42: goto st3; + case 47: goto st5; + } + goto st0; +st3: + if ( ++p == pe ) + goto _test_eof3; +case 3: + if ( (*p) == 42 ) + goto st4; + goto st3; +st4: + if ( ++p == pe ) + goto _test_eof4; +case 4: + switch( (*p) ) { + case 42: goto st4; + case 47: goto st1; + } + goto st3; +st5: + if ( ++p == pe ) + goto _test_eof5; +case 5: + if ( (*p) == 10 ) + goto st1; + goto st5; +tr3: +#line 466 "parser.rl" + { + char *np; + json->current_nesting = 1; + np = JSON_parse_array(json, p, pe, &result); + if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;} + } + goto st10; +tr4: +#line 459 "parser.rl" + { + char *np; + json->current_nesting = 1; + np = JSON_parse_object(json, p, pe, &result); + if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;} + } + goto st10; +st10: + if ( ++p == pe ) + goto _test_eof10; +case 10: +#line 1737 "parser.c" + switch( (*p) ) { + case 13: goto st10; + case 32: goto st10; + case 47: goto st6; + } + if ( 9 <= (*p) && (*p) <= 10 ) + goto st10; + goto st0; +st6: + if ( ++p == pe ) + goto _test_eof6; +case 6: + switch( (*p) ) { + case 42: goto st7; + case 47: goto st9; + } + goto st0; +st7: + if ( ++p == pe ) + goto _test_eof7; +case 7: + if ( (*p) == 42 ) + goto st8; + goto st7; +st8: + if ( ++p == pe ) + goto _test_eof8; +case 8: + switch( (*p) ) { + case 42: goto st8; + case 47: goto st10; + } + goto st7; +st9: + if ( ++p == pe ) + goto _test_eof9; +case 9: + if ( (*p) == 10 ) + goto st10; + goto st9; + } + _test_eof1: cs = 1; goto _test_eof; + _test_eof2: cs = 2; goto _test_eof; + _test_eof3: cs = 3; goto _test_eof; + _test_eof4: cs = 4; goto _test_eof; + _test_eof5: cs = 5; goto _test_eof; + _test_eof10: cs = 10; goto _test_eof; + _test_eof6: cs = 6; goto _test_eof; + _test_eof7: cs = 7; goto _test_eof; + _test_eof8: cs = 8; goto _test_eof; + _test_eof9: cs = 9; goto _test_eof; + + _test_eof: {} + _out: {} + } + +#line 651 "parser.rl" + + if (cs >= JSON_first_final && p == pe) { + return result; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } +} + +inline static JSON_Parser *JSON_allocate() +{ + JSON_Parser *json = ALLOC(JSON_Parser); + MEMZERO(json, JSON_Parser, 1); + return json; +} + +static void JSON_mark(JSON_Parser *json) +{ + rb_gc_mark_maybe(json->Vsource); + rb_gc_mark_maybe(json->create_id); + rb_gc_mark_maybe(json->object_class); + rb_gc_mark_maybe(json->array_class); +} + +static void JSON_free(JSON_Parser *json) +{ + ruby_xfree(json); +} + +static VALUE cJSON_parser_s_allocate(VALUE klass) +{ + JSON_Parser *json = JSON_allocate(); + return Data_Wrap_Struct(klass, JSON_mark, JSON_free, json); +} + +/* + * call-seq: source() + * + * Returns a copy of the current _source_ string, that was used to construct + * this Parser. + */ +static VALUE cParser_source(VALUE self) +{ + GET_STRUCT; + return rb_str_dup(json->Vsource); +} + +void Init_parser() +{ + rb_require("json/common"); + mJSON = rb_define_module("JSON"); + mExt = rb_define_module_under(mJSON, "Ext"); + cParser = rb_define_class_under(mExt, "Parser", rb_cObject); + eParserError = rb_path2class("JSON::ParserError"); + eNestingError = rb_path2class("JSON::NestingError"); + rb_define_alloc_func(cParser, cJSON_parser_s_allocate); + rb_define_method(cParser, "initialize", cParser_initialize, -1); + rb_define_method(cParser, "parse", cParser_parse, 0); + rb_define_method(cParser, "source", cParser_source, 0); + + CNaN = rb_const_get(mJSON, rb_intern("NaN")); + CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); + CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); + + i_json_creatable_p = rb_intern("json_creatable?"); + i_json_create = rb_intern("json_create"); + i_create_id = rb_intern("create_id"); + i_create_additions = rb_intern("create_additions"); + i_chr = rb_intern("chr"); + i_max_nesting = rb_intern("max_nesting"); + i_allow_nan = rb_intern("allow_nan"); + i_object_class = rb_intern("object_class"); + i_array_class = rb_intern("array_class"); +#ifdef HAVE_RUBY_ENCODING_H + mEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8")); + mEncoding_UTF_16BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16be")); + mEncoding_UTF_16LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16le")); + mEncoding_UTF_32BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32be")); + mEncoding_UTF_32LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32le")); + mEncoding_ASCII_8BIT = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("ascii-8bit")); + i_encoding = rb_intern("encoding"); + i_encode = rb_intern("encode"); + i_encode_bang = rb_intern("encode!"); + i_force_encoding = rb_intern("force_encoding"); +#else + i_iconv = rb_intern("iconv"); +#endif +} diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.rl b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.rl new file mode 100644 index 00000000..7de7bb1c --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/parser.rl @@ -0,0 +1,737 @@ +#include "ruby.h" +#include "unicode.h" +#if HAVE_RE_H +#include "re.h" +#endif +#if HAVE_RUBY_ST_H +#include "ruby/st.h" +#endif +#if HAVE_ST_H +#include "st.h" +#endif + +#define EVIL 0x666 + +#ifndef RHASH_TBL +#define RHASH_TBL(hsh) (RHASH(hsh)->tbl) +#endif + +#ifdef HAVE_RUBY_ENCODING_H +#include "ruby/encoding.h" +#define FORCE_UTF8(obj) rb_enc_associate((obj), rb_utf8_encoding()) +static VALUE mEncoding_ASCII_8BIT, mEncoding_UTF_8, mEncoding_UTF_16BE, + mEncoding_UTF_16LE, mEncoding_UTF_32BE, mEncoding_UTF_32LE; +static ID i_encoding, i_encode, i_encode_bang, i_force_encoding; +#else +#define FORCE_UTF8(obj) +static ID i_iconv; +#endif + +static VALUE mJSON, mExt, cParser, eParserError, eNestingError; +static VALUE CNaN, CInfinity, CMinusInfinity; + +static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, + i_chr, i_max_nesting, i_allow_nan, i_object_class, i_array_class; + +#define MinusInfinity "-Infinity" + +typedef struct JSON_ParserStruct { + VALUE Vsource; + char *source; + long len; + char *memo; + VALUE create_id; + int max_nesting; + int current_nesting; + int allow_nan; + VALUE object_class; + VALUE array_class; +} JSON_Parser; + +static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result); +static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result); + +#define GET_STRUCT \ + JSON_Parser *json; \ + Data_Get_Struct(self, JSON_Parser, json); + +%%{ + machine JSON_common; + + cr = '\n'; + cr_neg = [^\n]; + ws = [ \t\r\n]; + c_comment = '/*' ( any* - (any* '*/' any* ) ) '*/'; + cpp_comment = '//' cr_neg* cr; + comment = c_comment | cpp_comment; + ignore = ws | comment; + name_separator = ':'; + value_separator = ','; + Vnull = 'null'; + Vfalse = 'false'; + Vtrue = 'true'; + VNaN = 'NaN'; + VInfinity = 'Infinity'; + VMinusInfinity = '-Infinity'; + begin_value = [nft"\-[{NI] | digit; + begin_object = '{'; + end_object = '}'; + begin_array = '['; + end_array = ']'; + begin_string = '"'; + begin_name = begin_string; + begin_number = digit | '-'; +}%% + +%%{ + machine JSON_object; + include JSON_common; + + write data; + + action parse_value { + VALUE v = Qnil; + char *np = JSON_parse_value(json, fpc, pe, &v); + if (np == NULL) { + fhold; fbreak; + } else { + rb_hash_aset(*result, last_name, v); + fexec np; + } + } + + action parse_name { + char *np = JSON_parse_string(json, fpc, pe, &last_name); + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + action exit { fhold; fbreak; } + + a_pair = ignore* begin_name >parse_name + ignore* name_separator ignore* + begin_value >parse_value; + + main := begin_object + (a_pair (ignore* value_separator a_pair)*)? + ignore* end_object @exit; +}%% + +static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + VALUE last_name = Qnil; + VALUE object_class = json->object_class; + + if (json->max_nesting && json->current_nesting > json->max_nesting) { + rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting); + } + + *result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class); + + %% write init; + %% write exec; + + if (cs >= JSON_object_first_final) { + if (RTEST(json->create_id)) { + VALUE klassname = rb_hash_aref(*result, json->create_id); + if (!NIL_P(klassname)) { + VALUE klass = rb_path2class(StringValueCStr(klassname)); + if RTEST(rb_funcall(klass, i_json_creatable_p, 0)) { + *result = rb_funcall(klass, i_json_create, 1, *result); + } + } + } + return p + 1; + } else { + return NULL; + } +} + +%%{ + machine JSON_value; + include JSON_common; + + write data; + + action parse_null { + *result = Qnil; + } + action parse_false { + *result = Qfalse; + } + action parse_true { + *result = Qtrue; + } + action parse_nan { + if (json->allow_nan) { + *result = CNaN; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); + } + } + action parse_infinity { + if (json->allow_nan) { + *result = CInfinity; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); + } + } + action parse_string { + char *np = JSON_parse_string(json, fpc, pe, result); + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + action parse_number { + char *np; + if(pe > fpc + 9 && !strncmp(MinusInfinity, fpc, 9)) { + if (json->allow_nan) { + *result = CMinusInfinity; + fexec p + 10; + fhold; fbreak; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } + } + np = JSON_parse_float(json, fpc, pe, result); + if (np != NULL) fexec np; + np = JSON_parse_integer(json, fpc, pe, result); + if (np != NULL) fexec np; + fhold; fbreak; + } + + action parse_array { + char *np; + json->current_nesting++; + np = JSON_parse_array(json, fpc, pe, result); + json->current_nesting--; + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + action parse_object { + char *np; + json->current_nesting++; + np = JSON_parse_object(json, fpc, pe, result); + json->current_nesting--; + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + action exit { fhold; fbreak; } + +main := ( + Vnull @parse_null | + Vfalse @parse_false | + Vtrue @parse_true | + VNaN @parse_nan | + VInfinity @parse_infinity | + begin_number >parse_number | + begin_string >parse_string | + begin_array >parse_array | + begin_object >parse_object + ) %*exit; +}%% + +static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + %% write init; + %% write exec; + + if (cs >= JSON_value_first_final) { + return p; + } else { + return NULL; + } +} + +%%{ + machine JSON_integer; + + write data; + + action exit { fhold; fbreak; } + + main := '-'? ('0' | [1-9][0-9]*) (^[0-9] @exit); +}%% + +static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + %% write init; + json->memo = p; + %% write exec; + + if (cs >= JSON_integer_first_final) { + long len = p - json->memo; + *result = rb_Integer(rb_str_new(json->memo, len)); + return p + 1; + } else { + return NULL; + } +} + +%%{ + machine JSON_float; + include JSON_common; + + write data; + + action exit { fhold; fbreak; } + + main := '-'? ( + (('0' | [1-9][0-9]*) '.' [0-9]+ ([Ee] [+\-]?[0-9]+)?) + | (('0' | [1-9][0-9]*) ([Ee] [+\-]?[0-9]+)) + ) (^[0-9Ee.\-] @exit ); +}%% + +static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + %% write init; + json->memo = p; + %% write exec; + + if (cs >= JSON_float_first_final) { + long len = p - json->memo; + *result = rb_Float(rb_str_new(json->memo, len)); + return p + 1; + } else { + return NULL; + } +} + + +%%{ + machine JSON_array; + include JSON_common; + + write data; + + action parse_value { + VALUE v = Qnil; + char *np = JSON_parse_value(json, fpc, pe, &v); + if (np == NULL) { + fhold; fbreak; + } else { + rb_ary_push(*result, v); + fexec np; + } + } + + action exit { fhold; fbreak; } + + next_element = value_separator ignore* begin_value >parse_value; + + main := begin_array ignore* + ((begin_value >parse_value ignore*) + (ignore* next_element ignore*)*)? + end_array @exit; +}%% + +static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + VALUE array_class = json->array_class; + + if (json->max_nesting && json->current_nesting > json->max_nesting) { + rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting); + } + *result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class); + + %% write init; + %% write exec; + + if(cs >= JSON_array_first_final) { + return p + 1; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } +} + +static VALUE json_string_unescape(char *p, char *pe) +{ + VALUE result = rb_str_buf_new(pe - p + 1); + + while (p < pe) { + if (*p == '\\') { + p++; + if (p >= pe) return Qnil; /* raise an exception later, \ at end */ + switch (*p) { + case '"': + case '\\': + rb_str_buf_cat(result, p, 1); + p++; + break; + case 'b': + rb_str_buf_cat2(result, "\b"); + p++; + break; + case 'f': + rb_str_buf_cat2(result, "\f"); + p++; + break; + case 'n': + rb_str_buf_cat2(result, "\n"); + p++; + break; + case 'r': + rb_str_buf_cat2(result, "\r"); + p++; + break; + case 't': + rb_str_buf_cat2(result, "\t"); + p++; + break; + case 'u': + if (p > pe - 4) { + return Qnil; + } else { + p = JSON_convert_UTF16_to_UTF8(result, p, pe, strictConversion); + } + break; + default: + rb_str_buf_cat(result, p, 1); + p++; + break; + } + } else { + char *q = p; + while (*q != '\\' && q < pe) q++; + rb_str_buf_cat(result, p, q - p); + p = q; + } + } + return result; +} + +%%{ + machine JSON_string; + include JSON_common; + + write data; + + action parse_string { + *result = json_string_unescape(json->memo + 1, p); + if (NIL_P(*result)) { + fhold; + fbreak; + } else { + FORCE_UTF8(*result); + fexec p + 1; + } + } + + action exit { fhold; fbreak; } + + main := '"' ((^(["\\] | 0..0x1f) | '\\'["\\/bfnrt] | '\\u'[0-9a-fA-F]{4} | '\\'^(["\\/bfnrtu]|0..0x1f))* %parse_string) '"' @exit; +}%% + +static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) +{ + int cs = EVIL; + + *result = rb_str_new("", 0); + %% write init; + json->memo = p; + %% write exec; + + if (cs >= JSON_string_first_final) { + return p + 1; + } else { + return NULL; + } +} + + +%%{ + machine JSON; + + write data; + + include JSON_common; + + action parse_object { + char *np; + json->current_nesting = 1; + np = JSON_parse_object(json, fpc, pe, &result); + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + action parse_array { + char *np; + json->current_nesting = 1; + np = JSON_parse_array(json, fpc, pe, &result); + if (np == NULL) { fhold; fbreak; } else fexec np; + } + + main := ignore* ( + begin_object >parse_object | + begin_array >parse_array + ) ignore*; +}%% + +/* + * Document-class: JSON::Ext::Parser + * + * This is the JSON parser implemented as a C extension. It can be configured + * to be used by setting + * + * JSON.parser = JSON::Ext::Parser + * + * with the method parser= in JSON. + * + */ + +inline static VALUE convert_encoding(VALUE source) +{ + char *ptr = RSTRING_PTR(source); + long len = RSTRING_LEN(source); + if (len < 2) { + rb_raise(eParserError, "A JSON text must at least contain two octets!"); + } +#ifdef HAVE_RUBY_ENCODING_H + { + VALUE encoding = rb_funcall(source, i_encoding, 0); + if (encoding == mEncoding_ASCII_8BIT) { + if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_32BE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_16BE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_32LE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { + source = rb_str_dup(source); + rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_16LE); + source = rb_funcall(source, i_encode_bang, 1, mEncoding_UTF_8); + } else { + source = rb_funcall(source, i_force_encoding, 1, mEncoding_UTF_8); + } + } else { + source = rb_funcall(source, i_encode, 1, mEncoding_UTF_8); + } + } +#else + if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32be"), source); + } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16be"), source); + } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32le"), source); + } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { + source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16le"), source); + } +#endif + return source; +} + +/* + * call-seq: new(source, opts => {}) + * + * Creates a new JSON::Ext::Parser instance for the string _source_. + * + * Creates a new JSON::Ext::Parser instance for the string _source_. + * + * It will be configured by the _opts_ hash. _opts_ can have the following + * keys: + * + * _opts_ can have the following keys: + * * *max_nesting*: The maximum depth of nesting allowed in the parsed data + * structures. Disable depth checking with :max_nesting => false|nil|0, it + * defaults to 19. + * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + * defiance of RFC 4627 to be parsed by the Parser. This option defaults to + * false. + * * *create_additions*: If set to false, the Parser doesn't create + * additions even if a matchin class and create_id was found. This option + * defaults to true. + * * *object_class*: Defaults to Hash + * * *array_class*: Defaults to Array + */ +static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) +{ + char *ptr; + long len; + VALUE source, opts; + GET_STRUCT; + rb_scan_args(argc, argv, "11", &source, &opts); + source = convert_encoding(StringValue(source)); + ptr = RSTRING_PTR(source); + len = RSTRING_LEN(source); + if (!NIL_P(opts)) { + opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); + if (NIL_P(opts)) { + rb_raise(rb_eArgError, "opts needs to be like a hash"); + } else { + VALUE tmp = ID2SYM(i_max_nesting); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE max_nesting = rb_hash_aref(opts, tmp); + if (RTEST(max_nesting)) { + Check_Type(max_nesting, T_FIXNUM); + json->max_nesting = FIX2INT(max_nesting); + } else { + json->max_nesting = 0; + } + } else { + json->max_nesting = 19; + } + tmp = ID2SYM(i_allow_nan); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE allow_nan = rb_hash_aref(opts, tmp); + json->allow_nan = RTEST(allow_nan) ? 1 : 0; + } else { + json->allow_nan = 0; + } + tmp = ID2SYM(i_create_additions); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + VALUE create_additions = rb_hash_aref(opts, tmp); + if (RTEST(create_additions)) { + json->create_id = rb_funcall(mJSON, i_create_id, 0); + } else { + json->create_id = Qnil; + } + } else { + json->create_id = rb_funcall(mJSON, i_create_id, 0); + } + tmp = ID2SYM(i_object_class); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + json->object_class = rb_hash_aref(opts, tmp); + } else { + json->object_class = Qnil; + } + tmp = ID2SYM(i_array_class); + if (st_lookup(RHASH_TBL(opts), tmp, 0)) { + json->array_class = rb_hash_aref(opts, tmp); + } else { + json->array_class = Qnil; + } + } + } else { + json->max_nesting = 19; + json->allow_nan = 0; + json->create_id = rb_funcall(mJSON, i_create_id, 0); + json->object_class = Qnil; + json->array_class = Qnil; + } + json->current_nesting = 0; + json->len = len; + json->source = ptr; + json->Vsource = source; + return self; +} + +/* + * call-seq: parse() + * + * Parses the current JSON text _source_ and returns the complete data + * structure as a result. + */ +static VALUE cParser_parse(VALUE self) +{ + char *p, *pe; + int cs = EVIL; + VALUE result = Qnil; + GET_STRUCT; + + %% write init; + p = json->source; + pe = p + json->len; + %% write exec; + + if (cs >= JSON_first_final && p == pe) { + return result; + } else { + rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); + } +} + +inline static JSON_Parser *JSON_allocate() +{ + JSON_Parser *json = ALLOC(JSON_Parser); + MEMZERO(json, JSON_Parser, 1); + return json; +} + +static void JSON_mark(JSON_Parser *json) +{ + rb_gc_mark_maybe(json->Vsource); + rb_gc_mark_maybe(json->create_id); + rb_gc_mark_maybe(json->object_class); + rb_gc_mark_maybe(json->array_class); +} + +static void JSON_free(JSON_Parser *json) +{ + ruby_xfree(json); +} + +static VALUE cJSON_parser_s_allocate(VALUE klass) +{ + JSON_Parser *json = JSON_allocate(); + return Data_Wrap_Struct(klass, JSON_mark, JSON_free, json); +} + +/* + * call-seq: source() + * + * Returns a copy of the current _source_ string, that was used to construct + * this Parser. + */ +static VALUE cParser_source(VALUE self) +{ + GET_STRUCT; + return rb_str_dup(json->Vsource); +} + +void Init_parser() +{ + rb_require("json/common"); + mJSON = rb_define_module("JSON"); + mExt = rb_define_module_under(mJSON, "Ext"); + cParser = rb_define_class_under(mExt, "Parser", rb_cObject); + eParserError = rb_path2class("JSON::ParserError"); + eNestingError = rb_path2class("JSON::NestingError"); + rb_define_alloc_func(cParser, cJSON_parser_s_allocate); + rb_define_method(cParser, "initialize", cParser_initialize, -1); + rb_define_method(cParser, "parse", cParser_parse, 0); + rb_define_method(cParser, "source", cParser_source, 0); + + CNaN = rb_const_get(mJSON, rb_intern("NaN")); + CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); + CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); + + i_json_creatable_p = rb_intern("json_creatable?"); + i_json_create = rb_intern("json_create"); + i_create_id = rb_intern("create_id"); + i_create_additions = rb_intern("create_additions"); + i_chr = rb_intern("chr"); + i_max_nesting = rb_intern("max_nesting"); + i_allow_nan = rb_intern("allow_nan"); + i_object_class = rb_intern("object_class"); + i_array_class = rb_intern("array_class"); +#ifdef HAVE_RUBY_ENCODING_H + mEncoding_UTF_8 = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-8")); + mEncoding_UTF_16BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16be")); + mEncoding_UTF_16LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-16le")); + mEncoding_UTF_32BE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32be")); + mEncoding_UTF_32LE = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("utf-32le")); + mEncoding_ASCII_8BIT = rb_funcall(rb_path2class("Encoding"), rb_intern("find"), 1, rb_str_new2("ascii-8bit")); + i_encoding = rb_intern("encoding"); + i_encode = rb_intern("encode"); + i_encode_bang = rb_intern("encode!"); + i_force_encoding = rb_intern("force_encoding"); +#else + i_iconv = rb_intern("iconv"); +#endif +} diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.c b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.c new file mode 100644 index 00000000..6bd29e2e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.c @@ -0,0 +1,154 @@ +#include "unicode.h" + +/* + * Copyright 2001-2004 Unicode, Inc. + * + * Disclaimer + * + * This source code is provided as is by Unicode, Inc. No claims are + * made as to fitness for any particular purpose. No warranties of any + * kind are expressed or implied. The recipient agrees to determine + * applicability of information provided. If this file has been + * purchased on magnetic or optical media from Unicode, Inc., the + * sole remedy for any claim will be exchange of defective media + * within 90 days of receipt. + * + * Limitations on Rights to Redistribute This Code + * + * Unicode, Inc. hereby grants the right to freely use the information + * supplied in this file in the creation of products supporting the + * Unicode Standard, and to make copies of this file in any form + * for internal or external distribution as long as this notice + * remains attached. + */ + +/* + * Index into the table below with the first byte of a UTF-8 sequence to + * get the number of trailing bytes that are supposed to follow it. + * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is + * left as-is for anyone who may want to do such conversion, which was + * allowed in earlier algorithms. + */ +static const char trailingBytesForUTF8[256] = { + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, + 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 +}; + +/* + * Magic values subtracted from a buffer value during UTF8 conversion. + * This table contains as many values as there might be trailing bytes + * in a UTF-8 sequence. + */ +static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, + 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; + +/* + * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed + * into the first byte, depending on how many bytes follow. There are + * as many entries in this table as there are UTF-8 sequence types. + * (I.e., one byte sequence, two byte... etc.). Remember that sequencs + * for *legal* UTF-8 will be 4 or fewer bytes total. + */ +static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; + +char *JSON_convert_UTF16_to_UTF8 ( + VALUE buffer, + char *source, + char *sourceEnd, + ConversionFlags flags) +{ + UTF16 *tmp, *tmpPtr, *tmpEnd; + char buf[5]; + long n = 0, i; + char *p = source - 1; + + while (p < sourceEnd && p[0] == '\\' && p[1] == 'u') { + p += 6; + n++; + } + p = source + 1; + buf[4] = 0; + tmpPtr = tmp = ALLOC_N(UTF16, n); + tmpEnd = tmp + n; + for (i = 0; i < n; i++) { + buf[0] = *p++; + buf[1] = *p++; + buf[2] = *p++; + buf[3] = *p++; + tmpPtr[i] = (UTF16)strtol(buf, NULL, 16); + p += 2; + } + + while (tmpPtr < tmpEnd) { + UTF32 ch; + unsigned short bytesToWrite = 0; + const UTF32 byteMask = 0xBF; + const UTF32 byteMark = 0x80; + ch = *tmpPtr++; + /* If we have a surrogate pair, convert to UTF32 first. */ + if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { + /* If the 16 bits following the high surrogate are in the source + * buffer... */ + if (tmpPtr < tmpEnd) { + UTF32 ch2 = *tmpPtr; + /* If it's a low surrogate, convert to UTF32. */ + if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { + ch = ((ch - UNI_SUR_HIGH_START) << halfShift) + + (ch2 - UNI_SUR_LOW_START) + halfBase; + ++tmpPtr; + } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ + ruby_xfree(tmp); + rb_raise(rb_path2class("JSON::ParserError"), + "\\uXXXX is illegal/malformed utf-16 near %s", source); + } + } else { /* We don't have the 16 bits following the high surrogate. */ + ruby_xfree(tmp); + rb_raise(rb_path2class("JSON::ParserError"), + "partial character in source, but hit end near %s", source); + break; + } + } else if (flags == strictConversion) { + /* UTF-16 surrogate values are illegal in UTF-32 */ + if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { + ruby_xfree(tmp); + rb_raise(rb_path2class("JSON::ParserError"), + "\\uXXXX is illegal/malformed utf-16 near %s", source); + } + } + /* Figure out how many bytes the result will require */ + if (ch < (UTF32) 0x80) { + bytesToWrite = 1; + } else if (ch < (UTF32) 0x800) { + bytesToWrite = 2; + } else if (ch < (UTF32) 0x10000) { + bytesToWrite = 3; + } else if (ch < (UTF32) 0x110000) { + bytesToWrite = 4; + } else { + bytesToWrite = 3; + ch = UNI_REPLACEMENT_CHAR; + } + + buf[0] = 0; + buf[1] = 0; + buf[2] = 0; + buf[3] = 0; + p = buf + bytesToWrite; + switch (bytesToWrite) { /* note: everything falls through. */ + case 4: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; + case 3: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; + case 2: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; + case 1: *--p = (UTF8) (ch | firstByteMark[bytesToWrite]); + } + rb_str_buf_cat(buffer, p, bytesToWrite); + } + ruby_xfree(tmp); + source += 5 + (n - 1) * 6; + return source; +} diff --git a/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.h b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.h new file mode 100644 index 00000000..155da0ce --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/ext/json/ext/parser/unicode.h @@ -0,0 +1,58 @@ + +#ifndef _PARSER_UNICODE_H_ +#define _PARSER_UNICODE_H_ + +#include "ruby.h" + +typedef unsigned long UTF32; /* at least 32 bits */ +typedef unsigned short UTF16; /* at least 16 bits */ +typedef unsigned char UTF8; /* typically 8 bits */ + +#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD +#define UNI_MAX_BMP (UTF32)0x0000FFFF +#define UNI_MAX_UTF16 (UTF32)0x0010FFFF +#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF +#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF + +#define UNI_SUR_HIGH_START (UTF32)0xD800 +#define UNI_SUR_HIGH_END (UTF32)0xDBFF +#define UNI_SUR_LOW_START (UTF32)0xDC00 +#define UNI_SUR_LOW_END (UTF32)0xDFFF + +static const int halfShift = 10; /* used for shifting by 10 bits */ + +static const UTF32 halfBase = 0x0010000UL; +static const UTF32 halfMask = 0x3FFUL; + +typedef enum { + conversionOK = 0, /* conversion successful */ + sourceExhausted, /* partial character in source, but hit end */ + targetExhausted, /* insuff. room in target for conversion */ + sourceIllegal /* source sequence is illegal/malformed */ +} ConversionResult; + +typedef enum { + strictConversion = 0, + lenientConversion +} ConversionFlags; + +char *JSON_convert_UTF16_to_UTF8 ( + VALUE buffer, + char *source, + char *sourceEnd, + ConversionFlags flags); + +#ifndef RARRAY_PTR +#define RARRAY_PTR(ARRAY) RARRAY(ARRAY)->ptr +#endif +#ifndef RARRAY_LEN +#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len +#endif +#ifndef RSTRING_PTR +#define RSTRING_PTR(string) RSTRING(string)->ptr +#endif +#ifndef RSTRING_LEN +#define RSTRING_LEN(string) RSTRING(string)->len +#endif + +#endif diff --git a/vendor/gems/gems/json_pure-1.2.0/install.rb b/vendor/gems/gems/json_pure-1.2.0/install.rb new file mode 100755 index 00000000..adf77a0d --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/install.rb @@ -0,0 +1,26 @@ +#!/usr/bin/env ruby + +require 'rbconfig' +require 'fileutils' +include FileUtils::Verbose + +include Config + +bindir = CONFIG["bindir"] +cd 'bin' do + filename = 'edit_json.rb' + #install(filename, bindir) +end +sitelibdir = CONFIG["sitelibdir"] +cd 'lib' do + install('json.rb', sitelibdir) + mkdir_p File.join(sitelibdir, 'json') + for file in Dir['json/**/*.{rb,xpm}'] + d = File.join(sitelibdir, file) + mkdir_p File.dirname(d) + install(file, d) + end + install(File.join('json', 'editor.rb'), File.join(sitelibdir,'json')) + install(File.join('json', 'json.xpm'), File.join(sitelibdir,'json')) +end +warn " *** Installed PURE ruby library." diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json.rb new file mode 100644 index 00000000..789b0de5 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json.rb @@ -0,0 +1,10 @@ +require 'json/common' +module JSON + require 'json/version' + + begin + require 'json/ext' + rescue LoadError + require 'json/pure' + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/Array.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/Array.xpm new file mode 100644 index 00000000..27c48011 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/Array.xpm @@ -0,0 +1,21 @@ +/* XPM */ +static char * Array_xpm[] = { +"16 16 2 1", +" c None", +". c #000000", +" ", +" ", +" ", +" .......... ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" .......... ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/FalseClass.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/FalseClass.xpm new file mode 100644 index 00000000..25ce6083 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/FalseClass.xpm @@ -0,0 +1,21 @@ +/* XPM */ +static char * False_xpm[] = { +"16 16 2 1", +" c None", +". c #FF0000", +" ", +" ", +" ", +" ...... ", +" . ", +" . ", +" . ", +" ...... ", +" . ", +" . ", +" . ", +" . ", +" . ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/Hash.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/Hash.xpm new file mode 100644 index 00000000..cd8f6f7b --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/Hash.xpm @@ -0,0 +1,21 @@ +/* XPM */ +static char * Hash_xpm[] = { +"16 16 2 1", +" c None", +". c #000000", +" ", +" ", +" ", +" . . ", +" . . ", +" . . ", +" ......... ", +" . . ", +" . . ", +" ......... ", +" . . ", +" . . ", +" . . ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/Key.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/Key.xpm new file mode 100644 index 00000000..9fd72813 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/Key.xpm @@ -0,0 +1,73 @@ +/* XPM */ +static char * Key_xpm[] = { +"16 16 54 1", +" c None", +". c #110007", +"+ c #0E0900", +"@ c #000013", +"# c #070600", +"$ c #F6F006", +"% c #ECE711", +"& c #E5EE00", +"* c #16021E", +"= c #120900", +"- c #EDF12B", +"; c #000033", +"> c #0F0000", +", c #FFFE03", +"' c #E6E500", +") c #16021B", +"! c #F7F502", +"~ c #000E00", +"{ c #130000", +"] c #FFF000", +"^ c #FFE711", +"/ c #140005", +"( c #190025", +"_ c #E9DD27", +": c #E7DC04", +"< c #FFEC09", +"[ c #FFE707", +"} c #FFDE10", +"| c #150021", +"1 c #160700", +"2 c #FAF60E", +"3 c #EFE301", +"4 c #FEF300", +"5 c #E7E000", +"6 c #FFFF08", +"7 c #0E0206", +"8 c #040000", +"9 c #03052E", +"0 c #041212", +"a c #070300", +"b c #F2E713", +"c c #F9DE13", +"d c #36091E", +"e c #00001C", +"f c #1F0010", +"g c #FFF500", +"h c #DEDE00", +"i c #050A00", +"j c #FAF14A", +"k c #F5F200", +"l c #040404", +"m c #1A0D00", +"n c #EDE43D", +"o c #ECE007", +" ", +" ", +" .+@ ", +" #$%&* ", +" =-;>,') ", +" >!~{]^/ ", +" (_:<[}| ", +" 1234567 ", +" 890abcd ", +" efghi ", +" >jkl ", +" mnol ", +" >kl ", +" ll ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/NilClass.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/NilClass.xpm new file mode 100644 index 00000000..3509f06c --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/NilClass.xpm @@ -0,0 +1,21 @@ +/* XPM */ +static char * False_xpm[] = { +"16 16 2 1", +" c None", +". c #000000", +" ", +" ", +" ", +" ... ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" . . ", +" ... ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/Numeric.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/Numeric.xpm new file mode 100644 index 00000000..e071e2ee --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/Numeric.xpm @@ -0,0 +1,28 @@ +/* XPM */ +static char * Numeric_xpm[] = { +"16 16 9 1", +" c None", +". c #FF0000", +"+ c #0000FF", +"@ c #0023DB", +"# c #00EA14", +"$ c #00FF00", +"% c #004FAF", +"& c #0028D6", +"* c #00F20C", +" ", +" ", +" ", +" ... +++@#$$$$ ", +" .+ %& $$ ", +" . + $ ", +" . + $$ ", +" . ++$$$$ ", +" . + $$ ", +" . + $ ", +" . + $ ", +" . + $ $$ ", +" .....++++*$$ ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/String.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/String.xpm new file mode 100644 index 00000000..f79a89cd --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/String.xpm @@ -0,0 +1,96 @@ +/* XPM */ +static char * String_xpm[] = { +"16 16 77 1", +" c None", +". c #000000", +"+ c #040404", +"@ c #080806", +"# c #090606", +"$ c #EEEAE1", +"% c #E7E3DA", +"& c #E0DBD1", +"* c #D4B46F", +"= c #0C0906", +"- c #E3C072", +"; c #E4C072", +"> c #060505", +", c #0B0A08", +"' c #D5B264", +") c #D3AF5A", +"! c #080602", +"~ c #E1B863", +"{ c #DDB151", +"] c #DBAE4A", +"^ c #DDB152", +"/ c #DDB252", +"( c #070705", +"_ c #0C0A07", +": c #D3A33B", +"< c #020201", +"[ c #DAAA41", +"} c #040302", +"| c #E4D9BF", +"1 c #0B0907", +"2 c #030201", +"3 c #020200", +"4 c #C99115", +"5 c #080704", +"6 c #DBC8A2", +"7 c #E7D7B4", +"8 c #E0CD9E", +"9 c #080601", +"0 c #040400", +"a c #010100", +"b c #0B0B08", +"c c #DCBF83", +"d c #DCBC75", +"e c #DEB559", +"f c #040301", +"g c #BC8815", +"h c #120E07", +"i c #060402", +"j c #0A0804", +"k c #D4A747", +"l c #D6A12F", +"m c #0E0C05", +"n c #C8C1B0", +"o c #1D1B15", +"p c #D7AD51", +"q c #070502", +"r c #080804", +"s c #BC953B", +"t c #C4BDAD", +"u c #0B0807", +"v c #DBAC47", +"w c #1B150A", +"x c #B78A2C", +"y c #D8A83C", +"z c #D4A338", +"A c #0F0B03", +"B c #181105", +"C c #C59325", +"D c #C18E1F", +"E c #060600", +"F c #CC992D", +"G c #B98B25", +"H c #B3831F", +"I c #C08C1C", +"J c #060500", +"K c #0E0C03", +"L c #0D0A00", +" ", +" .+@# ", +" .$%&*= ", +" .-;>,')! ", +" .~. .{]. ", +" .^/. (_:< ", +" .[.}|$12 ", +" 345678}90 ", +" a2bcdefgh ", +" ijkl.mno ", +" tv_nsec + object['n'] = usec * 1000 + end + if respond_to?(:tv_nsec) + at(*object.values_at('s', 'n')) + else + at(object['s'], object['n'] / 1000) + end + end + + def to_json(*args) + { + 'json_class' => self.class.name, + 's' => tv_sec, + 'n' => respond_to?(:tv_nsec) ? tv_nsec : tv_usec * 1000 + }.to_json(*args) + end +end + +class Date + def self.json_create(object) + civil(*object.values_at('y', 'm', 'd', 'sg')) + end + + alias start sg unless method_defined?(:start) + + def to_json(*args) + { + 'json_class' => self.class.name, + 'y' => year, + 'm' => month, + 'd' => day, + 'sg' => start, + }.to_json(*args) + end +end + +class DateTime + def self.json_create(object) + args = object.values_at('y', 'm', 'd', 'H', 'M', 'S') + of_a, of_b = object['of'].split('/') + if of_b and of_b != '0' + args << Rational(of_a.to_i, of_b.to_i) + else + args << of_a + end + args << object['sg'] + civil(*args) + end + + alias start sg unless method_defined?(:start) + + def to_json(*args) + { + 'json_class' => self.class.name, + 'y' => year, + 'm' => month, + 'd' => day, + 'H' => hour, + 'M' => min, + 'S' => sec, + 'of' => offset.to_s, + 'sg' => start, + }.to_json(*args) + end +end + +class Range + def self.json_create(object) + new(*object['a']) + end + + def to_json(*args) + { + 'json_class' => self.class.name, + 'a' => [ first, last, exclude_end? ] + }.to_json(*args) + end +end + +class Struct + def self.json_create(object) + new(*object['v']) + end + + def to_json(*args) + klass = self.class.name + klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!" + { + 'json_class' => klass, + 'v' => values, + }.to_json(*args) + end +end + +class Exception + def self.json_create(object) + result = new(object['m']) + result.set_backtrace object['b'] + result + end + + def to_json(*args) + { + 'json_class' => self.class.name, + 'm' => message, + 'b' => backtrace, + }.to_json(*args) + end +end + +class Regexp + def self.json_create(object) + new(object['s'], object['o']) + end + + def to_json(*) + { + 'json_class' => self.class.name, + 'o' => options, + 's' => source, + }.to_json + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/add/rails.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/add/rails.rb new file mode 100644 index 00000000..e86ed1aa --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/add/rails.rb @@ -0,0 +1,58 @@ +# This file contains implementations of rails custom objects for +# serialisation/deserialisation. + +unless Object.const_defined?(:JSON) and ::JSON.const_defined?(:JSON_LOADED) and + ::JSON::JSON_LOADED + require 'json' +end + +class Object + def self.json_create(object) + obj = new + for key, value in object + next if key == 'json_class' + instance_variable_set "@#{key}", value + end + obj + end + + def to_json(*a) + result = { + 'json_class' => self.class.name + } + instance_variables.inject(result) do |r, name| + r[name[1..-1]] = instance_variable_get name + r + end + result.to_json(*a) + end +end + +class Symbol + def to_json(*a) + to_s.to_json(*a) + end +end + +module Enumerable + def to_json(*a) + to_a.to_json(*a) + end +end + +# class Regexp +# def to_json(*) +# inspect +# end +# end +# +# The above rails definition has some problems: +# +# 1. { 'foo' => /bar/ }.to_json # => "{foo: /bar/}" +# This isn't valid JSON, because the regular expression syntax is not +# defined in RFC 4627. (And unquoted strings are disallowed there, too.) +# Though it is valid Javascript. +# +# 2. { 'foo' => /bar/mix }.to_json # => "{foo: /bar/mix}" +# This isn't even valid Javascript. + diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/common.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/common.rb new file mode 100644 index 00000000..39f6336e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/common.rb @@ -0,0 +1,371 @@ +require 'json/version' +require 'iconv' + +module JSON + class << self + # If _object_ is string-like parse the string and return the parsed result + # as a Ruby data structure. Otherwise generate a JSON text from the Ruby + # data structure object and return it. + # + # The _opts_ argument is passed through to generate/parse respectively, see + # generate and parse for their documentation. + def [](object, opts = {}) + if object.respond_to? :to_str + JSON.parse(object.to_str, opts => {}) + else + JSON.generate(object, opts => {}) + end + end + + # Returns the JSON parser class, that is used by JSON. This might be either + # JSON::Ext::Parser or JSON::Pure::Parser. + attr_reader :parser + + # Set the JSON parser class _parser_ to be used by JSON. + def parser=(parser) # :nodoc: + @parser = parser + remove_const :Parser if const_defined? :Parser + const_set :Parser, parser + end + + # Return the constant located at _path_. The format of _path_ has to be + # either ::A::B::C or A::B::C. In any case A has to be located at the top + # level (absolute namespace path?). If there doesn't exist a constant at + # the given path, an ArgumentError is raised. + def deep_const_get(path) # :nodoc: + path = path.to_s + path.split(/::/).inject(Object) do |p, c| + case + when c.empty? then p + when p.const_defined?(c) then p.const_get(c) + else raise ArgumentError, "can't find const #{path}" + end + end + end + + # Set the module _generator_ to be used by JSON. + def generator=(generator) # :nodoc: + @generator = generator + generator_methods = generator::GeneratorMethods + for const in generator_methods.constants + klass = deep_const_get(const) + modul = generator_methods.const_get(const) + klass.class_eval do + instance_methods(false).each do |m| + m.to_s == 'to_json' and remove_method m + end + include modul + end + end + self.state = generator::State + const_set :State, self.state + end + + # Returns the JSON generator modul, that is used by JSON. This might be + # either JSON::Ext::Generator or JSON::Pure::Generator. + attr_reader :generator + + # Returns the JSON generator state class, that is used by JSON. This might + # be either JSON::Ext::Generator::State or JSON::Pure::Generator::State. + attr_accessor :state + + # This is create identifier, that is used to decide, if the _json_create_ + # hook of a class should be called. It defaults to 'json_class'. + attr_accessor :create_id + end + self.create_id = 'json_class' + + NaN = 0.0/0 + + Infinity = 1.0/0 + + MinusInfinity = -Infinity + + # The base exception for JSON errors. + class JSONError < StandardError; end + + # This exception is raised, if a parser error occurs. + class ParserError < JSONError; end + + # This exception is raised, if the nesting of parsed datastructures is too + # deep. + class NestingError < ParserError; end + + # This exception is raised, if a generator or unparser error occurs. + class GeneratorError < JSONError; end + # For backwards compatibility + UnparserError = GeneratorError + + # If a circular data structure is encountered while unparsing + # this exception is raised. + class CircularDatastructure < GeneratorError; end + + # This exception is raised, if the required unicode support is missing on the + # system. Usually this means, that the iconv library is not installed. + class MissingUnicodeSupport < JSONError; end + + module_function + + # Parse the JSON document _source_ into a Ruby data structure and return it. + # + # _opts_ can have the following + # keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Disable depth checking with :max_nesting => false, it defaults + # to 19. + # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to false. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + def parse(source, opts = {}) + JSON.parser.new(source, opts).parse + end + + # Parse the JSON document _source_ into a Ruby data structure and return it. + # The bang version of the parse method, defaults to the more dangerous values + # for the _opts_ hash, so be sure only to parse trusted _source_ documents. + # + # _opts_ can have the following keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Enable depth checking with :max_nesting => anInteger. The parse! + # methods defaults to not doing max depth checking: This can be dangerous, + # if someone wants to fill up your stack. + # * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to true. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + def parse!(source, opts = {}) + opts = { + :max_nesting => false, + :allow_nan => true + }.update(opts) + JSON.parser.new(source, opts).parse + end + + # Generate a JSON document from the Ruby data structure _obj_ and return + # it. _state_ is * a JSON::State object, + # * or a Hash like object (responding to to_hash), + # * an object convertible into a hash by a to_h method, + # that is used as or to configure a State object. + # + # It defaults to a state object, that creates the shortest possible JSON text + # in one line, checks for circular data structures and doesn't allow NaN, + # Infinity, and -Infinity. + # + # A _state_ hash can have the following keys: + # * *indent*: a string used to indent levels (default: ''), + # * *space*: a string that is put after, a : or , delimiter (default: ''), + # * *space_before*: a string that is put before a : pair delimiter (default: ''), + # * *object_nl*: a string that is put at the end of a JSON object (default: ''), + # * *array_nl*: a string that is put at the end of a JSON array (default: ''), + # * *check_circular*: true if checking for circular data structures + # should be done (the default), false otherwise. + # * *allow_nan*: true if NaN, Infinity, and -Infinity should be + # generated, otherwise an exception is thrown, if these values are + # encountered. This options defaults to false. + # * *max_nesting*: The maximum depth of nesting allowed in the data + # structures from which JSON is to be generated. Disable depth checking + # with :max_nesting => false, it defaults to 19. + # + # See also the fast_generate for the fastest creation method with the least + # amount of sanity checks, and the pretty_generate method for some + # defaults for a pretty output. + def generate(obj, state = nil) + if state + state = State.from_state(state) + else + state = State.new + end + result = obj.to_json(state) + if result !~ /\A\s*(?:\[.*\]|\{.*\})\s*\Z/m + raise GeneratorError, "only generation of JSON objects or arrays allowed" + end + result + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and + # later delete them. + alias unparse generate + module_function :unparse + # :startdoc: + + # Generate a JSON document from the Ruby data structure _obj_ and return it. + # This method disables the checks for circles in Ruby objects. + # + # *WARNING*: Be careful not to pass any Ruby data structures with circles as + # _obj_ argument, because this will cause JSON to go into an infinite loop. + def fast_generate(obj) + result = obj.to_json(nil) + if result !~ /\A(?:\[.*\]|\{.*\})\Z/ + raise GeneratorError, "only generation of JSON objects or arrays allowed" + end + result + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and later delete them. + alias fast_unparse fast_generate + module_function :fast_unparse + # :startdoc: + + # Generate a JSON document from the Ruby data structure _obj_ and return it. + # The returned document is a prettier form of the document returned by + # #unparse. + # + # The _opts_ argument can be used to configure the generator, see the + # generate method for a more detailed explanation. + def pretty_generate(obj, opts = nil) + state = JSON.state.new( + :indent => ' ', + :space => ' ', + :object_nl => "\n", + :array_nl => "\n", + :check_circular => true + ) + if opts + if opts.respond_to? :to_hash + opts = opts.to_hash + elsif opts.respond_to? :to_h + opts = opts.to_h + else + raise TypeError, "can't convert #{opts.class} into Hash" + end + state.configure(opts) + end + result = obj.to_json(state) + if result !~ /\A\s*(?:\[.*\]|\{.*\})\s*\Z/m + raise GeneratorError, "only generation of JSON objects or arrays allowed" + end + result + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and later delete them. + alias pretty_unparse pretty_generate + module_function :pretty_unparse + # :startdoc: + + # Load a ruby data structure from a JSON _source_ and return it. A source can + # either be a string-like object, an IO like object, or an object responding + # to the read method. If _proc_ was given, it will be called with any nested + # Ruby object as an argument recursively in depth first order. + # + # This method is part of the implementation of the load/dump interface of + # Marshal and YAML. + def load(source, proc = nil) + if source.respond_to? :to_str + source = source.to_str + elsif source.respond_to? :to_io + source = source.to_io.read + else + source = source.read + end + result = parse(source, :max_nesting => false, :allow_nan => true) + recurse_proc(result, &proc) if proc + result + end + + def recurse_proc(result, &proc) + case result + when Array + result.each { |x| recurse_proc x, &proc } + proc.call result + when Hash + result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc } + proc.call result + else + proc.call result + end + end + + alias restore load + module_function :restore + + # Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns + # the result. + # + # If anIO (an IO like object or an object that responds to the write method) + # was given, the resulting JSON is written to it. + # + # If the number of nested arrays or objects exceeds _limit_ an ArgumentError + # exception is raised. This argument is similar (but not exactly the + # same!) to the _limit_ argument in Marshal.dump. + # + # This method is part of the implementation of the load/dump interface of + # Marshal and YAML. + def dump(obj, anIO = nil, limit = nil) + if anIO and limit.nil? + anIO = anIO.to_io if anIO.respond_to?(:to_io) + unless anIO.respond_to?(:write) + limit = anIO + anIO = nil + end + end + limit ||= 0 + result = generate(obj, :allow_nan => true, :max_nesting => limit) + if anIO + anIO.write result + anIO + else + result + end + rescue JSON::NestingError + raise ArgumentError, "exceed depth limit" + end + + # Shortuct for iconv. + def self.iconv(to, from, string) + Iconv.iconv(to, from, string).first + end +end + +module ::Kernel + private + + # Outputs _objs_ to STDOUT as JSON strings in the shortest form, that is in + # one line. + def j(*objs) + objs.each do |obj| + puts JSON::generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # Ouputs _objs_ to STDOUT as JSON strings in a pretty format, with + # indentation and over many lines. + def jj(*objs) + objs.each do |obj| + puts JSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # If _object_ is string-like parse the string and return the parsed result as + # a Ruby data structure. Otherwise generate a JSON text from the Ruby data + # structure object and return it. + # + # The _opts_ argument is passed through to generate/parse respectively, see + # generate and parse for their documentation. + def JSON(object, opts = {}) + if object.respond_to? :to_str + JSON.parse(object.to_str, opts) + else + JSON.generate(object, opts) + end + end +end + +class ::Class + # Returns true, if this class can be used to create an instance + # from a serialised JSON string. The class has to implement a class + # method _json_create_ that expects a hash as first parameter, which includes + # the required data. + def json_creatable? + respond_to?(:json_create) + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/editor.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/editor.rb new file mode 100644 index 00000000..1e13f33c --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/editor.rb @@ -0,0 +1,1371 @@ +# To use the GUI JSON editor, start the edit_json.rb executable script. It +# requires ruby-gtk to be installed. + +require 'gtk2' +require 'iconv' +require 'json' +require 'rbconfig' +require 'open-uri' + +module JSON + module Editor + include Gtk + + # Beginning of the editor window title + TITLE = 'JSON Editor'.freeze + + # Columns constants + ICON_COL, TYPE_COL, CONTENT_COL = 0, 1, 2 + + # JSON primitive types (Containers) + CONTAINER_TYPES = %w[Array Hash].sort + # All JSON primitive types + ALL_TYPES = (%w[TrueClass FalseClass Numeric String NilClass] + + CONTAINER_TYPES).sort + + # The Nodes necessary for the tree representation of a JSON document + ALL_NODES = (ALL_TYPES + %w[Key]).sort + + DEFAULT_DIALOG_KEY_PRESS_HANDLER = lambda do |dialog, event| + case event.keyval + when Gdk::Keyval::GDK_Return + dialog.response Dialog::RESPONSE_ACCEPT + when Gdk::Keyval::GDK_Escape + dialog.response Dialog::RESPONSE_REJECT + end + end + + # Returns the Gdk::Pixbuf of the icon named _name_ from the icon cache. + def Editor.fetch_icon(name) + @icon_cache ||= {} + unless @icon_cache.key?(name) + path = File.dirname(__FILE__) + @icon_cache[name] = Gdk::Pixbuf.new(File.join(path, name + '.xpm')) + end + @icon_cache[name] + end + + # Opens an error dialog on top of _window_ showing the error message + # _text_. + def Editor.error_dialog(window, text) + dialog = MessageDialog.new(window, Dialog::MODAL, + MessageDialog::ERROR, + MessageDialog::BUTTONS_CLOSE, text) + dialog.show_all + dialog.run + rescue TypeError + dialog = MessageDialog.new(Editor.window, Dialog::MODAL, + MessageDialog::ERROR, + MessageDialog::BUTTONS_CLOSE, text) + dialog.show_all + dialog.run + ensure + dialog.destroy if dialog + end + + # Opens a yes/no question dialog on top of _window_ showing the error + # message _text_. If yes was answered _true_ is returned, otherwise + # _false_. + def Editor.question_dialog(window, text) + dialog = MessageDialog.new(window, Dialog::MODAL, + MessageDialog::QUESTION, + MessageDialog::BUTTONS_YES_NO, text) + dialog.show_all + dialog.run do |response| + return Gtk::Dialog::RESPONSE_YES === response + end + ensure + dialog.destroy if dialog + end + + # Convert the tree model starting from Gtk::TreeIter _iter_ into a Ruby + # data structure and return it. + def Editor.model2data(iter) + return nil if iter.nil? + case iter.type + when 'Hash' + hash = {} + iter.each { |c| hash[c.content] = Editor.model2data(c.first_child) } + hash + when 'Array' + array = Array.new(iter.n_children) + iter.each_with_index { |c, i| array[i] = Editor.model2data(c) } + array + when 'Key' + iter.content + when 'String' + iter.content + when 'Numeric' + content = iter.content + if /\./.match(content) + content.to_f + else + content.to_i + end + when 'TrueClass' + true + when 'FalseClass' + false + when 'NilClass' + nil + else + fail "Unknown type found in model: #{iter.type}" + end + end + + # Convert the Ruby data structure _data_ into tree model data for Gtk and + # returns the whole model. If the parameter _model_ wasn't given a new + # Gtk::TreeStore is created as the model. The _parent_ parameter specifies + # the parent node (iter, Gtk:TreeIter instance) to which the data is + # appended, alternativeley the result of the yielded block is used as iter. + def Editor.data2model(data, model = nil, parent = nil) + model ||= TreeStore.new(Gdk::Pixbuf, String, String) + iter = if block_given? + yield model + else + model.append(parent) + end + case data + when Hash + iter.type = 'Hash' + data.sort.each do |key, value| + pair_iter = model.append(iter) + pair_iter.type = 'Key' + pair_iter.content = key.to_s + Editor.data2model(value, model, pair_iter) + end + when Array + iter.type = 'Array' + data.each do |value| + Editor.data2model(value, model, iter) + end + when Numeric + iter.type = 'Numeric' + iter.content = data.to_s + when String, true, false, nil + iter.type = data.class.name + iter.content = data.nil? ? 'null' : data.to_s + else + iter.type = 'String' + iter.content = data.to_s + end + model + end + + # The Gtk::TreeIter class is reopened and some auxiliary methods are added. + class Gtk::TreeIter + include Enumerable + + # Traverse each of this Gtk::TreeIter instance's children + # and yield to them. + def each + n_children.times { |i| yield nth_child(i) } + end + + # Recursively traverse all nodes of this Gtk::TreeIter's subtree + # (including self) and yield to them. + def recursive_each(&block) + yield self + each do |i| + i.recursive_each(&block) + end + end + + # Remove the subtree of this Gtk::TreeIter instance from the + # model _model_. + def remove_subtree(model) + while current = first_child + model.remove(current) + end + end + + # Returns the type of this node. + def type + self[TYPE_COL] + end + + # Sets the type of this node to _value_. This implies setting + # the respective icon accordingly. + def type=(value) + self[TYPE_COL] = value + self[ICON_COL] = Editor.fetch_icon(value) + end + + # Returns the content of this node. + def content + self[CONTENT_COL] + end + + # Sets the content of this node to _value_. + def content=(value) + self[CONTENT_COL] = value + end + end + + # This module bundles some method, that can be used to create a menu. It + # should be included into the class in question. + module MenuExtension + include Gtk + + # Creates a Menu, that includes MenuExtension. _treeview_ is the + # Gtk::TreeView, on which it operates. + def initialize(treeview) + @treeview = treeview + @menu = Menu.new + end + + # Returns the Gtk::TreeView of this menu. + attr_reader :treeview + + # Returns the menu. + attr_reader :menu + + # Adds a Gtk::SeparatorMenuItem to this instance's #menu. + def add_separator + menu.append SeparatorMenuItem.new + end + + # Adds a Gtk::MenuItem to this instance's #menu. _label_ is the label + # string, _klass_ is the item type, and _callback_ is the procedure, that + # is called if the _item_ is activated. + def add_item(label, keyval = nil, klass = MenuItem, &callback) + label = "#{label} (C-#{keyval.chr})" if keyval + item = klass.new(label) + item.signal_connect(:activate, &callback) + if keyval + self.signal_connect(:'key-press-event') do |item, event| + if event.state & Gdk::Window::ModifierType::CONTROL_MASK != 0 and + event.keyval == keyval + callback.call item + end + end + end + menu.append item + item + end + + # This method should be implemented in subclasses to create the #menu of + # this instance. It has to be called after an instance of this class is + # created, to build the menu. + def create + raise NotImplementedError + end + + def method_missing(*a, &b) + treeview.__send__(*a, &b) + end + end + + # This class creates the popup menu, that opens when clicking onto the + # treeview. + class PopUpMenu + include MenuExtension + + # Change the type or content of the selected node. + def change_node(item) + if current = selection.selected + parent = current.parent + old_type, old_content = current.type, current.content + if ALL_TYPES.include?(old_type) + @clipboard_data = Editor.model2data(current) + type, content = ask_for_element(parent, current.type, + current.content) + if type + current.type, current.content = type, content + current.remove_subtree(model) + toplevel.display_status("Changed a node in tree.") + window.change + end + else + toplevel.display_status( + "Cannot change node of type #{old_type} in tree!") + end + end + end + + # Cut the selected node and its subtree, and save it into the + # clipboard. + def cut_node(item) + if current = selection.selected + if current and current.type == 'Key' + @clipboard_data = { + current.content => Editor.model2data(current.first_child) + } + else + @clipboard_data = Editor.model2data(current) + end + model.remove(current) + window.change + toplevel.display_status("Cut a node from tree.") + end + end + + # Copy the selected node and its subtree, and save it into the + # clipboard. + def copy_node(item) + if current = selection.selected + if current and current.type == 'Key' + @clipboard_data = { + current.content => Editor.model2data(current.first_child) + } + else + @clipboard_data = Editor.model2data(current) + end + window.change + toplevel.display_status("Copied a node from tree.") + end + end + + # Paste the data in the clipboard into the selected Array or Hash by + # appending it. + def paste_node_appending(item) + if current = selection.selected + if @clipboard_data + case current.type + when 'Array' + Editor.data2model(@clipboard_data, model, current) + expand_collapse(current) + when 'Hash' + if @clipboard_data.is_a? Hash + parent = current.parent + hash = Editor.model2data(current) + model.remove(current) + hash.update(@clipboard_data) + Editor.data2model(hash, model, parent) + if parent + expand_collapse(parent) + elsif @expanded + expand_all + end + window.change + else + toplevel.display_status( + "Cannot paste non-#{current.type} data into '#{current.type}'!") + end + else + toplevel.display_status( + "Cannot paste node below '#{current.type}'!") + end + else + toplevel.display_status("Nothing to paste in clipboard!") + end + else + toplevel.display_status("Append a node into the root first!") + end + end + + # Paste the data in the clipboard into the selected Array inserting it + # before the selected element. + def paste_node_inserting_before(item) + if current = selection.selected + if @clipboard_data + parent = current.parent or return + parent_type = parent.type + if parent_type == 'Array' + selected_index = parent.each_with_index do |c, i| + break i if c == current + end + Editor.data2model(@clipboard_data, model, parent) do |m| + m.insert_before(parent, current) + end + expand_collapse(current) + toplevel.display_status("Inserted an element to " + + "'#{parent_type}' before index #{selected_index}.") + window.change + else + toplevel.display_status( + "Cannot insert node below '#{parent_type}'!") + end + else + toplevel.display_status("Nothing to paste in clipboard!") + end + else + toplevel.display_status("Append a node into the root first!") + end + end + + # Append a new node to the selected Hash or Array. + def append_new_node(item) + if parent = selection.selected + parent_type = parent.type + case parent_type + when 'Hash' + key, type, content = ask_for_hash_pair(parent) + key or return + iter = create_node(parent, 'Key', key) + iter = create_node(iter, type, content) + toplevel.display_status( + "Added a (key, value)-pair to '#{parent_type}'.") + window.change + when 'Array' + type, content = ask_for_element(parent) + type or return + iter = create_node(parent, type, content) + window.change + toplevel.display_status("Appendend an element to '#{parent_type}'.") + else + toplevel.display_status("Cannot append to '#{parent_type}'!") + end + else + type, content = ask_for_element + type or return + iter = create_node(nil, type, content) + window.change + end + end + + # Insert a new node into an Array before the selected element. + def insert_new_node(item) + if current = selection.selected + parent = current.parent or return + parent_parent = parent.parent + parent_type = parent.type + if parent_type == 'Array' + selected_index = parent.each_with_index do |c, i| + break i if c == current + end + type, content = ask_for_element(parent) + type or return + iter = model.insert_before(parent, current) + iter.type, iter.content = type, content + toplevel.display_status("Inserted an element to " + + "'#{parent_type}' before index #{selected_index}.") + window.change + else + toplevel.display_status( + "Cannot insert node below '#{parent_type}'!") + end + else + toplevel.display_status("Append a node into the root first!") + end + end + + # Recursively collapse/expand a subtree starting from the selected node. + def collapse_expand(item) + if current = selection.selected + if row_expanded?(current.path) + collapse_row(current.path) + else + expand_row(current.path, true) + end + else + toplevel.display_status("Append a node into the root first!") + end + end + + # Create the menu. + def create + add_item("Change node", ?n, &method(:change_node)) + add_separator + add_item("Cut node", ?X, &method(:cut_node)) + add_item("Copy node", ?C, &method(:copy_node)) + add_item("Paste node (appending)", ?A, &method(:paste_node_appending)) + add_item("Paste node (inserting before)", ?I, + &method(:paste_node_inserting_before)) + add_separator + add_item("Append new node", ?a, &method(:append_new_node)) + add_item("Insert new node before", ?i, &method(:insert_new_node)) + add_separator + add_item("Collapse/Expand node (recursively)", ?e, + &method(:collapse_expand)) + + menu.show_all + signal_connect(:button_press_event) do |widget, event| + if event.kind_of? Gdk::EventButton and event.button == 3 + menu.popup(nil, nil, event.button, event.time) + end + end + signal_connect(:popup_menu) do + menu.popup(nil, nil, 0, Gdk::Event::CURRENT_TIME) + end + end + end + + # This class creates the File pulldown menu. + class FileMenu + include MenuExtension + + # Clear the model and filename, but ask to save the JSON document, if + # unsaved changes have occured. + def new(item) + window.clear + end + + # Open a file and load it into the editor. Ask to save the JSON document + # first, if unsaved changes have occured. + def open(item) + window.file_open + end + + def open_location(item) + window.location_open + end + + # Revert the current JSON document in the editor to the saved version. + def revert(item) + window.instance_eval do + @filename and file_open(@filename) + end + end + + # Save the current JSON document. + def save(item) + window.file_save + end + + # Save the current JSON document under the given filename. + def save_as(item) + window.file_save_as + end + + # Quit the editor, after asking to save any unsaved changes first. + def quit(item) + window.quit + end + + # Create the menu. + def create + title = MenuItem.new('File') + title.submenu = menu + add_item('New', &method(:new)) + add_item('Open', ?o, &method(:open)) + add_item('Open location', ?l, &method(:open_location)) + add_item('Revert', &method(:revert)) + add_separator + add_item('Save', ?s, &method(:save)) + add_item('Save As', ?S, &method(:save_as)) + add_separator + add_item('Quit', ?q, &method(:quit)) + title + end + end + + # This class creates the Edit pulldown menu. + class EditMenu + include MenuExtension + + # Copy data from model into primary clipboard. + def copy(item) + data = Editor.model2data(model.iter_first) + json = JSON.pretty_generate(data, :max_nesting => false) + c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) + c.text = json + end + + # Copy json text from primary clipboard into model. + def paste(item) + c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) + if json = c.wait_for_text + window.ask_save if @changed + begin + window.edit json + rescue JSON::ParserError + window.clear + end + end + end + + # Find a string in all nodes' contents and select the found node in the + # treeview. + def find(item) + @search = ask_for_find_term(@search) or return + iter = model.get_iter('0') or return + iter.recursive_each do |i| + if @iter + if @iter != i + next + else + @iter = nil + next + end + elsif @search.match(i[CONTENT_COL]) + set_cursor(i.path, nil, false) + @iter = i + break + end + end + end + + # Repeat the last search given by #find. + def find_again(item) + @search or return + iter = model.get_iter('0') + iter.recursive_each do |i| + if @iter + if @iter != i + next + else + @iter = nil + next + end + elsif @search.match(i[CONTENT_COL]) + set_cursor(i.path, nil, false) + @iter = i + break + end + end + end + + # Sort (Reverse sort) all elements of the selected array by the given + # expression. _x_ is the element in question. + def sort(item) + if current = selection.selected + if current.type == 'Array' + parent = current.parent + ary = Editor.model2data(current) + order, reverse = ask_for_order + order or return + begin + block = eval "lambda { |x| #{order} }" + if reverse + ary.sort! { |a,b| block[b] <=> block[a] } + else + ary.sort! { |a,b| block[a] <=> block[b] } + end + rescue => e + Editor.error_dialog(self, "Failed to sort Array with #{order}: #{e}!") + else + Editor.data2model(ary, model, parent) do |m| + m.insert_before(parent, current) + end + model.remove(current) + expand_collapse(parent) + window.change + toplevel.display_status("Array has been sorted.") + end + else + toplevel.display_status("Only Array nodes can be sorted!") + end + else + toplevel.display_status("Select an Array to sort first!") + end + end + + # Create the menu. + def create + title = MenuItem.new('Edit') + title.submenu = menu + add_item('Copy', ?c, &method(:copy)) + add_item('Paste', ?v, &method(:paste)) + add_separator + add_item('Find', ?f, &method(:find)) + add_item('Find Again', ?g, &method(:find_again)) + add_separator + add_item('Sort', ?S, &method(:sort)) + title + end + end + + class OptionsMenu + include MenuExtension + + # Collapse/Expand all nodes by default. + def collapsed_nodes(item) + if expanded + self.expanded = false + collapse_all + else + self.expanded = true + expand_all + end + end + + # Toggle pretty saving mode on/off. + def pretty_saving(item) + @pretty_item.toggled + window.change + end + + attr_reader :pretty_item + + # Create the menu. + def create + title = MenuItem.new('Options') + title.submenu = menu + add_item('Collapsed nodes', nil, CheckMenuItem, &method(:collapsed_nodes)) + @pretty_item = add_item('Pretty saving', nil, CheckMenuItem, + &method(:pretty_saving)) + @pretty_item.active = true + window.unchange + title + end + end + + # This class inherits from Gtk::TreeView, to configure it and to add a lot + # of behaviour to it. + class JSONTreeView < Gtk::TreeView + include Gtk + + # Creates a JSONTreeView instance, the parameter _window_ is + # a MainWindow instance and used for self delegation. + def initialize(window) + @window = window + super(TreeStore.new(Gdk::Pixbuf, String, String)) + self.selection.mode = SELECTION_BROWSE + + @expanded = false + self.headers_visible = false + add_columns + add_popup_menu + end + + # Returns the MainWindow instance of this JSONTreeView. + attr_reader :window + + # Returns true, if nodes are autoexpanding, false otherwise. + attr_accessor :expanded + + private + + def add_columns + cell = CellRendererPixbuf.new + column = TreeViewColumn.new('Icon', cell, + 'pixbuf' => ICON_COL + ) + append_column(column) + + cell = CellRendererText.new + column = TreeViewColumn.new('Type', cell, + 'text' => TYPE_COL + ) + append_column(column) + + cell = CellRendererText.new + cell.editable = true + column = TreeViewColumn.new('Content', cell, + 'text' => CONTENT_COL + ) + cell.signal_connect(:edited, &method(:cell_edited)) + append_column(column) + end + + def unify_key(iter, key) + return unless iter.type == 'Key' + parent = iter.parent + if parent.any? { |c| c != iter and c.content == key } + old_key = key + i = 0 + begin + key = sprintf("%s.%d", old_key, i += 1) + end while parent.any? { |c| c != iter and c.content == key } + end + iter.content = key + end + + def cell_edited(cell, path, value) + iter = model.get_iter(path) + case iter.type + when 'Key' + unify_key(iter, value) + toplevel.display_status('Key has been changed.') + when 'FalseClass' + value.downcase! + if value == 'true' + iter.type, iter.content = 'TrueClass', 'true' + end + when 'TrueClass' + value.downcase! + if value == 'false' + iter.type, iter.content = 'FalseClass', 'false' + end + when 'Numeric' + iter.content = + if value == 'Infinity' + value + else + (Integer(value) rescue Float(value) rescue 0).to_s + end + when 'String' + iter.content = value + when 'Hash', 'Array' + return + else + fail "Unknown type found in model: #{iter.type}" + end + window.change + end + + def configure_value(value, type) + value.editable = false + case type + when 'Array', 'Hash' + value.text = '' + when 'TrueClass' + value.text = 'true' + when 'FalseClass' + value.text = 'false' + when 'NilClass' + value.text = 'null' + when 'Numeric', 'String' + value.text ||= '' + value.editable = true + else + raise ArgumentError, "unknown type '#{type}' encountered" + end + end + + def add_popup_menu + menu = PopUpMenu.new(self) + menu.create + end + + public + + # Create a _type_ node with content _content_, and add it to _parent_ + # in the model. If _parent_ is nil, create a new model and put it into + # the editor treeview. + def create_node(parent, type, content) + iter = if parent + model.append(parent) + else + new_model = Editor.data2model(nil) + toplevel.view_new_model(new_model) + new_model.iter_first + end + iter.type, iter.content = type, content + expand_collapse(parent) if parent + iter + end + + # Ask for a hash key, value pair to be added to the Hash node _parent_. + def ask_for_hash_pair(parent) + key_input = type_input = value_input = nil + + dialog = Dialog.new("New (key, value) pair for Hash", nil, nil, + [ Stock::OK, Dialog::RESPONSE_ACCEPT ], + [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] + ) + dialog.width_request = 640 + + hbox = HBox.new(false, 5) + hbox.pack_start(Label.new("Key:"), false) + hbox.pack_start(key_input = Entry.new) + key_input.text = @key || '' + dialog.vbox.pack_start(hbox, false) + key_input.signal_connect(:activate) do + if parent.any? { |c| c.content == key_input.text } + toplevel.display_status('Key already exists in Hash!') + key_input.text = '' + else + toplevel.display_status('Key has been changed.') + end + end + + hbox = HBox.new(false, 5) + hbox.pack_start(Label.new("Type:"), false) + hbox.pack_start(type_input = ComboBox.new(true)) + ALL_TYPES.each { |t| type_input.append_text(t) } + type_input.active = @type || 0 + dialog.vbox.pack_start(hbox, false) + + type_input.signal_connect(:changed) do + value_input.editable = false + case ALL_TYPES[type_input.active] + when 'Array', 'Hash' + value_input.text = '' + when 'TrueClass' + value_input.text = 'true' + when 'FalseClass' + value_input.text = 'false' + when 'NilClass' + value_input.text = 'null' + else + value_input.text = '' + value_input.editable = true + end + end + + hbox = HBox.new(false, 5) + hbox.pack_start(Label.new("Value:"), false) + hbox.pack_start(value_input = Entry.new) + value_input.width_chars = 60 + value_input.text = @value || '' + dialog.vbox.pack_start(hbox, false) + + dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) + dialog.show_all + self.focus = dialog + dialog.run do |response| + if response == Dialog::RESPONSE_ACCEPT + @key = key_input.text + type = ALL_TYPES[@type = type_input.active] + content = value_input.text + return @key, type, content + end + end + return + ensure + dialog.destroy + end + + # Ask for an element to be appended _parent_. + def ask_for_element(parent = nil, default_type = nil, value_text = @content) + type_input = value_input = nil + + dialog = Dialog.new( + "New element into #{parent ? parent.type : 'root'}", + nil, nil, + [ Stock::OK, Dialog::RESPONSE_ACCEPT ], + [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] + ) + hbox = HBox.new(false, 5) + hbox.pack_start(Label.new("Type:"), false) + hbox.pack_start(type_input = ComboBox.new(true)) + default_active = 0 + types = parent ? ALL_TYPES : CONTAINER_TYPES + types.each_with_index do |t, i| + type_input.append_text(t) + if t == default_type + default_active = i + end + end + type_input.active = default_active + dialog.vbox.pack_start(hbox, false) + type_input.signal_connect(:changed) do + configure_value(value_input, types[type_input.active]) + end + + hbox = HBox.new(false, 5) + hbox.pack_start(Label.new("Value:"), false) + hbox.pack_start(value_input = Entry.new) + value_input.width_chars = 60 + value_input.text = value_text if value_text + configure_value(value_input, types[type_input.active]) + + dialog.vbox.pack_start(hbox, false) + + dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) + dialog.show_all + self.focus = dialog + dialog.run do |response| + if response == Dialog::RESPONSE_ACCEPT + type = types[type_input.active] + @content = case type + when 'Numeric' + if (t = value_input.text) == 'Infinity' + 1 / 0.0 + else + Integer(t) rescue Float(t) rescue 0 + end + else + value_input.text + end.to_s + return type, @content + end + end + return + ensure + dialog.destroy if dialog + end + + # Ask for an order criteria for sorting, using _x_ for the element in + # question. Returns the order criterium, and true/false for reverse + # sorting. + def ask_for_order + dialog = Dialog.new( + "Give an order criterium for 'x'.", + nil, nil, + [ Stock::OK, Dialog::RESPONSE_ACCEPT ], + [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] + ) + hbox = HBox.new(false, 5) + + hbox.pack_start(Label.new("Order:"), false) + hbox.pack_start(order_input = Entry.new) + order_input.text = @order || 'x' + order_input.width_chars = 60 + + hbox.pack_start(reverse_checkbox = CheckButton.new('Reverse'), false) + + dialog.vbox.pack_start(hbox, false) + + dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) + dialog.show_all + self.focus = dialog + dialog.run do |response| + if response == Dialog::RESPONSE_ACCEPT + return @order = order_input.text, reverse_checkbox.active? + end + end + return + ensure + dialog.destroy if dialog + end + + # Ask for a find term to search for in the tree. Returns the term as a + # string. + def ask_for_find_term(search = nil) + dialog = Dialog.new( + "Find a node matching regex in tree.", + nil, nil, + [ Stock::OK, Dialog::RESPONSE_ACCEPT ], + [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] + ) + hbox = HBox.new(false, 5) + + hbox.pack_start(Label.new("Regex:"), false) + hbox.pack_start(regex_input = Entry.new) + hbox.pack_start(icase_checkbox = CheckButton.new('Icase'), false) + regex_input.width_chars = 60 + if search + regex_input.text = search.source + icase_checkbox.active = search.casefold? + end + + dialog.vbox.pack_start(hbox, false) + + dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) + dialog.show_all + self.focus = dialog + dialog.run do |response| + if response == Dialog::RESPONSE_ACCEPT + begin + return Regexp.new(regex_input.text, icase_checkbox.active? ? Regexp::IGNORECASE : 0) + rescue => e + Editor.error_dialog(self, "Evaluation of regex /#{regex_input.text}/ failed: #{e}!") + return + end + end + end + return + ensure + dialog.destroy if dialog + end + + # Expand or collapse row pointed to by _iter_ according + # to the #expanded attribute. + def expand_collapse(iter) + if expanded + expand_row(iter.path, true) + else + collapse_row(iter.path) + end + end + end + + # The editor main window + class MainWindow < Gtk::Window + include Gtk + + def initialize(encoding) + @changed = false + @encoding = encoding + super(TOPLEVEL) + display_title + set_default_size(800, 600) + signal_connect(:delete_event) { quit } + + vbox = VBox.new(false, 0) + add(vbox) + #vbox.border_width = 0 + + @treeview = JSONTreeView.new(self) + @treeview.signal_connect(:'cursor-changed') do + display_status('') + end + + menu_bar = create_menu_bar + vbox.pack_start(menu_bar, false, false, 0) + + sw = ScrolledWindow.new(nil, nil) + sw.shadow_type = SHADOW_ETCHED_IN + sw.set_policy(POLICY_AUTOMATIC, POLICY_AUTOMATIC) + vbox.pack_start(sw, true, true, 0) + sw.add(@treeview) + + @status_bar = Statusbar.new + vbox.pack_start(@status_bar, false, false, 0) + + @filename ||= nil + if @filename + data = read_data(@filename) + view_new_model Editor.data2model(data) + end + + signal_connect(:button_release_event) do |_,event| + if event.button == 2 + c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) + if url = c.wait_for_text + location_open url + end + false + else + true + end + end + end + + # Creates the menu bar with the pulldown menus and returns it. + def create_menu_bar + menu_bar = MenuBar.new + @file_menu = FileMenu.new(@treeview) + menu_bar.append @file_menu.create + @edit_menu = EditMenu.new(@treeview) + menu_bar.append @edit_menu.create + @options_menu = OptionsMenu.new(@treeview) + menu_bar.append @options_menu.create + menu_bar + end + + # Sets editor status to changed, to indicate that the edited data + # containts unsaved changes. + def change + @changed = true + display_title + end + + # Sets editor status to unchanged, to indicate that the edited data + # doesn't containt unsaved changes. + def unchange + @changed = false + display_title + end + + # Puts a new model _model_ into the Gtk::TreeView to be edited. + def view_new_model(model) + @treeview.model = model + @treeview.expanded = true + @treeview.expand_all + unchange + end + + # Displays _text_ in the status bar. + def display_status(text) + @cid ||= nil + @status_bar.pop(@cid) if @cid + @cid = @status_bar.get_context_id('dummy') + @status_bar.push(@cid, text) + end + + # Opens a dialog, asking, if changes should be saved to a file. + def ask_save + if Editor.question_dialog(self, + "Unsaved changes to JSON model. Save?") + if @filename + file_save + else + file_save_as + end + end + end + + # Quit this editor, that is, leave this editor's main loop. + def quit + ask_save if @changed + if Gtk.main_level > 0 + destroy + Gtk.main_quit + end + nil + end + + # Display the new title according to the editor's current state. + def display_title + title = TITLE.dup + title << ": #@filename" if @filename + title << " *" if @changed + self.title = title + end + + # Clear the current model, after asking to save all unsaved changes. + def clear + ask_save if @changed + @filename = nil + self.view_new_model nil + end + + def check_pretty_printed(json) + pretty = !!((nl_index = json.index("\n")) && nl_index != json.size - 1) + @options_menu.pretty_item.active = pretty + end + private :check_pretty_printed + + # Open the data at the location _uri_, if given. Otherwise open a dialog + # to ask for the _uri_. + def location_open(uri = nil) + uri = ask_for_location unless uri + uri or return + ask_save if @changed + data = load_location(uri) or return + view_new_model Editor.data2model(data) + end + + # Open the file _filename_ or call the #select_file method to ask for a + # filename. + def file_open(filename = nil) + filename = select_file('Open as a JSON file') unless filename + data = load_file(filename) or return + view_new_model Editor.data2model(data) + end + + # Edit the string _json_ in the editor. + def edit(json) + if json.respond_to? :read + json = json.read + end + data = parse_json json + view_new_model Editor.data2model(data) + end + + # Save the current file. + def file_save + if @filename + store_file(@filename) + else + file_save_as + end + end + + # Save the current file as the filename + def file_save_as + filename = select_file('Save as a JSON file') + store_file(filename) + end + + # Store the current JSON document to _path_. + def store_file(path) + if path + data = Editor.model2data(@treeview.model.iter_first) + File.open(path + '.tmp', 'wb') do |output| + data or break + if @options_menu.pretty_item.active? + output.puts JSON.pretty_generate(data, :max_nesting => false) + else + output.write JSON.generate(data, :max_nesting => false) + end + end + File.rename path + '.tmp', path + @filename = path + toplevel.display_status("Saved data to '#@filename'.") + unchange + end + rescue SystemCallError => e + Editor.error_dialog(self, "Failed to store JSON file: #{e}!") + end + + # Load the file named _filename_ into the editor as a JSON document. + def load_file(filename) + if filename + if File.directory?(filename) + Editor.error_dialog(self, "Try to select a JSON file!") + nil + else + @filename = filename + if data = read_data(filename) + toplevel.display_status("Loaded data from '#@filename'.") + end + display_title + data + end + end + end + + # Load the data at location _uri_ into the editor as a JSON document. + def load_location(uri) + data = read_data(uri) or return + @filename = nil + toplevel.display_status("Loaded data from '#{uri}'.") + display_title + data + end + + def parse_json(json) + check_pretty_printed(json) + if @encoding && !/^utf8$/i.match(@encoding) + iconverter = Iconv.new('utf8', @encoding) + json = iconverter.iconv(json) + end + JSON::parse(json, :max_nesting => false, :create_additions => false) + end + private :parse_json + + # Read a JSON document from the file named _filename_, parse it into a + # ruby data structure, and return the data. + def read_data(filename) + open(filename) do |f| + json = f.read + return parse_json(json) + end + rescue => e + Editor.error_dialog(self, "Failed to parse JSON file: #{e}!") + return + end + + # Open a file selecton dialog, displaying _message_, and return the + # selected filename or nil, if no file was selected. + def select_file(message) + filename = nil + fs = FileSelection.new(message) + fs.set_modal(true) + @default_dir = File.join(Dir.pwd, '') unless @default_dir + fs.set_filename(@default_dir) + fs.set_transient_for(self) + fs.signal_connect(:destroy) { Gtk.main_quit } + fs.ok_button.signal_connect(:clicked) do + filename = fs.filename + @default_dir = File.join(File.dirname(filename), '') + fs.destroy + Gtk.main_quit + end + fs.cancel_button.signal_connect(:clicked) do + fs.destroy + Gtk.main_quit + end + fs.show_all + Gtk.main + filename + end + + # Ask for location URI a to load data from. Returns the URI as a string. + def ask_for_location + dialog = Dialog.new( + "Load data from location...", + nil, nil, + [ Stock::OK, Dialog::RESPONSE_ACCEPT ], + [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] + ) + hbox = HBox.new(false, 5) + + hbox.pack_start(Label.new("Location:"), false) + hbox.pack_start(location_input = Entry.new) + location_input.width_chars = 60 + location_input.text = @location || '' + + dialog.vbox.pack_start(hbox, false) + + dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) + dialog.show_all + dialog.run do |response| + if response == Dialog::RESPONSE_ACCEPT + return @location = location_input.text + end + end + return + ensure + dialog.destroy if dialog + end + end + + class << self + # Starts a JSON Editor. If a block was given, it yields + # to the JSON::Editor::MainWindow instance. + def start(encoding = 'utf8') # :yield: window + Gtk.init + @window = Editor::MainWindow.new(encoding) + @window.icon_list = [ Editor.fetch_icon('json') ] + yield @window if block_given? + @window.show_all + Gtk.main + end + + # Edit the string _json_ with encoding _encoding_ in the editor. + def edit(json, encoding = 'utf8') + start(encoding) do |window| + window.edit json + end + end + + attr_reader :window + end + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/ext.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/ext.rb new file mode 100644 index 00000000..719e5602 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/ext.rb @@ -0,0 +1,15 @@ +require 'json/common' + +module JSON + # This module holds all the modules/classes that implement JSON's + # functionality as C extensions. + module Ext + require 'json/ext/parser' + require 'json/ext/generator' + $DEBUG and warn "Using c extension for JSON." + JSON.parser = Parser + JSON.generator = Generator + end + + JSON_LOADED = true +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/json.xpm b/vendor/gems/gems/json_pure-1.2.0/lib/json/json.xpm new file mode 100644 index 00000000..2cb626bb --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/json.xpm @@ -0,0 +1,1499 @@ +/* XPM */ +static char * json_xpm[] = { +"64 64 1432 2", +" c None", +". c #641839", +"+ c #CF163C", +"@ c #D31C3B", +"# c #E11A38", +"$ c #5F242D", +"% c #320C22", +"& c #9B532D", +"* c #F32E34", +"= c #820F33", +"- c #4B0F34", +"; c #8E1237", +"> c #944029", +", c #961325", +"' c #A00C24", +") c #872C23", +"! c #694021", +"~ c #590D1F", +"{ c #420528", +"] c #D85A2D", +"^ c #7E092B", +"/ c #0E0925", +"( c #0D081F", +"_ c #0F081E", +": c #12071F", +"< c #360620", +"[ c #682A21", +"} c #673F21", +"| c #780E21", +"1 c #A82320", +"2 c #8D1D1F", +"3 c #970127", +"4 c #0D0123", +"5 c #0D0324", +"6 c #3B1E28", +"7 c #C28429", +"8 c #0C0523", +"9 c #0C041E", +"0 c #0E031A", +"a c #11031A", +"b c #13031B", +"c c #13031C", +"d c #11031D", +"e c #19051E", +"f c #390E20", +"g c #9C0C20", +"h c #C00721", +"i c #980320", +"j c #14031E", +"k c #CD9F32", +"l c #C29F2E", +"m c #0F0325", +"n c #0D0321", +"o c #0E0324", +"p c #D08329", +"q c #9D1B27", +"r c #1C0320", +"s c #0D011A", +"t c #120117", +"u c #130017", +"v c #150018", +"w c #160119", +"x c #17021A", +"y c #15021B", +"z c #11021E", +"A c #0F021F", +"B c #8C1821", +"C c #CF4522", +"D c #831821", +"E c #BA7033", +"F c #EDB339", +"G c #C89733", +"H c #280727", +"I c #0F051F", +"J c #0E0420", +"K c #591F27", +"L c #E47129", +"M c #612224", +"N c #0C021D", +"O c #120018", +"P c #140017", +"Q c #170017", +"R c #190018", +"S c #1B0019", +"T c #1B011A", +"U c #18011B", +"V c #15011C", +"W c #12031E", +"X c #460A21", +"Y c #A13823", +"Z c #784323", +"` c #5A0C21", +" . c #BC4530", +".. c #EB5B38", +"+. c #CE4E3B", +"@. c #DD9334", +"#. c #751A27", +"$. c #11071E", +"%. c #0F041C", +"&. c #1E0824", +"*. c #955A28", +"=. c #9A5027", +"-. c #1E0321", +";. c #11011A", +">. c #140018", +",. c #180018", +"'. c #1F001A", +"). c #20001B", +"!. c #1E001A", +"~. c #1B001A", +"{. c #16021B", +"]. c #16041E", +"^. c #220622", +"/. c #5F3525", +"(. c #DE5724", +"_. c #611021", +":. c #0F0925", +"<. c #D1892E", +"[. c #F27036", +"}. c #EC633B", +"|. c #DA293C", +"1. c #E64833", +"2. c #912226", +"3. c #11081C", +"4. c #110419", +"5. c #0F041E", +"6. c #451425", +"7. c #BF6F28", +"8. c #332225", +"9. c #0E021E", +"0. c #13001B", +"a. c #17001A", +"b. c #1C001B", +"c. c #21001C", +"d. c #23001C", +"e. c #21001B", +"f. c #19021A", +"g. c #17041E", +"h. c #150721", +"i. c #602424", +"j. c #D51223", +"k. c #540820", +"l. c #D04D2D", +"m. c #EA8933", +"n. c #875637", +"o. c #88543A", +"p. c #E5923A", +"q. c #891931", +"r. c #130B25", +"s. c #10051B", +"t. c #110217", +"u. c #12021A", +"v. c #761826", +"w. c #E2A728", +"x. c #300224", +"y. c #10011E", +"z. c #16001B", +"A. c #1B001B", +"B. c #21001A", +"C. c #1E0019", +"D. c #1D0019", +"E. c #1A011A", +"F. c #17031C", +"G. c #120720", +"H. c #4E0822", +"I. c #670721", +"J. c #C07630", +"K. c #F59734", +"L. c #BE1B35", +"M. c #0E1435", +"N. c #522037", +"O. c #DB8039", +"P. c #D45933", +"Q. c #420927", +"R. c #0F041D", +"S. c #140118", +"T. c #13021D", +"U. c #100423", +"V. c #7B6227", +"W. c #C04326", +"X. c #0E0020", +"Y. c #13001D", +"Z. c #18001B", +"`. c #1E001B", +" + c #22001C", +".+ c #22001B", +"++ c #1B011B", +"@+ c #16041D", +"#+ c #130520", +"$+ c #860521", +"%+ c #710520", +"&+ c #670A2A", +"*+ c #A66431", +"=+ c #E97536", +"-+ c #F8833A", +";+ c #F77A3A", +">+ c #C45337", +",+ c #0A1C35", +"'+ c #993638", +")+ c #F7863B", +"!+ c #F49736", +"~+ c #94462B", +"{+ c #0E031F", +"]+ c #130119", +"^+ c #160018", +"/+ c #16011B", +"(+ c #15021F", +"_+ c #120123", +":+ c #A65C28", +"<+ c #5C4D23", +"[+ c #0F001F", +"}+ c #14001D", +"|+ c #1A001B", +"1+ c #1F001B", +"2+ c #24001D", +"3+ c #25001D", +"4+ c #24001C", +"5+ c #1F001C", +"6+ c #1A011C", +"7+ c #16021E", +"8+ c #3F0421", +"9+ c #BC0522", +"0+ c #1C041E", +"a+ c #7F5531", +"b+ c #E68A38", +"c+ c #F8933E", +"d+ c #FA7942", +"e+ c #FB7543", +"f+ c #FA6F41", +"g+ c #F1793D", +"h+ c #7D3B3A", +"i+ c #28263B", +"j+ c #D45441", +"k+ c #F8A238", +"l+ c #996B2D", +"m+ c #0E0421", +"n+ c #12011A", +"o+ c #180019", +"p+ c #17001C", +"q+ c #12001F", +"r+ c #4C2B2A", +"s+ c #DB8130", +"t+ c #540023", +"u+ c #0F0120", +"v+ c #16011C", +"w+ c #22001D", +"x+ c #25001F", +"y+ c #26001F", +"z+ c #25001E", +"A+ c #24001E", +"B+ c #1D001C", +"C+ c #18011D", +"D+ c #16031F", +"E+ c #3C0522", +"F+ c #9B0821", +"G+ c #13041E", +"H+ c #F6462E", +"I+ c #E6AB37", +"J+ c #E7A03E", +"K+ c #FA9F44", +"L+ c #FB8A48", +"M+ c #FD7A4A", +"N+ c #FD794A", +"O+ c #FD7748", +"P+ c #FD7E45", +"Q+ c #FD8343", +"R+ c #FB5D42", +"S+ c #6E3A40", +"T+ c #EE8A37", +"U+ c #7E252B", +"V+ c #100520", +"W+ c #13011A", +"X+ c #170019", +"Y+ c #15001C", +"Z+ c #0F0020", +"`+ c #564427", +" @ c #E0BA29", +".@ c #5E2B25", +"+@ c #10011F", +"@@ c #17011C", +"#@ c #1E001D", +"$@ c #23001F", +"%@ c #250020", +"&@ c #24001F", +"*@ c #23001E", +"=@ c #21001E", +"-@ c #1B001C", +";@ c #17021D", +">@ c #14041E", +",@ c #AC0B25", +"'@ c #5E1420", +")@ c #F28635", +"!@ c #C2733E", +"~@ c #984C44", +"{@ c #EA9148", +"]@ c #FB844B", +"^@ c #FD7E4C", +"/@ c #FE7E4C", +"(@ c #FE7E4B", +"_@ c #FE7749", +":@ c #FD7148", +"<@ c #FB7D46", +"[@ c #F89641", +"}@ c #B95634", +"|@ c #0D0927", +"1@ c #11041D", +"2@ c #150119", +"3@ c #180017", +"4@ c #16001A", +"5@ c #13001E", +"6@ c #110023", +"7@ c #944C29", +"8@ c #EE6229", +"9@ c #3D0324", +"0@ c #12021F", +"a@ c #19011D", +"b@ c #21001F", +"c@ c #22001F", +"d@ c #20001E", +"e@ c #1F001D", +"f@ c #1C001C", +"g@ c #19011C", +"h@ c #3D1621", +"i@ c #B53622", +"j@ c #31061F", +"k@ c #841D34", +"l@ c #F2703F", +"m@ c #C14445", +"n@ c #E67349", +"o@ c #FB8E4B", +"p@ c #FD834C", +"q@ c #FE834D", +"r@ c #FE834C", +"s@ c #FE804C", +"t@ c #FD814B", +"u@ c #FB7D49", +"v@ c #F79B43", +"w@ c #AF1234", +"x@ c #0D0625", +"y@ c #13021C", +"z@ c #1A0019", +"A@ c #190019", +"B@ c #410225", +"C@ c #D39729", +"D@ c #AA5927", +"E@ c #0E0422", +"F@ c #15021E", +"G@ c #1A011D", +"H@ c #1D001D", +"I@ c #15031D", +"J@ c #240820", +"K@ c #A01023", +"L@ c #670B21", +"M@ c #3D0D33", +"N@ c #E63C3E", +"O@ c #EF7C45", +"P@ c #F59048", +"Q@ c #FB944A", +"R@ c #FD904A", +"S@ c #FE8E4B", +"T@ c #FE854A", +"U@ c #FE854B", +"V@ c #FE884C", +"W@ c #FC954B", +"X@ c #F8AB45", +"Y@ c #C37A35", +"Z@ c #0D0425", +"`@ c #13011B", +" # c #170018", +".# c #1A0018", +"+# c #1C0019", +"@# c #15001B", +"## c #100120", +"$# c #311F25", +"%# c #E68E28", +"&# c #7A1425", +"*# c #130321", +"=# c #17011E", +"-# c #1A001D", +";# c #19001B", +"># c #16021C", +",# c #130521", +"'# c #6F3123", +")# c #6D3022", +"!# c #C89433", +"~# c #EA7E3E", +"{# c #DB2943", +"]# c #EF7745", +"^# c #FB8544", +"/# c #FD9A43", +"(# c #FE9941", +"_# c #FE9D43", +":# c #FEA548", +"<# c #FEAE49", +"[# c #FCB944", +"}# c #CA9F35", +"|# c #0E0225", +"1# c #11001B", +"2# c #160019", +"3# c #12011B", +"4# c #0F0220", +"5# c #351D26", +"6# c #D85B28", +"7# c #6C0F26", +"8# c #190121", +"9# c #1B001E", +"0# c #1A001C", +"a# c #1D001B", +"b# c #130220", +"c# c #703A23", +"d# c #713A23", +"e# c #140327", +"f# c #411B36", +"g# c #C8713E", +"h# c #7A3A3F", +"i# c #CE2C3C", +"j# c #E77338", +"k# c #9C6535", +"l# c #9C6233", +"m# c #9C6332", +"n# c #9C6A35", +"o# c #C37D3C", +"p# c #FEAC41", +"q# c #FEC23E", +"r# c #826330", +"s# c #100122", +"t# c #120019", +"u# c #150017", +"v# c #190017", +"w# c #1B0018", +"x# c #12001A", +"y# c #10021F", +"z# c #1A0326", +"A# c #5F292A", +"B# c #7B4E29", +"C# c #3C0E25", +"D# c #1A0020", +"E# c #14021F", +"F# c #723B23", +"G# c #14001A", +"H# c #58042A", +"I# c #A28337", +"J# c #C8813B", +"K# c #B14B38", +"L# c #761231", +"M# c #5A132A", +"N# c #0D0726", +"O# c #0C0623", +"P# c #0B0723", +"Q# c #0B0A26", +"R# c #321C2D", +"S# c #C45B33", +"T# c #FEBB33", +"U# c #13052A", +"V# c #13011F", +"W# c #160017", +"X# c #15001A", +"Y# c #12001D", +"Z# c #94062A", +"`# c #630D2C", +" $ c #85292B", +".$ c #AA5E29", +"+$ c #1F0123", +"@$ c #19011F", +"#$ c #1E001C", +"$$ c #15031F", +"%$ c #712122", +"&$ c #712223", +"*$ c #14011B", +"=$ c #110321", +"-$ c #AF0C2B", +";$ c #E7D534", +">$ c #EAC934", +",$ c #84582D", +"'$ c #1B0824", +")$ c #11041E", +"!$ c #10021B", +"~$ c #100119", +"{$ c #100218", +"]$ c #0F041A", +"^$ c #0E0720", +"/$ c #2C1026", +"($ c #D8A328", +"_$ c #140322", +":$ c #160016", +"<$ c #14001F", +"[$ c #120024", +"}$ c #100128", +"|$ c #3C032F", +"1$ c #2C062E", +"2$ c #29022B", +"3$ c #A31D29", +"4$ c #976A25", +"5$ c #1A0321", +"6$ c #17031E", +"7$ c #1B021D", +"8$ c #20001C", +"9$ c #14041F", +"0$ c #703422", +"a$ c #6F3522", +"b$ c #8D0328", +"c$ c #920329", +"d$ c #0F0326", +"e$ c #100321", +"f$ c #11021B", +"g$ c #130117", +"h$ c #140016", +"i$ c #150015", +"j$ c #140015", +"k$ c #130116", +"l$ c #120219", +"m$ c #11031C", +"n$ c #12031D", +"o$ c #170016", +"p$ c #160020", +"q$ c #250029", +"r$ c #670033", +"s$ c #DCA238", +"t$ c #F5C736", +"u$ c #9A732E", +"v$ c #110227", +"w$ c #110324", +"x$ c #811924", +"y$ c #A04323", +"z$ c #250721", +"A$ c #1A041F", +"B$ c #1E011D", +"C$ c #1C011C", +"D$ c #18031D", +"E$ c #130721", +"F$ c #6F3623", +"G$ c #6B3622", +"H$ c #1A001A", +"I$ c #14011F", +"J$ c #12011E", +"K$ c #11011C", +"L$ c #140117", +"M$ c #170015", +"N$ c #150016", +"O$ c #120119", +"P$ c #11011B", +"Q$ c #11001A", +"R$ c #130018", +"S$ c #170118", +"T$ c #170119", +"U$ c #18021E", +"V$ c #1A0126", +"W$ c #6F2332", +"X$ c #E5563B", +"Y$ c #F1B83F", +"Z$ c #F6CC38", +"`$ c #9D7A2D", +" % c #130123", +".% c #130320", +"+% c #2A0721", +"@% c #B00E24", +"#% c #7D0B23", +"$% c #1F0522", +"%% c #1E0220", +"&% c #1D011E", +"*% c #1A031E", +"=% c #15051F", +"-% c #241322", +";% c #A32F23", +">% c #670E21", +",% c #1C001A", +"'% c #19001A", +")% c #180016", +"!% c #160118", +"~% c #140219", +"{% c #11021C", +"]% c #10021E", +"^% c #0F011D", +"/% c #170117", +"(% c #160219", +"_% c #17041D", +":% c #190523", +"<% c #8C042E", +"[% c #B65838", +"}% c #E9D73F", +"|% c #EED43E", +"1% c #D85538", +"2% c #493129", +"3% c #130120", +"4% c #15021D", +"5% c #330822", +"6% c #8A0825", +"7% c #3C0424", +"8% c #1E0322", +"9% c #1C0321", +"0% c #180421", +"a% c #130822", +"b% c #AF2D24", +"c% c #BC5623", +"d% c #2F071F", +"e% c #1A041C", +"f% c #1C031C", +"g% c #1D011C", +"h% c #160117", +"i% c #150419", +"j% c #12081D", +"k% c #0F0923", +"l% c #A77027", +"m% c #A60525", +"n% c #11021A", +"o% c #130218", +"p% c #150319", +"q% c #16061D", +"r% c #180923", +"s% c #9C1D2B", +"t% c #A32636", +"u% c #A66E3B", +"v% c #4B2E3C", +"w% c #412C36", +"x% c #36012D", +"y% c #140123", +"z% c #17001E", +"A% c #19011B", +"B% c #1A0421", +"C% c #340425", +"D% c #9E0326", +"E% c #1F0424", +"F% c #1C0524", +"G% c #180724", +"H% c #A91024", +"I% c #D55D24", +"J% c #90071E", +"K% c #3C051D", +"L% c #1C021C", +"M% c #1C011A", +"N% c #1D001A", +"O% c #160116", +"P% c #150216", +"Q% c #140217", +"R% c #140618", +"S% c #120D1D", +"T% c #231925", +"U% c #B16A2E", +"V% c #FDAC34", +"W% c #D58631", +"X% c #280E2A", +"Y% c #0D0A23", +"Z% c #0F0920", +"`% c #120C21", +" & c #1F1026", +".& c #A3352E", +"+& c #EE9F36", +"@& c #5D2A3C", +"#& c #960D3C", +"$& c #970638", +"%& c #A00330", +"&& c #4D0126", +"*& c #1C001F", +"=& c #280120", +"-& c #290223", +";& c #1F0425", +">& c #260726", +",& c #340A26", +"'& c #850925", +")& c #3A0823", +"!& c #82071D", +"~& c #5E071D", +"{& c #18051C", +"]& c #18021A", +"^& c #190118", +"/& c #160217", +"(& c #150418", +"_& c #130618", +":& c #110718", +"<& c #10081A", +"[& c #110D1D", +"}& c #291C24", +"|& c #A73B2D", +"1& c #FD6B36", +"2& c #FD853C", +"3& c #FD863B", +"4& c #C24A35", +"5& c #6B442F", +"6& c #6D302D", +"7& c #6E252E", +"8& c #8E3B32", +"9& c #DE7739", +"0& c #F48E3F", +"a& c #DD8D41", +"b& c #854F3D", +"c& c #7E2D35", +"d& c #33082B", +"e& c #1C0222", +"f& c #20001F", +"g& c #1F0222", +"h& c #1A0524", +"i& c #440C27", +"j& c #BC1427", +"k& c #20041B", +"l& c #53061C", +"m& c #25071B", +"n& c #11061A", +"o& c #130418", +"p& c #140317", +"q& c #150217", +"r& c #160318", +"s& c #12051B", +"t& c #100C1D", +"u& c #0E101E", +"v& c #0C121F", +"w& c #0C1321", +"x& c #781725", +"y& c #B25D2C", +"z& c #FA6335", +"A& c #FD633C", +"B& c #FE6D42", +"C& c #FE7C42", +"D& c #FE813F", +"E& c #FE873C", +"F& c #FD743B", +"G& c #FB683B", +"H& c #FA7A3E", +"I& c #F98242", +"J& c #F97844", +"K& c #F98943", +"L& c #F79C3D", +"M& c #A25133", +"N& c #280B28", +"O& c #1D021F", +"P& c #1F011C", +"Q& c #280321", +"R& c #1C0724", +"S& c #3F1C27", +"T& c #D33C27", +"U& c #0E061B", +"V& c #0C091C", +"W& c #0C0A1B", +"X& c #0E091A", +"Y& c #11081B", +"Z& c #100A20", +"`& c #0E0D23", +" * c #551227", +".* c #B21829", +"+* c #C42329", +"@* c #C62C29", +"#* c #C55429", +"$* c #E76F2B", +"%* c #F14232", +"&* c #F95E3A", +"** c #FC6740", +"=* c #FE6E45", +"-* c #FE7246", +";* c #FE7545", +">* c #FE7744", +",* c #FD7745", +"'* c #FD7845", +")* c #FD7847", +"!* c #FD7948", +"~* c #FD7B44", +"{* c #FC7C3B", +"]* c #6F3130", +"^* c #140B24", +"/* c #19031D", +"(* c #1C011B", +"_* c #5A011F", +":* c #B70421", +"<* c #380824", +"[* c #3E2626", +"}* c #9F5626", +"|* c #13051E", +"1* c #360A21", +"2* c #361223", +"3* c #371724", +"4* c #381824", +"5* c #3B1524", +"6* c #3E1E26", +"7* c #471A29", +"8* c #DB252E", +"9* c #ED2733", +"0* c #EE5436", +"a* c #F04237", +"b* c #F33934", +"c* c #F53D2F", +"d* c #D7312B", +"e* c #AF212B", +"f* c #3A2C31", +"g* c #F65F39", +"h* c #FB6F41", +"i* c #FD6D45", +"j* c #FE7047", +"k* c #FE7647", +"l* c #FE7847", +"m* c #FE7848", +"n* c #FE7748", +"o* c #FE7948", +"p* c #FE7C48", +"q* c #FE7C47", +"r* c #FE7642", +"s* c #FE7439", +"t* c #6D332C", +"u* c #100B21", +"v* c #16031B", +"w* c #2B001B", +"x* c #22011F", +"y* c #220521", +"z* c #1B0A23", +"A* c #421425", +"B* c #951924", +"C* c #381023", +"D* c #E94028", +"E* c #E7302B", +"F* c #EF432D", +"G* c #F4302E", +"H* c #F32C30", +"I* c #CB4432", +"J* c #DD3235", +"K* c #EF4B3A", +"L* c #F0333E", +"M* c #CC3D3F", +"N* c #E4313C", +"O* c #F34834", +"P* c #D13E2C", +"Q* c #431825", +"R* c #0E1424", +"S* c #3C202C", +"T* c #F15537", +"U* c #F97140", +"V* c #FC6E45", +"W* c #FE7547", +"X* c #FE7947", +"Y* c #FE7B48", +"Z* c #FE7D48", +"`* c #FE8047", +" = c #FE7A42", +".= c #FE7A38", +"+= c #6D442B", +"@= c #0F0B21", +"#= c #15031A", +"$= c #49001B", +"%= c #2F001C", +"&= c #21021E", +"*= c #220620", +"== c #1B0D23", +"-= c #641625", +";= c #951823", +">= c #390F25", +",= c #AC3A2A", +"'= c #B6492E", +")= c #ED7531", +"!= c #F45A34", +"~= c #F54C36", +"{= c #C72D39", +"]= c #DE283C", +"^= c #F33B40", +"/= c #F34142", +"(= c #D0393F", +"_= c #E72E39", +":= c #DB3C2E", +"<= c #461724", +"[= c #0F0D1E", +"}= c #140B1E", +"|= c #341427", +"1= c #CB4834", +"2= c #F7743F", +"3= c #FB7145", +"4= c #FE7747", +"5= c #FE7A47", +"6= c #FF7B48", +"7= c #FF7C48", +"8= c #FE7F47", +"9= c #FE8247", +"0= c #FE8642", +"a= c #FE8439", +"b= c #6D442D", +"c= c #0F0A21", +"d= c #14031A", +"e= c #20031D", +"f= c #210821", +"g= c #191024", +"h= c #CC1C25", +"i= c #961423", +"j= c #2C162C", +"k= c #BD242E", +"l= c #EF2C31", +"m= c #F54C34", +"n= c #F34037", +"o= c #F5353A", +"p= c #F7413D", +"q= c #F8423D", +"r= c #F93A39", +"s= c #F95731", +"t= c #341425", +"u= c #110A1D", +"v= c #140619", +"w= c #18051B", +"x= c #200F26", +"y= c #864833", +"z= c #F8773F", +"A= c #FC7445", +"B= c #FF7E48", +"C= c #FF7E49", +"D= c #FF7D49", +"E= c #FF7D48", +"F= c #FE8347", +"G= c #FE8743", +"H= c #FE893B", +"I= c #6E452F", +"J= c #100E23", +"K= c #14041A", +"L= c #55041D", +"M= c #540921", +"N= c #161124", +"O= c #CE6A25", +"P= c #3F1129", +"Q= c #170A29", +"R= c #0F0F29", +"S= c #15132B", +"T= c #1E182D", +"U= c #A82B3D", +"V= c #CB6633", +"W= c #CC6932", +"X= c #CC3D2D", +"Y= c #331225", +"Z= c #0F091C", +"`= c #120417", +" - c #160216", +".- c #190419", +"+- c #210F26", +"@- c #8C4934", +"#- c #F97A40", +"$- c #FC7545", +"%- c #FF7B49", +"&- c #FE7D46", +"*- c #FE7E43", +"=- c #FD7B3E", +"-- c #FA6934", +";- c #532328", +">- c #130B1D", +",- c #150519", +"'- c #14041C", +")- c #120920", +"!- c #C43624", +"~- c #A21E23", +"{- c #F87C30", +"]- c #C9302D", +"^- c #300F2A", +"/- c #591129", +"(- c #171328", +"_- c #171628", +":- c #141829", +"<- c #101A2B", +"[- c #0F172B", +"}- c #0F1226", +"|- c #0E0C20", +"1- c #100619", +"2- c #140316", +"3- c #19051B", +"4- c #3C1428", +"5- c #E04B36", +"6- c #FA7B41", +"7- c #FD7346", +"8- c #FE7548", +"9- c #FF7849", +"0- c #FF7749", +"a- c #FE7B47", +"b- c #FE7945", +"c- c #FC7740", +"d- c #FA7E39", +"e- c #C1432F", +"f- c #131523", +"g- c #130A1C", +"h- c #420621", +"i- c #D08423", +"j- c #F87739", +"k- c #C03D37", +"l- c #962B34", +"m- c #A14332", +"n- c #E54B30", +"o- c #9E3E2F", +"p- c #7F262E", +"q- c #922D2E", +"r- c #9C4B2E", +"s- c #65212C", +"t- c #101628", +"u- c #101022", +"v- c #11091C", +"w- c #130619", +"x- c #160A1E", +"y- c #43252C", +"z- c #F66439", +"A- c #FA6942", +"B- c #FD6C47", +"C- c #FE6E48", +"D- c #FE6F48", +"E- c #FE7049", +"F- c #FE714A", +"G- c #FE744A", +"H- c #FE7846", +"I- c #FD7243", +"J- c #FC703E", +"K- c #FA6C37", +"L- c #81312B", +"M- c #121123", +"N- c #15071D", +"O- c #16031A", +"P- c #17021B", +"Q- c #8F3D22", +"R- c #F8393E", +"S- c #E42A3D", +"T- c #E7473B", +"U- c #FB503B", +"V- c #FB4F3A", +"W- c #F95439", +"X- c #ED4C38", +"Y- c #F45938", +"Z- c #FB6537", +"`- c #EA5236", +" ; c #CE6232", +".; c #CD392C", +"+; c #181425", +"@; c #120F21", +"#; c #130D20", +"$; c #151225", +"%; c #903431", +"&; c #F8703D", +"*; c #FB6344", +"=; c #FD6748", +"-; c #FE6849", +";; c #FE6949", +">; c #FE6A49", +",; c #FE6C4A", +"'; c #FE704A", +"); c #FE734A", +"!; c #FE7449", +"~; c #FE7347", +"{; c #FE7145", +"]; c #FD6C42", +"^; c #FD753D", +"/; c #F36E35", +"(; c #CB452C", +"_; c #600D24", +":; c #1C061F", +"<; c #1E031F", +"[; c #5B3821", +"}; c #CE9822", +"|; c #FA4341", +"1; c #FB4341", +"2; c #FC4541", +"3; c #FC4542", +"4; c #FC4143", +"5; c #FC4D42", +"6; c #FB5042", +"7; c #FB5342", +"8; c #FC5242", +"9; c #FD4F40", +"0; c #FD503E", +"a; c #FB6339", +"b; c #F45E33", +"c; c #A12A2E", +"d; c #401E2C", +"e; c #452D2F", +"f; c #F74F38", +"g; c #FA5940", +"h; c #FC6245", +"i; c #FE6447", +"j; c #FE6449", +"k; c #FE6549", +"l; c #FE6749", +"m; c #FE6B49", +"n; c #FE6D49", +"o; c #FE6D48", +"p; c #FE6D47", +"q; c #FE6D45", +"r; c #FE6C44", +"s; c #FE6A42", +"t; c #FE663C", +"u; c #FC6233", +"v; c #752129", +"w; c #1F0922", +"x; c #750520", +"y; c #81061F", +"z; c #FA3D42", +"A; c #FB4142", +"B; c #FD4543", +"C; c #FD4844", +"D; c #FD4A45", +"E; c #FD4D45", +"F; c #FD5045", +"G; c #FD5345", +"H; c #FE5346", +"I; c #FE5445", +"J; c #FD5444", +"K; c #FC4F41", +"L; c #FA513D", +"M; c #F95339", +"N; c #F63736", +"O; c #F75737", +"P; c #F95F3B", +"Q; c #FB5840", +"R; c #FD5F43", +"S; c #FE6345", +"T; c #FE6547", +"U; c #FE6548", +"V; c #FE6448", +"W; c #FE6248", +"X; c #FE6348", +"Y; c #FE6748", +"Z; c #FE6848", +"`; c #FE6846", +" > c #FE6A45", +".> c #FE6D43", +"+> c #FE703F", +"@> c #FC6F36", +"#> c #6F302B", +"$> c #140A22", +"%> c #FA3B42", +"&> c #FC4243", +"*> c #FD4744", +"=> c #FE4A45", +"-> c #FE4C47", +";> c #FE4D47", +">> c #FE5047", +",> c #FE5347", +"'> c #FE5447", +")> c #FD5246", +"!> c #FB503F", +"~> c #FA543D", +"{> c #9B3D3B", +"]> c #A3433B", +"^> c #F9683D", +"/> c #FC6940", +"(> c #FE6342", +"_> c #FE6645", +":> c #FE6646", +"<> c #FE6147", +"[> c #FE6048", +"}> c #FE6148", +"|> c #FE6746", +"1> c #FE6A46", +"2> c #FE6F45", +"3> c #FE7441", +"4> c #FC7D39", +"5> c #6C422E", +"6> c #0F0F23", +"7> c #FA4142", +"8> c #FC4643", +"9> c #FE4D46", +"0> c #FE4E47", +"a> c #FE4F48", +"b> c #FE5148", +"c> c #FE5348", +"d> c #FE5548", +"e> c #FE5247", +"f> c #FD5445", +"g> c #FC5544", +"h> c #F96041", +"i> c #D33F3D", +"j> c #392D39", +"k> c #973C38", +"l> c #F94E3A", +"m> c #FD693E", +"n> c #FE6C43", +"o> c #FE6047", +"p> c #FE5D47", +"q> c #FE5E48", +"r> c #FE6948", +"s> c #FE6947", +"t> c #FE6B47", +"u> c #FE6E46", +"v> c #FD6D43", +"w> c #FB723D", +"x> c #D54A33", +"y> c #301C29", +"z> c #FB4A42", +"A> c #FD4B44", +"B> c #FE4F47", +"C> c #FE5048", +"D> c #FE5648", +"E> c #FE5848", +"F> c #FE5747", +"G> c #FE5547", +"H> c #FC5945", +"I> c #F95742", +"J> c #F3543D", +"K> c #A33336", +"L> c #302032", +"M> c #152433", +"N> c #CD3E38", +"O> c #FD5A3F", +"P> c #FE6343", +"Q> c #FE6446", +"R> c #FE6247", +"S> c #FE6A47", +"T> c #FC6542", +"U> c #FB6A3B", +"V> c #FA6D34", +"W> c #D73C2D", +"X> c #442428", +"Y> c #281323", +"Z> c #FD4E42", +"`> c #FD4D43", +" , c #FE4D45", +"., c #FE5248", +"+, c #FE5947", +"@, c #FE5C47", +"#, c #FE5B47", +"$, c #FE5A47", +"%, c #FE5847", +"&, c #FC5C45", +"*, c #F95B43", +"=, c #F3613F", +"-, c #E74F37", +";, c #8C2431", +">, c #161E2F", +",, c #CD4E33", +"', c #FD503A", +"), c #FE5D40", +"!, c #FE6445", +"~, c #FE6946", +"{, c #FE6847", +"], c #FE6747", +"^, c #FD6644", +"/, c #FD6241", +"(, c #FD5B3D", +"_, c #FE6739", +":, c #FE6135", +"<, c #AB4830", +"[, c #733E2A", +"}, c #161224", +"|, c #FC4E42", +"1, c #FE4D44", +"2, c #FE4E46", +"3, c #FE5147", +"4, c #FE5E47", +"5, c #FD5C46", +"6, c #FA5B44", +"7, c #F45441", +"8, c #EB393A", +"9, c #CC3433", +"0, c #47212F", +"a, c #59242F", +"b, c #FC6734", +"c, c #FC6F3A", +"d, c #FC723E", +"e, c #FD6540", +"f, c #FE6442", +"g, c #FE6643", +"h, c #FE6944", +"i, c #FE6546", +"j, c #FE6444", +"k, c #FE6143", +"l, c #FE5E41", +"m, c #FE613F", +"n, c #FE683C", +"o, c #FE7937", +"p, c #A25030", +"q, c #692629", +"r, c #151122", +"s, c #FA573F", +"t, c #FB4D40", +"u, c #FC4F43", +"v, c #FE5246", +"w, c #FF6347", +"x, c #FE5F48", +"y, c #F65942", +"z, c #F0493D", +"A, c #ED3736", +"B, c #73262F", +"C, c #10152C", +"D, c #3B292F", +"E, c #363034", +"F, c #AC3938", +"G, c #FC6B3B", +"H, c #FD763C", +"I, c #FE6D3F", +"J, c #FE6341", +"K, c #FE6642", +"L, c #FE6745", +"M, c #FE6245", +"N, c #FE6244", +"O, c #FE6841", +"P, c #FF683B", +"Q, c #EC7035", +"R, c #D0412D", +"S, c #3A1627", +"T, c #CF3938", +"U, c #F6543C", +"V, c #FB5040", +"W, c #FD5544", +"X, c #FE5A48", +"Y, c #FE5D48", +"Z, c #FE5F47", +"`, c #FF6147", +" ' c #FD5C45", +".' c #FB5B43", +"+' c #FA5A42", +"@' c #F76040", +"#' c #F4623D", +"$' c #F26D38", +"%' c #EC4130", +"&' c #380E2B", +"*' c #13122C", +"=' c #362D31", +"-' c #353435", +";' c #352E37", +">' c #2D3337", +",' c #CC5838", +"'' c #CD6F3A", +")' c #CE6E3D", +"!' c #FE793F", +"~' c #FD7541", +"{' c #FD6243", +"]' c #FE6545", +"^' c #FF6543", +"/' c #FF6240", +"(' c #FE723B", +"_' c #FE8034", +":' c #442D2C", +"<' c #311725", +"[' c #222830", +"}' c #B73B36", +"|' c #F94C3D", +"1' c #FD5543", +"2' c #FE5B48", +"3' c #FF5E47", +"4' c #FE5C48", +"5' c #FC5B44", +"6' c #F95640", +"7' c #C34E3D", +"8' c #A45A3A", +"9' c #F37438", +"0' c #F28935", +"a' c #AF422F", +"b' c #240D2B", +"c' c #88292F", +"d' c #FA8E34", +"e' c #FC7E38", +"f' c #FC5939", +"g' c #694A37", +"h' c #693437", +"i' c #382638", +"j' c #142439", +"k' c #9F483A", +"l' c #C45E3C", +"m' c #FD7240", +"n' c #FF6645", +"o' c #FF6245", +"p' c #FF6045", +"q' c #FF6146", +"r' c #FF6246", +"s' c #FF6446", +"t' c #FF6545", +"u' c #FE763F", +"v' c #FE7237", +"w' c #C65331", +"x' c #3D272A", +"y' c #0D1E2B", +"z' c #683032", +"A' c #F9453A", +"B' c #FD5341", +"C' c #FE5A46", +"D' c #FF5A48", +"E' c #FE5948", +"F' c #FD5A47", +"G' c #FC5D43", +"H' c #F95B3D", +"I' c #713F37", +"J' c #1E2D32", +"K' c #C44531", +"L' c #EF7A2F", +"M' c #6B2E2C", +"N' c #0F0E2C", +"O' c #F56633", +"P' c #FA803A", +"Q' c #FC673E", +"R' c #FD673E", +"S' c #FC6F3C", +"T' c #FA6E3B", +"U' c #C6633A", +"V' c #A06739", +"W' c #835638", +"X' c #381F38", +"Y' c #713B38", +"Z' c #7B503C", +"`' c #FE7741", +" ) c #FE7344", +".) c #FE6D46", +"+) c #FF6946", +"@) c #FF5E46", +"#) c #FF5D46", +"$) c #FF5D47", +"%) c #FF5F48", +"&) c #FF6248", +"*) c #FE6941", +"=) c #FC783C", +"-) c #C46B35", +";) c #892730", +">) c #111629", +",) c #1F2630", +"') c #AD3939", +")) c #FC5D41", +"!) c #FE5946", +"~) c #FF5848", +"{) c #FE5549", +"]) c #FC5E42", +"^) c #FA673B", +"/) c #DB7033", +"() c #392E2B", +"_) c #311A28", +":) c #3C2127", +"<) c #1D1027", +"[) c #92102C", +"}) c #F58336", +"|) c #FA673E", +"1) c #FD6642", +"2) c #FD5A41", +"3) c #FC6D41", +"4) c #FC6D3F", +"5) c #FD683E", +"6) c #F38C39", +"7) c #CE6535", +"8) c #612E34", +"9) c #1D2637", +"0) c #71513E", +"a) c #FF6847", +"b) c #FF5F47", +"c) c #FF5A46", +"d) c #FF5847", +"e) c #FF5748", +"f) c #FF594A", +"g) c #FF5E4B", +"h) c #FE654C", +"i) c #FE694B", +"j) c #FE6B48", +"k) c #FC6A43", +"l) c #F7683E", +"m) c #EC6E39", +" ", +" ", +" ", +" ", +" ", +" ", +" ", +" ", +" . + @ # $ % ", +" & * = - ; > , ' ) ! ~ ", +" { ] ^ / ( _ : < [ } | 1 2 ", +" 3 4 5 6 7 8 9 0 a b c d e f g h i j ", +" k l m n o p q r s t u v w x y z A B C D ", +" E F G H I J K L M N O P Q R S T U V W X Y Z ` ", +" ...+.@.#.$.%.&.*.=.-.;.>.,.S '.).!.~.{.].^./.(._. ", +" :.<.[.}.|.1.2.3.4.5.6.7.8.9.0.a.b.c.d.e.!.S f.g.h.i.j.k. ", +" l.m.n.o.p.q.r.s.t.u.J v.w.x.y.z.A.c.d.d.B.C.D.E.F.G.H.I. ", +" J.K.L.M.N.O.P.Q.R.t S.T.U.V.W.X.Y.Z.`. +d.d..+B.'.++@+#+$+%+ ", +" &+*+=+-+;+>+,+'+)+!+~+{+]+^+/+(+_+:+<+[+}+|+1+d.2+3+4+d.5+6+7+8+9+0+ ", +" a+b+c+d+e+f+g+h+i+j+k+l+m+n+^+o+p+q+r+s+t+u+v+b.w+x+y+z+A+w+B+C+D+E+F+G+ ", +" H+I+J+K+L+M+N+O+P+Q+R+S+T+U+V+W+Q ,.X+Y+Z+`+ @.@+@@@#@$@%@&@*@=@#@-@;@>@,@'@ ", +" )@!@~@{@]@^@/@(@_@:@<@[@}@|@1@2@3@R ,.4@5@6@7@8@9@0@a@#@b@c@=@d@e@f@g@>@h@i@j@ ", +" k@l@m@n@o@p@q@r@s@t@u@v@w@x@y@^+R S z@A@z.q+B@C@D@E@F@G@H@#@e@#@#@f@g@I@J@K@L@ ", +" M@N@O@P@Q@R@S@T@U@V@W@X@Y@Z@`@ #.#+#+#S A@@###$#%#&#*#=#-#f@B+B+B+f@;#>#,#'#)# ", +" !#~#{#]#^#/#(#(#_#:#<#[#}#|#1#^+.#S +#+#z@2#3#4#5#6#7#8#9#0#A.B+B+a#A.@@b#c#d# ", +" e#f#g#h#i#j#k#l#m#n#o#p#q#r#s#t#u#v#.#w#S R ^+x#y#z#A#B#C#D#-#A.a#`.`.b.g@E#d#F# ", +" G#0@H#I#J#K#L#M#N#O#P#Q#R#S#T#U#V#>.W#3@v#R R X+X#Y#s#Z#`# $.$+$@$g@f@5+5+#$6+$$%$&$ ", +" *$=$-$;$>$,$'$)$!$~${$]$^$/$($_$*$u#:$Q 3@,.X+z.<$[$}$|$1$2$3$4$5$6$7$e@8$#$G@9$0$a$ ", +" ,.4@E#b$c$d$e$f$g$h$i$j$k$l$m$n$`@>.:$o$3@,. #a.p$q$r$s$t$u$v$w$x$y$z$A$B$#@C$D$E$F$G$ ", +" R S H$v+I$J$K$n+L$:$o$o$M$N$L$O$P$Q$R$N$o$3@S$T$U$V$W$X$Y$Z$`$ %.%+%@%#%$%%%&%*%=%-%;%>% ", +" E.,%~.'%Z.4@v W#o$)%)%)%Q !%~%{%]%^%Q$u u#/%(%_%:%<%[%}%|%1%2%3%4%=%5%6%7%8%9%0%a%b%c%d% ", +" e%f%g%a#,%,%z@R 3@3@3@)%Q h%i%j%k%l%m%{+n%o%p%q%r%s%t%u%v%w%x%y%z%A%*%B%C%D%E%F%G%H%I% ", +" J%K%L%M%N%D.S v#)%)%O%P%Q%R%S%T%U%V%W%X%Y%Z%`% &.&+&@&#&$&%&&&*&f@a##@=&-&;&>&,&'&)& ", +" !&~&{&]&^&.#w#^&/%/&(&_&:&<&[&}&|&1&2&3&4&5&6&7&8&9&0&a&b&c&d&e&e@1+5+e@f&g&h&i&j& ", +" k&l&m&n&o&p&q&r&i%s&3.t&u&v&w&x&y&z&A&B&C&D&E&F&G&H&I&J&K&L&M&N&O&P&1+`.e@f&Q&R&S&T& ", +" 0 U&V&W&X&<&Y&j%Z&`& *.*+*@*#*$*%*&***=*-*;*>*>*,*'*)*!*~*{*]*^*/*(*a#B+#@_*:*<*[*}* ", +" |*1*2*3*4*5*6*7*8*9*0*a*b*c*d*e*f*g*h*i*j*k*l*m*n*o*p*q*r*s*t*u*v*E.w*d.e@x*y*z*A*B* ", +" C*D*E*F*G*H*I*J*K*L*M*N*O*P*Q*R*S*T*U*V*W*l*X*o*o*Y*Z*`* =.=+=@=#='%$=%=e@&=*===-=;= ", +" >=,='=)=!=~={=]=^=/=(=_=:=<=[=}=|=1=2=3=4=5=p*6=6=7=8=9=0=a=b=c=d=A@~.b.B+e=f=g=h=i= ", +" j=k=l=m=n=o=p=q=r=s=t=u=v=w=x=y=z=A=5=Z*B=C=D=E=8=F=G=H=I=J=K=S$R z@'%L=M=N=O= ", +" P=Q=R=S=T=U=V=W=X=Y=Z=`= -.-+-@-#-$-5=p*E=D=%-%-q*&-*-=---;->-,-/%3@^+'-)-!-~- ", +" {-]-^-/-(-_-:-<-[-}-|-1-2- -3-4-5-6-7-8-n*m*9-0-9-o*a-b-c-d-e-f-g-(&h%w c h-i- ", +" j-k-l-m-n-o-p-q-r-s-t-u-v-w-,-x-y-z-A-B-C-D-E-E-F-G-_@m*H-I-J-K-L-M-N-O-P-(+Q- ", +" R-S-T-U-V-W-X-Y-Z-`- ;.;+;@;#;$;%;&;*;=;-;-;;;>;,;';);!;~;{;];^;/;(;_;:;<;[;}; ", +" |;1;2;3;4;5;6;7;8;9;0;a;b;c;d;e;f;g;h;i;j;j;k;k;l;m;n;o;p;q;r;s;t;u;v;w;x;y; ", +" z;A;B;C;D;E;F;G;H;I;J;K;L;M;N;O;P;Q;R;S;T;U;V;W;X;k;Y;Z;`; >r;.>+>@>#>$> ", +" %>&>*>=>->;>>>,>'>,>)>F;8;!>~>{>]>^>/>(>_>:>i;<>[>X;}>i;|>1>q;2>3>4>5>6> ", +" 7>8>=>9>0>a>b>c>d>,>e>e>f>g>h>i>j>k>l>m>n>:>i;o>p>q>W;r>s>t>p;u>v>w>x>y> ", +" z>A>9>0>B>C>c>D>E>F>G>G>F>H>I>J>K>L>M>N>O>P>Q>R>o>R>T;s>S>S>S>t>1>T>U>V>W>X>Y> ", +" Z>`> ,9>B>.,D>+,@,#,$,%,$,&,*,=,-,;,>,,,',),P>!,!,_>~,t>s>{,],{,],^,/,(,_,:,<,[,}, ", +" |,`>1,2,3,G>+,4,o>o>4,@,@,5,6,7,8,9,0,a,b,c,d,e,f,g,h, >~,|>T;T;T;i,j,k,l,m,n,o,p,q,r, ", +" s,t,u,v,G>%,@,o>w,R>x,p>@,5,6,y,z,A,B,C,D,E,F,G,H,I,J,K,L,L,i,i;i;i;Q>S;M,N,P>O,P,Q,R,S, ", +" T,U,V,W,%,X,Y,Z,`,[>q>@, '.'+'@'#'$'%'&'*'='-';'>',''')'!'~'{'N,i,:>_>]'M,M,Q>_>^'/'('_':'<' ", +" ['}'|'1'$,X,2'p>3'4'2'@,5'6'7'8'9'0'a'b'c'd'e'f'g'h'i'j'k'l'd,m'g, > >n'o'p'q'r's't'.>u'v'w'x' ", +" y'z'A'B'C'X,X,2'D'E'E'F'G'H'I'J'K'L'M'N'O'P'Q'R'S'T'U'V'W'X'Y'Z'`' ).)+)r'@)#)$)%)&)l;1>*)=)-);) ", +" >),)')))!)X,E'X,~){)d>!)])^)/)()_):)<)[)})|)1)f,2)3)4)5)6)7)8)9)0)*--*a)b)c)d)e)f)g)h)i)j)k)l)m) ", +" ", +" ", +" ", +" ", +" ", +" ", +" ", +" "}; diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/pure.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure.rb new file mode 100644 index 00000000..565ef0c5 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure.rb @@ -0,0 +1,77 @@ +require 'json/common' +require 'json/pure/parser' +require 'json/pure/generator' + +module JSON + begin + require 'iconv' + # An iconv instance to convert from UTF8 to UTF16 Big Endian. + UTF16toUTF8 = Iconv.new('utf-8', 'utf-16be') # :nodoc: + # An iconv instance to convert from UTF16 Big Endian to UTF8. + UTF8toUTF16 = Iconv.new('utf-16be', 'utf-8') # :nodoc: + UTF8toUTF16.iconv('no bom') + rescue LoadError + raise MissingUnicodeSupport, + "iconv couldn't be loaded, which is required for UTF-8/UTF-16 conversions" + rescue Errno::EINVAL, Iconv::InvalidEncoding + # Iconv doesn't support big endian utf-16. Let's try to hack this manually + # into the converters. + begin + old_verbose, $VERBSOSE = $VERBOSE, nil + # An iconv instance to convert from UTF8 to UTF16 Big Endian. + UTF16toUTF8 = Iconv.new('utf-8', 'utf-16') # :nodoc: + # An iconv instance to convert from UTF16 Big Endian to UTF8. + UTF8toUTF16 = Iconv.new('utf-16', 'utf-8') # :nodoc: + UTF8toUTF16.iconv('no bom') + if UTF8toUTF16.iconv("\xe2\x82\xac") == "\xac\x20" + swapper = Class.new do + def initialize(iconv) # :nodoc: + @iconv = iconv + end + + def iconv(string) # :nodoc: + result = @iconv.iconv(string) + JSON.swap!(result) + end + end + UTF8toUTF16 = swapper.new(UTF8toUTF16) # :nodoc: + end + if UTF16toUTF8.iconv("\xac\x20") == "\xe2\x82\xac" + swapper = Class.new do + def initialize(iconv) # :nodoc: + @iconv = iconv + end + + def iconv(string) # :nodoc: + string = JSON.swap!(string.dup) + @iconv.iconv(string) + end + end + UTF16toUTF8 = swapper.new(UTF16toUTF8) # :nodoc: + end + rescue Errno::EINVAL, Iconv::InvalidEncoding + raise MissingUnicodeSupport, "iconv doesn't seem to support UTF-8/UTF-16 conversions" + ensure + $VERBOSE = old_verbose + end + end + + # Swap consecutive bytes of _string_ in place. + def self.swap!(string) # :nodoc: + 0.upto(string.size / 2) do |i| + break unless string[2 * i + 1] + string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i] + end + string + end + + # This module holds all the modules/classes that implement JSON's + # functionality in pure ruby. + module Pure + $DEBUG and warn "Using pure library for JSON." + JSON.parser = Parser + JSON.generator = Generator + end + + JSON_LOADED = true +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/generator.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/generator.rb new file mode 100644 index 00000000..57ef4838 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/generator.rb @@ -0,0 +1,443 @@ +module JSON + MAP = { + "\x0" => '\u0000', + "\x1" => '\u0001', + "\x2" => '\u0002', + "\x3" => '\u0003', + "\x4" => '\u0004', + "\x5" => '\u0005', + "\x6" => '\u0006', + "\x7" => '\u0007', + "\b" => '\b', + "\t" => '\t', + "\n" => '\n', + "\xb" => '\u000b', + "\f" => '\f', + "\r" => '\r', + "\xe" => '\u000e', + "\xf" => '\u000f', + "\x10" => '\u0010', + "\x11" => '\u0011', + "\x12" => '\u0012', + "\x13" => '\u0013', + "\x14" => '\u0014', + "\x15" => '\u0015', + "\x16" => '\u0016', + "\x17" => '\u0017', + "\x18" => '\u0018', + "\x19" => '\u0019', + "\x1a" => '\u001a', + "\x1b" => '\u001b', + "\x1c" => '\u001c', + "\x1d" => '\u001d', + "\x1e" => '\u001e', + "\x1f" => '\u001f', + '"' => '\"', + '\\' => '\\\\', + } # :nodoc: + + # Convert a UTF8 encoded Ruby string _string_ to a JSON string, encoded with + # UTF16 big endian characters as \u????, and return it. + if defined?(::Encoding) + def utf8_to_json(string) # :nodoc: + string = string.dup + string << '' # XXX workaround: avoid buffer sharing + string.force_encoding(::Encoding::ASCII_8BIT) + string.gsub!(/["\\\x0-\x1f]/) { MAP[$&] } + string.gsub!(/( + (?: + [\xc2-\xdf][\x80-\xbf] | + [\xe0-\xef][\x80-\xbf]{2} | + [\xf0-\xf4][\x80-\xbf]{3} + )+ | + [\x80-\xc1\xf5-\xff] # invalid + )/nx) { |c| + c.size == 1 and raise GeneratorError, "invalid utf8 byte: '#{c}'" + s = JSON::UTF8toUTF16.iconv(c).unpack('H*')[0] + s.gsub!(/.{4}/n, '\\\\u\&') + } + string.force_encoding(::Encoding::UTF_8) + string + rescue Iconv::Failure => e + raise GeneratorError, "Caught #{e.class}: #{e}" + end + else + def utf8_to_json(string) # :nodoc: + string = string.gsub(/["\\\x0-\x1f]/) { MAP[$&] } + string.gsub!(/( + (?: + [\xc2-\xdf][\x80-\xbf] | + [\xe0-\xef][\x80-\xbf]{2} | + [\xf0-\xf4][\x80-\xbf]{3} + )+ | + [\x80-\xc1\xf5-\xff] # invalid + )/nx) { |c| + c.size == 1 and raise GeneratorError, "invalid utf8 byte: '#{c}'" + s = JSON::UTF8toUTF16.iconv(c).unpack('H*')[0] + s.gsub!(/.{4}/n, '\\\\u\&') + } + string + rescue Iconv::Failure => e + raise GeneratorError, "Caught #{e.class}: #{e}" + end + end + module_function :utf8_to_json + + module Pure + module Generator + # This class is used to create State instances, that are use to hold data + # while generating a JSON text from a a Ruby data structure. + class State + # Creates a State object from _opts_, which ought to be Hash to create + # a new State instance configured by _opts_, something else to create + # an unconfigured instance. If _opts_ is a State object, it is just + # returned. + def self.from_state(opts) + case opts + when self + opts + when Hash + new(opts) + else + new + end + end + + # Instantiates a new State object, configured by _opts_. + # + # _opts_ can have the following keys: + # + # * *indent*: a string used to indent levels (default: ''), + # * *space*: a string that is put after, a : or , delimiter (default: ''), + # * *space_before*: a string that is put before a : pair delimiter (default: ''), + # * *object_nl*: a string that is put at the end of a JSON object (default: ''), + # * *array_nl*: a string that is put at the end of a JSON array (default: ''), + # * *check_circular*: true if checking for circular data structures + # should be done (the default), false otherwise. + # * *check_circular*: true if checking for circular data structures + # should be done, false (the default) otherwise. + # * *allow_nan*: true if NaN, Infinity, and -Infinity should be + # generated, otherwise an exception is thrown, if these values are + # encountered. This options defaults to false. + def initialize(opts = {}) + @seen = {} + @indent = '' + @space = '' + @space_before = '' + @object_nl = '' + @array_nl = '' + @check_circular = true + @allow_nan = false + configure opts + end + + # This string is used to indent levels in the JSON text. + attr_accessor :indent + + # This string is used to insert a space between the tokens in a JSON + # string. + attr_accessor :space + + # This string is used to insert a space before the ':' in JSON objects. + attr_accessor :space_before + + # This string is put at the end of a line that holds a JSON object (or + # Hash). + attr_accessor :object_nl + + # This string is put at the end of a line that holds a JSON array. + attr_accessor :array_nl + + # This integer returns the maximum level of data structure nesting in + # the generated JSON, max_nesting = 0 if no maximum is checked. + attr_accessor :max_nesting + + def check_max_nesting(depth) # :nodoc: + return if @max_nesting.zero? + current_nesting = depth + 1 + current_nesting > @max_nesting and + raise NestingError, "nesting of #{current_nesting} is too deep" + end + + # Returns true, if circular data structures should be checked, + # otherwise returns false. + def check_circular? + @check_circular + end + + # Returns true if NaN, Infinity, and -Infinity should be considered as + # valid JSON and output. + def allow_nan? + @allow_nan + end + + # Returns _true_, if _object_ was already seen during this generating + # run. + def seen?(object) + @seen.key?(object.__id__) + end + + # Remember _object_, to find out if it was already encountered (if a + # cyclic data structure is if a cyclic data structure is rendered). + def remember(object) + @seen[object.__id__] = true + end + + # Forget _object_ for this generating run. + def forget(object) + @seen.delete object.__id__ + end + + # Configure this State instance with the Hash _opts_, and return + # itself. + def configure(opts) + @indent = opts[:indent] if opts.key?(:indent) + @space = opts[:space] if opts.key?(:space) + @space_before = opts[:space_before] if opts.key?(:space_before) + @object_nl = opts[:object_nl] if opts.key?(:object_nl) + @array_nl = opts[:array_nl] if opts.key?(:array_nl) + @check_circular = !!opts[:check_circular] if opts.key?(:check_circular) + @allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan) + if !opts.key?(:max_nesting) # defaults to 19 + @max_nesting = 19 + elsif opts[:max_nesting] + @max_nesting = opts[:max_nesting] + else + @max_nesting = 0 + end + self + end + + # Returns the configuration instance variables as a hash, that can be + # passed to the configure method. + def to_h + result = {} + for iv in %w[indent space space_before object_nl array_nl check_circular allow_nan max_nesting] + result[iv.intern] = instance_variable_get("@#{iv}") + end + result + end + end + + module GeneratorMethods + module Object + # Converts this object to a string (calling #to_s), converts + # it to a JSON string, and returns the result. This is a fallback, if no + # special method #to_json was defined for some object. + def to_json(*) to_s.to_json end + end + + module Hash + # Returns a JSON string containing a JSON object, that is unparsed from + # this Hash instance. + # _state_ is a JSON::State object, that can also be used to configure the + # produced JSON string output further. + # _depth_ is used to find out nesting depth, to indent accordingly. + def to_json(state = nil, depth = 0, *) + if state + state = JSON.state.from_state(state) + state.check_max_nesting(depth) + json_check_circular(state) { json_transform(state, depth) } + else + json_transform(state, depth) + end + end + + private + + def json_check_circular(state) + if state and state.check_circular? + state.seen?(self) and raise JSON::CircularDatastructure, + "circular data structures not supported!" + state.remember self + end + yield + ensure + state and state.forget self + end + + def json_shift(state, depth) + state and not state.object_nl.empty? or return '' + state.indent * depth + end + + def json_transform(state, depth) + delim = ',' + if state + delim << state.object_nl + result = '{' + result << state.object_nl + result << map { |key,value| + s = json_shift(state, depth + 1) + s << key.to_s.to_json(state, depth + 1) + s << state.space_before + s << ':' + s << state.space + s << value.to_json(state, depth + 1) + }.join(delim) + result << state.object_nl + result << json_shift(state, depth) + result << '}' + else + result = '{' + result << map { |key,value| + key.to_s.to_json << ':' << value.to_json + }.join(delim) + result << '}' + end + result + end + end + + module Array + # Returns a JSON string containing a JSON array, that is unparsed from + # this Array instance. + # _state_ is a JSON::State object, that can also be used to configure the + # produced JSON string output further. + # _depth_ is used to find out nesting depth, to indent accordingly. + def to_json(state = nil, depth = 0, *) + if state + state = JSON.state.from_state(state) + state.check_max_nesting(depth) + json_check_circular(state) { json_transform(state, depth) } + else + json_transform(state, depth) + end + end + + private + + def json_check_circular(state) + if state and state.check_circular? + state.seen?(self) and raise JSON::CircularDatastructure, + "circular data structures not supported!" + state.remember self + end + yield + ensure + state and state.forget self + end + + def json_shift(state, depth) + state and not state.array_nl.empty? or return '' + state.indent * depth + end + + def json_transform(state, depth) + delim = ',' + if state + delim << state.array_nl + result = '[' + result << state.array_nl + result << map { |value| + json_shift(state, depth + 1) << value.to_json(state, depth + 1) + }.join(delim) + result << state.array_nl + result << json_shift(state, depth) + result << ']' + else + '[' << map { |value| value.to_json }.join(delim) << ']' + end + end + end + + module Integer + # Returns a JSON string representation for this Integer number. + def to_json(*) to_s end + end + + module Float + # Returns a JSON string representation for this Float number. + def to_json(state = nil, *) + case + when infinite? + if state && state.allow_nan? + to_s + else + raise GeneratorError, "#{self} not allowed in JSON" + end + when nan? + if state && state.allow_nan? + to_s + else + raise GeneratorError, "#{self} not allowed in JSON" + end + else + to_s + end + end + end + + module String + if defined?(::Encoding) + # This string should be encoded with UTF-8 A call to this method + # returns a JSON string encoded with UTF16 big endian characters as + # \u????. + def to_json(*) + if encoding == ::Encoding::UTF_8 + '"' << JSON.utf8_to_json(self) << '"' + else + string = encode(::Encoding::UTF_8) + '"' << JSON.utf8_to_json(string) << '"' + end + end + else + # This string should be encoded with UTF-8 A call to this method + # returns a JSON string encoded with UTF16 big endian characters as + # \u????. + def to_json(*) + '"' << JSON.utf8_to_json(self) << '"' + end + end + + # Module that holds the extinding methods if, the String module is + # included. + module Extend + # Raw Strings are JSON Objects (the raw bytes are stored in an array for the + # key "raw"). The Ruby String can be created by this module method. + def json_create(o) + o['raw'].pack('C*') + end + end + + # Extends _modul_ with the String::Extend module. + def self.included(modul) + modul.extend Extend + end + + # This method creates a raw object hash, that can be nested into + # other data structures and will be unparsed as a raw string. This + # method should be used, if you want to convert raw strings to JSON + # instead of UTF-8 strings, e. g. binary data. + def to_json_raw_object + { + JSON.create_id => self.class.name, + 'raw' => self.unpack('C*'), + } + end + + # This method creates a JSON text from the result of + # a call to to_json_raw_object of this String. + def to_json_raw(*args) + to_json_raw_object.to_json(*args) + end + end + + module TrueClass + # Returns a JSON string for true: 'true'. + def to_json(*) 'true' end + end + + module FalseClass + # Returns a JSON string for false: 'false'. + def to_json(*) 'false' end + end + + module NilClass + # Returns a JSON string for nil: 'null'. + def to_json(*) 'null' end + end + end + end + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/parser.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/parser.rb new file mode 100644 index 00000000..7a09f2fe --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/pure/parser.rb @@ -0,0 +1,303 @@ +require 'strscan' + +module JSON + module Pure + # This class implements the JSON parser that is used to parse a JSON string + # into a Ruby data structure. + class Parser < StringScanner + STRING = /" ((?:[^\x0-\x1f"\\] | + # escaped special characters: + \\["\\\/bfnrt] | + \\u[0-9a-fA-F]{4} | + # match all but escaped special characters: + \\[\x20-\x21\x23-\x2e\x30-\x5b\x5d-\x61\x63-\x65\x67-\x6d\x6f-\x71\x73\x75-\xff])*) + "/nx + INTEGER = /(-?0|-?[1-9]\d*)/ + FLOAT = /(-? + (?:0|[1-9]\d*) + (?: + \.\d+(?i:e[+-]?\d+) | + \.\d+ | + (?i:e[+-]?\d+) + ) + )/x + NAN = /NaN/ + INFINITY = /Infinity/ + MINUS_INFINITY = /-Infinity/ + OBJECT_OPEN = /\{/ + OBJECT_CLOSE = /\}/ + ARRAY_OPEN = /\[/ + ARRAY_CLOSE = /\]/ + PAIR_DELIMITER = /:/ + COLLECTION_DELIMITER = /,/ + TRUE = /true/ + FALSE = /false/ + NULL = /null/ + IGNORE = %r( + (?: + //[^\n\r]*[\n\r]| # line comments + /\* # c-style comments + (?: + [^*/]| # normal chars + /[^*]| # slashes that do not start a nested comment + \*[^/]| # asterisks that do not end this comment + /(?=\*/) # single slash before this comment's end + )* + \*/ # the End of this comment + |[ \t\r\n]+ # whitespaces: space, horicontal tab, lf, cr + )+ + )mx + + UNPARSED = Object.new + + # Creates a new JSON::Pure::Parser instance for the string _source_. + # + # It will be configured by the _opts_ hash. _opts_ can have the following + # keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Disable depth checking with :max_nesting => false|nil|0, + # it defaults to 19. + # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to false. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + # * *object_class*: Defaults to Hash + # * *array_class*: Defaults to Array + def initialize(source, opts = {}) + if defined?(::Encoding) + if source.encoding == Encoding::ASCII_8BIT + b = source[0, 4].bytes.to_a + source = case + when b.size >= 4 && b[0] == 0 && b[1] == 0 && b[2] == 0 + source.dup.force_encoding(Encoding::UTF_32BE).encode!(Encoding::UTF_8) + when b.size >= 4 && b[0] == 0 && b[2] == 0 + source.dup.force_encoding(Encoding::UTF_16BE).encode!(Encoding::UTF_8) + when b.size >= 4 && b[1] == 0 && b[2] == 0 && b[3] == 0 + source.dup.force_encoding(Encoding::UTF_32LE).encode!(Encoding::UTF_8) + when b.size >= 4 && b[1] == 0 && b[3] == 0 + source.dup.force_encoding(Encoding::UTF_16LE).encode!(Encoding::UTF_8) + else + source.dup + end + else + source = source.encode(Encoding::UTF_8) + end + source.force_encoding(Encoding::ASCII_8BIT) + else + b = source + source = case + when b.size >= 4 && b[0] == 0 && b[1] == 0 && b[2] == 0 + JSON.iconv('utf-8', 'utf-32be', b) + when b.size >= 4 && b[0] == 0 && b[2] == 0 + JSON.iconv('utf-8', 'utf-16be', b) + when b.size >= 4 && b[1] == 0 && b[2] == 0 && b[3] == 0 + JSON.iconv('utf-8', 'utf-32le', b) + when b.size >= 4 && b[1] == 0 && b[3] == 0 + JSON.iconv('utf-8', 'utf-16le', b) + else + b + end + end + super source + if !opts.key?(:max_nesting) # defaults to 19 + @max_nesting = 19 + elsif opts[:max_nesting] + @max_nesting = opts[:max_nesting] + else + @max_nesting = 0 + end + @allow_nan = !!opts[:allow_nan] + ca = true + ca = opts[:create_additions] if opts.key?(:create_additions) + @create_id = ca ? JSON.create_id : nil + @object_class = opts[:object_class] || Hash + @array_class = opts[:array_class] || Array + end + + alias source string + + # Parses the current JSON string _source_ and returns the complete data + # structure as a result. + def parse + reset + obj = nil + until eos? + case + when scan(OBJECT_OPEN) + obj and raise ParserError, "source '#{peek(20)}' not in JSON!" + @current_nesting = 1 + obj = parse_object + when scan(ARRAY_OPEN) + obj and raise ParserError, "source '#{peek(20)}' not in JSON!" + @current_nesting = 1 + obj = parse_array + when skip(IGNORE) + ; + else + raise ParserError, "source '#{peek(20)}' not in JSON!" + end + end + obj or raise ParserError, "source did not contain any JSON!" + obj + end + + private + + # Unescape characters in strings. + UNESCAPE_MAP = Hash.new { |h, k| h[k] = k.chr } + UNESCAPE_MAP.update({ + ?" => '"', + ?\\ => '\\', + ?/ => '/', + ?b => "\b", + ?f => "\f", + ?n => "\n", + ?r => "\r", + ?t => "\t", + ?u => nil, + }) + + def parse_string + if scan(STRING) + return '' if self[1].empty? + string = self[1].gsub(%r((?:\\[\\bfnrt"/]|(?:\\u(?:[A-Fa-f\d]{4}))+|\\[\x20-\xff]))n) do |c| + if u = UNESCAPE_MAP[$&[1]] + u + else # \uXXXX + bytes = '' + i = 0 + while c[6 * i] == ?\\ && c[6 * i + 1] == ?u + bytes << c[6 * i + 2, 2].to_i(16) << c[6 * i + 4, 2].to_i(16) + i += 1 + end + JSON::UTF16toUTF8.iconv(bytes) + end + end + if string.respond_to?(:force_encoding) + string.force_encoding(Encoding::UTF_8) + end + string + else + UNPARSED + end + rescue Iconv::Failure => e + raise GeneratorError, "Caught #{e.class}: #{e}" + end + + def parse_value + case + when scan(FLOAT) + Float(self[1]) + when scan(INTEGER) + Integer(self[1]) + when scan(TRUE) + true + when scan(FALSE) + false + when scan(NULL) + nil + when (string = parse_string) != UNPARSED + string + when scan(ARRAY_OPEN) + @current_nesting += 1 + ary = parse_array + @current_nesting -= 1 + ary + when scan(OBJECT_OPEN) + @current_nesting += 1 + obj = parse_object + @current_nesting -= 1 + obj + when @allow_nan && scan(NAN) + NaN + when @allow_nan && scan(INFINITY) + Infinity + when @allow_nan && scan(MINUS_INFINITY) + MinusInfinity + else + UNPARSED + end + end + + def parse_array + raise NestingError, "nesting of #@current_nesting is too deep" if + @max_nesting.nonzero? && @current_nesting > @max_nesting + result = @array_class.new + delim = false + until eos? + case + when (value = parse_value) != UNPARSED + delim = false + result << value + skip(IGNORE) + if scan(COLLECTION_DELIMITER) + delim = true + elsif match?(ARRAY_CLOSE) + ; + else + raise ParserError, "expected ',' or ']' in array at '#{peek(20)}'!" + end + when scan(ARRAY_CLOSE) + if delim + raise ParserError, "expected next element in array at '#{peek(20)}'!" + end + break + when skip(IGNORE) + ; + else + raise ParserError, "unexpected token in array at '#{peek(20)}'!" + end + end + result + end + + def parse_object + raise NestingError, "nesting of #@current_nesting is too deep" if + @max_nesting.nonzero? && @current_nesting > @max_nesting + result = @object_class.new + delim = false + until eos? + case + when (string = parse_string) != UNPARSED + skip(IGNORE) + unless scan(PAIR_DELIMITER) + raise ParserError, "expected ':' in object at '#{peek(20)}'!" + end + skip(IGNORE) + unless (value = parse_value).equal? UNPARSED + result[string] = value + delim = false + skip(IGNORE) + if scan(COLLECTION_DELIMITER) + delim = true + elsif match?(OBJECT_CLOSE) + ; + else + raise ParserError, "expected ',' or '}' in object at '#{peek(20)}'!" + end + else + raise ParserError, "expected value in object at '#{peek(20)}'!" + end + when scan(OBJECT_CLOSE) + if delim + raise ParserError, "expected next name, value pair in object at '#{peek(20)}'!" + end + if @create_id and klassname = result[@create_id] + klass = JSON.deep_const_get klassname + break unless klass and klass.json_creatable? + result = klass.json_create(result) + end + break + when skip(IGNORE) + ; + else + raise ParserError, "unexpected token in object at '#{peek(20)}'!" + end + end + result + end + end + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/lib/json/version.rb b/vendor/gems/gems/json_pure-1.2.0/lib/json/version.rb new file mode 100644 index 00000000..ff48b9b7 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/lib/json/version.rb @@ -0,0 +1,8 @@ +module JSON + # JSON version + VERSION = '1.2.0' + VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc: + VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: + VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: + VERSION_BUILD = VERSION_ARRAY[2] # :nodoc: +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail1.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail1.json new file mode 100644 index 00000000..6216b865 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail10.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail10.json new file mode 100644 index 00000000..5d8c0047 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail11.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail11.json new file mode 100644 index 00000000..76eb95b4 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail12.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail12.json new file mode 100644 index 00000000..77580a45 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail13.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail13.json new file mode 100644 index 00000000..379406b5 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail14.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail14.json new file mode 100644 index 00000000..0ed366b3 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail18.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail18.json new file mode 100644 index 00000000..e2d130c6 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail19.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail19.json new file mode 100644 index 00000000..3b9c46fa --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail2.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail2.json new file mode 100644 index 00000000..6b7c11e5 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail20.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail20.json new file mode 100644 index 00000000..27c1af3e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail21.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail21.json new file mode 100644 index 00000000..62474573 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail22.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail22.json new file mode 100644 index 00000000..a7752581 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail23.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail23.json new file mode 100644 index 00000000..494add1c --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail24.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail24.json new file mode 100644 index 00000000..caff239b --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail25.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail25.json new file mode 100644 index 00000000..2dfbd259 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail25.json @@ -0,0 +1 @@ +["tab character in string "] diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail27.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail27.json new file mode 100644 index 00000000..6b01a2ca --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail28.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail28.json new file mode 100644 index 00000000..621a0101 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail3.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail3.json new file mode 100644 index 00000000..168c81eb --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail4.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail4.json new file mode 100644 index 00000000..9de168bf --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail5.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail5.json new file mode 100644 index 00000000..ddf3ce3d --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail6.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail6.json new file mode 100644 index 00000000..ed91580e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail7.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail7.json new file mode 100644 index 00000000..8a96af3e --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail8.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail8.json new file mode 100644 index 00000000..b28479c6 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail9.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail9.json new file mode 100644 index 00000000..5815574f --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass1.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass1.json new file mode 100644 index 00000000..7828fcc1 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass1.json @@ -0,0 +1,56 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E666, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ], + "compact": [1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066 + + +,"rosebud"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass15.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass15.json new file mode 100644 index 00000000..fc8376b6 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass16.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass16.json new file mode 100644 index 00000000..c43ae3c2 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass16.json @@ -0,0 +1 @@ +["Illegal backslash escape: \'"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass17.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass17.json new file mode 100644 index 00000000..62b9214a --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass2.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass2.json new file mode 100644 index 00000000..d3c63c7a --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass26.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass26.json new file mode 100644 index 00000000..845d26a6 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass3.json b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass3.json new file mode 100644 index 00000000..4528d51f --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/fixtures/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json.rb new file mode 100755 index 00000000..736c57b8 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json.rb @@ -0,0 +1,320 @@ +#!/usr/bin/env ruby +# -*- coding: utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end +require 'stringio' + +unless Array.method_defined?(:permutation) + begin + require 'enumerator' + require 'permutation' + class Array + def permutation + Permutation.for(self).to_enum.map { |x| x.project } + end + end + rescue LoadError + warn "Skipping permutation tests." + end +end + +class TC_JSON < Test::Unit::TestCase + include JSON + + def setup + @ary = [1, "foo", 3.14, 4711.0, 2.718, nil, [1,-2,3], false, true].map do + |x| [x] + end + @ary_to_parse = ["1", '"foo"', "3.14", "4711.0", "2.718", "null", + "[1,-2,3]", "false", "true"].map do + |x| "[#{x}]" + end + @hash = { + 'a' => 2, + 'b' => 3.141, + 'c' => 'c', + 'd' => [ 1, "b", 3.14 ], + 'e' => { 'foo' => 'bar' }, + 'g' => "\"\0\037", + 'h' => 1000.0, + 'i' => 0.001 + } + @json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\ + '"g":"\\"\\u0000\\u001f","h":1.0E3,"i":1.0E-3}' + end + + def test_construction + parser = JSON::Parser.new('test') + assert_equal 'test', parser.source + end + + def assert_equal_float(expected, is) + assert_in_delta(expected.first, is.first, 1e-2) + end + + def test_parse_simple_arrays + assert_equal([], parse('[]')) + assert_equal([], parse(' [ ] ')) + assert_equal([nil], parse('[null]')) + assert_equal([false], parse('[false]')) + assert_equal([true], parse('[true]')) + assert_equal([-23], parse('[-23]')) + assert_equal([23], parse('[23]')) + assert_equal([0.23], parse('[0.23]')) + assert_equal([0.0], parse('[0e0]')) + assert_raises(JSON::ParserError) { parse('[+23.2]') } + assert_raises(JSON::ParserError) { parse('[+23]') } + assert_raises(JSON::ParserError) { parse('[.23]') } + assert_raises(JSON::ParserError) { parse('[023]') } + assert_equal_float [3.141], parse('[3.141]') + assert_equal_float [-3.141], parse('[-3.141]') + assert_equal_float [3.141], parse('[3141e-3]') + assert_equal_float [3.141], parse('[3141.1e-3]') + assert_equal_float [3.141], parse('[3141E-3]') + assert_equal_float [3.141], parse('[3141.0E-3]') + assert_equal_float [-3.141], parse('[-3141.0e-3]') + assert_equal_float [-3.141], parse('[-3141e-3]') + assert_raises(ParserError) { parse('[NaN]') } + assert parse('[NaN]', :allow_nan => true).first.nan? + assert_raises(ParserError) { parse('[Infinity]') } + assert_equal [1.0/0], parse('[Infinity]', :allow_nan => true) + assert_raises(ParserError) { parse('[-Infinity]') } + assert_equal [-1.0/0], parse('[-Infinity]', :allow_nan => true) + assert_equal([""], parse('[""]')) + assert_equal(["foobar"], parse('["foobar"]')) + assert_equal([{}], parse('[{}]')) + end + + def test_parse_simple_objects + assert_equal({}, parse('{}')) + assert_equal({}, parse(' { } ')) + assert_equal({ "a" => nil }, parse('{ "a" : null}')) + assert_equal({ "a" => nil }, parse('{"a":null}')) + assert_equal({ "a" => false }, parse('{ "a" : false } ')) + assert_equal({ "a" => false }, parse('{"a":false}')) + assert_raises(JSON::ParserError) { parse('{false}') } + assert_equal({ "a" => true }, parse('{"a":true}')) + assert_equal({ "a" => true }, parse(' { "a" : true } ')) + assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) + assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) + assert_equal({ "a" => 23 }, parse('{"a":23 } ')) + assert_equal({ "a" => 23 }, parse(' { "a" : 23 } ')) + assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) + assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) + end + + if Array.method_defined?(:permutation) + def test_parse_more_complex_arrays + a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "nۧt€ð2" => {} }] + a.permutation.each do |perm| + json = pretty_generate(perm) + assert_equal perm, parse(json) + end + end + + def test_parse_complex_objects + a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "nۧt€ð2" => {} }] + a.permutation.each do |perm| + s = "a" + orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h } + json = pretty_generate(orig_obj) + assert_equal orig_obj, parse(json) + end + end + end + + def test_parse_arrays + assert_equal([1,2,3], parse('[1,2,3]')) + assert_equal([1.2,2,3], parse('[1.2,2,3]')) + assert_equal([[],[[],[]]], parse('[[],[[],[]]]')) + end + + def test_parse_values + assert_equal([""], parse('[""]')) + assert_equal(["\\"], parse('["\\\\"]')) + assert_equal(['"'], parse('["\""]')) + assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]')) + assert_equal(["\"\b\n\r\t\0\037"], + parse('["\"\b\n\r\t\u0000\u001f"]')) + for i in 0 ... @ary.size + assert_equal(@ary[i], parse(@ary_to_parse[i])) + end + end + + def test_parse_array + assert_equal([], parse('[]')) + assert_equal([], parse(' [ ] ')) + assert_equal([1], parse('[1]')) + assert_equal([1], parse(' [ 1 ] ')) + assert_equal(@ary, + parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]]'\ + ',[false],[true]]')) + assert_equal(@ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2] + , [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] })) + end + + class SubArray < Array; end + + def test_parse_array_custom_class + res = parse('[]', :array_class => SubArray) + assert_equal([], res) + assert_equal(SubArray, res.class) + end + + def test_parse_object + assert_equal({}, parse('{}')) + assert_equal({}, parse(' { } ')) + assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}')) + assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } ')) + end + + class SubHash < Hash; end + + def test_parse_object_custom_class + res = parse('{}', :object_class => SubHash) + assert_equal({}, res) + assert_equal(SubHash, res.class) + end + + def test_parser_reset + parser = Parser.new(@json) + assert_equal(@hash, parser.parse) + assert_equal(@hash, parser.parse) + end + + def test_comments + json = < "value1", "key2" => "value2", "key3" => "value3" }, + parse(json)) + json = < "value1" }, parse(json)) + end + + def test_backslash + data = [ '\\.(?i:gif|jpe?g|png)$' ] + json = '["\\\\.(?i:gif|jpe?g|png)$"]' + assert_equal json, JSON.generate(data) + assert_equal data, JSON.parse(json) + # + data = [ '\\"' ] + json = '["\\\\\""]' + assert_equal json, JSON.generate(data) + assert_equal data, JSON.parse(json) + # + json = '["/"]' + data = JSON.parse(json) + assert_equal ['/'], data + assert_equal json, JSON.generate(data) + # + json = '["\""]' + data = JSON.parse(json) + assert_equal ['"'], data + assert_equal json, JSON.generate(data) + json = '["\\\'"]' + data = JSON.parse(json) + assert_equal ["'"], data + assert_equal '["\'"]', JSON.generate(data) + end + + def test_wrong_inputs + assert_raises(ParserError) { JSON.parse('"foo"') } + assert_raises(ParserError) { JSON.parse('123') } + assert_raises(ParserError) { JSON.parse('[] bla') } + assert_raises(ParserError) { JSON.parse('[] 1') } + assert_raises(ParserError) { JSON.parse('[] []') } + assert_raises(ParserError) { JSON.parse('[] {}') } + assert_raises(ParserError) { JSON.parse('{} []') } + assert_raises(ParserError) { JSON.parse('{} {}') } + assert_raises(ParserError) { JSON.parse('[NULL]') } + assert_raises(ParserError) { JSON.parse('[FALSE]') } + assert_raises(ParserError) { JSON.parse('[TRUE]') } + assert_raises(ParserError) { JSON.parse('[07] ') } + assert_raises(ParserError) { JSON.parse('[0a]') } + assert_raises(ParserError) { JSON.parse('[1.]') } + assert_raises(ParserError) { JSON.parse(' ') } + end + + def test_nesting + assert_raises(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 } + assert_raises(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse } + assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2) + too_deep = '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]' + too_deep_ary = eval too_deep + assert_raises(JSON::NestingError) { JSON.parse too_deep } + assert_raises(JSON::NestingError) { JSON.parser.new(too_deep).parse } + assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 19 } + ok = JSON.parse too_deep, :max_nesting => 20 + assert_equal too_deep_ary, ok + ok = JSON.parse too_deep, :max_nesting => nil + assert_equal too_deep_ary, ok + ok = JSON.parse too_deep, :max_nesting => false + assert_equal too_deep_ary, ok + ok = JSON.parse too_deep, :max_nesting => 0 + assert_equal too_deep_ary, ok + assert_raises(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 } + assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2) + assert_raises(JSON::NestingError) { JSON.generate too_deep_ary } + assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 19 } + ok = JSON.generate too_deep_ary, :max_nesting => 20 + assert_equal too_deep, ok + ok = JSON.generate too_deep_ary, :max_nesting => nil + assert_equal too_deep, ok + ok = JSON.generate too_deep_ary, :max_nesting => false + assert_equal too_deep, ok + ok = JSON.generate too_deep_ary, :max_nesting => 0 + assert_equal too_deep, ok + end + + def test_load_dump + too_deep = '[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]' + assert_equal too_deep, JSON.dump(eval(too_deep)) + assert_kind_of String, Marshal.dump(eval(too_deep)) + assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 19) } + assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 19) } + assert_equal too_deep, JSON.dump(eval(too_deep), 20) + assert_kind_of String, Marshal.dump(eval(too_deep), 20) + output = StringIO.new + JSON.dump(eval(too_deep), output) + assert_equal too_deep, output.string + output = StringIO.new + JSON.dump(eval(too_deep), output, 20) + assert_equal too_deep, output.string + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json_addition.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_addition.rb new file mode 100755 index 00000000..51e4a67f --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_addition.rb @@ -0,0 +1,164 @@ +#!/usr/bin/env ruby +# -*- coding:utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end +require 'json/add/core' +require 'date' + +class TC_JSONAddition < Test::Unit::TestCase + include JSON + + class A + def initialize(a) + @a = a + end + + attr_reader :a + + def ==(other) + a == other.a + end + + def self.json_create(object) + new(*object['args']) + end + + def to_json(*args) + { + 'json_class' => self.class.name, + 'args' => [ @a ], + }.to_json(*args) + end + end + + class B + def self.json_creatable? + false + end + + def to_json(*args) + { + 'json_class' => self.class.name, + }.to_json(*args) + end + end + + class C + def self.json_creatable? + false + end + + def to_json(*args) + { + 'json_class' => 'TC_JSONAddition::Nix', + }.to_json(*args) + end + end + + def test_extended_json + a = A.new(666) + assert A.json_creatable? + json = generate(a) + a_again = JSON.parse(json) + assert_kind_of a.class, a_again + assert_equal a, a_again + end + + def test_extended_json_disabled + a = A.new(666) + assert A.json_creatable? + json = generate(a) + a_again = JSON.parse(json, :create_additions => true) + assert_kind_of a.class, a_again + assert_equal a, a_again + a_hash = JSON.parse(json, :create_additions => false) + assert_kind_of Hash, a_hash + assert_equal( + {"args"=>[666], "json_class"=>"TC_JSONAddition::A"}.sort_by { |k,| k }, + a_hash.sort_by { |k,| k } + ) + end + + def test_extended_json_fail1 + b = B.new + assert !B.json_creatable? + json = generate(b) + assert_equal({ "json_class"=>"TC_JSONAddition::B" }, JSON.parse(json)) + end + + def test_extended_json_fail2 + c = C.new + assert !C.json_creatable? + json = generate(c) + assert_raises(ArgumentError) { JSON.parse(json) } + end + + def test_raw_strings + raw = '' + raw.respond_to?(:encode!) and raw.encode!(Encoding::ASCII_8BIT) + raw_array = [] + for i in 0..255 + raw << i + raw_array << i + end + json = raw.to_json_raw + json_raw_object = raw.to_json_raw_object + hash = { 'json_class' => 'String', 'raw'=> raw_array } + assert_equal hash, json_raw_object + json_raw = < e + e_json = JSON.generate e + e_again = JSON e_json + assert_kind_of TypeError, e_again + assert_equal e.message, e_again.message + assert_equal e.backtrace, e_again.backtrace + end + assert_equal(/foo/, JSON(JSON(/foo/))) + assert_equal(/foo/i, JSON(JSON(/foo/i))) + end + + def test_utc_datetime + now = Time.now + d = DateTime.parse(now.to_s) # usual case + assert d, JSON.parse(d.to_json) + d = DateTime.parse(now.utc.to_s) # of = 0 + assert d, JSON.parse(d.to_json) + d = DateTime.civil(2008, 6, 17, 11, 48, 32, 1) # of = 1 / 12 => 1/12 + assert d, JSON.parse(d.to_json) + d = DateTime.civil(2008, 6, 17, 11, 48, 32, 12) # of = 12 / 12 => 12 + assert d, JSON.parse(d.to_json) + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json_encoding.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_encoding.rb new file mode 100644 index 00000000..bfb3e60b --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_encoding.rb @@ -0,0 +1,67 @@ +#!/usr/bin/env ruby +# -*- coding: utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end +require 'iconv' + +class TC_JSONEncoding < Test::Unit::TestCase + include JSON + + def setup + @utf_8 = '["© ≠ €!"]' + @parsed = [ "© ≠ €!" ] + @utf_16_data = Iconv.iconv('utf-16be', 'utf-8', @parsed.first) + @generated = '["\u00a9 \u2260 \u20ac!"]' + if defined?(::Encoding) + @utf_8_ascii_8bit = @utf_8.dup.force_encoding(Encoding::ASCII_8BIT) + @utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8) + @utf_16be_ascii_8bit = @utf_16be.dup.force_encoding(Encoding::ASCII_8BIT) + @utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8) + @utf_16le_ascii_8bit = @utf_16le.dup.force_encoding(Encoding::ASCII_8BIT) + @utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8) + @utf_32be_ascii_8bit = @utf_32be.dup.force_encoding(Encoding::ASCII_8BIT) + @utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8) + @utf_32le_ascii_8bit = @utf_32le.dup.force_encoding(Encoding::ASCII_8BIT) + else + @utf_8_ascii_8bit = @utf_8.dup + @utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8) + @utf_16be_ascii_8bit = @utf_16be.dup + @utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8) + @utf_16le_ascii_8bit = @utf_16le.dup + @utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8) + @utf_32be_ascii_8bit = @utf_32be.dup + @utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8) + @utf_32le_ascii_8bit = @utf_32le.dup + end + end + + def test_parse + assert_equal @parsed, JSON.parse(@utf_8) + assert_equal @parsed, JSON.parse(@utf_16be) + assert_equal @parsed, JSON.parse(@utf_16le) + assert_equal @parsed, JSON.parse(@utf_32be) + assert_equal @parsed, JSON.parse(@utf_32le) + end + + def test_parse_ascii_8bit + assert_equal @parsed, JSON.parse(@utf_8_ascii_8bit) + assert_equal @parsed, JSON.parse(@utf_16be_ascii_8bit) + assert_equal @parsed, JSON.parse(@utf_16le_ascii_8bit) + assert_equal @parsed, JSON.parse(@utf_32be_ascii_8bit) + assert_equal @parsed, JSON.parse(@utf_32le_ascii_8bit) + end + + def test_generate + assert_equal @generated, JSON.generate(@parsed) + if defined?(::Encoding) + assert_equal @generated, JSON.generate(@utf_16_data) + else + assert_raises(JSON::GeneratorError) { JSON.generate(@utf_16_data) } + end + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json_fixtures.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_fixtures.rb new file mode 100755 index 00000000..95e57ebf --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_fixtures.rb @@ -0,0 +1,34 @@ +#!/usr/bin/env ruby +# -*- coding: utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end + +class TC_JSONFixtures < Test::Unit::TestCase + def setup + fixtures = File.join(File.dirname(__FILE__), 'fixtures/*.json') + passed, failed = Dir[fixtures].partition { |f| f['pass'] } + @passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort + @failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort + end + + def test_passing + for name, source in @passed + assert JSON.parse(source), + "Did not pass for fixture '#{name}'" + end + end + + def test_failing + for name, source in @failed + assert_raises(JSON::ParserError, JSON::NestingError, + "Did not fail for fixture '#{name}'") do + JSON.parse(source) + end + end + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json_generate.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_generate.rb new file mode 100755 index 00000000..e725e6ff --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_generate.rb @@ -0,0 +1,120 @@ +#!/usr/bin/env ruby +# -*- coding: utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end + +class TC_JSONGenerate < Test::Unit::TestCase + include JSON + + def setup + @hash = { + 'a' => 2, + 'b' => 3.141, + 'c' => 'c', + 'd' => [ 1, "b", 3.14 ], + 'e' => { 'foo' => 'bar' }, + 'g' => "\"\0\037", + 'h' => 1000.0, + 'i' => 0.001 + } + @json2 = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},' + + '"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}' + @json3 = <<'EOT'.chomp +{ + "a": 2, + "b": 3.141, + "c": "c", + "d": [ + 1, + "b", + 3.14 + ], + "e": { + "foo": "bar" + }, + "g": "\"\u0000\u001f", + "h": 1000.0, + "i": 0.001 +} +EOT + end + + def test_generate + json = generate(@hash) + assert_equal(JSON.parse(@json2), JSON.parse(json)) + parsed_json = parse(json) + assert_equal(@hash, parsed_json) + json = generate({1=>2}) + assert_equal('{"1":2}', json) + parsed_json = parse(json) + assert_equal({"1"=>2}, parsed_json) + assert_raise(GeneratorError) { generate(666) } + end + + def test_generate_pretty + json = pretty_generate(@hash) + assert_equal(JSON.parse(@json3), JSON.parse(json)) + parsed_json = parse(json) + assert_equal(@hash, parsed_json) + json = pretty_generate({1=>2}) + assert_equal(<<'EOT'.chomp, json) +{ + "1": 2 +} +EOT + parsed_json = parse(json) + assert_equal({"1"=>2}, parsed_json) + assert_raise(GeneratorError) { pretty_generate(666) } + end + + def test_fast_generate + json = fast_generate(@hash) + assert_equal(JSON.parse(@json2), JSON.parse(json)) + parsed_json = parse(json) + assert_equal(@hash, parsed_json) + json = fast_generate({1=>2}) + assert_equal('{"1":2}', json) + parsed_json = parse(json) + assert_equal({"1"=>2}, parsed_json) + assert_raise(GeneratorError) { fast_generate(666) } + end + + def test_states + json = generate({1=>2}, nil) + assert_equal('{"1":2}', json) + s = JSON.state.new(:check_circular => true) + #assert s.check_circular + h = { 1=>2 } + h[3] = h + assert_raises(JSON::CircularDatastructure) { generate(h) } + assert_raises(JSON::CircularDatastructure) { generate(h, s) } + s = JSON.state.new(:check_circular => true) + #assert s.check_circular + a = [ 1, 2 ] + a << a + assert_raises(JSON::CircularDatastructure) { generate(a, s) } + end + + def test_allow_nan + assert_raises(GeneratorError) { generate([JSON::NaN]) } + assert_equal '[NaN]', generate([JSON::NaN], :allow_nan => true) + assert_raises(GeneratorError) { fast_generate([JSON::NaN]) } + assert_raises(GeneratorError) { pretty_generate([JSON::NaN]) } + assert_equal "[\n NaN\n]", pretty_generate([JSON::NaN], :allow_nan => true) + assert_raises(GeneratorError) { generate([JSON::Infinity]) } + assert_equal '[Infinity]', generate([JSON::Infinity], :allow_nan => true) + assert_raises(GeneratorError) { fast_generate([JSON::Infinity]) } + assert_raises(GeneratorError) { pretty_generate([JSON::Infinity]) } + assert_equal "[\n Infinity\n]", pretty_generate([JSON::Infinity], :allow_nan => true) + assert_raises(GeneratorError) { generate([JSON::MinusInfinity]) } + assert_equal '[-Infinity]', generate([JSON::MinusInfinity], :allow_nan => true) + assert_raises(GeneratorError) { fast_generate([JSON::MinusInfinity]) } + assert_raises(GeneratorError) { pretty_generate([JSON::MinusInfinity]) } + assert_equal "[\n -Infinity\n]", pretty_generate([JSON::MinusInfinity], :allow_nan => true) + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tests/test_json_rails.rb b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_rails.rb new file mode 100755 index 00000000..d33402da --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tests/test_json_rails.rb @@ -0,0 +1,146 @@ +#!/usr/bin/env ruby +# -*- coding: utf-8 -*- + +require 'test/unit' +case ENV['JSON'] +when 'pure' then require 'json/pure' +when 'ext' then require 'json/ext' +else require 'json' +end +require 'json/add/rails' +require 'date' + +class TC_JSONRails < Test::Unit::TestCase + include JSON + + class A + def initialize(a) + @a = a + end + + attr_reader :a + + def ==(other) + a == other.a + end + + def self.json_create(object) + new(*object['args']) + end + + def to_json(*args) + { + 'json_class' => self.class.name, + 'args' => [ @a ], + }.to_json(*args) + end + end + + class B + def self.json_creatable? + false + end + + def to_json(*args) + { + 'json_class' => self.class.name, + }.to_json(*args) + end + end + + class C + def to_json(*args) + { + 'json_class' => 'TC_JSONRails::Nix', + }.to_json(*args) + end + end + + class D + def initialize + @foo = 666 + end + + attr_reader :foo + + def ==(other) + foo == other.foo + end + end + + def test_extended_json + a = A.new(666) + assert A.json_creatable? + assert_equal 666, a.a + json = generate(a) + a_again = JSON.parse(json) + assert_kind_of a.class, a_again + assert_equal a, a_again + assert_equal 666, a_again.a + end + + def test_extended_json_generic_object + d = D.new + assert D.json_creatable? + assert_equal 666, d.foo + json = generate(d) + d_again = JSON.parse(json) + assert_kind_of d.class, d_again + assert_equal d, d_again + assert_equal 666, d_again.foo + end + + def test_extended_json_disabled + a = A.new(666) + assert A.json_creatable? + json = generate(a) + a_again = JSON.parse(json, :create_additions => true) + assert_kind_of a.class, a_again + assert_equal a, a_again + a_hash = JSON.parse(json, :create_additions => false) + assert_kind_of Hash, a_hash + assert_equal( + {"args"=>[666], "json_class"=>"TC_JSONRails::A"}.sort_by { |k,| k }, + a_hash.sort_by { |k,| k } + ) + end + + def test_extended_json_fail1 + b = B.new + assert !B.json_creatable? + json = generate(b) + assert_equal({ 'json_class' => B.name }, JSON.parse(json)) + end + + def test_extended_json_fail2 + c = C.new # with rails addition all objects are theoretically creatable + assert C.json_creatable? + json = generate(c) + assert_raises(ArgumentError) { JSON.parse(json) } + end + + def test_raw_strings + raw = '' + raw.respond_to?(:encode!) and raw.encode!(Encoding::ASCII_8BIT) + raw_array = [] + for i in 0..255 + raw << i + raw_array << i + end + json = raw.to_json_raw + json_raw_object = raw.to_json_raw_object + hash = { 'json_class' => 'String', 'raw'=> raw_array } + assert_equal hash, json_raw_object + json_raw = <= "1.9." + i = i.chr + end + assert_equal i, JSON.parse(json).first[0] + if i == ?\b + generated = JSON.generate(["" << i]) + assert '["\b"]' == generated || '["\10"]' == generated + elsif [?\n, ?\r, ?\t, ?\f].include?(i) + assert_equal '[' << ('' << i).dump << ']', JSON.generate(["" << i]) + elsif i.chr < 0x20.chr + assert_equal json, JSON.generate(["" << i]) + end + end + assert_raise(JSON::GeneratorError) do + JSON.generate(["\x80"]) + end + assert_equal "\302\200", JSON.parse('["\u0080"]').first + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tools/fuzz.rb b/vendor/gems/gems/json_pure-1.2.0/tools/fuzz.rb new file mode 100755 index 00000000..4dacd958 --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tools/fuzz.rb @@ -0,0 +1,139 @@ +require 'json' + +require 'iconv' +ISO_8859_1_TO_UTF8 = Iconv.new('utf-8', 'iso-8859-15') +class ::String + def to_utf8 + ISO_8859_1_TO_UTF8.iconv self + end +end + +class Fuzzer + def initialize(n, freqs = {}) + sum = freqs.inject(0.0) { |s, x| s + x.last } + freqs.each_key { |x| freqs[x] /= sum } + s = 0.0 + freqs.each_key do |x| + freqs[x] = s .. (s + t = freqs[x]) + s += t + end + @freqs = freqs + @n = n + @alpha = (0..0xff).to_a + end + + def random_string + s = '' + 30.times { s << @alpha[rand(@alpha.size)] } + s.to_utf8 + end + + def pick + r = rand + found = @freqs.find { |k, f| f.include? rand } + found && found.first + end + + def make_pick + k = pick + case + when k == Hash, k == Array + k.new + when k == true, k == false, k == nil + k + when k == String + random_string + when k == Fixnum + rand(2 ** 30) - 2 ** 29 + when k == Bignum + rand(2 ** 70) - 2 ** 69 + end + end + + def fuzz(current = nil) + if @n > 0 + case current + when nil + @n -= 1 + current = fuzz [ Hash, Array ][rand(2)].new + when Array + while @n > 0 + @n -= 1 + current << case p = make_pick + when Array, Hash + fuzz(p) + else + p + end + end + when Hash + while @n > 0 + @n -= 1 + current[random_string] = case p = make_pick + when Array, Hash + fuzz(p) + else + p + end + end + end + end + current + end +end + +class MyState < JSON.state + WS = " \r\t\n" + + def initialize + super( + :indent => make_spaces, + :space => make_spaces, + :space_before => make_spaces, + :object_nl => make_spaces, + :array_nl => make_spaces, + :max_nesting => false + ) + end + + def make_spaces + s = '' + rand(1).times { s << WS[rand(WS.size)] } + s + end +end + +n = (ARGV.shift || 500).to_i +loop do + fuzzer = Fuzzer.new(n, + Hash => 25, + Array => 25, + String => 10, + Fixnum => 10, + Bignum => 10, + nil => 5, + true => 5, + false => 5 + ) + o1 = fuzzer.fuzz + json = JSON.generate o1, MyState.new + if $DEBUG + puts "-" * 80 + puts json, json.size + else + puts json.size + end + begin + o2 = JSON.parse(json, :max_nesting => false) + rescue JSON::ParserError => e + puts "Caught #{e.class}: #{e.message}\n#{e.backtrace * "\n"}" + puts "o1 = #{o1.inspect}", "json = #{json}", "json_str = #{json.inspect}" + puts "locals = #{local_variables.inspect}" + exit + end + if o1 != o2 + puts "mismatch", "o1 = #{o1.inspect}", "o2 = #{o2.inspect}", + "json = #{json}", "json_str = #{json.inspect}" + puts "locals = #{local_variables.inspect}" + end +end diff --git a/vendor/gems/gems/json_pure-1.2.0/tools/server.rb b/vendor/gems/gems/json_pure-1.2.0/tools/server.rb new file mode 100755 index 00000000..084377fa --- /dev/null +++ b/vendor/gems/gems/json_pure-1.2.0/tools/server.rb @@ -0,0 +1,61 @@ +#!/usr/bin/env ruby + +require 'webrick' +include WEBrick +$:.unshift 'ext' +$:.unshift 'lib' +require 'json' + +class JSONServlet < HTTPServlet::AbstractServlet + @@count = 1 + + def do_GET(req, res) + obj = { + "TIME" => Time.now.strftime("%FT%T"), + "foo" => "Bär", + "bar" => "© ≠ €!", + 'a' => 2, + 'b' => 3.141, + 'COUNT' => @@count += 1, + 'c' => 'c', + 'd' => [ 1, "b", 3.14 ], + 'e' => { 'foo' => 'bar' }, + 'g' => "æ¾æœ¬è¡Œå¼˜", + 'h' => 1000.0, + 'i' => 0.001, + 'j' => "\xf0\xa0\x80\x81", + } + res.body = JSON.generate obj + res['Content-Type'] = "application/json" + end +end + +def create_server(err, dir, port) + dir = File.expand_path(dir) + err.puts "Surf to:", "http://#{Socket.gethostname}:#{port}" + + s = HTTPServer.new( + :Port => port, + :DocumentRoot => dir, + :Logger => WEBrick::Log.new(err), + :AccessLog => [ + [ err, WEBrick::AccessLog::COMMON_LOG_FORMAT ], + [ err, WEBrick::AccessLog::REFERER_LOG_FORMAT ], + [ err, WEBrick::AccessLog::AGENT_LOG_FORMAT ] + ] + ) + s.mount("/json", JSONServlet) + s +end + +default_dir = File.expand_path(File.join(File.dirname(__FILE__), '..', 'data')) +dir = ARGV.shift || default_dir +port = (ARGV.shift || 6666).to_i +s = create_server(STDERR, dir, 6666) +t = Thread.new { s.start } +trap(:INT) do + s.shutdown + t.join + exit +end +sleep diff --git a/vendor/gems/gems/markaby-0.5/README b/vendor/gems/gems/markaby-0.5/README new file mode 100644 index 00000000..8c6bdd34 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/README @@ -0,0 +1,255 @@ += Markaby (Markup as Ruby) + +Markaby is a very short bit of code for writing HTML pages in pure Ruby. +It is an alternative to ERb which weaves the two languages together. +Also a replacement for templating languages which use primitive languages +that blend with HTML. + +== Using Markaby as a Rails plugin + +Write Rails templates in pure Ruby. Example layout: + + html do + head do + title 'Products: ' + action_name + stylesheet_link_tag 'scaffold' + end + + body do + p flash[:notice], :style => "color: green" + + self << content_for_layout + end + end + +== Using Markaby as a Ruby class + +Markaby is flaming easy to call from your Ruby classes. + + require 'markaby' + + mab = Markaby::Builder.new + mab.html do + head { title "Boats.com" } + body do + h1 "Boats.com has great deals" + ul do + li "$49 for a canoe" + li "$39 for a raft" + li "$29 for a huge boot that floats and can fit 5 people" + end + end + end + puts mab.to_s + +Markaby::Builder.new does take two arguments for passing in variables and +a helper object. You can also affix the block right on to the class. + +See Markaby::Builder for all of that. + += A Note About instance_eval + +The Markaby::Builder class is different from the normal Builder class, +since it uses instance_eval when running blocks. This cleans +up the appearance of the Markaby code you write. If instance_eval +was not used, the code would look like this: + + mab = Markaby::Builder.new + mab.html do + mab.head { mab.title "Boats.com" } + mab.body do + mab.h1 "Boats.com has great deals" + end + end + puts mab.to_s + +So, the advantage is the cleanliness of your code. The disadvantage is that +the block will run inside the Markaby::Builder object's scope. This means +that inside these blocks, self will be your Markaby::Builder object. +When you use instance variables in these blocks, they will be instance variables +of the Markaby::Builder object. + +This doesn't effect Rails users, but when used in regular Ruby code, it can +be a bit disorienting. You are recommended to put your Markaby code in a +module where it won't mix with anything. + += The Six Steps of Markaby + +If you dive right into Markaby, it'll probably make good sense, but you're +likely to run into a few kinks. Why not review these six steps and commit +them memory so you can really *know* what you're doing? + +== 1. Element Classes + +Element classes may be added by hooking methods onto container elements: + + div.entry do + h2.entryTitle 'Son of WebPage' + div.entrySection %{by Anthony} + div.entryContent 'Okay, once again, the idea here is ...' + end + +Which results in: + +
+

Son of WebPage

+
by Anthony
+
Okay, once again, the idea here is ...
+
+ +== 2. Element IDs + +IDs may be added by the use of bang methods: + + div.page! { + div.content! { + h1 "A Short Short Saintly Dog" + } + } + +Which results in: + +
+
+

A Short Short Saintly Dog

+
+
+ +== 3. Validate Your XHTML 1.0 Output + +If you'd like Markaby to help you assemble valid XHTML documents, +you can use the xhtml_transitional or xhtml_strict +methods in place of the normal html tag. + + xhtml_strict do + head { ... } + body { ... } + end + +This will add the XML instruction and the doctype tag to your document. +Also, a character set meta tag will be placed inside your head +tag. + +Now, since Markaby knows which doctype you're using, it checks a big +list of valid tags and attributes before printing anything. + + >> div :styl => "padding: 10px" do + >> img :src => "samorost.jpg" + >> end + InvalidHtmlError: no such attribute `styl' + +Markaby will also make sure you don't use the same element ID twice! + +== 4. Escape or No Escape? + +Markaby uses a simple convention for escaping stuff: if a string +is an argument, it gets escaped. If the string is in a block, it +doesn't. + +This is handy if you're using something like RedCloth or +RDoc inside an element. Pass the string back through the block +and it'll skip out of escaping. + + div.comment { RedCloth.new(str).to_html } + +But, if we have some raw text that needs escaping, pass it in +as an argument: + + div.comment raw_str + +One caveat: if you have other tags inside a block, the string +passed back will be ignored. + + div.comment { + div.author "_why" + div.says "Torpedoooooes!" + "
Silence.
" + } + +The final div above won't appear in the output. You can't mix +tag modes like that, friend. + +== 5. Auto-stringification + +If you end up using any of your Markaby "tags" as a string, the +tag won't be output. It'll be up to you to add the new string +back into the HTML output. + +This means if you call to_s, you'll get a string back. + + div.title { "Rock Bottom" + span(" by Robert Wyatt").to_s } + +But, when you're adding strings in Ruby, to_s happens automatically. + + div.title { "Rock Bottom" + span(" by Robert Wyatt") } + +Interpolation works fine. + + div.title { "Rock Bottom #{span(" by Robert Wyatt")}" } + +And any other operation you might perform on a string. + + div.menu! \ + ['5.gets', 'bits', 'cult', 'inspect', '-h'].map do |category| + link_to category + end. + join( " | " ) + +== 6. The tag! Method + +If you need to force a tag at any time, call tag! with the +tag name followed by the possible arguments and block. The CssProxy +won't work with this technique. + + tag! :select, :id => "country_list" do + countries.each do |country| + tag! :option, country + end + end + += A Note About Rails Helpers + +When used in Rails templates, the Rails helper object is passed into +Markaby::Builder. When you call helper methods inside Markaby, the output +from those methods will be output to the stream. This is incredibly +handy, since most Rails helpers output HTML tags. + + head do + javascript_include_tag 'prototype' + autodiscovery_link_tag + end + +However, some methods are designed to give back a String which you can use +elsewhere. That's okay! Every method returns a Fragment object, which can +be used as a string. + + p { "Total is: #{number_to_human_size @file_bytes}" } + +Also see the Quick Tour above, specifically the stuff about auto-stringification. + +If for any reason you have trouble with fragments, you can just +call the @helpers object with the method and you'll get +the String back and nothing will be output. + + p { "Total is: #{@helper.number_to_human_size @file_bytes}" } + +Conversely, you may call instance variables from your controller by using +a method and its value will be returned, nothing will be output. + + # Inside imaginary ProductController + def list + @products = Product.find :all + end + + # Inside app/views/product/list.mab + products.each do |product| + p product.title + end + += Credits + +Markaby is a work of immense hope by Tim Fletcher and why the lucky stiff. +Thankyou for giving it a whirl. + +Markaby is inspired by the HTML library within cgi.rb. Hopefully it will +turn around and take some cues. diff --git a/vendor/gems/gems/markaby-0.5/Rakefile b/vendor/gems/gems/markaby-0.5/Rakefile new file mode 100644 index 00000000..299b6804 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/Rakefile @@ -0,0 +1,20 @@ +require 'rake' +require 'rake/testtask' +require 'rake/clean' +require 'rake/gempackagetask' +require 'rake/rdoctask' +require 'tools/rakehelp' +require 'fileutils' +include FileUtils + +REV = File.read(".svn/entries")[/committed-rev="(\d+)"/, 1] rescue nil +VERS = ENV['VERSION'] || "0.5" + (REV ? ".#{REV}" : "") + +task :default => [:package] + +setup_tests +setup_rdoc ['README', 'CHANGELOG', 'lib/**/*.rb'] + +summary = "Markup as Ruby, write HTML in your native Ruby tongue" +test_file = "test/test_markaby.rb" +setup_gem("markaby", VERS, "Tim Fletcher and _why", summary, [['builder', '>=2.0.0']], test_file) diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby.rb b/vendor/gems/gems/markaby-0.5/lib/markaby.rb new file mode 100644 index 00000000..5ac76d4a --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby.rb @@ -0,0 +1,35 @@ +# = About lib/markaby.rb +# +# By requiring lib/markaby, you can load Markaby's dependency (the Builder library,) +# as well as the full set of Markaby classes. +# +# For a full list of features and instructions, see the README. +$:.unshift File.expand_path(File.dirname(__FILE__)) + +# Markaby is a module containing all of the great Markaby classes that +# do such an excellent job. +# +# * Markaby::Builder: the class for actually calling the Ruby methods +# which write the HTML. +# * Markaby::CSSProxy: a class which adds element classes and IDs to +# elements when used within Markaby::Builder. +# * Markaby::MetAid: metaprogramming helper methods. +# * Markaby::Tags: lists the roles of various XHTML tags to help Builder +# use these tags as they are intended. +# * Markaby::Template: a class for hooking Markaby into Rails as a +# proper templating language. +module Markaby + VERSION = '0.5' + + class InvalidXhtmlError < Exception; end +end + +unless defined?(Builder) + require 'rubygems' + require 'builder' +end + +require 'markaby/builder' +require 'markaby/cssproxy' +require 'markaby/metaid' +require 'markaby/template' diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/builder.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/builder.rb new file mode 100644 index 00000000..9dfeab11 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/builder.rb @@ -0,0 +1,288 @@ +require 'markaby/tags' + +module Markaby + # The Markaby::Builder class is the central gear in the system. When using + # from Ruby code, this is the only class you need to instantiate directly. + # + # mab = Markaby::Builder.new + # mab.html do + # head { title "Boats.com" } + # body do + # h1 "Boats.com has great deals" + # ul do + # li "$49 for a canoe" + # li "$39 for a raft" + # li "$29 for a huge boot that floats and can fit 5 people" + # end + # end + # end + # puts mab.to_s + # + class Builder + + @@default = { + :indent => 0, + :output_helpers => true, + :output_xml_instruction => true, + :output_meta_tag => true, + :auto_validation => true, + :tagset => Markaby::XHTMLTransitional + } + + def self.set(option, value) + @@default[option] = value + end + + def self.ignored_helpers + @@ignored_helpers ||= [] + end + + def self.ignore_helpers(*helpers) + ignored_helpers.concat helpers + end + + attr_accessor :output_helpers, :tagset + + # Create a Markaby builder object. Pass in a hash of variable assignments to + # +assigns+ which will be available as instance variables inside tag construction + # blocks. If an object is passed in to +helpers+, its methods will be available + # from those same blocks. + # + # Pass in a +block+ to new and the block will be evaluated. + # + # mab = Markaby::Builder.new { + # html do + # body do + # h1 "Matching Mole" + # end + # end + # } + # + def initialize(assigns = {}, helpers = nil, &block) + @streams = [[]] + @assigns = assigns + @elements = {} + + @@default.each do |k, v| + instance_variable_set("@#{k}", @assigns[k] || v) + end + + if helpers.nil? + @helpers = nil + else + @helpers = helpers.dup + for iv in helpers.instance_variables + instance_variable_set(iv, helpers.instance_variable_get(iv)) + end + end + + unless assigns.nil? || assigns.empty? + for iv, val in assigns + instance_variable_set("@#{iv}", val) + unless @helpers.nil? + @helpers.instance_variable_set("@#{iv}", val) + end + end + end + + @builder = ::Builder::XmlMarkup.new(:indent => @indent, :target => @streams.last) + class << @builder + attr_accessor :target, :level + end + + if block + text(capture(&block)) + end + end + + # Returns a string containing the HTML stream. Internally, the stream is stored as an Array. + def to_s + @streams.last.to_s + end + + # Write a +string+ to the HTML stream without escaping it. + def text(string) + @builder << "#{string}" + nil + end + alias_method :<<, :text + alias_method :concat, :text + + # Emulate ERB to satisfy helpers like form_for. + def _erbout; self end + + # Captures the HTML code built inside the +block+. This is done by creating a new + # stream for the builder object, running the block and passing back its stream as a string. + # + # >> Markaby::Builder.new.capture { h1 "TEST"; h2 "CAPTURE ME" } + # => "

TITLE

\n

CAPTURE ME

\n" + # + def capture(&block) + @streams.push(builder.target = []) + @builder.level += 1 + str = instance_eval(&block) + str = @streams.last.join if @streams.last.any? + @streams.pop + @builder.level -= 1 + builder.target = @streams.last + str + end + + # Create a tag named +tag+. Other than the first argument which is the tag name, + # the arguments are the same as the tags implemented via method_missing. + def tag!(tag, *args, &block) + ele_id = nil + if @auto_validation and @tagset + if !@tagset.tagset.has_key?(tag) + raise InvalidXhtmlError, "no element `#{tag}' for #{tagset.doctype}" + elsif args.last.respond_to?(:to_hash) + attrs = args.last.to_hash + attrs.each do |k, v| + atname = k.to_s.downcase.intern + unless k =~ /:/ or @tagset.tagset[tag].include? atname + raise InvalidXhtmlError, "no attribute `#{k}' on #{tag} elements" + end + if atname == :id + ele_id = v.to_s + if @elements.has_key? ele_id + raise InvalidXhtmlError, "id `#{ele_id}' already used (id's must be unique)." + end + end + end + end + end + if block + str = capture &block + block = proc { text(str) } + end + + f = fragment { @builder.method_missing(tag, *args, &block) } + @elements[ele_id] = f if ele_id + f + end + + # This method is used to intercept calls to helper methods and instance + # variables. Here is the order of interception: + # + # * If +sym+ is a helper method, the helper method is called + # and output to the stream. + # * If +sym+ is a Builder::XmlMarkup method, it is passed on to the builder object. + # * If +sym+ is also the name of an instance variable, the + # value of the instance variable is returned. + # * If +sym+ has come this far and no +tagset+ is found, +sym+ and its arguments are passed to tag! + # * If a tagset is found, though, +NoMethodError+ is raised. + # + # method_missing used to be the lynchpin in Markaby, but it's no longer used to handle + # HTML tags. See html_tag for that. + def method_missing(sym, *args, &block) + if @helpers.respond_to?(sym, true) && !self.class.ignored_helpers.include?(sym) + r = @helpers.send(sym, *args, &block) + if @output_helpers and r.respond_to? :to_str + fragment { @builder << r } + else + r + end + elsif ::Builder::XmlMarkup.instance_methods.include?(sym.to_s) + @builder.__send__(sym, *args, &block) + elsif instance_variables.include?("@#{sym}") + instance_variable_get("@#{sym}") + elsif @tagset.nil? + tag!(sym, *args, &block) + else + raise NoMethodError, "no such method `#{sym}'" + end + end + + # Every HTML tag method goes through an html_tag call. So, calling div is equivalent + # to calling html_tag(:div). All HTML tags in Markaby's list are given generated wrappers + # for this method. + # + # If the @auto_validation setting is on, this method will check for many common mistakes which + # could lead to invalid XHTML. + def html_tag(sym, *args, &block) + if @auto_validation and @tagset.self_closing.include?(sym) and block + raise InvalidXhtmlError, "the `\#{sym}' element is self-closing, please remove the block" + end + if args.empty? and block.nil? and not NO_PROXY.include?(sym) + return CssProxy.new do |args, block| + if @tagset.forms.include?(sym) and args.last.respond_to?(:to_hash) and args.last[:id] + args.last[:name] ||= args.last[:id] + end + tag!(sym, *args, &block) + end + end + if not @tagset.self_closing.include?(sym) and args.first.respond_to?(:to_hash) + block ||= proc{} + end + tag!(sym, *args, &block) + end + + XHTMLTransitional.tags.each do |k| + class_eval %{ + def #{k}(*args, &block) + html_tag(#{k.inspect}, *args, &block) + end + } + end + + # Builds a head tag. Adds a meta tag inside with Content-Type + # set to text/html; charset=utf-8. + def head(*args, &block) + tag!(:head, *args) do + tag!(:meta, "http-equiv" => "Content-Type", "content" => "text/html; charset=utf-8") if @output_meta_tag + instance_eval(&block) + end + end + + # Builds an html tag. An XML 1.0 instruction and an XHTML 1.0 Transitional doctype + # are prepended. Also assumes :xmlns => "http://www.w3.org/1999/xhtml", + # :lang => "en". + def xhtml_transitional(&block) + self.tagset = Markaby::XHTMLTransitional + xhtml_html &block + end + + # Builds an html tag with XHTML 1.0 Strict doctype instead. + def xhtml_strict(&block) + self.tagset = Markaby::XHTMLStrict + xhtml_html &block + end + + private + + def xhtml_html(&block) + instruct! if @output_xml_instruction + declare!(:DOCTYPE, :html, :PUBLIC, *tagset.doctype) + tag!(:html, :xmlns => "http://www.w3.org/1999/xhtml", "xml:lang" => "en", :lang => "en", &block) + end + + def fragment + stream = @streams.last + f1 = stream.length + yield + f2 = stream.length - f1 + Fragment.new(stream, f1, f2) + end + + end + + # Every tag method in Markaby returns a Fragment. If any method gets called on the Fragment, + # the tag is removed from the Markaby stream and given back as a string. Usually the fragment + # is never used, though, and the stream stays intact. + # + # For a more practical explanation, check out the README. + class Fragment < ::Builder::BlankSlate + def initialize(s, a, b) + @s, @f1, @f2 = s, a, b + end + def method_missing(*a) + unless @str + @str = @s[@f1, @f2].to_s + @s[@f1, @f2] = [nil] * @f2 + @str + end + @str.send(*a) + end + end + +end diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/cssproxy.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/cssproxy.rb new file mode 100644 index 00000000..a0e606b8 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/cssproxy.rb @@ -0,0 +1,53 @@ +module Markaby + # Class used by Markaby::Builder to store element options. Methods called + # against the CssProxy object are added as element classes or IDs. + # + # See the README for examples. + class CssProxy + + # Creates a CssProxy object. The +opts+ and +block+ passed in are + # stored until the element is created by Builder.tag! + def initialize(opts = {}, &blk) + @opts = opts + @blk = blk + end + + # Adds attributes to an element, for internal use only. For example, if you + # want to write a wrapper which sets a bunch of default attributes for a certain + # tag. Like the default `img' method included with Markaby automatically sets an + # empty alt attribute. + def merge!(opts) + @opts.merge! opts + self + end + + # Adds attributes to an element. Bang methods set the :id attribute. + # Other methods add to the :class attribute. If a block is supplied, + # it is executed with a merged hash (@opts + args). + def method_missing(id_or_class, *args, &blk) + idc = id_or_class.to_s + case idc + when "pass" + when /!$/ + @opts[:id] = $` + else + @opts[:class] = "#{@opts[:class]} #{idc}".strip + end + if args.empty? and blk.nil? + self + else + if args.last.respond_to? :to_hash + @opts.merge!(args.pop.to_hash) + end + args.push @opts + @blk.call(args, blk) + end + end + + def to_str + @blk.call([[@opts]]).to_s + end + alias_method :to_s, :to_str + + end +end diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/metaid.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/metaid.rb new file mode 100644 index 00000000..87df746b --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/metaid.rb @@ -0,0 +1,16 @@ +# metaprogramming assistant -- metaid.rb +class Object + # The hidden singleton lurks behind everyone + def metaclass; class << self; self; end; end + def meta_eval &blk; metaclass.instance_eval &blk; end + + # Adds methods to a metaclass + def meta_def name, &blk + meta_eval { define_method name, &blk } + end + + # Defines an instance method within a class + def class_def name, &blk + class_eval { define_method name, &blk } + end +end diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/rails.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/rails.rb new file mode 100644 index 00000000..05c6782f --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/rails.rb @@ -0,0 +1,46 @@ +module Markaby + + # Markaby helpers for Rails. + module ActionControllerHelpers + # Returns a string of HTML built from the attached +block+. Any +options+ are + # passed into the render method. + # + # Use this method in your controllers to output Markaby directly from inside. + def render_markaby(options = {}, &block) + render options.merge({ :text => Builder.new({}, self, &block).to_s }) + end + end + + class ActionViewTemplateHandler + def initialize(action_view) + @action_view = action_view + end + def render(template, local_assigns = {}) + Template.new(template).render(@action_view.assigns.merge(local_assigns), @action_view) + end + end + + class Builder + # Emulate ERB to satisfy helpers like form_for. + def _erbout; self end + + # Content_for will store the given block in an instance variable for later use + # in another template or in the layout. + # + # The name of the instance variable is content_for_ to stay consistent + # with @content_for_layout which is used by ActionView's layouts. + # + # Example: + # + # content_for("header") do + # h1 "Half Shark and Half Lion" + # end + # + # If used several times, the variable will contain all the parts concatenated. + def content_for(name, &block) + @helpers.assigns["content_for_#{name}"] = + eval("@content_for_#{name} = (@content_for_#{name} || '') + capture(&block)") + end + end + +end diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/tags.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/tags.rb new file mode 100644 index 00000000..3d0315ba --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/tags.rb @@ -0,0 +1,165 @@ +module Markaby + + FORM_TAGS = [ :form, :input, :select, :textarea ] + SELF_CLOSING_TAGS = [ :base, :meta, :link, :hr, :br, :param, :img, :area, :input, :col ] + NO_PROXY = [ :hr, :br ] + + # Common sets of attributes. + AttrCore = [:id, :class, :style, :title] + AttrI18n = [:lang, 'xml:lang'.intern, :dir] + AttrEvents = [:onclick, :ondblclick, :onmousedown, :onmouseup, :onmouseover, :onmousemove, + :onmouseout, :onkeypress, :onkeydown, :onkeyup] + AttrFocus = [:accesskey, :tabindex, :onfocus, :onblur] + AttrHAlign = [:align, :char, :charoff] + AttrVAlign = [:valign] + Attrs = AttrCore + AttrI18n + AttrEvents + + # All the tags and attributes from XHTML 1.0 Strict + class XHTMLStrict + class << self + attr_accessor :tags, :tagset, :forms, :self_closing, :doctype + end + @doctype = ["-//W3C//DTD XHTML 1.0 Strict//EN", "DTD/xhtml1-strict.dtd"] + @tagset = { + :html => AttrI18n + [:id, :xmlns], + :head => AttrI18n + [:id, :profile], + :title => AttrI18n + [:id], + :base => [:href, :id], + :meta => AttrI18n + [:id, :http, :name, :content, :scheme, 'http-equiv'.intern], + :link => Attrs + [:charset, :href, :hreflang, :type, :rel, :rev, :media], + :style => AttrI18n + [:id, :type, :media, :title, 'xml:space'.intern], + :script => [:id, :charset, :type, :src, :defer, 'xml:space'.intern], + :noscript => Attrs, + :body => Attrs + [:onload, :onunload], + :div => Attrs, + :p => Attrs, + :ul => Attrs, + :ol => Attrs, + :li => Attrs, + :dl => Attrs, + :dt => Attrs, + :dd => Attrs, + :address => Attrs, + :hr => Attrs, + :pre => Attrs + ['xml:space'.intern], + :blockquote => Attrs + [:cite], + :ins => Attrs + [:cite, :datetime], + :del => Attrs + [:cite, :datetime], + :a => Attrs + AttrFocus + [:charset, :type, :name, :href, :hreflang, :rel, :rev, :shape, :coords], + :span => Attrs, + :bdo => AttrCore + AttrEvents + [:lang, 'xml:lang'.intern, :dir], + :br => AttrCore, + :em => Attrs, + :strong => Attrs, + :dfn => Attrs, + :code => Attrs, + :samp => Attrs, + :kbd => Attrs, + :var => Attrs, + :cite => Attrs, + :abbr => Attrs, + :acronym => Attrs, + :q => Attrs + [:cite], + :sub => Attrs, + :sup => Attrs, + :tt => Attrs, + :i => Attrs, + :b => Attrs, + :big => Attrs, + :small => Attrs, + :object => Attrs + [:declare, :classid, :codebase, :data, :type, :codetype, :archive, :standby, :height, :width, :usemap, :name, :tabindex], + :param => [:id, :name, :value, :valuetype, :type], + :img => Attrs + [:src, :alt, :longdesc, :height, :width, :usemap, :ismap], + :map => AttrI18n + AttrEvents + [:id, :class, :style, :title, :name], + :area => Attrs + AttrFocus + [:shape, :coords, :href, :nohref, :alt], + :form => Attrs + [:action, :method, :enctype, :onsubmit, :onreset, :accept, :accept], + :label => Attrs + [:for, :accesskey, :onfocus, :onblur], + :input => Attrs + AttrFocus + [:type, :name, :value, :checked, :disabled, :readonly, :size, :maxlength, :src, :alt, :usemap, :onselect, :onchange, :accept], + :select => Attrs + [:name, :size, :multiple, :disabled, :tabindex, :onfocus, :onblur, :onchange], + :optgroup => Attrs + [:disabled, :label], + :option => Attrs + [:selected, :disabled, :label, :value], + :textarea => Attrs + AttrFocus + [:name, :rows, :cols, :disabled, :readonly, :onselect, :onchange], + :fieldset => Attrs, + :legend => Attrs + [:accesskey], + :button => Attrs + AttrFocus + [:name, :value, :type, :disabled], + :table => Attrs + [:summary, :width, :border, :frame, :rules, :cellspacing, :cellpadding], + :caption => Attrs, + :colgroup => Attrs + AttrHAlign + AttrVAlign + [:span, :width], + :col => Attrs + AttrHAlign + AttrVAlign + [:span, :width], + :thead => Attrs + AttrHAlign + AttrVAlign, + :tfoot => Attrs + AttrHAlign + AttrVAlign, + :tbody => Attrs + AttrHAlign + AttrVAlign, + :tr => Attrs + AttrHAlign + AttrVAlign, + :th => Attrs + AttrHAlign + AttrVAlign + [:abbr, :axis, :headers, :scope, :rowspan, :colspan], + :td => Attrs + AttrHAlign + AttrVAlign + [:abbr, :axis, :headers, :scope, :rowspan, :colspan], + :h1 => Attrs, + :h2 => Attrs, + :h3 => Attrs, + :h4 => Attrs, + :h5 => Attrs, + :h6 => Attrs + } + + @tags = @tagset.keys + @forms = @tags & FORM_TAGS + @self_closing = @tags & SELF_CLOSING_TAGS + end + + # Additional tags found in XHTML 1.0 Transitional + class XHTMLTransitional + class << self + attr_accessor :tags, :tagset, :forms, :self_closing, :doctype + end + @doctype = ["-//W3C//DTD XHTML 1.0 Transitional//EN", "DTD/xhtml1-transitional.dtd"] + @tagset = XHTMLStrict.tagset.merge \ + :strike => Attrs, + :center => Attrs, + :dir => Attrs + [:compact], + :noframes => Attrs, + :basefont => [:id, :size, :color, :face], + :u => Attrs, + :menu => Attrs + [:compact], + :iframe => AttrCore + [:longdesc, :name, :src, :frameborder, :marginwidth, :marginheight, :scrolling, :align, :height, :width], + :font => AttrCore + AttrI18n + [:size, :color, :face], + :s => Attrs, + :applet => AttrCore + [:codebase, :archive, :code, :object, :alt, :name, :width, :height, :align, :hspace, :vspace], + :isindex => AttrCore + AttrI18n + [:prompt] + + # Additional attributes found in XHTML 1.0 Transitional + { :script => [:language], + :a => [:target], + :td => [:bgcolor, :nowrap, :width, :height], + :p => [:align], + :h5 => [:align], + :h3 => [:align], + :li => [:type, :value], + :div => [:align], + :pre => [:width], + :body => [:background, :bgcolor, :text, :link, :vlink, :alink], + :ol => [:type, :compact, :start], + :h4 => [:align], + :h2 => [:align], + :object => [:align, :border, :hspace, :vspace], + :img => [:name, :align, :border, :hspace, :vspace], + :link => [:target], + :legend => [:align], + :dl => [:compact], + :input => [:align], + :h6 => [:align], + :hr => [:align, :noshade, :size, :width], + :base => [:target], + :ul => [:type, :compact], + :br => [:clear], + :form => [:name, :target], + :area => [:target], + :h1 => [:align] + }.each do |k, v| + @tagset[k] += v + end + + @tags = @tagset.keys + @forms = @tags & FORM_TAGS + @self_closing = @tags & SELF_CLOSING_TAGS + end + +end diff --git a/vendor/gems/gems/markaby-0.5/lib/markaby/template.rb b/vendor/gems/gems/markaby-0.5/lib/markaby/template.rb new file mode 100644 index 00000000..681ec7c9 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/lib/markaby/template.rb @@ -0,0 +1,12 @@ +module Markaby + class Template + def initialize(template) + @template = template + end + def render(*args) + output = Builder.new(*args) + output.instance_eval @template + return output.to_s + end + end +end diff --git a/vendor/gems/gems/markaby-0.5/setup.rb b/vendor/gems/gems/markaby-0.5/setup.rb new file mode 100644 index 00000000..8f2397ad --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/setup.rb @@ -0,0 +1,1551 @@ +# +# setup.rb +# +# Copyright (c) 2000-2005 Minero Aoki +# +# This program is free software. +# You can distribute/modify this program under the terms of +# the GNU LGPL, Lesser General Public License version 2.1. +# + +unless Enumerable.method_defined?(:map) # Ruby 1.4.6 + module Enumerable + alias map collect + end +end + +unless File.respond_to?(:read) # Ruby 1.6 + def File.read(fname) + open(fname) {|f| + return f.read + } + end +end + +unless Errno.const_defined?(:ENOTEMPTY) # Windows? + module Errno + class ENOTEMPTY + # We do not raise this exception, implementation is not needed. + end + end +end + +def File.binread(fname) + open(fname, 'rb') {|f| + return f.read + } +end + +# for corrupted Windows' stat(2) +def File.dir?(path) + File.directory?((path[-1,1] == '/') ? path : path + '/') +end + + +class ConfigTable + + include Enumerable + + def initialize(rbconfig) + @rbconfig = rbconfig + @items = [] + @table = {} + # options + @install_prefix = nil + @config_opt = nil + @verbose = true + @no_harm = false + @libsrc_pattern = '*.rb' + end + + attr_accessor :install_prefix + attr_accessor :config_opt + + attr_writer :verbose + + def verbose? + @verbose + end + + attr_writer :no_harm + + def no_harm? + @no_harm + end + + attr_accessor :libsrc_pattern + + def [](key) + lookup(key).resolve(self) + end + + def []=(key, val) + lookup(key).set val + end + + def names + @items.map {|i| i.name } + end + + def each(&block) + @items.each(&block) + end + + def key?(name) + @table.key?(name) + end + + def lookup(name) + @table[name] or setup_rb_error "no such config item: #{name}" + end + + def add(item) + @items.push item + @table[item.name] = item + end + + def remove(name) + item = lookup(name) + @items.delete_if {|i| i.name == name } + @table.delete_if {|name, i| i.name == name } + item + end + + def load_script(path, inst = nil) + if File.file?(path) + MetaConfigEnvironment.new(self, inst).instance_eval File.read(path), path + end + end + + def savefile + '.config' + end + + def load_savefile + begin + File.foreach(savefile()) do |line| + k, v = *line.split(/=/, 2) + self[k] = v.strip + end + rescue Errno::ENOENT + setup_rb_error $!.message + "\n#{File.basename($0)} config first" + end + end + + def save + @items.each {|i| i.value } + File.open(savefile(), 'w') {|f| + @items.each do |i| + f.printf "%s=%s\n", i.name, i.value if i.value? and i.value + end + } + end + + def load_standard_entries + standard_entries(@rbconfig).each do |ent| + add ent + end + end + + def standard_entries(rbconfig) + c = rbconfig + + rubypath = c['bindir'] + '/' + c['ruby_install_name'] + + major = c['MAJOR'].to_i + minor = c['MINOR'].to_i + teeny = c['TEENY'].to_i + version = "#{major}.#{minor}" + + # ruby ver. >= 1.4.4? + newpath_p = ((major >= 2) or + ((major == 1) and + ((minor >= 5) or + ((minor == 4) and (teeny >= 4))))) + + if c['rubylibdir'] + # V > 1.6.3 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = c['rubylibdir'] + librubyverarch = c['archdir'] + siteruby = c['sitedir'] + siterubyver = c['sitelibdir'] + siterubyverarch = c['sitearchdir'] + elsif newpath_p + # 1.4.4 <= V <= 1.6.3 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = "#{c['prefix']}/lib/ruby/#{version}" + librubyverarch = "#{c['prefix']}/lib/ruby/#{version}/#{c['arch']}" + siteruby = c['sitedir'] + siterubyver = "$siteruby/#{version}" + siterubyverarch = "$siterubyver/#{c['arch']}" + else + # V < 1.4.4 + libruby = "#{c['prefix']}/lib/ruby" + librubyver = "#{c['prefix']}/lib/ruby/#{version}" + librubyverarch = "#{c['prefix']}/lib/ruby/#{version}/#{c['arch']}" + siteruby = "#{c['prefix']}/lib/ruby/#{version}/site_ruby" + siterubyver = siteruby + siterubyverarch = "$siterubyver/#{c['arch']}" + end + parameterize = lambda {|path| + path.sub(/\A#{Regexp.quote(c['prefix'])}/, '$prefix') + } + + if arg = c['configure_args'].split.detect {|arg| /--with-make-prog=/ =~ arg } + makeprog = arg.sub(/'/, '').split(/=/, 2)[1] + else + makeprog = 'make' + end + + [ + ExecItem.new('installdirs', 'std/site/home', + 'std: install under libruby; site: install under site_ruby; home: install under $HOME')\ + {|val, table| + case val + when 'std' + table['rbdir'] = '$librubyver' + table['sodir'] = '$librubyverarch' + when 'site' + table['rbdir'] = '$siterubyver' + table['sodir'] = '$siterubyverarch' + when 'home' + setup_rb_error '$HOME was not set' unless ENV['HOME'] + table['prefix'] = ENV['HOME'] + table['rbdir'] = '$libdir/ruby' + table['sodir'] = '$libdir/ruby' + end + }, + PathItem.new('prefix', 'path', c['prefix'], + 'path prefix of target environment'), + PathItem.new('bindir', 'path', parameterize.call(c['bindir']), + 'the directory for commands'), + PathItem.new('libdir', 'path', parameterize.call(c['libdir']), + 'the directory for libraries'), + PathItem.new('datadir', 'path', parameterize.call(c['datadir']), + 'the directory for shared data'), + PathItem.new('mandir', 'path', parameterize.call(c['mandir']), + 'the directory for man pages'), + PathItem.new('sysconfdir', 'path', parameterize.call(c['sysconfdir']), + 'the directory for system configuration files'), + PathItem.new('localstatedir', 'path', parameterize.call(c['localstatedir']), + 'the directory for local state data'), + PathItem.new('libruby', 'path', libruby, + 'the directory for ruby libraries'), + PathItem.new('librubyver', 'path', librubyver, + 'the directory for standard ruby libraries'), + PathItem.new('librubyverarch', 'path', librubyverarch, + 'the directory for standard ruby extensions'), + PathItem.new('siteruby', 'path', siteruby, + 'the directory for version-independent aux ruby libraries'), + PathItem.new('siterubyver', 'path', siterubyver, + 'the directory for aux ruby libraries'), + PathItem.new('siterubyverarch', 'path', siterubyverarch, + 'the directory for aux ruby binaries'), + PathItem.new('rbdir', 'path', '$siterubyver', + 'the directory for ruby scripts'), + PathItem.new('sodir', 'path', '$siterubyverarch', + 'the directory for ruby extentions'), + PathItem.new('rubypath', 'path', rubypath, + 'the path to set to #! line'), + ProgramItem.new('rubyprog', 'name', rubypath, + 'the ruby program using for installation'), + ProgramItem.new('makeprog', 'name', makeprog, + 'the make program to compile ruby extentions'), + SelectItem.new('shebang', 'all/ruby/never', 'ruby', + 'shebang line (#!) editing mode'), + BoolItem.new('without-ext', 'yes/no', 'no', + 'does not compile/install ruby extentions') + ] + end + private :standard_entries + + def load_multipackage_entries + multipackage_entries().each do |ent| + add ent + end + end + + def multipackage_entries + [ + PackageSelectionItem.new('with', 'name,name...', '', 'ALL', + 'package names that you want to install'), + PackageSelectionItem.new('without', 'name,name...', '', 'NONE', + 'package names that you do not want to install') + ] + end + private :multipackage_entries + + ALIASES = { + 'std-ruby' => 'librubyver', + 'stdruby' => 'librubyver', + 'rubylibdir' => 'librubyver', + 'archdir' => 'librubyverarch', + 'site-ruby-common' => 'siteruby', # For backward compatibility + 'site-ruby' => 'siterubyver', # For backward compatibility + 'bin-dir' => 'bindir', + 'bin-dir' => 'bindir', + 'rb-dir' => 'rbdir', + 'so-dir' => 'sodir', + 'data-dir' => 'datadir', + 'ruby-path' => 'rubypath', + 'ruby-prog' => 'rubyprog', + 'ruby' => 'rubyprog', + 'make-prog' => 'makeprog', + 'make' => 'makeprog' + } + + def fixup + ALIASES.each do |ali, name| + @table[ali] = @table[name] + end + @items.freeze + @table.freeze + @options_re = /\A--(#{@table.keys.join('|')})(?:=(.*))?\z/ + end + + def parse_opt(opt) + m = @options_re.match(opt) or setup_rb_error "config: unknown option #{opt}" + m.to_a[1,2] + end + + def dllext + @rbconfig['DLEXT'] + end + + def value_config?(name) + lookup(name).value? + end + + class Item + def initialize(name, template, default, desc) + @name = name.freeze + @template = template + @value = default + @default = default + @description = desc + end + + attr_reader :name + attr_reader :description + + attr_accessor :default + alias help_default default + + def help_opt + "--#{@name}=#{@template}" + end + + def value? + true + end + + def value + @value + end + + def resolve(table) + @value.gsub(%r<\$([^/]+)>) { table[$1] } + end + + def set(val) + @value = check(val) + end + + private + + def check(val) + setup_rb_error "config: --#{name} requires argument" unless val + val + end + end + + class BoolItem < Item + def config_type + 'bool' + end + + def help_opt + "--#{@name}" + end + + private + + def check(val) + return 'yes' unless val + unless /\A(y(es)?|n(o)?|t(rue)?|f(alse))\z/i =~ val + setup_rb_error "config: --#{@name} accepts only yes/no for argument" + end + (/\Ay(es)?|\At(rue)/i =~ value) ? 'yes' : 'no' + end + end + + class PathItem < Item + def config_type + 'path' + end + + private + + def check(path) + setup_rb_error "config: --#{@name} requires argument" unless path + path[0,1] == '$' ? path : File.expand_path(path) + end + end + + class ProgramItem < Item + def config_type + 'program' + end + end + + class SelectItem < Item + def initialize(name, selection, default, desc) + super + @ok = selection.split('/') + end + + def config_type + 'select' + end + + private + + def check(val) + unless @ok.include?(val.strip) + setup_rb_error "config: use --#{@name}=#{@template} (#{val})" + end + val.strip + end + end + + class ExecItem < Item + def initialize(name, selection, desc, &block) + super name, selection, nil, desc + @ok = selection.split('/') + @action = block + end + + def config_type + 'exec' + end + + def value? + false + end + + def resolve(table) + setup_rb_error "$#{name()} wrongly used as option value" + end + + undef set + + def evaluate(val, table) + v = val.strip.downcase + unless @ok.include?(v) + setup_rb_error "invalid option --#{@name}=#{val} (use #{@template})" + end + @action.call v, table + end + end + + class PackageSelectionItem < Item + def initialize(name, template, default, help_default, desc) + super name, template, default, desc + @help_default = help_default + end + + attr_reader :help_default + + def config_type + 'package' + end + + private + + def check(val) + unless File.dir?("packages/#{val}") + setup_rb_error "config: no such package: #{val}" + end + val + end + end + + class MetaConfigEnvironment + def intiailize(config, installer) + @config = config + @installer = installer + end + + def config_names + @config.names + end + + def config?(name) + @config.key?(name) + end + + def bool_config?(name) + @config.lookup(name).config_type == 'bool' + end + + def path_config?(name) + @config.lookup(name).config_type == 'path' + end + + def value_config?(name) + @config.lookup(name).config_type != 'exec' + end + + def add_config(item) + @config.add item + end + + def add_bool_config(name, default, desc) + @config.add BoolItem.new(name, 'yes/no', default ? 'yes' : 'no', desc) + end + + def add_path_config(name, default, desc) + @config.add PathItem.new(name, 'path', default, desc) + end + + def set_config_default(name, default) + @config.lookup(name).default = default + end + + def remove_config(name) + @config.remove(name) + end + + # For only multipackage + def packages + raise '[setup.rb fatal] multi-package metaconfig API packages() called for single-package; contact application package vendor' unless @installer + @installer.packages + end + + # For only multipackage + def declare_packages(list) + raise '[setup.rb fatal] multi-package metaconfig API declare_packages() called for single-package; contact application package vendor' unless @installer + @installer.packages = list + end + end + +end # class ConfigTable + + +# This module requires: #verbose?, #no_harm? +module FileOperations + + def mkdir_p(dirname, prefix = nil) + dirname = prefix + File.expand_path(dirname) if prefix + $stderr.puts "mkdir -p #{dirname}" if verbose? + return if no_harm? + + # Does not check '/', it's too abnormal. + dirs = File.expand_path(dirname).split(%r<(?=/)>) + if /\A[a-z]:\z/i =~ dirs[0] + disk = dirs.shift + dirs[0] = disk + dirs[0] + end + dirs.each_index do |idx| + path = dirs[0..idx].join('') + Dir.mkdir path unless File.dir?(path) + end + end + + def rm_f(path) + $stderr.puts "rm -f #{path}" if verbose? + return if no_harm? + force_remove_file path + end + + def rm_rf(path) + $stderr.puts "rm -rf #{path}" if verbose? + return if no_harm? + remove_tree path + end + + def remove_tree(path) + if File.symlink?(path) + remove_file path + elsif File.dir?(path) + remove_tree0 path + else + force_remove_file path + end + end + + def remove_tree0(path) + Dir.foreach(path) do |ent| + next if ent == '.' + next if ent == '..' + entpath = "#{path}/#{ent}" + if File.symlink?(entpath) + remove_file entpath + elsif File.dir?(entpath) + remove_tree0 entpath + else + force_remove_file entpath + end + end + begin + Dir.rmdir path + rescue Errno::ENOTEMPTY + # directory may not be empty + end + end + + def move_file(src, dest) + force_remove_file dest + begin + File.rename src, dest + rescue + File.open(dest, 'wb') {|f| + f.write File.binread(src) + } + File.chmod File.stat(src).mode, dest + File.unlink src + end + end + + def force_remove_file(path) + begin + remove_file path + rescue + end + end + + def remove_file(path) + File.chmod 0777, path + File.unlink path + end + + def install(from, dest, mode, prefix = nil) + $stderr.puts "install #{from} #{dest}" if verbose? + return if no_harm? + + realdest = prefix ? prefix + File.expand_path(dest) : dest + realdest = File.join(realdest, File.basename(from)) if File.dir?(realdest) + str = File.binread(from) + if diff?(str, realdest) + verbose_off { + rm_f realdest if File.exist?(realdest) + } + File.open(realdest, 'wb') {|f| + f.write str + } + File.chmod mode, realdest + + File.open("#{objdir_root()}/InstalledFiles", 'a') {|f| + if prefix + f.puts realdest.sub(prefix, '') + else + f.puts realdest + end + } + end + end + + def diff?(new_content, path) + return true unless File.exist?(path) + new_content != File.binread(path) + end + + def command(*args) + $stderr.puts args.join(' ') if verbose? + system(*args) or raise RuntimeError, + "system(#{args.map{|a| a.inspect }.join(' ')}) failed" + end + + def ruby(*args) + command config('rubyprog'), *args + end + + def make(task = nil) + command(*[config('makeprog'), task].compact) + end + + def extdir?(dir) + File.exist?("#{dir}/MANIFEST") or File.exist?("#{dir}/extconf.rb") + end + + def files_of(dir) + Dir.open(dir) {|d| + return d.select {|ent| File.file?("#{dir}/#{ent}") } + } + end + + DIR_REJECT = %w( . .. CVS SCCS RCS CVS.adm .svn ) + + def directories_of(dir) + Dir.open(dir) {|d| + return d.select {|ent| File.dir?("#{dir}/#{ent}") } - DIR_REJECT + } + end + +end + + +# This module requires: #srcdir_root, #objdir_root, #relpath +module HookScriptAPI + + def get_config(key) + @config[key] + end + + alias config get_config + + # obsolete: use metaconfig to change configuration + def set_config(key, val) + @config[key] = val + end + + # + # srcdir/objdir (works only in the package directory) + # + + def curr_srcdir + "#{srcdir_root()}/#{relpath()}" + end + + def curr_objdir + "#{objdir_root()}/#{relpath()}" + end + + def srcfile(path) + "#{curr_srcdir()}/#{path}" + end + + def srcexist?(path) + File.exist?(srcfile(path)) + end + + def srcdirectory?(path) + File.dir?(srcfile(path)) + end + + def srcfile?(path) + File.file?(srcfile(path)) + end + + def srcentries(path = '.') + Dir.open("#{curr_srcdir()}/#{path}") {|d| + return d.to_a - %w(. ..) + } + end + + def srcfiles(path = '.') + srcentries(path).select {|fname| + File.file?(File.join(curr_srcdir(), path, fname)) + } + end + + def srcdirectories(path = '.') + srcentries(path).select {|fname| + File.dir?(File.join(curr_srcdir(), path, fname)) + } + end + +end + + +class ToplevelInstaller + + Version = '3.4.0' + Copyright = 'Copyright (c) 2000-2005 Minero Aoki' + + TASKS = [ + [ 'all', 'do config, setup, then install' ], + [ 'config', 'saves your configurations' ], + [ 'show', 'shows current configuration' ], + [ 'setup', 'compiles ruby extentions and others' ], + [ 'install', 'installs files' ], + [ 'test', 'run all tests in test/' ], + [ 'clean', "does `make clean' for each extention" ], + [ 'distclean',"does `make distclean' for each extention" ] + ] + + def ToplevelInstaller.invoke + config = ConfigTable.new(load_rbconfig()) + config.load_standard_entries + config.load_multipackage_entries if multipackage? + config.fixup + klass = (multipackage?() ? ToplevelInstallerMulti : ToplevelInstaller) + klass.new(File.dirname($0), config).invoke + end + + def ToplevelInstaller.multipackage? + File.dir?(File.dirname($0) + '/packages') + end + + def ToplevelInstaller.load_rbconfig + if arg = ARGV.detect {|arg| /\A--rbconfig=/ =~ arg } + ARGV.delete(arg) + load File.expand_path(arg.split(/=/, 2)[1]) + $".push 'rbconfig.rb' + else + require 'rbconfig' + end + ::Config::CONFIG + end + + def initialize(ardir_root, config) + @ardir = File.expand_path(ardir_root) + @config = config + # cache + @valid_task_re = nil + end + + def config(key) + @config[key] + end + + def inspect + "#<#{self.class} #{__id__()}>" + end + + def invoke + run_metaconfigs + case task = parsearg_global() + when nil, 'all' + parsearg_config + init_installers + exec_config + exec_setup + exec_install + else + case task + when 'config', 'test' + ; + when 'clean', 'distclean' + @config.load_savefile if File.exist?(@config.savefile) + else + @config.load_savefile + end + __send__ "parsearg_#{task}" + init_installers + __send__ "exec_#{task}" + end + end + + def run_metaconfigs + @config.load_script "#{@ardir}/metaconfig" + end + + def init_installers + @installer = Installer.new(@config, @ardir, File.expand_path('.')) + end + + # + # Hook Script API bases + # + + def srcdir_root + @ardir + end + + def objdir_root + '.' + end + + def relpath + '.' + end + + # + # Option Parsing + # + + def parsearg_global + while arg = ARGV.shift + case arg + when /\A\w+\z/ + setup_rb_error "invalid task: #{arg}" unless valid_task?(arg) + return arg + when '-q', '--quiet' + @config.verbose = false + when '--verbose' + @config.verbose = true + when '--help' + print_usage $stdout + exit 0 + when '--version' + puts "#{File.basename($0)} version #{Version}" + exit 0 + when '--copyright' + puts Copyright + exit 0 + else + setup_rb_error "unknown global option '#{arg}'" + end + end + nil + end + + def valid_task?(t) + valid_task_re() =~ t + end + + def valid_task_re + @valid_task_re ||= /\A(?:#{TASKS.map {|task,desc| task }.join('|')})\z/ + end + + def parsearg_no_options + unless ARGV.empty? + setup_rb_error "#{task}: unknown options: #{ARGV.join(' ')}" + end + end + + alias parsearg_show parsearg_no_options + alias parsearg_setup parsearg_no_options + alias parsearg_test parsearg_no_options + alias parsearg_clean parsearg_no_options + alias parsearg_distclean parsearg_no_options + + def parsearg_config + evalopt = [] + set = [] + @config.config_opt = [] + while i = ARGV.shift + if /\A--?\z/ =~ i + @config.config_opt = ARGV.dup + break + end + name, value = *@config.parse_opt(i) + if @config.value_config?(name) + @config[name] = value + else + evalopt.push [name, value] + end + set.push name + end + evalopt.each do |name, value| + @config.lookup(name).evaluate value, @config + end + # Check if configuration is valid + set.each do |n| + @config[n] if @config.value_config?(n) + end + end + + def parsearg_install + @config.no_harm = false + @config.install_prefix = '' + while a = ARGV.shift + case a + when '--no-harm' + @config.no_harm = true + when /\A--prefix=/ + path = a.split(/=/, 2)[1] + path = File.expand_path(path) unless path[0,1] == '/' + @config.install_prefix = path + else + setup_rb_error "install: unknown option #{a}" + end + end + end + + def print_usage(out) + out.puts 'Typical Installation Procedure:' + out.puts " $ ruby #{File.basename $0} config" + out.puts " $ ruby #{File.basename $0} setup" + out.puts " # ruby #{File.basename $0} install (may require root privilege)" + out.puts + out.puts 'Detailed Usage:' + out.puts " ruby #{File.basename $0} " + out.puts " ruby #{File.basename $0} [] []" + + fmt = " %-24s %s\n" + out.puts + out.puts 'Global options:' + out.printf fmt, '-q,--quiet', 'suppress message outputs' + out.printf fmt, ' --verbose', 'output messages verbosely' + out.printf fmt, ' --help', 'print this message' + out.printf fmt, ' --version', 'print version and quit' + out.printf fmt, ' --copyright', 'print copyright and quit' + out.puts + out.puts 'Tasks:' + TASKS.each do |name, desc| + out.printf fmt, name, desc + end + + fmt = " %-24s %s [%s]\n" + out.puts + out.puts 'Options for CONFIG or ALL:' + @config.each do |item| + out.printf fmt, item.help_opt, item.description, item.help_default + end + out.printf fmt, '--rbconfig=path', 'rbconfig.rb to load',"running ruby's" + out.puts + out.puts 'Options for INSTALL:' + out.printf fmt, '--no-harm', 'only display what to do if given', 'off' + out.printf fmt, '--prefix=path', 'install path prefix', '' + out.puts + end + + # + # Task Handlers + # + + def exec_config + @installer.exec_config + @config.save # must be final + end + + def exec_setup + @installer.exec_setup + end + + def exec_install + @installer.exec_install + end + + def exec_test + @installer.exec_test + end + + def exec_show + @config.each do |i| + printf "%-20s %s\n", i.name, i.value if i.value? + end + end + + def exec_clean + @installer.exec_clean + end + + def exec_distclean + @installer.exec_distclean + end + +end # class ToplevelInstaller + + +class ToplevelInstallerMulti < ToplevelInstaller + + include FileOperations + + def initialize(ardir_root, config) + super + @packages = directories_of("#{@ardir}/packages") + raise 'no package exists' if @packages.empty? + @root_installer = Installer.new(@config, @ardir, File.expand_path('.')) + end + + def run_metaconfigs + @config.load_script "#{@ardir}/metaconfig", self + @packages.each do |name| + @config.load_script "#{@ardir}/packages/#{name}/metaconfig" + end + end + + attr_reader :packages + + def packages=(list) + raise 'package list is empty' if list.empty? + list.each do |name| + raise "directory packages/#{name} does not exist"\ + unless File.dir?("#{@ardir}/packages/#{name}") + end + @packages = list + end + + def init_installers + @installers = {} + @packages.each do |pack| + @installers[pack] = Installer.new(@config, + "#{@ardir}/packages/#{pack}", + "packages/#{pack}") + end + with = extract_selection(config('with')) + without = extract_selection(config('without')) + @selected = @installers.keys.select {|name| + (with.empty? or with.include?(name)) \ + and not without.include?(name) + } + end + + def extract_selection(list) + a = list.split(/,/) + a.each do |name| + setup_rb_error "no such package: #{name}" unless @installers.key?(name) + end + a + end + + def print_usage(f) + super + f.puts 'Inluded packages:' + f.puts ' ' + @packages.sort.join(' ') + f.puts + end + + # + # Task Handlers + # + + def exec_config + run_hook 'pre-config' + each_selected_installers {|inst| inst.exec_config } + run_hook 'post-config' + @config.save # must be final + end + + def exec_setup + run_hook 'pre-setup' + each_selected_installers {|inst| inst.exec_setup } + run_hook 'post-setup' + end + + def exec_install + run_hook 'pre-install' + each_selected_installers {|inst| inst.exec_install } + run_hook 'post-install' + end + + def exec_test + run_hook 'pre-test' + each_selected_installers {|inst| inst.exec_test } + run_hook 'post-test' + end + + def exec_clean + rm_f @config.savefile + run_hook 'pre-clean' + each_selected_installers {|inst| inst.exec_clean } + run_hook 'post-clean' + end + + def exec_distclean + rm_f @config.savefile + run_hook 'pre-distclean' + each_selected_installers {|inst| inst.exec_distclean } + run_hook 'post-distclean' + end + + # + # lib + # + + def each_selected_installers + Dir.mkdir 'packages' unless File.dir?('packages') + @selected.each do |pack| + $stderr.puts "Processing the package `#{pack}' ..." if verbose? + Dir.mkdir "packages/#{pack}" unless File.dir?("packages/#{pack}") + Dir.chdir "packages/#{pack}" + yield @installers[pack] + Dir.chdir '../..' + end + end + + def run_hook(id) + @root_installer.run_hook id + end + + # module FileOperations requires this + def verbose? + @config.verbose? + end + + # module FileOperations requires this + def no_harm? + @config.no_harm? + end + +end # class ToplevelInstallerMulti + + +class Installer + + FILETYPES = %w( bin lib ext data conf man ) + + include FileOperations + include HookScriptAPI + + def initialize(config, srcroot, objroot) + @config = config + @srcdir = File.expand_path(srcroot) + @objdir = File.expand_path(objroot) + @currdir = '.' + end + + def inspect + "#<#{self.class} #{File.basename(@srcdir)}>" + end + + # + # Hook Script API base methods + # + + def srcdir_root + @srcdir + end + + def objdir_root + @objdir + end + + def relpath + @currdir + end + + # + # Config Access + # + + # module FileOperations requires this + def verbose? + @config.verbose? + end + + # module FileOperations requires this + def no_harm? + @config.no_harm? + end + + def verbose_off + begin + save, @config.verbose = @config.verbose?, false + yield + ensure + @config.verbose = save + end + end + + # + # TASK config + # + + def exec_config + exec_task_traverse 'config' + end + + def config_dir_bin(rel) + end + + def config_dir_lib(rel) + end + + def config_dir_man(rel) + end + + def config_dir_ext(rel) + extconf if extdir?(curr_srcdir()) + end + + def extconf + ruby "#{curr_srcdir()}/extconf.rb", *@config.config_opt + end + + def config_dir_data(rel) + end + + def config_dir_conf(rel) + end + + # + # TASK setup + # + + def exec_setup + exec_task_traverse 'setup' + end + + def setup_dir_bin(rel) + files_of(curr_srcdir()).each do |fname| + adjust_shebang "#{curr_srcdir()}/#{fname}" + end + end + + def adjust_shebang(path) + return if no_harm? + tmpfile = File.basename(path) + '.tmp' + begin + File.open(path, 'rb') {|r| + first = r.gets + return unless File.basename(first.sub(/\A\#!/, '').split[0].to_s) == 'ruby' + $stderr.puts "adjusting shebang: #{File.basename(path)}" if verbose? + File.open(tmpfile, 'wb') {|w| + w.print first.sub(/\A\#!\s*\S+/, '#! ' + config('rubypath')) + w.write r.read + } + } + move_file tmpfile, File.basename(path) + ensure + File.unlink tmpfile if File.exist?(tmpfile) + end + end + + def setup_dir_lib(rel) + end + + def setup_dir_man(rel) + end + + def setup_dir_ext(rel) + make if extdir?(curr_srcdir()) + end + + def setup_dir_data(rel) + end + + def setup_dir_conf(rel) + end + + # + # TASK install + # + + def exec_install + rm_f 'InstalledFiles' + exec_task_traverse 'install' + end + + def install_dir_bin(rel) + install_files targetfiles(), "#{config('bindir')}/#{rel}", 0755 + end + + def install_dir_lib(rel) + install_files rubyscripts(), "#{config('rbdir')}/#{rel}", 0644 + end + + def install_dir_ext(rel) + return unless extdir?(curr_srcdir()) + install_files rubyextentions('.'), + "#{config('sodir')}/#{File.dirname(rel)}", + 0555 + end + + def install_dir_data(rel) + install_files targetfiles(), "#{config('datadir')}/#{rel}", 0644 + end + + def install_dir_conf(rel) + # FIXME: should not remove current config files + # (rename previous file to .old/.org) + install_files targetfiles(), "#{config('sysconfdir')}/#{rel}", 0644 + end + + def install_dir_man(rel) + install_files targetfiles(), "#{config('mandir')}/#{rel}", 0644 + end + + def install_files(list, dest, mode) + mkdir_p dest, @config.install_prefix + list.each do |fname| + install fname, dest, mode, @config.install_prefix + end + end + + def rubyscripts + glob_select(@config.libsrc_pattern, targetfiles()) + end + + def rubyextentions(dir) + ents = glob_select("*.#{@config.dllext}", targetfiles()) + if ents.empty? + setup_rb_error "no ruby extention exists: 'ruby #{$0} setup' first" + end + ents + end + + def targetfiles + mapdir(existfiles() - hookfiles()) + end + + def mapdir(ents) + ents.map {|ent| + if File.exist?(ent) + then ent # objdir + else "#{curr_srcdir()}/#{ent}" # srcdir + end + } + end + + # picked up many entries from cvs-1.11.1/src/ignore.c + JUNK_FILES = %w( + core RCSLOG tags TAGS .make.state + .nse_depinfo #* .#* cvslog.* ,* .del-* *.olb + *~ *.old *.bak *.BAK *.orig *.rej _$* *$ + + *.org *.in .* + ) + + def existfiles + glob_reject(JUNK_FILES, (files_of(curr_srcdir()) | files_of('.'))) + end + + def hookfiles + %w( pre-%s post-%s pre-%s.rb post-%s.rb ).map {|fmt| + %w( config setup install clean ).map {|t| sprintf(fmt, t) } + }.flatten + end + + def glob_select(pat, ents) + re = globs2re([pat]) + ents.select {|ent| re =~ ent } + end + + def glob_reject(pats, ents) + re = globs2re(pats) + ents.reject {|ent| re =~ ent } + end + + GLOB2REGEX = { + '.' => '\.', + '$' => '\$', + '#' => '\#', + '*' => '.*' + } + + def globs2re(pats) + /\A(?:#{ + pats.map {|pat| pat.gsub(/[\.\$\#\*]/) {|ch| GLOB2REGEX[ch] } }.join('|') + })\z/ + end + + # + # TASK test + # + + TESTDIR = 'test' + + def exec_test + unless File.directory?('test') + $stderr.puts 'no test in this package' if verbose? + return + end + $stderr.puts 'Running tests...' if verbose? + require 'test/unit' + runner = Test::Unit::AutoRunner.new(true) + runner.to_run << TESTDIR + runner.run + end + + # + # TASK clean + # + + def exec_clean + exec_task_traverse 'clean' + rm_f @config.savefile + rm_f 'InstalledFiles' + end + + def clean_dir_bin(rel) + end + + def clean_dir_lib(rel) + end + + def clean_dir_ext(rel) + return unless extdir?(curr_srcdir()) + make 'clean' if File.file?('Makefile') + end + + def clean_dir_data(rel) + end + + def clean_dir_conf(rel) + end + + # + # TASK distclean + # + + def exec_distclean + exec_task_traverse 'distclean' + rm_f @config.savefile + rm_f 'InstalledFiles' + end + + def distclean_dir_bin(rel) + end + + def distclean_dir_lib(rel) + end + + def distclean_dir_ext(rel) + return unless extdir?(curr_srcdir()) + make 'distclean' if File.file?('Makefile') + end + + def distclean_dir_data(rel) + end + + def distclean_dir_conf(rel) + end + + # + # lib + # + + def exec_task_traverse(task) + run_hook "pre-#{task}" + FILETYPES.each do |type| + if config('without-ext') == 'yes' and type == 'ext' + $stderr.puts 'skipping ext/* by user option' if verbose? + next + end + traverse task, type, "#{task}_dir_#{type}" + end + run_hook "post-#{task}" + end + + def traverse(task, rel, mid) + dive_into(rel) { + run_hook "pre-#{task}" + __send__ mid, rel.sub(%r[\A.*?(?:/|\z)], '') + directories_of(curr_srcdir()).each do |d| + traverse task, "#{rel}/#{d}", mid + end + run_hook "post-#{task}" + } + end + + def dive_into(rel) + return unless File.dir?("#{@srcdir}/#{rel}") + + dir = File.basename(rel) + Dir.mkdir dir unless File.dir?(dir) + prevdir = Dir.pwd + Dir.chdir dir + $stderr.puts '---> ' + rel if verbose? + @currdir = rel + yield + Dir.chdir prevdir + $stderr.puts '<--- ' + rel if verbose? + @currdir = File.dirname(rel) + end + + def run_hook(id) + path = [ "#{curr_srcdir()}/#{id}", + "#{curr_srcdir()}/#{id}.rb" ].detect {|cand| File.file?(cand) } + return unless path + begin + instance_eval File.read(path), path, 1 + rescue + raise if $DEBUG + setup_rb_error "hook #{path} failed:\n" + $!.message + end + end + +end # class Installer + + +class SetupError < StandardError; end + +def setup_rb_error(msg) + raise SetupError, msg +end + +if $0 == __FILE__ + begin + ToplevelInstaller.invoke + rescue SetupError + raise if $DEBUG + $stderr.puts $!.message + $stderr.puts "Try 'ruby #{$0} --help' for detailed usage." + exit 1 + end +end diff --git a/vendor/gems/gems/markaby-0.5/test/test_markaby.rb b/vendor/gems/gems/markaby-0.5/test/test_markaby.rb new file mode 100644 index 00000000..944ebe02 --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/test/test_markaby.rb @@ -0,0 +1,109 @@ +require 'test/unit' +require File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib', 'markaby')) + +module MarkabyTestHelpers + def link_to(obj) + %{#{obj}} + end + def pluralize(string) + string + "s" + end + module_function :link_to, :pluralize +end + +class MarkabyTest < Test::Unit::TestCase + + def mab(*args, &block) + Markaby::Builder.new(*args, &block).to_s + end + + def assert_exception(exclass, exmsg, *mab_args, &block) + begin + mab(*mab_args, &block) + rescue Exception => e + assert_equal exclass, e.class + assert_equal exmsg, e.message + end + end + + def test_simple + assert_equal "
", mab { hr } + assert_equal "

foo

", mab { p 'foo' } + assert_equal "

foo

", mab { p { 'foo' } } + end + + def test_classes_and_ids + assert_equal %{
}, mab { div.one '' } + assert_equal %{
}, mab { div.one.two '' } + assert_equal %{
}, mab { div.three! '' } + end + + def test_escaping + assert_equal "

Apples & Oranges

", mab { h1 'Apples & Oranges' } + assert_equal "

Apples & Oranges

", mab { h1 { 'Apples & Oranges' } } + assert_equal "

Apples

", mab { h1 'Apples', :class => 'fruits&floots' } + end + + def test_capture + builder = Markaby::Builder.new + assert builder.to_s.empty? + assert_equal "

TEST

", builder.capture { h1 'TEST' } + assert builder.to_s.empty? + assert mab { capture { h1 'hello world' }; nil }.empty? + assert_equal mab { div { h1 'TEST' } }, mab { div { capture { h1 'TEST' } } } + end + + def test_ivars + html = "

Steve

Gerald

Gerald

" + assert_equal html, mab { div { @name = 'Steve'; h1 @name; div { @name = 'Gerald'; h2 @name }; h3 @name } } + assert_equal html, mab { div { @name = 'Steve'; h1 @name; self << capture { div { @name = 'Gerald'; h2 @name } }; h3 @name } } + assert_equal html, mab(:name => 'Steve') { div { h1 @name; self << capture { div { @name = 'Gerald'; h2 @name } }; h3 @name } } + end + + def test_ivars_without_at_symbol + assert_equal "

Hello World

", mab { @message = 'Hello World'; h1 message } + end + + def test_helpers + Markaby::Builder.ignored_helpers.clear + assert_equal %{squirrels}, mab({}, MarkabyTestHelpers) { pluralize('squirrel') } + assert_equal %{edit}, mab({}, MarkabyTestHelpers) { link_to('edit') } + assert mab({}, MarkabyTestHelpers) { @output_helpers = false; link_to('edit'); nil }.empty? + Markaby::Builder.ignore_helpers :pluralize + assert_exception(NoMethodError, "no such method `pluralize'", {}, MarkabyTestHelpers) { pluralize('squirrel') } + end + + def test_builder_bang_methods + assert_equal "", mab { instruct! } + end + + def test_fragments + assert_equal %{

Monkeys

Giraffes Miniature and Large

Donkeys

Parakeet Innocent IV in Classic Chartreuse

}, + mab { div { h1 "Monkeys"; h2 { "Giraffes #{small 'Miniature' } and #{strong 'Large'}" }; h3 "Donkeys"; h4 { "Parakeet #{b { i 'Innocent IV' }} in Classic Chartreuse" } } } + assert_equal %{

Monkeys

Giraffes Miniature

Donkeys

}, + mab { div { h1 "Monkeys"; h2 { "Giraffes #{strong 'Miniature' }" }; h3 "Donkeys" } } + assert_equal %{

Monkeys

Giraffes Miniature and Large

Donkeys

Parakeet Large as well...

}, + mab { div { @a = small 'Miniature'; @b = strong 'Large'; h1 "Monkeys"; h2 { "Giraffes #{@a} and #{@b}" }; h3 "Donkeys"; h4 { "Parakeet #{@b} as well..." } } } + end + + def test_invalid_xhtml + assert_exception(NoMethodError, "no such method `dav'") { dav {} } + assert_exception(Markaby::InvalidXhtmlError, "no attribute `styl' on div elements") { div(:styl => 'ok') {} } + assert_exception(Markaby::InvalidXhtmlError, "no attribute `class' on tbody elements") { tbody.okay {} } + end + + def test_full_doc_transitional + doc = mab { instruct!; xhtml_transitional { head { title 'OKay' } } } + assert doc =~ /^<\?xml version="1.0" encoding="UTF-8"\?>/ + assert doc.include?(%{"-//W3C//DTD XHTML 1.0 Transitional//EN" "DTD/xhtml1-transitional.dtd">}) + assert doc.include?(%{OKay}) + end + + def test_full_doc_strict + doc = mab { xhtml_strict { head { title 'OKay' } } } + assert doc =~ /^<\?xml version="1.0" encoding="UTF-8"\?>/ + assert doc.include?(%{"-//W3C//DTD XHTML 1.0 Strict//EN" "DTD/xhtml1-strict.dtd">}) + assert doc.include?(%{OKay}) + end + +end diff --git a/vendor/gems/gems/markaby-0.5/tools/rakehelp.rb b/vendor/gems/gems/markaby-0.5/tools/rakehelp.rb new file mode 100644 index 00000000..6a97c1ca --- /dev/null +++ b/vendor/gems/gems/markaby-0.5/tools/rakehelp.rb @@ -0,0 +1,106 @@ + +def make(makedir) + Dir.chdir(makedir) do + sh 'make' + end +end + + +def extconf(dir) + Dir.chdir(dir) do ruby "extconf.rb" end +end + + +def setup_tests + Rake::TestTask.new do |t| + t.libs << "test" + t.test_files = FileList['test/test*.rb'] + t.verbose = true + end +end + + +def setup_clean otherfiles + files = ['build/*', '**/*.o', '**/*.so', '**/*.a', 'lib/*-*', '**/*.log'] + otherfiles + CLEAN.include(files) +end + + +def setup_rdoc files + Rake::RDocTask.new do |rdoc| + rdoc.rdoc_dir = 'doc/rdoc' + rdoc.options << '--line-numbers' + rdoc.rdoc_files.add(files) + end +end + + +def setup_extension(dir, extension) + ext = "ext/#{dir}" + ext_so = "#{ext}/#{extension}.#{Config::CONFIG['DLEXT']}" + ext_files = FileList[ + "#{ext}/*.c", + "#{ext}/*.h", + "#{ext}/extconf.rb", + "#{ext}/Makefile", + "lib" + ] + + task "lib" do + directory "lib" + end + + desc "Builds just the #{extension} extension" + task extension.to_sym => ["#{ext}/Makefile", ext_so ] + + file "#{ext}/Makefile" => ["#{ext}/extconf.rb"] do + extconf "#{ext}" + end + + file ext_so => ext_files do + make "#{ext}" + cp ext_so, "lib" + end +end + + +def setup_gem(pkg_name, pkg_version, author, summary, dependencies, test_file) + pkg_version = pkg_version + pkg_name = pkg_name + pkg_file_name = "#{pkg_name}-#{pkg_version}" + + spec = Gem::Specification.new do |s| + s.name = pkg_name + s.version = pkg_version + s.platform = Gem::Platform::RUBY + s.author = author + s.summary = summary + s.test_file = test_file + s.has_rdoc = true + s.extra_rdoc_files = [ "README" ] + dependencies.each do |dep| + s.add_dependency(*dep) + end + s.files = %w(README Rakefile setup.rb) + + Dir.glob("{bin,doc,test,lib}/**/*") + + Dir.glob("ext/**/*.{h,c,rb}") + + Dir.glob("examples/**/*.rb") + + Dir.glob("tools/*.rb") + + s.require_path = "lib" + s.extensions = FileList["ext/**/extconf.rb"].to_a + + s.bindir = "bin" + end + + Rake::GemPackageTask.new(spec) do |p| + p.gem_spec = spec + p.need_tar = true + end + + task :install do + sh %{rake package} + sh %{gem install pkg/#{pkg_name}-#{pkg_version}} + end + +end diff --git a/vendor/gems/gems/maruku-0.6.0/Rakefile b/vendor/gems/gems/maruku-0.6.0/Rakefile new file mode 100644 index 00000000..9413e4d1 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/Rakefile @@ -0,0 +1,73 @@ +require 'rubygems' +Gem::manage_gems +require 'rake/gempackagetask' + +require 'maruku_gem' + +task :default => [:package] + +Rake::GemPackageTask.new($spec) do |pkg| + pkg.need_zip = true + pkg.need_tar = true +end + +PKG_NAME = 'maruku' +PKG_FILE_NAME = "#{PKG_NAME}-#{MaRuKu::Version}" +RUBY_FORGE_PROJECT = PKG_NAME +RUBY_FORGE_USER = 'andrea' + +RELEASE_NAME = MaRuKu::Version +RUBY_FORGE_GROUPID = '2795' +RUBY_FORGE_PACKAGEID = '3292' + +desc "Publish the release files to RubyForge." +task :release => [:gem, :package] do + system("rubyforge login --username #{RUBY_FORGE_USER}") + + gem = "pkg/#{PKG_FILE_NAME}.gem" + # -n notes/#{Maruku::Version}.txt + cmd = "rubyforge add_release %s %s \"%s\" %s" % + [RUBY_FORGE_GROUPID, RUBY_FORGE_PACKAGEID, RELEASE_NAME, gem] + + puts cmd + system(cmd) + + files = ["gem", "tgz", "zip"].map { |ext| "pkg/#{PKG_FILE_NAME}.#{ext}" } + files.each do |file| +# system("rubyforge add_file %s %s %s %s" % +# [RUBY_FORGE_GROUPID, RUBY_FORGE_PACKAGEID, RELEASE_NAME, file]) + end +end + +task :test => [:markdown_span_tests, :markdown_block_tests] + +task :markdown_block_tests do + tests = Dir['tests/unittest/**/*.md'].join(' ') + puts "Executing tests #{tests}" +# ok = marutest(tests) + ok = system "ruby -Ilib bin/marutest #{tests}" + raise "Failed block unittest" if not ok +end + +task :markdown_span_tests do + ok = system( "ruby -Ilib lib/maruku/tests/new_parser.rb v b") + raise "Failed span unittest" if not ok +end + +require 'rake/rdoctask' + +Rake::RDocTask.new do |rdoc| + files = [#'README', 'LICENSE', 'COPYING', + 'lib/**/*.rb', + 'rdoc/*.rdoc'#, 'test/*.rb' + ] + rdoc.rdoc_files.add(files) + rdoc.main = "rdoc/main.rdoc" # page to start on + rdoc.title = "Maruku Documentation" + rdoc.template = "jamis.rb" + rdoc.rdoc_dir = 'doc' # rdoc output folder + rdoc.options << '--line-numbers' << '--inline-source' +end + + + diff --git a/vendor/gems/gems/maruku-0.6.0/bin/marudown b/vendor/gems/gems/maruku-0.6.0/bin/marudown new file mode 100644 index 00000000..c5f52527 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/bin/marudown @@ -0,0 +1,29 @@ +#!/usr/bin/env ruby + +require 'maruku' + + # If we are given filenames, convert each file + if not ARGV.empty? + ARGV.each do |f| + puts "Opening #{f}" + + # read file content + input = File.open(f,'r').read + + # create Maruku + doc = Maruku.new(input, {:on_error=>:warning}) + # convert to a complete html document + output = doc.to_md + + # write to file + dir = File.dirname(f) + filename = File.basename(f, File.extname(f)) + ".txt" + + output = File.join(dir, filename) + File.open(output,'w') do |f| f.puts html end + end + else + # else, act as a filter + data = $stdin.read + puts Maruku.new(data, {:on_error=>:warning}).to_md + end diff --git a/vendor/gems/gems/maruku-0.6.0/bin/maruku b/vendor/gems/gems/maruku-0.6.0/bin/maruku new file mode 100755 index 00000000..66fbc1dd --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/bin/maruku @@ -0,0 +1,181 @@ +#!/usr/bin/env ruby + +require 'maruku' +require 'optparse' + + +def cli_puts(s) + $stderr.puts(s) if MaRuKu::Globals[:verbose] +end + + +export = :html +break_on_error = false +using_math = false +using_mathml = false +output_file = nil + +opt = OptionParser.new do |opts| + opts.banner = "Usage: maruku [options] [file1.md [file2.md ..." + + opts.on("-v", "--[no-]verbose", "Run verbosely") do |v| + MaRuKu::Globals[:verbose] = v end + opts.on("-u", "--[no-]unsafe", "Use unsafe features") do |v| + MaRuKu::Globals[:unsafe_features] = v end + + opts.on("-b", "Break on error") do |v| + break_on_error = true end + + + opts.on("-i", "--math-images ENGINE", "Uses ENGINE to render TeX to PNG.") do |s| + using_math = true + MaRuKu::Globals[:html_math_output_png] = true + MaRuKu::Globals[:html_math_output_mathml] = false + MaRuKu::Globals[:html_png_engine] = s + cli_puts "Using png engine #{s}." + end + + opts.on("-m", "--math-engine ENGINE", "Uses ENGINE to render MathML") do |s| + MaRuKu::Globals[:html_math_output_png] = false + MaRuKu::Globals[:html_math_output_mathml] = true + using_math = true + using_mathml = true + MaRuKu::Globals[:html_math_engine] = s + end + + opts.on("-o", "--output FILE", "Output filename") do |s| + output_file = s + end + + opts.on_tail("--pdf", "Write PDF","First writes LaTeX, then calls pdflatex." ) do export = :pdf end + opts.on_tail("--s5", "Write S5 slideshow") do export = :s5 end + opts.on_tail("--html", "Write HTML") do export = :html end + opts.on_tail("--html-frag", "Write the contents of the BODY.") do export = :html_frag end + opts.on_tail("--tex", "Write LaTeX" ) do export = :tex end + opts.on_tail("--inspect", "Shows the parsing result" ) do export = :inspect end + + opts.on_tail("--version", "Show version") do + puts "Maruku #{MaRuKu::Version}"; exit + end + + opts.on_tail("--ext EXTENSIONS", "Use maruku extensions (comma separated)" ) do |s| + s.split(",").each do |e| require "maruku/ext/#{e}"; end + end + + opts.on_tail("-h", "--help", "Show this message") do + puts opts + exit + end + +end + +begin +opt.parse! +rescue OptionParser::InvalidOption=>e + $stderr.puts e + $stderr.puts opt + exit +end + + +if using_math + cli_puts "Using Math extensions." + require 'maruku/ext/math' +end + +#p ARGV +#p MaRuKu::Globals + + +inputs = +# If we are given filenames, convert each file +if not ARGV.empty? + ARGV.map do |f| + # read file content + cli_puts "Reading from file #{f.inspect}." + [f, File.open(f,'r').read] + end +else + export = :html_frag if export == :html + export = :tex_frag if export == :tex + + cli_puts "Reading from standard input." + [[nil, $stdin.read]] +end + +inputs.each do |f, input| + + # create Maruku + params = {} + params[:on_error] = break_on_error ? :raise : :warning + + t = Time.now + doc = Maruku.new(input, params) + + cli_puts ("Parsing in %.2f seconds." % (Time.now-t)) + + out=""; suffix = "?" + t = Time.now + case export + when :html + suffix = using_mathml ? '.xhtml' : '.html' + out = doc.to_html_document( {:indent => -1}) + when :html_frag + suffix='.html_frag' + out = doc.to_html( {:indent => -1}) + when :pdf, :tex + suffix='.tex' + out = doc.to_latex_document + when :tex_frag + suffix='.tex_frag' + out = doc.to_latex + when :inspect + suffix='.txt' + out = doc.inspect + when :markdown + suffix='.pretty_md' + out = doc.to_markdown + when :s5 + suffix='_s5slides.html' + out = doc.to_s5({:content_only => false}) + end + + cli_puts("Rendering in %.2f seconds." % (Time.now-t)) + + # write to file or stdout + if f + + if not output_file + dir = File.dirname(f) + job = File.join(dir, File.basename(f, File.extname(f))) + output_file = job + suffix + else + job = File.basename(output_file, File.extname(output_file)) + end + + if output_file == "-" + cli_puts "Writing to standard output" + $stdout.puts out + else + + if not (export == :pdf) + cli_puts "Writing to #{output_file}" + File.open(output_file,'w') do |f| f.puts out end + else + cli_puts "Writing to #{job}.tex" + File.open("#{job}.tex",'w') do |f| f.puts out end + cmd = "pdflatex '#{job}.tex' -interaction=nonstopmode "+ + "'-output-directory=#{dir}' " + cli_puts "maruku: executing $ #{cmd}" + # run twice for cross references + system cmd + system cmd + end + + end + else # write to stdout + cli_puts "Writing to standard output" + $stdout.puts out + end + end + diff --git a/vendor/gems/gems/maruku-0.6.0/bin/marutest b/vendor/gems/gems/maruku-0.6.0/bin/marutest new file mode 100644 index 00000000..4ff49dc3 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/bin/marutest @@ -0,0 +1,345 @@ +#!/usr/bin/env ruby + +require 'maruku' +require 'maruku/textile2' + +$marutest_language = :markdown + +#MARKER = "\n***EOF***\n" +SPLIT = %r{\n\*\*\*[^\*]+\*\*\*\n}m + +def marker(x) + "\n*** Output of #{x} ***\n" +end + +def write_lines(i, j, lines, prefix, i_star) + i = [i, 0].max + j = [j, lines.size-1].min + for a in i..j + l = lines[a].gsub(/\t/,' ') + puts( ("%s %3d" % [prefix, a]) + + (a==i_star ? " -->" : " ")+lines[a]) + end +end + +# a = expected b = found +def equals(a, b) + a = a.split("\n") + b = b.split("\n") + + for i in 0..([a.size-1,b.size-1].max) + la = a[i] + lb = b[i] + if la != lb + puts "\n" + + + write_lines(i-3, i+3, a, "expected", i ) + write_lines(i-3, i+3, b, " found", i ) + return false + end + end + return true +end + +TOTEST = [:inspect,:to_html,:to_latex,:to_md,:to_s] + +def run_test(filename, its_ok, verbose=true) + # read file content + input = (f=File.open(filename,'r')).read; f.close + + output_html = File.join(File.dirname(filename), + File.basename(filename, File.extname(filename)) + ".html") + + # split the input in sections + + stuff = input.split(SPLIT) + if stuff.size == 1 + stuff[2] = stuff[0] + stuff[0] = "Write a comment here" + stuff[1] = "{} # params " + end + + comment = stuff.shift + params_s = stuff.shift + + params = eval(params_s||'{}') + if params == nil + raise "Null params? #{params_s.inspect}" + end + + markdown = stuff.shift + +# puts "comment: #{markdown.inspect}" +# puts "markdown: #{markdown.inspect}" + + failed = [] + relaxed = [] + crashed = [] + actual = {} + + doc = + if $marutest_language == :markdown + Maruku.new(markdown, params) + else + MaRuKu.textile2(markdown, params) + end + + for s in TOTEST + begin + if s==:to_html + actual[s] = doc.to_html + else + actual[s] = doc.send s + raise "Methods #{s} gave nil" if not actual[s] + end + rescue Exception => e + crashed << s + actual[s] = e.inspect+ "\n"+ e.backtrace.join("\n") + puts actual[s] + end + end + + File.open(output_html, 'w') do |f| + f.write doc.to_html_document + end + + begin + m = Maruku.new + d = m.instance_eval(actual[:inspect]) + rescue Exception => e + s = e.inspect + e.backtrace.join("\n") + raise "Inspect is not correct:\n ========\n#{actual[:inspect]}"+ + "============\n #{s}" + end + + expected = {} + if (stuff.size < TOTEST.size) + $stdout.write " (first time!) " + TOTEST.each do |x| expected[x] = actual[x] end + else + TOTEST.each_index do |i| + symbol = TOTEST[i] + expected[symbol] = stuff[i] +# puts "symbol: #{symbol.inspect} = #{stuff[i].inspect}" + end + end + + m = Maruku.new + + + if not its_ok.include? :inspect + begin + d = m.instance_eval(expected[:inspect]) + # puts "Eval: #{d.inspect}" + expected[:inspect] = d.inspect + rescue Exception => e + s = e.inspect + e.backtrace.join("\n") + raise "Cannot eval user-provided string:\n #{expected[:inspect].to_s}"+ + "\n #{s}" + end + end + +# m.instance_eval(actual[:inspect]) != m.instance_eval(expected[:inspect]) + +# actual[:inspect] = m.instance_eval(actual[:inspect]) +# expected[:inspect] = m.instance_eval(expected[:inspect]) + + + TOTEST.each do |x| + expected[x].strip! + actual[x].strip! + if not equals(expected[x], actual[x]) + if its_ok.include? x + expected[x] = actual[x] + $stdout.write " relax:#{x} " + relaxed << x + else + actual[x] = "-----| WARNING | -----\n" + actual[x].to_s + failed << x + end + end + end + + f = File.open(filename, 'w') + + f.write comment + f.write "\n*** Parameters: ***\n" + f.write params_s + f.write "\n*** Markdown input: ***\n" + f.write markdown + + TOTEST.each do |x| + f.write marker(x) + f.write expected[x] + end + f.write "\n*** EOF ***\n" + + if not failed.empty? or not crashed.empty? + + f.puts "\n\n\n\nFailed tests: #{failed.inspect} \n" + + TOTEST.each do |x| + f.write marker(x) + f.write actual[x] + end + + else + f.puts "\n\n\n\tOK!\n\n\n" + end + + + if false + md_pl = markdown_pl(markdown) + + f.write "\n*** Output of Markdown.pl ***\n" + f.write md_pl + + f.write "\n*** Output of Markdown.pl (parsed) ***\n" + begin + doc = REXML::Document.new("
#{md_pl}
",{ + :compress_whitespace=>['div','p'], + :ignore_whitespace_nodes=>['div','p'], + :respect_whitespace=>['pre','code'] + }) + div = doc.root + xml ="" + div.write_children(xml,indent=1,transitive=true,ie_hack=false) + f.write xml + rescue Exception=>e + f.puts "Error: #{e.inspect}" + end + f.close + else + f.write "\n*** Output of Markdown.pl ***\n" + f.write "(not used anymore)" + + f.write "\n*** Output of Markdown.pl (parsed) ***\n" + f.write "(not used anymore)" + end + + return failed, relaxed, crashed +end + +def markdown_pl(markdown) + tmp1 = "/tmp/marutest1" + tmp2 = "/tmp/marutest2" + File.open(tmp1,'w') do |f| f.puts markdown end + system "Markdown.pl < #{tmp1} > #{tmp2}" + f = File.open(tmp2,'r') + s = f.read + f.close + s +end + +def passed?(args, arg) + if args.include? arg + args.delete arg + true + else + false + end +end + +def marutest(args) + dont_worry = [] + TOTEST.each do |x| + arg = "ok:#{x}" + # puts arg + if passed?(args, arg) + dont_worry << x + end + end + + if passed?(args, 'ok') + dont_worry = TOTEST.clone + end + + if dont_worry.size > 0 + puts "Relaxed on #{dont_worry.inspect}" + end + + + failed = {} + relaxed = {} + + args.each do |f| + $stdout.write f + ' '*(50-f.size) + " " + $stdout.flush + tf, tr, tcrashed = run_test(f, dont_worry) + + tf = tf + tcrashed + + + if tr.size > 0 + $stdout.write " relax #{tr.inspect} " + end + + if tf.size>0 + $stdout.write " failed on #{tf.inspect} " + else + $stdout.write " OK " + end + + if tcrashed.size > 0 + $stdout.write " CRASHED on #{tcrashed.inspect}" + end + + $stdout.write "\n" + + failed[f] = tf + relaxed[f] = tr + end + + num_failed = 0 + failed_cat = {} + + puts "\n\n\n**** FINAL REPORT ****\n\n" + + + if failed.size > 0 + failed.each do |file, fl| + num_failed += fl.size + if fl.size > 0 + puts "\t#{file}\tfailed on #{fl.inspect}" + end + fl.each do |x| + failed_cat[x] = failed_cat[x] || 0 + failed_cat[x] = failed_cat[x] + 1 + end + end + end + + if dont_worry.size > 0 + puts "Relaxed on #{dont_worry.inspect}" + end + + if relaxed.size > 0 + relaxed.each do |file, r| + if r.size > 0 + puts "\t#{file}\t\trelaxed on #{r.inspect}" + end + end + end + + if failed_cat.size > 0 + puts "\nCategories:\n" + + failed_cat.each do |x, num| + puts "\t#{x.inspect} \tfailed #{num}/#{args.size}" + end + end + + return num_failed == 0 +end + +if File.basename(__FILE__) == 'marutest' + if ARGV.empty? + puts "marutest is a tool for running Maruku's unittest." + exit 1 + end + ok = marutest(ARGV.clone) + + exit ok ? 0 : -1 +end + diff --git a/vendor/gems/gems/maruku-0.6.0/bin/marutex b/vendor/gems/gems/maruku-0.6.0/bin/marutex new file mode 100755 index 00000000..b3ed099d --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/bin/marutex @@ -0,0 +1,31 @@ +#!/usr/bin/env ruby + +require 'maruku' + +if File.basename($0) =~ /^marutex/ + # Convert each file + ARGV.each do |f| + puts "Opening #{f}" + + # read file content + input = File.open(f,'r').read + + # create Maruku + doc = Maruku.new(input) + # convert to a complete html document + latex = doc.to_latex_document + + # write to file + dir = File.dirname(f) + job = File.join(dir, File.basename(f, File.extname(f))) + filename = job + ".tex" + + File.open(filename,'w') do |f| f.puts latex end + + # run twice for cross references + system "pdflatex '#{job}' '-output-directory=#{dir}' " + system "pdflatex '#{job}' '-output-directory=#{dir}' " + +# system "open #{job}.pdf" + end +end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/docs/changelog.md b/vendor/gems/gems/maruku-0.6.0/docs/changelog.md new file mode 100644 index 00000000..4b4aec73 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/changelog.md @@ -0,0 +1,334 @@ +CSS: style.css +LaTeX CJK: true +HTML use syntax: true + + + +#### Changes in 0.5.6 #### {#stable} + +* News: + + - Now Maruku is in the official Gentoo Portage tree (done by [Aggelos Orfanakos]) + +* New stuff: + + - Attribute `maruku_signature` defaults to false. (many people asked this) + - unittests scripts are included in the distribution. + - New attribute `filter_html`: if true, raw HTML/XML is discarded. (asked by Marik) + - Command line: if output file is `-`, Maruku writes to stdout. + +* Bug fixes: + + * Another tiny bug in HTML parsing. + * In latex, `\linebreak` was used instead of `\newline` (reported by Sam Kleinman) + * Fixed bug with non-alpha numeric characters in ref.ids (reported by Aggelos Orfanakos) + + +* Pending bugs/feature requests: + + - Maruku does not allow 3-space indented lists. + - Lists item whose first character is UTF8 are not recognized (reported by Aggelos Orfanakos) + - Maruku cannot output `"`-delimited attributes, because `REXML` does not support it. + +[Aggelos Orfanakos]: http://agorf.gr/ + +#### Changes in 0.5.5 #### + +* Features: + + * Input of HTML numeric entities: + + Examples of numeric character references include © or © + for the copyright symbol, Α or Α for the Greek capital + letter alpha, and ا or ا for the Arabic letter alef. + + > Examples of numeric character references include © or © + > for the copyright symbol, Α or Α for the Greek capital + > letter alpha, and ا or ا for the Arabic letter alef. + +* Bug fixes: + + * Alt text was ignored for images. + * Fixed minor bug in reading HTML inside paragraph. + * Changed rules for block-level HTML to make it similar to Markdown.pl. + For example: + + Paragraph +
+ + will be translated to + +

Paragraph +

+ + while this: + + Paragraph + +
+ + becomes + +

Paragraph

+ +
+ +* **Pending bugs**: there are some problems when parsing lists. It is difficult + to get it right because the spec is very fuzzy. At the moment, list items + cannot be indented by more than 1 space. + +#### Changes in 0.5.4 #### + +* Features: + + * [All HTML attributes](http://www.w3.org/TR/html4/index/attributes.html) are supported. + + > Science is a wonderful thing if one does not + > have to earn one's living at it. + {: cite="http://en.wikiquote.org/wiki/Albert_Einstein"} + + * Attribute `doc_prefix`. + + * Math: + + * `\begin{equation}` and `\end{equation}` are understood. + * Math parsing enabled per-instance using the `math_enabled` attribute. + * `math_numbered` attribute. + +* Bug fixes: + + * Runs quietly with `ruby -w`. + * Fixed a bug which could cause data-loss when reading indented lines. + + +#### Changes in 0.5.3 #### + +* Features: + + * [All HTML `table` attributes](http://www.w3.org/TR/html4/struct/tables.html#h-11.2.1) + can be used (`summary`, `width`, `frame`, `rules`, + `border`, `cellspacing`, `cellpadding`). + + The next version will hopefully use all HTML attributes. + + + + +* Bug fixes: + + * Crash on this line: (found by Aggelos Orfanakos) + + [test][]: + + * Regression with attribute system (found by Charles) + +#### Changes in 0.5.1 #### + +* Bug fixes: + + * Workaround for Internet Explorer bug: + be very sure that `'` is always written as `'`. + + * Support for empty images ref: `![image]` and `![image][]`. + + * Fixed bug in parsing attribute lists definitions. + +* Minor things: + + * Now code blocks are written as a `` element inside a `
`, and
+		`` elements have both `class` and `lang` attributes set 
+		to the specified language.
+		
+		Example:
+
+			    Example
+			{:lang=ruby}
+		{:lang=markdown}
+		
+		produces:
+		
+			
Example
+ {:lang=xml} + +#### Changes in 0.5.0 #### + +* Syntax changes: + + * Compatibility with newest Markdown.pl: `[text]` as a synonim of `[text][]`. + + * Meta data: the first IAL in a span environment now refers to the parent. + This makes it possible to set attributes for cells: + + Head | Head | + ---------------+-------+-- + {:r} Hello + ... + + {:r: scope='row'} + + The first cell will have the `scope` attribute set to `row`. + +* New settings: + + * Disable the Maruku signature by setting `maruku signature: false` + +* Stricter doctype. By the way -- did I mention it? -- + **Maruku HTML has always been proper validating XHTML strict** + (if a page does not validate, please report it as a bug). + + Of course, this only matters when using `maruku` as a standalone + program. + + * I have updated the XHTML DTD used to support MathML: + currently using XHTML+MathML+SVG. + * Content-type set to `application/xhtml+xml` + * All entities are written as numeric entities. + +* Bug fixes + + * Many fixes in the code handling the sanitizing of inline HTML. + * `markdown=1` did not propagate to children. + * LaTeX: An exception was raised if an unknown entity was used. + +#### Changes in 0.4.2 #### + +* Adapted syntax to the [new meta-data proposal][proposal]. + +* Changes in LaTeX export: + + * Links to external URLs are blue by default. + + * New attributes: `latex_preamble` to add a custom preamble, + and `latex_cjk` to add packages for UTF-8 Japanese characters. + (**support for this is still shaky**). Example: + + Title: my document + LaTeX CJK: true + LaTeX preamble: preamble.tex + + Content + +* Bug fixes + + + Images were not given `id` or `class` attributes. + + + Fixed bug in LaTeX export with handling of `<`,`>` enclosed URLs: ``. + +#### Changes in 0.4.1 aka "Typographer" #### + +* Implemented SmartyPants support: + + 'Twas a "test" to 'remember' -- in the '90s + --- while I was <>. She was 6\"12\'. + > 'Twas a "test" to 'remember' -- in the '90s --- while I was <>. + > She was 6\"12\'. + + I adapted the code from RubyPants. + +* Server directives between `` are properly preserved. +* Changes in LaTeX export: + + * Now Japanese text rendering sort of works, using the following packages: + + \usepackage[C40]{fontenc} + \usepackage[cjkjis]{ucs} + \usepackage[utf8x]{inputenc} + + Nevertheless, I could only get bitmap fonts working -- probably it's a problem + with my setup. + + A quick test: 日本ã€ä¸­å›½ã€ã²ã‚‰ãŒãªã€ã‚«ã‚¿ã‚«ãƒŠã€‚ + + * Fixed bugs in rendering of immediate links. + * External packages are `require`d only if needed. + * More symbols supported. + See the symbol list + [in HTML](http://maruku.rubyforge.org/entity_test.html) and + [in PDF](http://maruku.rubyforge.org/entity_test.pdf). + + +#### Changes in 0.4 #### + +* First implementation of [the new meta-data syntax][meta]. +* General refactorization of the code and much cleaner error reporting. +* Created [the RDOC documentation][rdoc]. +* The `add_whitespace` method took too much time -- it was O(n^2). +* Added unit-tests for block-level elements. + +[rdoc]: http://maruku.rubyforge.org/rdoc/ +[meta]: http://maruku.rubyforge.org/proposal.html + + + +[Jacques Distler]: http://golem.ph.utexas.edu/~distler +[itex2MML]: http://golem.ph.utexas.edu/~distler/blog/itex2MML.html +[math]: http://rubyforge.maruku.org/math.html + + +#### Changes in 0.3 #### + +* A real parser is used instead of a regexp-based system, also for span-level + elements. + + Now Maruku is almost 2x faster than Bluecloth, while having more features. + + Here are some benchmarks: + + BlueCloth (to_html): parsing 0.00 sec + rendering 1.54 sec = 1.55 sec + Maruku (to_html): parsing 0.47 sec + rendering 0.38 sec = 0.85 sec + Maruku (to_latex): parsing 0.49 sec + rendering 0.25 sec = 0.73 sec + + This is the result of running `lib/maruku/tests/benchmark.rb` on the Markdown + specification. + +* Prettier HTML output by adding whitespace. + +* Added a full suite of unit-tests for the span-level parser. + +* Error management: Having a real parser, Maruku warns you about syntax issues. + + The default action is to warn and try to continue. If you do this: + + Maruku.new(string, {:on_error => :raise}) + + then syntax errors will cause an exception to be raised (you can catch this + and retry). + +* Fixed a series of bugs in handling inline HTML code. + +Immediate TODO-list: + +* UTF-8 input/output works OK for HTML, however I am having pain trying to export + to LaTeX. I want at least Japanese characters support, so if you know how to + do this you are very welcome to give me an hand. + + For example: in the HTML version, you should see accented characters in this + parenthesis: + + > (àèìòù) + + and Japanese text in these other parentheses: + + > (カタカナ㧠ç§ã® åå‰ã¯ アンドレア ãƒã‚§ãƒ³ã‚· ã§ã™). + > + > (日本ã®ã‚¬ãƒ«ã¯ 大好ãã€ã§ã‚‚ã€æ—¥æœ¬èªžã¯é›£ã—ã§ã™ã‹ã‚‰ã€ãã†ãž 英語話ã™ã‚¬ãƒ«ã‚’ ãŠã—ãˆã¦ãã ã•ã„). + + In the LaTeX version, these do not appear. I know how to do LaTeX with + ISO-8859-1 encoding (European characters), but I'm struggling with half-baked + solutions for UTF-8 encoded documents. + +* Implement the [new meta-data proposal][proposal]. + +* Exporting to Markdown (pretty printing). + +* Exporting to HTML splitting in multiple files. + +* RubyPants. + +* Support for images in PDF. + + +[proposal]: http://maruku.rubyforge.org/proposal.html +[contact]: http://www.dis.uniroma1.it/~acensi/contact.html +[markdown-discuss]: http://six.pairlist.net/mailman/listinfo/markdown-discuss +[tracker]: http://rubyforge.org/tracker/?group_id=2795 + diff --git a/vendor/gems/gems/maruku-0.6.0/docs/div_syntax.md b/vendor/gems/gems/maruku-0.6.0/docs/div_syntax.md new file mode 100644 index 00000000..0f08278a --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/div_syntax.md @@ -0,0 +1,36 @@ +## Option number 1 ## + +* `[ ]{0,3}\+={2,}` pushes the stack +* `[ ]{0,3}\-={2,}` pops the stack + + +================ {#id} + IAL can be put on the same line of a push + + +== {#id2} + Or on the same line of a pop: + -== + + -============== + +## Option number 2 ## + +Double braces: + + {{ + + }}{} + +I don't like, it gets too messy because there are +too many braces. + + + + +================ {#id} + + nested div: + + +======================== + inside nested DIV + -======================== + + -============== diff --git a/vendor/gems/gems/maruku-0.6.0/docs/entity_test.md b/vendor/gems/gems/maruku-0.6.0/docs/entity_test.md new file mode 100644 index 00000000..ac943188 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/entity_test.md @@ -0,0 +1,23 @@ + +List of symbols supported by Maruku +=================================== + + + diff --git a/vendor/gems/gems/maruku-0.6.0/docs/markdown_syntax.md b/vendor/gems/gems/maruku-0.6.0/docs/markdown_syntax.md new file mode 100644 index 00000000..b8b2a4db --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/markdown_syntax.md @@ -0,0 +1,899 @@ +css: style.css + +Markdown: Syntax +================ + + + + +* [Overview](#overview) + * [Philosophy](#philosophy) + * [Inline HTML](#html) + * [Automatic Escaping for Special Characters](#autoescape) +* [Block Elements](#block) + * [Paragraphs and Line Breaks](#p) + * [Headers](#header) + * [Blockquotes](#blockquote) + * [Lists](#list) + * [Code Blocks](#precode) + * [Horizontal Rules](#hr) +* [Span Elements](#span) + * [Links](#link) + * [Emphasis](#em) + * [Code](#code) + * [Images](#img) +* [Miscellaneous](#misc) + * [Backslash Escapes](#backslash) + * [Automatic Links](#autolink) + + +**Note:** This document is itself written using Markdown; you +can [see the source for it by adding '.text' to the URL][src]. + + [src]: /projects/markdown/syntax.text + +* * * + +

Overview

+ +

Philosophy

+ +Markdown is intended to be as easy-to-read and easy-to-write as is feasible. + +Readability, however, is emphasized above all else. A Markdown-formatted +document should be publishable as-is, as plain text, without looking +like it's been marked up with tags or formatting instructions. While +Markdown's syntax has been influenced by several existing text-to-HTML +filters -- including [Setext] [1], [atx] [2], [Textile] [3], [reStructuredText] [4], +[Grutatext] [5], and [EtText] [6] -- the single biggest source of +inspiration for Markdown's syntax is the format of plain text email. + + [1]: http://docutils.sourceforge.net/mirror/setext.html + [2]: http://www.aaronsw.com/2002/atx/ + [3]: http://textism.com/tools/textile/ + [4]: http://docutils.sourceforge.net/rst.html + [5]: http://www.triptico.com/software/grutatxt.html + [6]: http://ettext.taint.org/doc/ + +To this end, Markdown's syntax is comprised entirely of punctuation +characters, which punctuation characters have been carefully chosen so +as to look like what they mean. E.g., asterisks around a word actually +look like \*emphasis\*. Markdown lists look like, well, lists. Even +blockquotes look like quoted passages of text, assuming you've ever +used email. + + + +

Inline HTML

+ +Markdown's syntax is intended for one purpose: to be used as a +format for *writing* for the web. + +Markdown is not a replacement for HTML, or even close to it. Its +syntax is very small, corresponding only to a very small subset of +HTML tags. The idea is *not* to create a syntax that makes it easier +to insert HTML tags. In my opinion, HTML tags are already easy to +insert. The idea for Markdown is to make it easy to read, write, and +edit prose. HTML is a *publishing* format; Markdown is a *writing* +format. Thus, Markdown's formatting syntax only addresses issues that +can be conveyed in plain text. + +For any markup that is not covered by Markdown's syntax, you simply +use HTML itself. There's no need to preface it or delimit it to +indicate that you're switching from Markdown to HTML; you just use +the tags. + +The only restrictions are that block-level HTML elements -- e.g. `
`, +``, `
`, `

`, etc. -- must be separated from surrounding +content by blank lines, and the start and end tags of the block should +not be indented with tabs or spaces. Markdown is smart enough not +to add extra (unwanted) `

` tags around HTML block-level tags. + +For example, to add an HTML table to a Markdown article: + + This is a regular paragraph. + +

+ + + +
Foo
+ + This is another regular paragraph. + +Note that Markdown formatting syntax is not processed within block-level +HTML tags. E.g., you can't use Markdown-style `*emphasis*` inside an +HTML block. + +Span-level HTML tags -- e.g. ``, ``, or `` -- can be +used anywhere in a Markdown paragraph, list item, or header. If you +want, you can even use HTML tags instead of Markdown formatting; e.g. if +you'd prefer to use HTML `` or `` tags instead of Markdown's +link or image syntax, go right ahead. + +Unlike block-level HTML tags, Markdown syntax *is* processed within +span-level tags. + + +

Automatic Escaping for Special Characters

+ +In HTML, there are two characters that demand special treatment: `<` +and `&`. Left angle brackets are used to start tags; ampersands are +used to denote HTML entities. If you want to use them as literal +characters, you must escape them as entities, e.g. `<`, and +`&`. + +Ampersands in particular are bedeviling for web writers. If you want to +write about 'AT&T', you need to write '`AT&T`'. You even need to +escape ampersands within URLs. Thus, if you want to link to: + + http://images.google.com/images?num=30&q=larry+bird + +you need to encode the URL as: + + http://images.google.com/images?num=30&q=larry+bird + +in your anchor tag `href` attribute. Needless to say, this is easy to +forget, and is probably the single most common source of HTML validation +errors in otherwise well-marked-up web sites. + +Markdown allows you to use these characters naturally, taking care of +all the necessary escaping for you. If you use an ampersand as part of +an HTML entity, it remains unchanged; otherwise it will be translated +into `&`. + +So, if you want to include a copyright symbol in your article, you can write: + + © + +and Markdown will leave it alone. But if you write: + + AT&T + +Markdown will translate it to: + + AT&T + +Similarly, because Markdown supports [inline HTML](#html), if you use +angle brackets as delimiters for HTML tags, Markdown will treat them as +such. But if you write: + + 4 < 5 + +Markdown will translate it to: + + 4 < 5 + +However, inside Markdown code spans and blocks, angle brackets and +ampersands are *always* encoded automatically. This makes it easy to use +Markdown to write about HTML code. (As opposed to raw HTML, which is a +terrible format for writing about HTML syntax, because every single `<` +and `&` in your example code needs to be escaped.) + + +* * * + + +

Block Elements

+ + +

Paragraphs and Line Breaks

+ +A paragraph is simply one or more consecutive lines of text, separated +by one or more blank lines. (A blank line is any line that looks like a +blank line -- a line containing nothing but spaces or tabs is considered +blank.) Normal paragraphs should not be indented with spaces or tabs. + +The implication of the "one or more consecutive lines of text" rule is +that Markdown supports "hard-wrapped" text paragraphs. This differs +significantly from most other text-to-HTML formatters (including Movable +Type's "Convert Line Breaks" option) which translate every line break +character in a paragraph into a `
` tag. + +When you *do* want to insert a `
` break tag using Markdown, you +end a line with two or more spaces, then type return. + +Yes, this takes a tad more effort to create a `
`, but a simplistic +"every line break is a `
`" rule wouldn't work for Markdown. +Markdown's email-style [blockquoting][bq] and multi-paragraph [list items][l] +work best -- and look better -- when you format them with hard breaks. + + [bq]: #blockquote + [l]: #list + + + + + +Markdown supports two styles of headers, [Setext] [1] and [atx] [2]. + +Setext-style headers are "underlined" using equal signs (for first-level +headers) and dashes (for second-level headers). For example: + + This is an H1 + ============= + + This is an H2 + ------------- + +Any number of underlining `=`'s or `-`'s will work. + +Atx-style headers use 1-6 hash characters at the start of the line, +corresponding to header levels 1-6. For example: + + # This is an H1 + + ## This is an H2 + + ###### This is an H6 + +Optionally, you may "close" atx-style headers. This is purely +cosmetic -- you can use this if you think it looks better. The +closing hashes don't even need to match the number of hashes +used to open the header. (The number of opening hashes +determines the header level.) : + + # This is an H1 # + + ## This is an H2 ## + + ### This is an H3 ###### + + +

Blockquotes

+ +Markdown uses email-style `>` characters for blockquoting. If you're +familiar with quoting passages of text in an email message, then you +know how to create a blockquote in Markdown. It looks best if you hard +wrap the text and put a `>` before every line: + + > This is a blockquote with two paragraphs. Lorem ipsum dolor sit amet, + > consectetuer adipiscing elit. Aliquam hendrerit mi posuere lectus. + > Vestibulum enim wisi, viverra nec, fringilla in, laoreet vitae, risus. + > + > Donec sit amet nisl. Aliquam semper ipsum sit amet velit. Suspendisse + > id sem consectetuer libero luctus adipiscing. + +Markdown allows you to be lazy and only put the `>` before the first +line of a hard-wrapped paragraph: + + > This is a blockquote with two paragraphs. Lorem ipsum dolor sit amet, + consectetuer adipiscing elit. Aliquam hendrerit mi posuere lectus. + Vestibulum enim wisi, viverra nec, fringilla in, laoreet vitae, risus. + + > Donec sit amet nisl. Aliquam semper ipsum sit amet velit. Suspendisse + id sem consectetuer libero luctus adipiscing. + +Blockquotes can be nested (i.e. a blockquote-in-a-blockquote) by +adding additional levels of `>`: + + > This is the first level of quoting. + > + > > This is nested blockquote. + > + > Back to the first level. + +Blockquotes can contain other Markdown elements, including headers, lists, +and code blocks: + + > ## This is a header. + > + > 1. This is the first list item. + > 2. This is the second list item. + > + > Here's some example code: + > + > return shell_exec("echo $input | $markdown_script"); + +Any decent text editor should make email-style quoting easy. For +example, with BBEdit, you can make a selection and choose Increase +Quote Level from the Text menu. + + +

Lists

+ +Markdown supports ordered (numbered) and unordered (bulleted) lists. + +Unordered lists use asterisks, pluses, and hyphens -- interchangably +-- as list markers: + + * Red + * Green + * Blue + +is equivalent to: + + + Red + + Green + + Blue + +and: + + - Red + - Green + - Blue + +Ordered lists use numbers followed by periods: + + 1. Bird + 2. McHale + 3. Parish + +It's important to note that the actual numbers you use to mark the +list have no effect on the HTML output Markdown produces. The HTML +Markdown produces from the above list is: + +
    +
  1. Bird
  2. +
  3. McHale
  4. +
  5. Parish
  6. +
+ +If you instead wrote the list in Markdown like this: + + 1. Bird + 1. McHale + 1. Parish + +or even: + + 3. Bird + 1. McHale + 8. Parish + +you'd get the exact same HTML output. The point is, if you want to, +you can use ordinal numbers in your ordered Markdown lists, so that +the numbers in your source match the numbers in your published HTML. +But if you want to be lazy, you don't have to. + +If you do use lazy list numbering, however, you should still start the +list with the number 1. At some point in the future, Markdown may support +starting ordered lists at an arbitrary number. + +List markers typically start at the left margin, but may be indented by +up to three spaces. List markers must be followed by one or more spaces +or a tab. + +To make lists look nice, you can wrap items with hanging indents: + + * Lorem ipsum dolor sit amet, consectetuer adipiscing elit. + Aliquam hendrerit mi posuere lectus. Vestibulum enim wisi, + viverra nec, fringilla in, laoreet vitae, risus. + * Donec sit amet nisl. Aliquam semper ipsum sit amet velit. + Suspendisse id sem consectetuer libero luctus adipiscing. + +But if you want to be lazy, you don't have to: + + * Lorem ipsum dolor sit amet, consectetuer adipiscing elit. + Aliquam hendrerit mi posuere lectus. Vestibulum enim wisi, + viverra nec, fringilla in, laoreet vitae, risus. + * Donec sit amet nisl. Aliquam semper ipsum sit amet velit. + Suspendisse id sem consectetuer libero luctus adipiscing. + +If list items are separated by blank lines, Markdown will wrap the +items in `

` tags in the HTML output. For example, this input: + + * Bird + * Magic + +will turn into: + +

    +
  • Bird
  • +
  • Magic
  • +
+ +But this: + + * Bird + + * Magic + +will turn into: + +
    +
  • Bird

  • +
  • Magic

  • +
+ +List items may consist of multiple paragraphs. Each subsequent +paragraph in a list item must be intended by either 4 spaces +or one tab: + + 1. This is a list item with two paragraphs. Lorem ipsum dolor + sit amet, consectetuer adipiscing elit. Aliquam hendrerit + mi posuere lectus. + + Vestibulum enim wisi, viverra nec, fringilla in, laoreet + vitae, risus. Donec sit amet nisl. Aliquam semper ipsum + sit amet velit. + + 2. Suspendisse id sem consectetuer libero luctus adipiscing. + +It looks nice if you indent every line of the subsequent +paragraphs, but here again, Markdown will allow you to be +lazy: + + * This is a list item with two paragraphs. + + This is the second paragraph in the list item. You're + only required to indent the first line. Lorem ipsum dolor + sit amet, consectetuer adipiscing elit. + + * Another item in the same list. + +To put a blockquote within a list item, the blockquote's `>` +delimiters need to be indented: + + * A list item with a blockquote: + + > This is a blockquote + > inside a list item. + +To put a code block within a list item, the code block needs +to be indented *twice* -- 8 spaces or two tabs: + + * A list item with a code block: + + + + +It's worth noting that it's possible to trigger an ordered list by +accident, by writing something like this: + + 1986. What a great season. + +In other words, a *number-period-space* sequence at the beginning of a +line. To avoid this, you can backslash-escape the period: + + 1986\. What a great season. + + + +

Code Blocks

+ +Pre-formatted code blocks are used for writing about programming or +markup source code. Rather than forming normal paragraphs, the lines +of a code block are interpreted literally. Markdown wraps a code block +in both `
` and `` tags.
+
+To produce a code block in Markdown, simply indent every line of the
+block by at least 4 spaces or 1 tab. For example, given this input:
+
+    This is a normal paragraph:
+
+        This is a code block.
+
+Markdown will generate:
+
+    

This is a normal paragraph:

+ +
This is a code block.
+    
+ +One level of indentation -- 4 spaces or 1 tab -- is removed from each +line of the code block. For example, this: + + Here is an example of AppleScript: + + tell application "Foo" + beep + end tell + +will turn into: + +

Here is an example of AppleScript:

+ +
tell application "Foo"
+        beep
+    end tell
+    
+ +A code block continues until it reaches a line that is not indented +(or the end of the article). + +Within a code block, ampersands (`&`) and angle brackets (`<` and `>`) +are automatically converted into HTML entities. This makes it very +easy to include example HTML source code using Markdown -- just paste +it and indent it, and Markdown will handle the hassle of encoding the +ampersands and angle brackets. For example, this: + + + +will turn into: + +
<div class="footer">
+        &copy; 2004 Foo Corporation
+    </div>
+    
+ +Regular Markdown syntax is not processed within code blocks. E.g., +asterisks are just literal asterisks within a code block. This means +it's also easy to use Markdown to write about Markdown's own syntax. + + + +

Horizontal Rules

+ +You can produce a horizontal rule tag (`
`) by placing three or +more hyphens, asterisks, or underscores on a line by themselves. If you +wish, you may use spaces between the hyphens or asterisks. Each of the +following lines will produce a horizontal rule: + + * * * + + *** + + ***** + + - - - + + --------------------------------------- + + +* * * + +

Span Elements

+ + + +Markdown supports two style of links: *inline* and *reference*. + +In both styles, the link text is delimited by [square brackets]. + +To create an inline link, use a set of regular parentheses immediately +after the link text's closing square bracket. Inside the parentheses, +put the URL where you want the link to point, along with an *optional* +title for the link, surrounded in quotes. For example: + + This is [an example](http://example.com/ "Title") inline link. + + [This link](http://example.net/) has no title attribute. + +Will produce: + +

This is + an example inline link.

+ +

This link has no + title attribute.

+ +If you're referring to a local resource on the same server, you can +use relative paths: + + See my [About](/about/) page for details. + +Reference-style links use a second set of square brackets, inside +which you place a label of your choosing to identify the link: + + This is [an example][id] reference-style link. + +You can optionally use a space to separate the sets of brackets: + + This is [an example] [id] reference-style link. + +Then, anywhere in the document, you define your link label like this, +on a line by itself: + + [id]: http://example.com/ "Optional Title Here" + +That is: + +* Square brackets containing the link identifier (optionally + indented from the left margin using up to three spaces); +* followed by a colon; +* followed by one or more spaces (or tabs); +* followed by the URL for the link; +* optionally followed by a title attribute for the link, enclosed + in double or single quotes, or enclosed in parentheses. + +The following three link definitions are equivalent: + + [foo]: http://example.com/ "Optional Title Here" + [foo]: http://example.com/ 'Optional Title Here' + [foo]: http://example.com/ (Optional Title Here) + +**Note:** There is a known bug in Markdown.pl 1.0.1 which prevents +single quotes from being used to delimit link titles. + +The link URL may, optionally, be surrounded by angle brackets: + + [id]: "Optional Title Here" + +You can put the title attribute on the next line and use extra spaces +or tabs for padding, which tends to look better with longer URLs: + + [id]: http://example.com/longish/path/to/resource/here + "Optional Title Here" + +Link definitions are only used for creating links during Markdown +processing, and are stripped from your document in the HTML output. + +Link definition names may constist of letters, numbers, spaces, and +punctuation -- but they are *not* case sensitive. E.g. these two +links: + + [link text][a] + [link text][A] + +are equivalent. + +The *implicit link name* shortcut allows you to omit the name of the +link, in which case the link text itself is used as the name. +Just use an empty set of square brackets -- e.g., to link the word +"Google" to the google.com web site, you could simply write: + + [Google][] + +And then define the link: + + [Google]: http://google.com/ + +Because link names may contain spaces, this shortcut even works for +multiple words in the link text: + + Visit [Daring Fireball][] for more information. + +And then define the link: + + [Daring Fireball]: http://daringfireball.net/ + +Link definitions can be placed anywhere in your Markdown document. I +tend to put them immediately after each paragraph in which they're +used, but if you want, you can put them all at the end of your +document, sort of like footnotes. + +Here's an example of reference links in action: + + I get 10 times more traffic from [Google] [1] than from + [Yahoo] [2] or [MSN] [3]. + + [1]: http://google.com/ "Google" + [2]: http://search.yahoo.com/ "Yahoo Search" + [3]: http://search.msn.com/ "MSN Search" + +Using the implicit link name shortcut, you could instead write: + + I get 10 times more traffic from [Google][] than from + [Yahoo][] or [MSN][]. + + [google]: http://google.com/ "Google" + [yahoo]: http://search.yahoo.com/ "Yahoo Search" + [msn]: http://search.msn.com/ "MSN Search" + +Both of the above examples will produce the following HTML output: + +

I get 10 times more traffic from Google than from + Yahoo + or MSN.

+ +For comparison, here is the same paragraph written using +Markdown's inline link style: + + I get 10 times more traffic from [Google](http://google.com/ "Google") + than from [Yahoo](http://search.yahoo.com/ "Yahoo Search") or + [MSN](http://search.msn.com/ "MSN Search"). + +The point of reference-style links is not that they're easier to +write. The point is that with reference-style links, your document +source is vastly more readable. Compare the above examples: using +reference-style links, the paragraph itself is only 81 characters +long; with inline-style links, it's 176 characters; and as raw HTML, +it's 234 characters. In the raw HTML, there's more markup than there +is text. + +With Markdown's reference-style links, a source document much more +closely resembles the final output, as rendered in a browser. By +allowing you to move the markup-related metadata out of the paragraph, +you can add links without interrupting the narrative flow of your +prose. + + +

Emphasis

+ +Markdown treats asterisks (`*`) and underscores (`_`) as indicators of +emphasis. Text wrapped with one `*` or `_` will be wrapped with an +HTML `` tag; double `*`'s or `_`'s will be wrapped with an HTML +`` tag. E.g., this input: + + *single asterisks* + + _single underscores_ + + **double asterisks** + + __double underscores__ + +will produce: + + single asterisks + + single underscores + + double asterisks + + double underscores + +You can use whichever style you prefer; the lone restriction is that +the same character must be used to open and close an emphasis span. + +Emphasis can be used in the middle of a word: + + un*fucking*believable + +But if you surround an `*` or `_` with spaces, it'll be treated as a +literal asterisk or underscore. + +To produce a literal asterisk or underscore at a position where it +would otherwise be used as an emphasis delimiter, you can backslash +escape it: + + \*this text is surrounded by literal asterisks\* + + + +

Code

+ +To indicate a span of code, wrap it with backtick quotes (`` ` ``). +Unlike a pre-formatted code block, a code span indicates code within a +normal paragraph. For example: + + Use the `printf()` function. + +will produce: + +

Use the printf() function.

+ +To include a literal backtick character within a code span, you can use +multiple backticks as the opening and closing delimiters: + + ``There is a literal backtick (`) here.`` + +which will produce this: + +

There is a literal backtick (`) here.

+ +The backtick delimiters surrounding a code span may include spaces -- +one after the opening, one before the closing. This allows you to place +literal backtick characters at the beginning or end of a code span: + + A single backtick in a code span: `` ` `` + + A backtick-delimited string in a code span: `` `foo` `` + +will produce: + +

A single backtick in a code span: `

+ +

A backtick-delimited string in a code span: `foo`

+ +With a code span, ampersands and angle brackets are encoded as HTML +entities automatically, which makes it easy to include example HTML +tags. Markdown will turn this: + + Please don't use any `` tags. + +into: + +

Please don't use any <blink> tags.

+ +You can write this: + + `—` is the decimal-encoded equivalent of `—`. + +to produce: + +

&#8212; is the decimal-encoded + equivalent of &mdash;.

+ + + +

Images

+ +Admittedly, it's fairly difficult to devise a "natural" syntax for +placing images into a plain text document format. + +Markdown uses an image syntax that is intended to resemble the syntax +for links, allowing for two styles: *inline* and *reference*. + +Inline image syntax looks like this: + + ![Alt text](/path/to/img.jpg) + + ![Alt text](/path/to/img.jpg "Optional title") + +That is: + +* An exclamation mark: `!`; +* followed by a set of square brackets, containing the `alt` + attribute text for the image; +* followed by a set of parentheses, containing the URL or path to + the image, and an optional `title` attribute enclosed in double + or single quotes. + +Reference-style image syntax looks like this: + + ![Alt text][id] + +Where "id" is the name of a defined image reference. Image references +are defined using syntax identical to link references: + + [id]: url/to/image "Optional title attribute" + +As of this writing, Markdown has no syntax for specifying the +dimensions of an image; if this is important to you, you can simply +use regular HTML `` tags. + + +* * * + + +

Miscellaneous

+ + + +Markdown supports a shortcut style for creating "automatic" links for URLs and email addresses: simply surround the URL or email address with angle brackets. What this means is that if you want to show the actual text of a URL or email address, and also have it be a clickable link, you can do this: + + + +Markdown will turn this into: + + http://example.com/ + +Automatic links for email addresses work similarly, except that +Markdown will also perform a bit of randomized decimal and hex +entity-encoding to help obscure your address from address-harvesting +spambots. For example, Markdown will turn this: + + + +into something like this: + + address@exa + mple.com + +which will render in a browser as a clickable link to "address@example.com". + +(This sort of entity-encoding trick will indeed fool many, if not +most, address-harvesting bots, but it definitely won't fool all of +them. It's better than nothing, but an address published in this way +will probably eventually start receiving spam.) + + + +

Backslash Escapes

+ +Markdown allows you to use backslash escapes to generate literal +characters which would otherwise have special meaning in Markdown's +formatting syntax. For example, if you wanted to surround a word with +literal asterisks (instead of an HTML `` tag), you can backslashes +before the asterisks, like this: + + \*literal asterisks\* + +Markdown provides backslash escapes for the following characters: + + \ backslash + ` backtick + * asterisk + _ underscore + {} curly braces + [] square brackets + () parentheses + # hash mark + + plus sign + - minus sign (hyphen) + . dot + ! exclamation mark + diff --git a/vendor/gems/gems/maruku-0.6.0/docs/maruku.md b/vendor/gems/gems/maruku-0.6.0/docs/maruku.md new file mode 100644 index 00000000..62a0a6bc --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/maruku.md @@ -0,0 +1,346 @@ +CSS: style.css +Use numbered headers: true +HTML use syntax: true +LaTeX use listings: true +LaTeX CJK: false +LaTeX preamble: preamble.tex + +![MaRuKu](logo.png){#logo} + + +Mar**u**k**u**: a Markdown-superset interpreter +=============================================== + +[Maruku] is a Markdown interpreter written in [Ruby]. + +> [Last release](#release_notes) is version 0.5.6 -- 2007-05-22. +> +> Install using [rubygems]: +> +> $ gem install maruku +> +> Use this command to update to latest version: +> +> $ gem update maruku +> +{#news} + +[rubygems]: http://rubygems.org + +* * * + + +Maruku allows you to write in an easy-to-read-and-write syntax, like this: + +> [This document in Markdown][this_md] + +Then it can be translated to HTML: + +> [This document in HTML][this_html] + +or LaTeX, which is then converted to PDF: + +> [This document in PDF][this_pdf] + +Maruku implements: + +* the original [Markdown syntax][markdown_html] + ([HTML][markdown_html] or [PDF][markdown_pdf]), translated by Maruku). + +* all the improvements in [PHP Markdown Extra]. + +* a new [meta-data syntax][meta_data_proposal] + + +__Authors__: Maruku has been developed so far by [Andrea Censi]. +Contributors are most welcome! + +__The name of the game__: Maruku is the [romaji] transliteration of +the [katakana] transliteration +of "Mark", the first word in Markdown. I chose this name because Ruby +is Japanese, and also the sillable "ru" appears in Maruku. + + +[romaji]: http://en.wikipedia.org/wiki/Romaji +[katakana]: http://en.wikipedia.org/wiki/Katakana + +[tests]: http://maruku.rubyforge.org/tests/ +[maruku]: http://maruku.rubyforge.org/ +[markdown_html]: http://maruku.rubyforge.org/markdown_syntax.html +[markdown_pdf]: http://maruku.rubyforge.org/markdown_syntax.pdf +[this_md]: http://maruku.rubyforge.org/maruku.md +[this_html]: http://maruku.rubyforge.org/maruku.html +[this_pdf]: http://maruku.rubyforge.org/maruku.pdf +[Andrea Censi]: http://www.dis.uniroma1.it/~acensi/ + +[contact]: http://www.dis.uniroma1.it/~acensi/contact.html +[gem]: http://rubygems.rubyforge.org/ +[tracker]: http://rubyforge.org/tracker/?group_id=2795 + + +[ruby]: http://www.ruby-lang.org +[bluecloth]: http://www.deveiate.org/projects/BlueCloth +[Markdown syntax]: http://daringfireball.net/projects/markdown/syntax +[PHP Markdown Extra]: http://www.michelf.com/projects/php-markdown/extra/ +[math syntax]: http://maruku.rubyforge.org/math.xhtml +[blahtex]: http://www.blahtex.org +[ritex]: http://ritex.rubyforge.org +[itex2mml]: http://golem.ph.utexas.edu/~distler/code/itexToMML/ +[syntax]: http://syntax.rubyforge.org/ + +[listings]: http://www.ctan.org/tex-archive/macros/latex/contrib/listings/ +[meta_data_proposal]: http://maruku.rubyforge.org/proposal.html +[markdown-discuss]: http://six.pairlist.net/mailman/listinfo/markdown-discuss + +* * * + +Table of contents: (**auto-generated by Maruku!**) + +* This list will contain the toc (it doesn't matter what you write here) +{:toc} + +* * * + +{:ruby: lang=ruby code_background_color='#efffef'} +{:shell: lang=sh code_background_color='#efefff'} +{:markdown: code_background_color='#ffefef'} +{:html: lang=xml} + + +Release notes {#release_notes} +-------------- + +Note: Maruku seems to be very robust, nevertheless it is still beta-level +software. So if you want to use it in production environments, please +check back in a month or so, while we squash the remaining bugs. + +In the meantime, feel free to toy around, and please signal problems, +request features, by [contacting me][contact] or using the [tracker][tracker]. +For issues about the Markdown syntax itself and improvements to it, +please write to the [Markdown-discuss mailing list][markdown-discuss]. + +Have fun! + +See the [changelog](http://maruku.rubyforge.org/changelog.html#stable). + + +Usage +-------- + +### Embedded Maruku ### + +This is the basic usage: + + require 'rubygems' + require 'maruku' + + doc = Maruku.new(markdown_string) + puts doc.to_html +{:ruby} + +The method `to_html` outputs only an HTML fragment, while the method `to_html_document` outputs a complete XHTML 1.0 document: + + puts doc.to_html_document +{:ruby} + +You can have the REXML document tree with: + + tree = doc.to_html_document_tree +{:ruby} + +### From the command line ### + +There is one command-line program installed: `maruku`. + +Without arguments, it converts Markdown to HTML: + + $ maruku file.md # creates file.html +{:shell} + +With the `--pdf` arguments, it converts Markdown to LaTeX, then calls `pdflatex` to +transform to PDF: + + $ maruku --pdf file.md # creates file.tex and file.pdf +{:shell} + + + +Maruku summary of features {#features} +-------------------------- + +* Supported syntax + + * [Basic Markdown][markdown_syntax] + * [Markdown Extra](#extra) + * [Meta-data syntax](#meta) + +* Output + + * XHTML + + * Syntax highlighting via the [`syntax`][syntax] library. + + * LaTeX + + * [Translation of HTML entities to LaTeX](#entities) + * Syntax highlighting via the [`listings`][listings] package. + +* Misc + + * [Documentation for supported attributes][supported_attributes] + + * [Automatic generation of the TOC](#toc-generation) + + +[supported_attributes]: exd.html + +**Experimental features (not released yet)** + +* [LaTeX Math syntax][math_syntax] (not enabled by default) +* An extension system for adding new syntax is available, + but the API is bound to change in the future, + so please don't use it. +* LaTeX to MathML using either one of [`ritex`][ritex], [`itex2mml`][itex2mml], + [`blahtex`][blahtex]. +* LaTeX to PNG using [`blahtex`][blahtex]. + +### New meta-data syntax {#meta} + +Maruku implements a syntax that allows to attach "meta" information +to objects. + +See [this proposal][meta_data_proposal] for how to attach +metadata to the elements. + +See the [documentation for supported attributes][supported_attributes]. + +Meta-data for the document itself is specified through the use +of email headers: + + Title: A simple document containing meta-headers + CSS: style.css + + Content of the document +{:markdown} + +When creating the document through + + Maruku.new(s).to_html_document +{:ruby} + +the title and stylesheet are added as expected. + +Meta-data keys are assumed to be case-insensitive. + + +### Automatic generation of the table of contents ### {#toc-generation} + +If you create a list, and then set the `toc` attribute, when rendering +Maruku will create an auto-generated table of contents. + + * This will become a table of contents (this text will be scraped). + {:toc} + +You can see an example of this at the beginning of this document. + +### Use HTML entities ### {#entities} + +If you want to use HTML entities, go on! We will take care +of the translation to LaTeX: + +Entity | Result +------------|---------- +`©` | © +`£` | £ +`λ` | λ +`—` | — + +See the [list of supported entities][ent_html] ([pdf][ent_pdf]). + +[ent_html]: http://maruku.rubyforge.org/entity_test.html +[ent_pdf]: http://maruku.rubyforge.org/entity_test.pdf + + +### This header contains *emphasis* **strong text** and `code` #### + +Note that this header contains formatting and it still works, also in the table of contents. + +And [This is a *link* with **all** ***sort*** of `weird stuff`](#features) in the text. + + +Examples of PHP Markdown Extra syntax {#extra} +------------------------------------- + +* tables + + Col1 | Very very long head | Very very long head| + -----|:-------------------:|-------------------:| + cell | center-align | right-align | + {:markdown} + + Col1 | Very very long head | Very very long head| + -----|:-------------------:|-------------------:| + cell | center-align | right-align | + + +* footnotes [^foot] + + * footnotes [^foot] + + [^foot]: I really was missing those. + {:markdown} + +[^foot]: I really was missing those. + +* Markdown inside HTML elements + +
+ This is a div with Markdown **strong text** +
+ {:html} + +
+ This is a div with Markdown **strong text** +
+ + +* header ids + + ## Download ## {#download} + {:markdown} + + For example, [a link to the download](#download) header. + + +* definition lists + + Definition list + : something very hard to parse + {:markdown} + + Definition list + : something very hard to parse + +* abbreviations or ABB for short. + +*[ABB]: Simply an abbreviation + + + + + + diff --git a/vendor/gems/gems/maruku-0.6.0/docs/math.md b/vendor/gems/gems/maruku-0.6.0/docs/math.md new file mode 100644 index 00000000..16bed4ae --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/math.md @@ -0,0 +1,194 @@ +Title: Math support in Maruku +LaTeX preamble: math_preamble.tex +LaTeX use listings: true +CSS: math.css style.css +use numbered headers: true + +Math support in Maruku +====================== + +This document describes Maruku's support of inline LaTeX-style math. + +At the moment, **these features are experimental**, are probably +buggy and the syntax and implementation are bound to change in +the near future. + +Also, there are many subtleties of which one must care for +correctly serving the XHTML+MathML document to browsers. +In fact, *this documentation is __not__ enough to get you started*, +unless you feel very adventurous. + +* toc +{:toc} + +Syntax +--------------------------------------- + +### Inline math + +Inline math is contained inside couples of `$`. + +Everything inside will be passed as-is to LaTeX: no Markdown +interpretation will take place. + + Example: $x^{n}+y^{n} \neq z^{n}$ for $n \geq 3$ + +> Example: $x^{n}+y^{n} \neq z^{n}$ for $n \geq 3$ + +### Equations + +Equations are specified using either the `$$ ... $$` or `\[ ... \]` +LaTeX notation. Equations can span multiple lines. + + \[ + \sum_{n=1}^\infty \frac{1}{n} + \text{ is divergent, but } + \lim_{n \to \infty} \sum_{i=1}^n \frac{1}{i} - \ln n \text{exists.} + \] + +> \[ +> \sum_{n=1}^\infty \frac{1}{n} +> \text{ is divergent, but } +> \lim_{n \to \infty} \sum_{i=1}^n \frac{1}{i} - \ln n \quad \text{exists.} +> \] + +Some random AMSTeX symbols: + + $$ \beth \Subset \bigtriangleup \bumpeq \ggg \pitchfork $$ + +$$ \beth \Subset \bigtriangleup \bumpeq \ggg \pitchfork $$ + + +## Cross references ## {#cross} + +Create a label for an equation in two ways: + +* LaTeX style: + + Consider \eqref{a}: + + $$ \alpha = \beta \label{a} $$ + +* More readable style: + + Consider (eq:a): + + $$ \alpha = \beta $$ (a) + +You can mix the two. + +Labels will work as expected also in the PDF output, whatever +style you use: Maruku will insert the necessary `\label` commands. + +The following are 4 equations, labeled A,B,C,D: + +$$ \alpha $$ (A) + +\[ + \beta +\] (B) + +$$ \gamma \label{C} $$ + +\[ + \delta \label{D} +\] + +You can now refer to (eq:A), (eq:B), \eqref{C}, \eqref{D}. + + +Enabling the extension +--------------------------------------- + +### On the command line + +Use the `-m` option to choose the kind of output. Possible choices are: + +`--math-engine itex2mml` : Outputs MathML using [itex2mml](#using_itex2mml). +`--math-engine ritex` : Outputs MathML using [ritex](#using_ritex). +`--math-engine blahtex` : Outputs MathML using [blahtex](#using_blahtex). +`--math-images blahtex` : Outputs PNGs using [blahtex](#using_blahtex). + +### With embedded Maruku + +You have to enable the math extension like this: + + require 'maruku' # loads maruku + require 'maruku/ext/math' # loads the math extension + +Use the following to choose the engine: + + MaRuKu::Globals[:html_math_engine] = 'ritex' + MaRuKu::Globals[:html_png_engine] = 'blahtex' + +Available MathML engines are 'none', 'itex2mml', 'blahtex'. +'blahtex' is the only PNG engine available. + +External libraries needed +------------------------- + +To output MathML or PNGs, it is needed to install one of the following libraries + +### Using `ritex` ### {#using_ritex} + +Install with + + $ gem install ritex + +ritex's abilities are very limited, but it is the easiest to install. + +### Using `itex2mml` ### {#using_itex2mml} + +itex2mml supports much more LaTeX commands/environments than ritex. + +Install itex2mml using the instructions at: + +> + +This is a summary of the available LaTeX commands: + +> + +Moreover, [Jacques Distler] is integrating Maruku+itex2mml+[Instiki]. +You can find more information here: + +> + +[Jacques Distler]: http://golem.ph.utexas.edu/~distler +[instiki]: http://www.instiki.org + +### Using `blahtex` ### {#using_blahtex} + +Download from . Make sure you have +the command-line `blahtex` in your path. + + +Subtleties +---------- + +### Serving the right content/type ### + + +* Mozilla wants files to have the `.xhtml` extension. + +... + +### Where PNGS are put ### + +* `Globals[:math_png_dir]` + +* `Globals[:math_png_dir_url]` + + +### Styling equations #### + +... + +### Aligning PNGs #### + + +* using `ex` + +* **IE7 bug** + +... diff --git a/vendor/gems/gems/maruku-0.6.0/docs/other_stuff.md b/vendor/gems/gems/maruku-0.6.0/docs/other_stuff.md new file mode 100644 index 00000000..1528630a --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/other_stuff.md @@ -0,0 +1,51 @@ +* *Jan. 22* With very minimal changes, Maruku now works in JRuby. + It is very slow, though. + + Some benchmarks: + + * G4 1.5GhZ, Ruby 1.8.5: + + Maruku (to_html): parsing 0.65 sec + rendering 0.40 sec = 1.04 sec + Maruku (to_latex): parsing 0.70 sec + rendering 0.21 sec = 0.91 sec + + * G4 1.5GhZ, JRuby 1.9.2: + + Maruku (to_html): parsing 4.77 sec + rendering 2.24 sec = 7.01 sec + Maruku (to_latex): parsing 4.04 sec + rendering 1.12 sec = 5.16 sec + +* *Jan. 21* Integration of Blahtex. PNG export of formula and alignment works + ok in Mozilla, Safari, Camino, Opera. IE7 is acting strangely. + +* Support for LaTeX-style formula input, and export to MathML. + + [Jacques Distler] is integrating Maruku into Instiki (a Ruby On Rails-based wiki software), as to have a Ruby wiki with proper math support. You know, these physicists like all those funny symbols. + + * To have the MathML export, it is needed to install one of: + + * [RiTeX] (`gem install ritex`) + * [itex2MML] supports much more complex formulas than Ritex. + * PNG for old browser is not here yet. The plan is to use + BlahTeX. + + +* Command line options for the `maruku` command: + + Usage: maruku [options] [file1.md [file2.md ... + -v, --[no-]verbose Run verbosely + -u, --[no-]unsafe Use unsafe features + -b Break on error + -m, --math-engine ENGINE Uses ENGINE to render MathML + --pdf Write PDF + --html Write HTML + --tex Write LaTeX + --inspect Shows the parsing result + --version Show version + -h, --help Show this message + +* Other things: + + * Created the embryo of an extension system. Please don't use it + yet, as probably the API is bound to change. + + * There are a couple of hidden, unsafe, features that are not enabled by default. + diff --git a/vendor/gems/gems/maruku-0.6.0/docs/proposal.md b/vendor/gems/gems/maruku-0.6.0/docs/proposal.md new file mode 100644 index 00000000..ff40284b --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/docs/proposal.md @@ -0,0 +1,309 @@ +CSS: style.css +LaTeX_use_listings: true +html_use_syntax: true +use_numbered_headers: true + +Proposal for adding a meta-data syntax to Markdown +============================================= + +This document describes a syntax for attaching meta-data to +block-level elements (headers, paragraphs, code blocks,…), +and to span-level elements (links, images,…). + +***Note: this is an evolving proposal*** + +Last updated **January 10th, 2007**: + +* Changed the syntax for compatibility with a future extension mechanism. + + The first character in the curly braces must be a colon, optionally + followed by a space: + + {: ref .class #id} + + The old syntax was `{ref .class #id}`. + + For ALDs, the new syntax is: + + {:ref_id: key=val .class #id } + + instead of: + + {ref_id}: key=val .class #id + + Converters that don't use this syntax may just ignore everything + which is in curly braces and starts with ":". + +* IAL can be put both *before* and *after* the element. + There is no ambiguity as a blank line is needed between elements: + + Paragraph 1 + + {:par2} + Paragraph 2 + + is equivalent to: + + Paragraph 1 + + Paragraph 2 + {:par2} + +* Simplified rules for escaping. + +*Table of contents:* + +> * Table of contents +> {:toc} + +Overview +-------- + +This proposal describes two additions to the Markdown syntax: + +1. inline attribute lists (IAL) + + ## Header ## {: key=val .class #id ref_id} + +2. attribute lists definitions (ALD) + + {:ref_id: key=val .class #id} + +Every span-level or block-level element can be followed by an IAL: + + ### Header ### {: #header1 class=c1} + + Paragraph *with emphasis*{: class=c1} + second line of paragraph + {: class=c1} + +In this example, the three IALs refer to the header, the emphasis span, and the entire paragraph, respectively. + +IALs can reference ALDs. The result of the following example is the same as the previous one: + + ### Header ### {: #header1 c1} + + Paragraph *with emphasis*{:c1} + second line of paragraph + {:c1} + + {:c1: class=c1} + +Attribute lists +--------------- + +This is an example attribute list, which shows +everything you can put inside: + + {: key1=val key2="long val" #myid .class1 .class2 ref1 ref2} + +More in particular, an attribute list is a whitespace-separated list +of elements of 4 different kinds: + +1. key/value pairs (quoted if necessary) +2. [references to ALD](#using_tags) (`ref1`,`ref2`) +3. [id specifiers](#class_id) (`#myid`) +4. [class specifiers](#class_id) (`.myclass`) + +### `id` and `class` are special ### {#class_id} + +For ID and classes there are special shortcuts: + +* `#myid` is a shortcut for `id=myid` +* `.myclass` means "add `myclass` to the current `class` attribute". + + So these are equivalent: + + {: .class1 .class2} + {: class="class1 class2"} + + +The following attribute lists are equivalent: + + {: #myid .class1 .class2} + {: id=myid class=class1 .class2} + {: id=myid class="class1 class2"} + {: id=myid class="will be overridden" class=class1 .class2} + +Where to put inline attribute lists +---------------------------------- + +### For block-level elements ### + +For paragraphs and other block-level elements, IAL go +**after** the element: + + This is a paragraph. + Line 2 of the paragraph. + {: #myid .myclass} + + A quote with a citation url: + > Who said that? + {: cite=google.com} + +Note: empty lines between the block and the IAL are not tolerated. +So this is not legal: + + This is a paragraph. + Line 2 of the paragraph. + + {: #myid .myclass} + +Attribute lists may be indented up to 3 spaces: + + Paragraph1 + {:ok} + + Paragraph2 + {:ok} + + Paragraph2 + {:ok} +{:code_show_spaces} + +### For headers ### + +For headers, you can put attribute lists on the same line: + + ### Header ### {: #myid} + + Header {: #myid .myclass} + ------ + +or, as like other block-level elements, on the line below: + + ### Header ### + {: #myid} + + Header + ------ + {: #myid .myclass} + +### For span-level elements ### + +For span-level elements, meta-data goes immediately **after** in the +flow. + +For example, in this: + + This is a *chunky paragraph*{: #id1} + {: #id2} + +the ID of the `em` element is set to `id1` +and the ID of the paragraph is set to `id2`. + +This works also for links, like this: + + This is [a link][ref]{:#myid rel=abc rev=abc} + +For images, this: + + This is ![Alt text](url "fresh carrots") + +is equivalent to: + + This is ![Alt text](url){:title="fresh carrots"} + +Using attributes lists definition {#using_tags} +--------------------------------- + +In an attribute list, you can have: + +1. `key=value` pairs, +2. id attributes (`#myid`) +3. class attributes (`.myclass`) + +Everything else is interpreted as a reference to +an ALD. + + # Header # {:ref} + + Blah blah blah. + + {:ref: #myhead .myclass lang=fr} + +Of course, more than one IAL can reference the same ALD: + + # Header 1 # {:1} + ... + # Header 2 # {:1} + + {:1: .myclass lang=fr} + + +The rules {:#grammar} +--------- + +### The issue of escaping ### + +1. No escaping in code spans/blocks. + +2. Everywhere else, **all** PUNCTUATION characters **can** be escaped, +and **must** be escaped when they could trigger links, tables, etc. + + A punctuation character is anything not a letter, a number, or whitespace + (`[^a-zA-Z0-9\s\n]`). + +3. As a rule, quotes **must** be escaped inside quoted values: + + * Inside `"quoted values"`, you **must** escape `"`. + * Inside `'quoted values'`, you **must** escape `'`. + + * Other examples: + + `"bah 'bah' bah"` = `"bah \'bah\' bah"` = `'bah \'bah\' bah'` + + `'bah "bah" bah'` = `'bah \"bah\" bah'` = `"bah \"bah\" bah"` + + +4. There is an exception for backward compatibility, in links/images titles: + + [text](url "title"with"quotes") + + The exception is not valid for attribute lists and in other + contexts, where you have to use the canonical syntax. + + +### Syntax for attribute lists #### + +Consider the following attribute list: + + {: key=value ref key2="quoted value" } + +In this string, `key`, `value`, and `ref` can be substituted by any +string that does not contain whitespace, or the unescaped characters `}`,`=`,`'`,`"`. + +Inside a quoted value you **must** escape the other kind of quote. + +Also, you **must** escape a closing curly brace `}` inside quoted values. +This rule is for making life easier for interpreter that just want to skip +the meta-data. + +If you don't implement this syntax, you can get rid of the IAL by using this +regular expression (this is written in Ruby): + + r = /\{:(\\\}|[^\}])*\}/ + + s.gsub(r, '') # ignore metadata +{:ruby} + +Basically: match everything contained in a couple of `{:` and `}`, taking care +of escaping of `}`. This `\\\}|[^\}]` means: eat either any character which +is not a `}` or an escape sequence `\}`. + +For this example, + + this is + {: skipped="\}" val=\} bar} + + for me + {: also this} + +the result is: + + this is + + + for me + + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku.rb new file mode 100644 index 00000000..71d5aa4c --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku.rb @@ -0,0 +1,141 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + +require 'rexml/document' + +# :include:MaRuKu.txt +module MaRuKu + + module In + module Markdown + module SpanLevelParser; end + module BlockLevelParser; end + end + # more to come? + end + + module Out + # Functions for exporting to MarkDown. + module Markdown; end + # Functions for exporting to HTML. + module HTML; end + # Functions for exporting to Latex + module Latex; end + end + + # These are strings utilities. + module Strings; end + + module Helpers; end + + module Errors; end + + class MDElement + include REXML + include MaRuKu + include Out::Markdown + include Out::HTML + include Out::Latex + include Strings + include Helpers + include Errors + end + + + class MDDocument < MDElement + include In::Markdown + include In::Markdown::SpanLevelParser + include In::Markdown::BlockLevelParser + end +end + +# This is the public interface +class Maruku < MaRuKu::MDDocument; end + + + +require 'rexml/document' + +# Structures definition +require 'maruku/structures' +require 'maruku/structures_inspect' + +require 'maruku/defaults' +# Less typing +require 'maruku/helpers' + +# Code for parsing whole Markdown documents +require 'maruku/input/parse_doc' + +# Ugly things kept in a closet +require 'maruku/string_utils' +require 'maruku/input/linesource' +require 'maruku/input/type_detection' + +# A class for reading and sanitizing inline HTML +require 'maruku/input/html_helper' + +# Code for parsing Markdown block-level elements +require 'maruku/input/parse_block' + +# Code for parsing Markdown span-level elements +require 'maruku/input/charsource' +require 'maruku/input/parse_span_better' +require 'maruku/input/rubypants' + +require 'maruku/input/extensions' + +require 'maruku/attributes' + +require 'maruku/structures_iterators' + +require 'maruku/errors_management' + +# Code for creating a table of contents +require 'maruku/toc' + +# Support for div Markdown extension +require 'maruku/ext/div' + +# Version and URL +require 'maruku/version' + + +# Exporting to html +require 'maruku/output/to_html' + +# Exporting to latex +require 'maruku/output/to_latex' +require 'maruku/output/to_latex_strings' +require 'maruku/output/to_latex_entities' + +# Pretty print +require 'maruku/output/to_markdown' + +# S5 slides +require 'maruku/output/s5/to_s5' +require 'maruku/output/s5/fancy' + +# Exporting to text: strips all formatting (not complete) +require 'maruku/output/to_s' + +# class Maruku is the global interface +require 'maruku/maruku' + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/attributes.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/attributes.rb new file mode 100644 index 00000000..74d7fe90 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/attributes.rb @@ -0,0 +1,227 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +class String + def quote_if_needed + if /[\s\'\"]/.match self + inspect + else + self + end + end +end + +module MaRuKu; + MagicChar = ':' + + class AttributeList < Array + + # An attribute list becomes + # {#id .cl key="val" ref} + # [ [:id, 'id'], [:class, 'id'], ['key', 'val'], [ :ref, 'ref' ]] + + private :push + + def push_key_val(key, val); + raise "Bad #{key.inspect}=#{val.inspect}" if not key and val + push [key, val] + end + def push_ref(ref_id); + + raise "Bad :ref #{ref_id.inspect}" if not ref_id + push [:ref, ref_id+""] + +# p "Now ", self ######################################## + end + def push_class(val); + raise "Bad :id #{val.inspect}" if not val + push [:class, val] + end + def push_id(val); + raise "Bad :id #{val.inspect}" if not val + push [:id, val] + end + + def to_s + map do |k,v| + case k + when :id; "#" + v.quote_if_needed + when :class; "." + v.quote_if_needed + when :ref; v.quote_if_needed + else k.quote_if_needed + "=" + v.quote_if_needed + end + end . join(' ') + end + alias to_md to_s + end + +end + +module MaRuKu; module In; module Markdown; module SpanLevelParser + + def unit_tests_for_attribute_lists + [ + [ "", [], "Empty lists are allowed" ], + [ "=", :throw, "Bad char to begin a list with." ], + [ "a =b", :throw, "No whitespace before `=`." ], + [ "a= b", :throw, "No whitespace after `=`." ], + + [ "a b", [[:ref, 'a'],[:ref, 'b']], "More than one ref" ], + [ "a b c", [[:ref, 'a'],[:ref, 'b'],[:ref, 'c']], "More than one ref" ], + [ "hello notfound", [[:ref, 'hello'],[:ref, 'notfound']]], + + [ "'a'", [[:ref, 'a']], "Quoted value." ], + [ '"a"' ], + + [ "a=b", [['a','b']], "Simple key/val" ], + [ "'a'=b" ], + [ "'a'='b'" ], + [ "a='b'" ], + + [ 'a="b\'"', [['a',"b\'"]], "Key/val with quotes" ], + [ 'a=b\''], + [ 'a="\\\'b\'"', [['a',"\'b\'"]], "Key/val with quotes" ], + + ['"', :throw, "Unclosed quotes"], + ["'"], + ["'a "], + ['"a '], + + [ "#a", [[:id, 'a']], "Simple ID" ], + [ "#'a'" ], + [ '#"a"' ], + + [ "#", :throw, "Unfinished '#'." ], + [ ".", :throw, "Unfinished '.'." ], + [ "# a", :throw, "No white-space after '#'." ], + [ ". a", :throw, "No white-space after '.' ." ], + + [ "a=b c=d", [['a','b'],['c','d']], "Tabbing" ], + [ " \ta=b \tc='d' "], + [ "\t a=b\t c='d'\t\t"], + + [ ".\"a'", :throw, "Mixing quotes is bad." ], + + ].map { |s, expected, comment| + @expected = (expected ||= @expected) + @comment = (comment ||= (last=@comment) ) + (comment == last && (comment += (@count+=1).to_s)) || @count = 1 + expected = [md_ial(expected)] if expected.kind_of? Array + ["{#{MagicChar}#{s}}", expected, "Attributes: #{comment}"] + } + end + + def md_al(s=[]); AttributeList.new(s) end + + # returns nil or an AttributeList + def read_attribute_list(src, con, break_on_chars) + + separators = break_on_chars + [?=,?\ ,?\t] + escaped = Maruku::EscapedCharInQuotes + + al = AttributeList.new + while true + src.consume_whitespace + break if break_on_chars.include? src.cur_char + + case src.cur_char + when nil + maruku_error "Attribute list terminated by EOF:\n "+ + "#{al.inspect}" , src, con + tell_user "I try to continue and return partial attribute list:\n"+ + al.inspect + break + when ?= # error + maruku_error "In attribute lists, cannot start identifier with `=`." + tell_user "I try to continue" + src.ignore_char + when ?# # id definition + src.ignore_char + if id = read_quoted_or_unquoted(src, con, escaped, separators) + al.push_id id + else + maruku_error 'Could not read `id` attribute.', src, con + tell_user 'Trying to ignore bad `id` attribute.' + end + when ?. # class definition + src.ignore_char + if klass = read_quoted_or_unquoted(src, con, escaped, separators) + al.push_class klass + else + maruku_error 'Could not read `class` attribute.', src, con + tell_user 'Trying to ignore bad `class` attribute.' + end + else + if key = read_quoted_or_unquoted(src, con, escaped, separators) + if src.cur_char == ?= + src.ignore_char # skip the = + if val = read_quoted_or_unquoted(src, con, escaped, separators) + al.push_key_val(key, val) + else + maruku_error "Could not read value for key #{key.inspect}.", + src, con + tell_user "Ignoring key #{key.inspect}." + end + else + al.push_ref key + end + else + maruku_error 'Could not read key or reference.' + end + end # case + end # while true + al + end + + + # We need a helper + def is_ial(e); e.kind_of? MDElement and e.node_type == :ial end + + def merge_ial(elements, src, con) + + # Apply each IAL to the element before + elements.each_with_index do |e, i| + if is_ial(e) && i>= 1 then + before = elements[i-1] + after = elements[i+1] + if before.kind_of? MDElement + before.al = e.ial + elsif after.kind_of? MDElement + after.al = e.ial + else + maruku_error "It is not clear to me what element this IAL {:#{e.ial.to_md}} \n"+ + "is referring to. The element before is a #{before.class.to_s}, \n"+ + "the element after is a #{after.class.to_s}.\n"+ + "\n before: #{before.inspect}"+ + "\n after: #{after.inspect}", + src, con + # xxx dire se c'è empty vicino + end + end + end + + if not Globals[:debug_keep_ials] + elements.delete_if {|x| is_ial(x) unless x == elements.first} + end + end + +end end end end +#module MaRuKu; module In; module Markdown; module SpanLevelParser diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/defaults.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/defaults.rb new file mode 100644 index 00000000..ea4a3289 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/defaults.rb @@ -0,0 +1,71 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu + +Globals = { + :unsafe_features => false, + :on_error => :warning, + + + :use_numbered_headers => false, + + :maruku_signature => false, + :code_background_color => '#fef', + :code_show_spaces => false, + + :filter_html => false, + + :html_math_output_mathml => true, # also set :html_math_engine + :html_math_engine => 'none', #ritex, itex2mml + + :html_math_output_png => false, + :html_png_engine => 'none', + :html_png_dir => 'pngs', + :html_png_url => 'pngs/', + :html_png_resolution => 200, + + :html_use_syntax => false, + + :latex_use_listings => false, + :latex_cjk => false, + :latex_cache_file => "blahtex_cache.pstore", # cache file for blahtex filter + + :debug_keep_ials => false, + :doc_prefix => '' +} + +class MDElement + def get_setting(sym) + if self.attributes.has_key?(sym) then + return self.attributes[sym] + elsif self.doc && self.doc.attributes.has_key?(sym) then + return self.doc.attributes[sym] + elsif MaRuKu::Globals.has_key?(sym) + return MaRuKu::Globals[sym] + else + $stderr.puts "Bug: no default for #{sym.inspect}" + nil + end + end +end + +end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/errors_management.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/errors_management.rb new file mode 100644 index 00000000..9aa8d3bc --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/errors_management.rb @@ -0,0 +1,92 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + + +#m Any method that detects formatting error calls the +#m maruku_error() method. +#m if @meta[:on_error] == +#m +#m - :warning write on the standard err (or @error_stream if defined), +#m then do your best. +#m - :ignore be shy and try to continue +#m - :raise raises a MarukuException +#m +#m default is :raise + +module MaRuKu + + class Exception < RuntimeError + end + +module Errors + + def maruku_error(s,src=nil,con=nil) + policy = get_setting(:on_error) + + case policy + when :ignore + when :raise + raise_error create_frame(describe_error(s,src,con)) + when :warning + tell_user create_frame(describe_error(s,src,con)) + else + raise "BugBug: policy = #{policy.inspect}" + end + end + + def maruku_recover(s,src=nil,con=nil) + tell_user create_frame(describe_error(s,src,con)) + end + + alias error maruku_error + + def raise_error(s) + raise MaRuKu::Exception, s, caller + end + + def tell_user(s) + error_stream = self.attributes[:error_stream] || $stderr + error_stream << s + end + + def create_frame(s) + n = 75 + "\n" + + " "+"_"*n + "\n"+ + "| Maruku tells you:\n" + + "+" + ("-"*n) +"\n"+ + add_tabs(s,1,'| ') + "\n" + + "+" + ("-"*n) + "\n" + + add_tabs(caller[0, 5].join("\n"),1,'!') + "\n" + + "\\" + ("_"*n) + "\n" + end + + def describe_error(s,src,con) + t = s + src && (t += "\n#{src.describe}\n") + con && (t += "\n#{con.describe}\n") + t + end + +end # Errors +end # MaRuKu + + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/div.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/div.rb new file mode 100644 index 00000000..87f4d2f7 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/div.rb @@ -0,0 +1,133 @@ + + +#+-----------------------------------{.warning}------ +#| this is the last warning! +#| +#| please, go away! +#| +#| +------------------------------------- {.menace} -- +#| | or else terrible things will happen +#| +-------------------------------------------------- +#+--------------------------------------------------- + +OpenDiv = /^[ ]{0,3}\+\-\-+\s*(\{([^{}]*|".*"|'.*')*\})?\s*\-*\s*$/ +CloseDiv = /^[ ]{0,3}\=\-\-+\s*(\{([^{}]*|".*"|'.*')*\})?\s*\-*\s*$/ +# note these are not enough for parsing the above example: +#OpenDiv = /^[ ]{0,3}\+\-\-+\s*(.*)$/ +#CloseDiv = /^[ ]{0,3}\=\-\-+\s*(.*)$/ +StartPipe = /^[ ]{0,3}\|(.*)$/ # $1 is rest of line +DecorativeClosing = OpenDiv + +MaRuKu::In::Markdown::register_block_extension( + :regexp => OpenDiv, + :handler => lambda { |doc, src, context| + # return false if not doc.is_math_enabled? + first = src.shift_line + first =~ OpenDiv + ial_at_beginning = $1 + ial_at_end = nil + + lines = [] + # if second line starts with "|" + if src.cur_line =~ StartPipe + # then we read until no more "|" + while src.cur_line && (src.cur_line =~ StartPipe) + content = $1 + lines.push content + src.shift_line + end + if src.cur_line =~ DecorativeClosing + ial_at_end = $1 + src.shift_line + end + else + # else we read until CloseDiv + divs_open = 1 + while src.cur_line && (divs_open>0) + if src.cur_line =~ CloseDiv + divs_open -= 1 + if divs_open == 0 + ial_at_end = $1 + src.shift_line + break + else + lines.push src.shift_line + end + else + if src.cur_line =~ OpenDiv + divs_open += 1 + end + lines.push src.shift_line + end + end + + if divs_open > 0 + e = "At end of input, I still have #{divs_open} DIVs open." + doc.maruku_error(e, src, context) + return true + end + end + + ial_at_beginning = nil unless + (ial_at_beginning&&ial_at_beginning.size > 0) + ial_at_end = nil unless (ial_at_end && ial_at_end.size > 0) + + if ial_at_beginning && ial_at_end + e = "Found two conflicting IALs: #{ial_at_beginning.inspect} and #{ial_at_end.inspect}" + doc.maruku_error(e, src, context) + end + + al_string = ial_at_beginning || ial_at_end + al = nil + + if al_string =~ /^\{(.*)\}\s*$/ + inside = $1 + cs = MaRuKu::In::Markdown::SpanLevelParser::CharSource + al = al_string && + doc.read_attribute_list(cs.new(inside), its_context=nil, break_on=[nil]) + end + + src = MaRuKu::In::Markdown::BlockLevelParser::LineSource.new(lines) + children = doc.parse_blocks(src) + + context.push doc.md_div(children, al) + true + }) + + +module MaRuKu; class MDElement + + def md_div(children, al=nil) + type = label = num = nil + doc.refid2ref ||= {} + if al + al.each do |k, v| + case k + when :class + type = $1 if v =~ /^num_(\w*)/ + when :id + label = v + end + end + end + if type + doc.refid2ref[type] ||= {} + num = doc.refid2ref[type].length + 1 || 1 + end + e = self.md_el(:div, children, meta={:label => label, :type => type, :num => num}, al) + if type && label + doc.refid2ref[type].update({label => e}) + end + e + end + +end end + + +module MaRuKu; module Out; module HTML + + def to_html_div + add_ws wrap_as_element('div') + end + +end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math.rb new file mode 100644 index 00000000..8f65371c --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math.rb @@ -0,0 +1,41 @@ + + +require 'maruku/ext/math/elements' +require 'maruku/ext/math/parsing' +require 'maruku/ext/math/to_latex' +require 'maruku/ext/math/to_html' + +require 'maruku/ext/math/mathml_engines/none' +require 'maruku/ext/math/mathml_engines/ritex' +require 'maruku/ext/math/mathml_engines/itex2mml' +require 'maruku/ext/math/mathml_engines/blahtex' + + +=begin maruku_doc +Attribute: math_enabled +Scope: global, document +Summary: Enables parsing of LaTeX math + +To explicitly disable the math parsing: + + Maruku.new(string, {:math_enabled => false}) + {:ruby} + +=end + +MaRuKu::Globals[:math_enabled] = true + + +=begin maruku_doc +Attribute: math_numbered +Scope: global, document +Summary: Math openings which should be numerated + +Array containing any of `'\\['`, `'\\begin{equation}'`, `'$$'`. + + MaRuKu::Globals[:math_numbered] = ['\\['] + +=end + + +MaRuKu::Globals[:math_numbered] = [] diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/elements.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/elements.rb new file mode 100644 index 00000000..f0ba3c08 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/elements.rb @@ -0,0 +1,27 @@ +module MaRuKu; class MDElement + + def md_inline_math(math) + self.md_el(:inline_math, [], meta={:math=>math}) + end + + def md_equation(math, label, numerate) + reglabel= /\\label\{(\w+)\}/ + if math =~ reglabel + label = $1 + math.gsub!(reglabel,'') + end +# puts "Found label = #{label} math #{math.inspect} " + num = nil + if (label || numerate) && @doc #take number + @doc.eqid2eq ||= {} + num = @doc.eqid2eq.size + 1 + label = "eq#{num}" if not label # FIXME do id for document + end + e = self.md_el(:equation, [], meta={:math=>math, :label=>label,:num=>num}) + if label && @doc #take number + @doc.eqid2eq[label] = e + end + e + end + +end end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/latex_fix.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/latex_fix.rb new file mode 100644 index 00000000..35f13831 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/latex_fix.rb @@ -0,0 +1,12 @@ +class String + # fix some LaTeX command-name clashes + def fix_latex + if #{html_math_engine} == 'itex2mml' + s = self.gsub("\\mathop{", "\\operatorname{") + s.gsub!(/\\begin\{svg\}.*?\\end\{svg\}/m, " ") + s.gsub("\\space{", "\\itexspace{") + else + self + end + end +end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/blahtex.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/blahtex.rb new file mode 100644 index 00000000..b6cb30e8 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/blahtex.rb @@ -0,0 +1,107 @@ + +require 'tempfile' +require 'fileutils' +require 'digest/md5' +require 'pstore' + +module MaRuKu; module Out; module HTML + + PNG = Struct.new(:src,:depth,:height) + + def convert_to_png_blahtex(kind, tex) + begin + FileUtils::mkdir_p get_setting(:html_png_dir) + + # first, we check whether this image has already been processed + md5sum = Digest::MD5.hexdigest(tex+" params: ") + result_file = File.join(get_setting(:html_png_dir), md5sum+".txt") + + if not File.exists?(result_file) + tmp_in = Tempfile.new('maruku_blahtex') + f = tmp_in.open + f.write tex + f.close + + resolution = get_setting(:html_png_resolution) + + options = "--png --use-preview-package --shell-dvipng 'dvipng -D #{resolution}' " + options += "--displaymath " if kind == :equation + options += ("--temp-directory '%s' " % get_setting(:html_png_dir)) + options += ("--png-directory '%s'" % get_setting(:html_png_dir)) + + cmd = "blahtex #{options} < #{tmp_in.path} > #{result_file}" + #$stderr.puts "$ #{cmd}" + system cmd + tmp_in.delete + end + + result = File.read(result_file) + if result.nil? || result.empty? + raise "Blahtex error: empty output" + end + + doc = Document.new(result, {:respect_whitespace =>:all}) + png = doc.root.elements[1] + if png.name != 'png' + raise "Blahtex error: \n#{doc}" + end + depth = png.elements['depth'] || (raise "No depth element in:\n #{doc}") + height = png.elements['height'] || (raise "No height element in:\n #{doc}") + md5 = png.elements['md5'] || (raise "No md5 element in:\n #{doc}") + + depth = depth.text.to_f + height = height.text.to_f # XXX check != 0 + md5 = md5.text + + dir_url = get_setting(:html_png_url) + return PNG.new("#{dir_url}#{md5}.png", depth, height) + rescue Exception => e + maruku_error "Error: #{e}" + end + nil + end + + + def convert_to_mathml_blahtex(kind, tex) + @@BlahtexCache = PStore.new(get_setting(:latex_cache_file)) + + begin + @@BlahtexCache.transaction do + if @@BlahtexCache[tex].nil? + tmp_in = Tempfile.new('maruku_blahtex') + f = tmp_in.open + f.write tex + f.close + tmp_out = Tempfile.new('maruku_blahtex') + + options = "--mathml" + cmd = "blahtex #{options} < #{tmp_in.path} > #{tmp_out.path}" + #$stderr.puts "$ #{cmd}" + system cmd + tmp_in.delete + + result = nil + File.open(tmp_out.path) do |f| result=f.read end + puts result + + @@BlahtexCache[tex] = result + end + + blahtex = @@BlahtexCache[tex] + doc = Document.new(blahtex, {:respect_whitespace =>:all}) + mathml = doc.root.elements['mathml'] + if not mathml + maruku_error "Blahtex error: \n#{doc}" + return nil + else + return mathml + end + end + + rescue Exception => e + maruku_error "Error: #{e}" + end + nil + end + +end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/itex2mml.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/itex2mml.rb new file mode 100644 index 00000000..69d273d3 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/itex2mml.rb @@ -0,0 +1,29 @@ + +module MaRuKu; module Out; module HTML + + def convert_to_mathml_itex2mml(kind, tex) + begin + if not $itex2mml_parser + require 'itextomml' + $itex2mml_parser = Itex2MML::Parser.new + end + + itex_method = {:equation=>:block_filter,:inline=>:inline_filter} + + mathml = $itex2mml_parser.send(itex_method[kind], tex) + doc = Document.new(mathml, {:respect_whitespace =>:all}).root + return doc + rescue LoadError => e + maruku_error "Could not load package 'itex2mml'.\n"+ "Please install it." unless $already_warned_itex2mml + $already_warned_itex2mml = true + rescue REXML::ParseException => e + maruku_error "Invalid MathML TeX: \n#{add_tabs(tex,1,'tex>')}"+ + "\n\n #{e.inspect}" + rescue + maruku_error "Could not produce MathML TeX: \n#{tex}"+ + "\n\n #{e.inspect}" + end + nil + end + +end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/none.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/none.rb new file mode 100644 index 00000000..42f1df47 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/none.rb @@ -0,0 +1,20 @@ +module MaRuKu; module Out; module HTML + + def convert_to_mathml_none(kind, tex) + # You can: either return a REXML::Element + # return Element.new 'div' + # or return an empty array on error + # return [] + # or have a string parsed by REXML: + tex = tex.gsub('&','&') + mathml = "#{tex}" + return Document.new(mathml).root + end + + def convert_to_png_none(kind, tex) + return nil + end + + +end end end + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/ritex.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/ritex.rb new file mode 100644 index 00000000..199eff5c --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/mathml_engines/ritex.rb @@ -0,0 +1,24 @@ +module MaRuKu; module Out; module HTML + + def convert_to_mathml_ritex(kind, tex) + begin + if not $ritex_parser + require 'ritex' + $ritex_parser = Ritex::Parser.new + end + + mathml = $ritex_parser.parse(tex.strip) + doc = Document.new(mathml, {:respect_whitespace =>:all}).root + return doc + rescue LoadError => e + maruku_error "Could not load package 'ritex'.\n"+ + "Please install it using:\n"+ + " $ gem install ritex\n\n"+e.inspect + rescue Racc::ParseError => e + maruku_error "Could not parse TeX: \n#{tex}"+ + "\n\n #{e.inspect}" + end + nil + end + +end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/parsing.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/parsing.rb new file mode 100644 index 00000000..ec32ea30 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/parsing.rb @@ -0,0 +1,119 @@ +module MaRuKu + + class MDDocument + # Hash equation id (String) to equation element (MDElement) + safe_attr_accessor :eqid2eq, Hash + + def is_math_enabled? + get_setting :math_enabled + end + end +end + + + # Everything goes; takes care of escaping the "\$" inside the expression + RegInlineMath = /\${1}((?:[^\$]|\\\$)+)\$/ + + MaRuKu::In::Markdown::register_span_extension( + :chars => ?$, + :regexp => RegInlineMath, + :handler => lambda { |doc, src, con| + return false if not doc.is_math_enabled? + + if m = src.read_regexp(RegInlineMath) + math = m.captures.compact.first + con.push doc.md_inline_math(math) + true + else + #puts "not math: #{src.cur_chars 10}" + false + end + } + ) + + + MathOpen1 = Regexp.escape('\\begin{equation}') + MathClose1 = Regexp.escape('\\end{equation}') + MathOpen2 = Regexp.escape('\\[') + MathClose2 = Regexp.escape('\\]') + MathOpen3 = Regexp.escape('$$') + MathClose3 = Regexp.escape('$$') + + EqLabel = /(?:\((\w+)\))/ + EquationOpen = /#{MathOpen1}|#{MathOpen2}|#{MathOpen3}/ + EquationClose = /#{MathClose1}|#{MathClose2}|#{MathClose3}/ + + # $1 is opening, $2 is tex + EquationStart = /^[ ]{0,3}(#{EquationOpen})(.*)$/ + # $1 is tex, $2 is closing, $3 is tex + EquationEnd = /^(.*)(#{EquationClose})\s*#{EqLabel}?\s*$/ + # $1 is opening, $2 is tex, $3 is closing, $4 is label + OneLineEquation = /^[ ]{0,3}(#{EquationOpen})(.*)(#{EquationClose})\s*#{EqLabel}?\s*$/ + + MaRuKu::In::Markdown::register_block_extension( + :regexp => EquationStart, + :handler => lambda { |doc, src, con| + return false if not doc.is_math_enabled? + first = src.shift_line + if first =~ OneLineEquation + opening, tex, closing, label = $1, $2, $3, $4 + numerate = doc.get_setting(:math_numbered).include?(opening) + con.push doc.md_equation(tex, label, numerate) + else + first =~ EquationStart + opening, tex = $1, $2 + + numerate = doc.get_setting(:math_numbered).include?(opening) + label = nil + while true + if not src.cur_line + doc.maruku_error("Stream finished while reading equation\n\n"+ + doc.add_tabs(tex,1,'$> '), src, con) + break + end + line = src.shift_line + if line =~ EquationEnd + tex_line, closing = $1, $2 + label = $3 if $3 + tex += tex_line + "\n" + break + else + tex += line + "\n" + end + end + con.push doc.md_equation(tex, label, numerate) + end + true + }) + + + # This adds support for \eqref + RegEqrefLatex = /\\eqref\{(\w+)\}/ + RegEqPar = /\(eq:(\w+)\)/ + RegEqref = Regexp::union(RegEqrefLatex, RegEqPar) + + MaRuKu::In::Markdown::register_span_extension( + :chars => [?\\, ?(], + :regexp => RegEqref, + :handler => lambda { |doc, src, con| + return false if not doc.is_math_enabled? + eqid = src.read_regexp(RegEqref).captures.compact.first + r = doc.md_el(:eqref, [], meta={:eqid=>eqid}) + con.push r + true + } + ) + + # This adds support for \ref + RegRef = /\\ref\{(\w*)\}/ + MaRuKu::In::Markdown::register_span_extension( + :chars => [?\\, ?(], + :regexp => RegRef, + :handler => lambda { |doc, src, con| + return false if not doc.is_math_enabled? + refid = src.read_regexp(RegRef).captures.compact.first + r = doc.md_el(:divref, [], meta={:refid=>refid}) + con.push r + true + } + ) diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_html.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_html.rb new file mode 100644 index 00000000..eec3748f --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_html.rb @@ -0,0 +1,187 @@ + +=begin maruku_doc +Extension: math +Attribute: html_math_engine +Scope: document, element +Output: html +Summary: Select the rendering engine for MathML. +Default: + +Select the rendering engine for math. + +If you want to use your custom engine `foo`, then set: + + HTML math engine: foo +{:lang=markdown} + +and then implement two functions: + + def convert_to_mathml_foo(kind, tex) + ... + end +=end + +=begin maruku_doc +Extension: math +Attribute: html_png_engine +Scope: document, element +Output: html +Summary: Select the rendering engine for math. +Default: + +Same thing as `html_math_engine`, only for PNG output. + + def convert_to_png_foo(kind, tex) + # same thing + ... + end +{:lang=ruby} + +=end + +module MaRuKu; module Out; module HTML + + + + # Creates an xml Mathml document of self.math + def render_mathml(kind, tex) + engine = get_setting(:html_math_engine) + method = "convert_to_mathml_#{engine}".to_sym + if self.respond_to? method + mathml = self.send(method, kind, tex) + return mathml || convert_to_mathml_none(kind, tex) + else + puts "A method called #{method} should be defined." + return convert_to_mathml_none(kind, tex) + end + end + + # Creates an xml Mathml document of self.math + def render_png(kind, tex) + engine = get_setting(:html_png_engine) + method = "convert_to_png_#{engine}".to_sym + if self.respond_to? method + return self.send(method, kind, tex) + else + puts "A method called #{method} should be defined." + return nil + end + end + + def pixels_per_ex + if not $pixels_per_ex + x = render_png(:inline, "x") + $pixels_per_ex = x.height # + x.depth + end + $pixels_per_ex + end + + def adjust_png(png, use_depth) + src = png.src + + height_in_px = png.height + depth_in_px = png.depth + height_in_ex = height_in_px / pixels_per_ex + depth_in_ex = depth_in_px / pixels_per_ex + total_height_in_ex = height_in_ex + depth_in_ex + style = "" + style += "vertical-align: -#{depth_in_ex}ex;" if use_depth + style += "height: #{total_height_in_ex}ex;" + img = Element.new 'img' + img.attributes['src'] = src + img.attributes['style'] = style + img.attributes['alt'] = "$#{self.math.strip}$" + img + end + + def to_html_inline_math + mathml = get_setting(:html_math_output_mathml) && render_mathml(:inline, self.math) + png = get_setting(:html_math_output_png) && render_png(:inline, self.math) + + span = create_html_element 'span' + add_class_to(span, 'maruku-inline') + + if mathml + add_class_to(mathml, 'maruku-mathml') + return mathml + end + + if png + img = adjust_png(png, use_depth=true) + add_class_to(img, 'maruku-png') + span << img + end + span + + end + + def to_html_equation + mathml = get_setting(:html_math_output_mathml) && render_mathml(:equation, self.math) + png = get_setting(:html_math_output_png) && render_png(:equation, self.math) + + div = create_html_element 'div' + add_class_to(div, 'maruku-equation') + if mathml + add_class_to(mathml, 'maruku-mathml') + div << mathml + end + + if png + img = adjust_png(png, use_depth=false) + add_class_to(img, 'maruku-png') + div << img + end + + source_span = Element.new 'span' + add_class_to(source_span, 'maruku-eq-tex') + code = convert_to_mathml_none(:equation, self.math.strip) + code.attributes['style'] = 'display: none' + source_span << code + div << source_span + + if self.label # then numerate + span = Element.new 'span' + span.attributes['class'] = 'maruku-eq-number' + num = self.num + span << Text.new("(#{num})") + div << span + div.attributes['id'] = "eq:#{self.label}" + end + div + end + + def to_html_eqref + if eq = self.doc.eqid2eq[self.eqid] + num = eq.num + a = Element.new 'a' + a.attributes['class'] = 'maruku-eqref' + a.attributes['href'] = "#eq:#{self.eqid}" + a << Text.new("(#{num})") + a + else + maruku_error "Cannot find equation #{self.eqid.inspect}" + Text.new "(eq:#{self.eqid})" + end + end + + def to_html_divref + ref= nil + self.doc.refid2ref.each_value { |h| + ref = h[self.refid] if h.has_key?(self.refid) + } + if ref + num = ref.num + a = Element.new 'a' + a.attributes['class'] = 'maruku-ref' + a.attributes['href'] = "#" + self.refid + a << Text.new(num.to_s) + a + else + maruku_error "Cannot find div #{self.refid.inspect}" + Text.new "\\ref{#{self.refid}}" + end + end + +end end end + + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_latex.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_latex.rb new file mode 100644 index 00000000..44f3866b --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/ext/math/to_latex.rb @@ -0,0 +1,26 @@ +require 'maruku/ext/math/latex_fix' + +module MaRuKu; module Out; module Latex + + def to_latex_inline_math + "$#{self.math.strip}$".fix_latex + end + + def to_latex_equation + if self.label + l = "\\label{#{self.label}}" + "\\begin{equation}\n#{self.math.strip}\n#{l}\\end{equation}\n".fix_latex + else + "\\begin{displaymath}\n#{self.math.strip}\n\\end{displaymath}\n".fix_latex + end + end + + def to_latex_eqref + "\\eqref{#{self.eqid}}" + end + + def to_latex_divref + "\\ref{#{self.refid}}" + end + +end end end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/helpers.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/helpers.rb new file mode 100644 index 00000000..5f90c7c8 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/helpers.rb @@ -0,0 +1,260 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + + + +# A series of helper functions for creating elements: they hide the +# particular internal representation. +# +# Please, always use these instead of creating MDElement. +# + +module MaRuKu +module Helpers + + # if the first is a md_ial, it is used as such + def md_el(node_type, children=[], meta={}, al=nil) + if (e=children.first).kind_of?(MDElement) and + e.node_type == :ial then + if al + al += e.ial + else + al = e.ial + end + children.shift + end + e = MDElement.new(node_type, children, meta, al) + e.doc = @doc + return e + end + + def md_header(level, children, al=nil) + md_el(:header, children, {:level => level}, al) + end + + # Inline code + def md_code(code, al=nil) + md_el(:inline_code, [], {:raw_code => code}, al) + end + + # Code block + def md_codeblock(source, al=nil) + md_el(:code, [], {:raw_code => source}, al) + end + + def md_quote(children, al=nil) + md_el(:quote, children, {}, al) + end + + def md_li(children, want_my_par, al=nil) + md_el(:li, children, {:want_my_paragraph=>want_my_par}, al) + end + + def md_footnote(footnote_id, children, al=nil) + md_el(:footnote, children, {:footnote_id=>footnote_id}, al) + end + + def md_abbr_def(abbr, text, al=nil) + md_el(:abbr_def, [], {:abbr=>abbr, :text=>text}, al) + end + + def md_abbr(abbr, title) + md_el(:abbr, [abbr], {:title=>title}) + end + + def md_html(raw_html, al=nil) + e = md_el(:raw_html, [], {:raw_html=>raw_html}) + begin + # remove newlines and whitespace at begin + # end end of string, or else REXML gets confused + raw_html = raw_html.gsub(/\A\s*[\s\n]*\Z/,'>') + + raw_html = "#{raw_html}" + e.instance_variable_set :@parsed_html, + REXML::Document.new(raw_html) + rescue REXML::ParseException => ex + e.instance_variable_set :@parsed_html, nil + maruku_recover "REXML cannot parse this block of HTML/XML:\n"+ + add_tabs(raw_html,1,'|') + "\n"+ex.inspect +# " #{raw_html.inspect}\n\n"+ex.inspect + end + e + end + + def md_link(children, ref_id, al=nil) + md_el(:link, children, {:ref_id=>ref_id.downcase}, al) + end + + def md_im_link(children, url, title=nil, al=nil) + md_el(:im_link, children, {:url=>url,:title=>title}, al) + end + + def md_image(children, ref_id, al=nil) + md_el(:image, children, {:ref_id=>ref_id}, al) + end + + def md_im_image(children, url, title=nil, al=nil) + md_el(:im_image, children, {:url=>url,:title=>title},al) + end + + def md_em(children, al=nil) + md_el(:emphasis, [children].flatten, {}, al) + end + + def md_br() + md_el(:linebreak, [], {}, nil) + end + + def md_hrule() + md_el(:hrule, [], {}, nil) + end + + def md_strong(children, al=nil) + md_el(:strong, [children].flatten, {}, al) + end + + def md_emstrong(children, al=nil) + md_strong(md_em(children), al) + end + + # + def md_url(url, al=nil) + md_el(:immediate_link, [], {:url=>url}, al) + end + + # + # + def md_email(email, al=nil) + md_el(:email_address, [], {:email=>email}, al) + end + + def md_entity(entity_name, al=nil) + md_el(:entity, [], {:entity_name=>entity_name}, al) + end + + # Markdown extra + def md_foot_ref(ref_id, al=nil) + md_el(:footnote_reference, [], {:footnote_id=>ref_id}, al) + end + + def md_par(children, al=nil) + md_el(:paragraph, children, meta={}, al) + end + + # [1]: http://url [properties] + def md_ref_def(ref_id, url, title=nil, meta={}, al=nil) + meta[:url] = url + meta[:ref_id] = ref_id + meta[:title] = title if title + md_el(:ref_definition, [], meta, al) + end + + # inline attribute list + def md_ial(al) + al = Maruku::AttributeList.new(al) if + not al.kind_of?Maruku::AttributeList + md_el(:ial, [], {:ial=>al}) + end + + # Attribute list definition + def md_ald(id, al) + md_el(:ald, [], {:ald_id=>id,:ald=>al}) + end + + # Server directive + def md_xml_instr(target, code) + md_el(:xml_instr, [], {:target=>target, :code=>code}) + end + +end +end + +module MaRuKu + +class MDElement + # outputs abbreviated form (this should be eval()uable to get the document) + def inspect2 + s = + case @node_type + when :paragraph + "md_par(%s)" % children_inspect + when :footnote_reference + "md_foot_ref(%s)" % self.footnote_id.inspect + when :entity + "md_entity(%s)" % self.entity_name.inspect + when :email_address + "md_email(%s)" % self.email.inspect + when :inline_code + "md_code(%s)" % self.raw_code.inspect + when :raw_html + "md_html(%s)" % self.raw_html.inspect + when :emphasis + "md_em(%s)" % children_inspect + when :strong + "md_strong(%s)" % children_inspect + when :immediate_link + "md_url(%s)" % self.url.inspect + when :image + "md_image(%s, %s)" % [ + children_inspect, + self.ref_id.inspect] + when :im_image + "md_im_image(%s, %s, %s)" % [ + children_inspect, + self.url.inspect, + self.title.inspect] + when :link + "md_link(%s,%s)" % [ + children_inspect, self.ref_id.inspect] + when :im_link + "md_im_link(%s, %s, %s)" % [ + children_inspect, + self.url.inspect, + self.title.inspect, + ] + when :ref_definition + "md_ref_def(%s, %s, %s)" % [ + self.ref_id.inspect, + self.url.inspect, + self.title.inspect + ] + when :ial + "md_ial(%s)" % self.ial.inspect + else + return nil + end + if @al and not @al.empty? then + s = s.chop + ", #{@al.inspect})" + end + s + end + +end + +end + + + + + + + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/charsource.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/charsource.rb new file mode 100644 index 00000000..a3d14796 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/charsource.rb @@ -0,0 +1,326 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu; module In; module Markdown; module SpanLevelParser + +# a string scanner coded by me +class CharSourceManual; end + +# a wrapper around StringScanner +class CharSourceStrscan; end + +# A debug scanner that checks the correctness of both +# by comparing their output +class CharSourceDebug; end + +# Choose! + +CharSource = CharSourceManual # faster! 58ms vs. 65ms +#CharSource = CharSourceStrscan +#CharSource = CharSourceDebug + + +class CharSourceManual + include MaRuKu::Strings + + def initialize(s, parent=nil) + raise "Passed #{s.class}" if not s.kind_of? String + @buffer = s + @buffer_index = 0 + @parent = parent + end + + # Return current char as a FixNum (or nil). + def cur_char; @buffer[@buffer_index] end + + # Return the next n chars as a String. + def cur_chars(n); @buffer[@buffer_index,n] end + + # Return the char after current char as a FixNum (or nil). + def next_char; @buffer[@buffer_index+1] end + + def shift_char + c = @buffer[@buffer_index] + @buffer_index+=1 + c + end + + def ignore_char + @buffer_index+=1 + nil + end + + def ignore_chars(n) + @buffer_index+=n + nil + end + + def current_remaining_buffer + @buffer[@buffer_index, @buffer.size-@buffer_index] + end + + def cur_chars_are(string) + # There is a bug here + if false + r2 = /^.{#{@buffer_index}}#{Regexp.escape string}/m + @buffer =~ r2 + else + cur_chars(string.size) == string + end + end + + def next_matches(r) + r2 = /^.{#{@buffer_index}}#{r}/m + md = r2.match @buffer + return !!md + end + + def read_regexp3(r) + r2 = /^.{#{@buffer_index}}#{r}/m + m = r2.match @buffer + if m + consumed = m.to_s.size - @buffer_index +# puts "Consumed #{consumed} chars (entire is #{m.to_s.inspect})" + ignore_chars consumed + else +# puts "Could not read regexp #{r2.inspect} from buffer "+ +# " index=#{@buffer_index}" +# puts "Cur chars = #{cur_chars(20).inspect}" +# puts "Matches? = #{cur_chars(20) =~ r}" + end + m + end + + def read_regexp(r) + r2 = /^#{r}/ + rest = current_remaining_buffer + m = r2.match(rest) + if m + @buffer_index += m.to_s.size +# puts "#{r} matched #{rest.inspect}: #{m.to_s.inspect}" + end + return m + end + + def consume_whitespace + while c = cur_char + if (c == ?\s || c == ?\t) +# puts "ignoring #{c}" + ignore_char + else +# puts "#{c} is not ws: "<=?a && c<=?z) || (c>=?A && c<=?Z)) + out << c + @buffer_index += 1 + end + end + + def describe + s = describe_pos(@buffer, @buffer_index) + if @parent + s += "\n\n" + @parent.describe + end + s + end + include SpanLevelParser +end + +def describe_pos(buffer, buffer_index) + len = 75 + num_before = [len/2, buffer_index].min + num_after = [len/2, buffer.size-buffer_index].min + num_before_max = buffer_index + num_after_max = buffer.size-buffer_index + +# puts "num #{num_before} #{num_after}" + num_before = [num_before_max, len-num_after].min + num_after = [num_after_max, len-num_before].min +# puts "num #{num_before} #{num_after}" + + index_start = [buffer_index - num_before, 0].max + index_end = [buffer_index + num_after, buffer.size].min + + size = index_end- index_start + +# puts "- #{index_start} #{size}" + + str = buffer[index_start, size] + str.gsub!("\n",'N') + str.gsub!("\t",'T') + + if index_end == buffer.size + str += "EOF" + end + + pre_s = buffer_index-index_start + pre_s = [pre_s, 0].max + pre_s2 = [len-pre_s,0].max +# puts "pre_S = #{pre_s}" + pre =" "*(pre_s) + + "-"*len+"\n"+ + str + "\n" + + "-"*pre_s + "|" + "-"*(pre_s2)+"\n"+ +# pre + "|\n"+ + pre + "+--- Byte #{buffer_index}\n"+ + + "Shown bytes [#{index_start} to #{size}] of #{buffer.size}:\n"+ + add_tabs(buffer,1,">") + +# "CharSource: At character #{@buffer_index} of block "+ +# " beginning with:\n #{@buffer[0,50].inspect} ...\n"+ +# " before: \n ... #{cur_chars(50).inspect} ... " +end + + +require 'strscan' + +class CharSourceStrscan + include SpanLevelParser + include MaRuKu::Strings + + def initialize(s, parent=nil) + @s = StringScanner.new(s) + @parent = parent + end + + # Return current char as a FixNum (or nil). + def cur_char + @s.peek(1)[0] + end + + # Return the next n chars as a String. + def cur_chars(n); + @s.peek(n) + end + + # Return the char after current char as a FixNum (or nil). + def next_char; + @s.peek(2)[1] + end + + def shift_char + (@s.get_byte)[0] + end + + def ignore_char + @s.get_byte + nil + end + + def ignore_chars(n) + n.times do @s.get_byte end + nil + end + + def current_remaining_buffer + @s.rest #nil #@buffer[@buffer_index, @buffer.size-@buffer_index] + end + + def cur_chars_are(string) + cur_chars(string.size) == string + end + + def next_matches(r) + len = @s.match?(r) + return !!len + end + + def read_regexp(r) + string = @s.scan(r) + if string + return r.match(string) + else + return nil + end + end + + def consume_whitespace + @s.scan(/\s+/) + nil + end + + def describe + describe_pos(@s.string, @s.pos) + end + +end + + +class CharSourceDebug + def initialize(s, parent) + @a = CharSourceManual.new(s, parent) + @b = CharSourceStrscan.new(s, parent) + end + + def method_missing(methodname, *args) + a_bef = @a.describe + b_bef = @b.describe + + a = @a.send(methodname, *args) + b = @b.send(methodname, *args) + +# if methodname == :describe +# return a +# end + + if a.kind_of? MatchData + if a.to_a != b.to_a + puts "called: #{methodname}(#{args})" + puts "Matchdata:\na = #{a.to_a.inspect}\nb = #{b.to_a.inspect}" + puts "AFTER: "+@a.describe + puts "AFTER: "+@b.describe + puts "BEFORE: "+a_bef + puts "BEFORE: "+b_bef + puts caller.join("\n") + exit + end + else + if a!=b + puts "called: #{methodname}(#{args})" + puts "Attenzione!\na = #{a.inspect}\nb = #{b.inspect}" + puts ""+@a.describe + puts ""+@b.describe + puts caller.join("\n") + exit + end + end + + if @a.cur_char != @b.cur_char + puts "Fuori sincronia dopo #{methodname}(#{args})" + puts ""+@a.describe + puts ""+@b.describe + exit + end + + return a + end +end + +end end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/extensions.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/extensions.rb new file mode 100644 index 00000000..fc4cfb4d --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/extensions.rb @@ -0,0 +1,69 @@ +module MaRuKu; module In; module Markdown + + + # Hash Fixnum -> name + SpanExtensionsTrigger = {} + + + class SpanExtension + # trigging chars + attr_accessor :chars + # trigging regexp + attr_accessor :regexp + # lambda + attr_accessor :block + end + + # Hash String -> Extension + SpanExtensions = {} + + def check_span_extensions(src, con) + c = src.cur_char + if extensions = SpanExtensionsTrigger[c] + extensions.each do |e| + if e.regexp && (match = src.next_matches(e.regexp)) + return true if e.block.call(doc, src, con) + end + end + end + return false # not special + end + + def self.register_span_extension(args) + e = SpanExtension.new + e.chars = [*args[:chars]] + e.regexp = args[:regexp] + e.block = args[:handler] || raise("No blocks passed") + e.chars.each do |c| + (SpanExtensionsTrigger[c] ||= []).push e + end + end + + def self.register_block_extension(args) + regexp = args[:regexp] + BlockExtensions[regexp] = (args[:handler] || raise("No blocks passed")) + end + + # Hash Regexp -> Block + BlockExtensions = {} + + def check_block_extensions(src, con, line) + BlockExtensions.each do |reg, block| + if m = reg.match(line) + block = BlockExtensions[reg] + accepted = block.call(doc, src, con) + return true if accepted + end + end + return false # not special + end + + def any_matching_block_extension?(line) + BlockExtensions.each_key do |reg| + m = reg.match(line) + return m if m + end + return false + end + +end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/html_helper.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/html_helper.rb new file mode 100644 index 00000000..c2f5a8e0 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/html_helper.rb @@ -0,0 +1,189 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu; module In; module Markdown; module SpanLevelParser + +# This class helps me read and sanitize HTML blocks + +# I tried to do this with REXML, but wasn't able to. (suggestions?) + + class HTMLHelper + include MaRuKu::Strings + + Tag = %r{^<(/)?(\w+)\s*([^>]*)>}m + PartialTag = %r{^<.*}m + + EverythingElse = %r{^[^<]+}m + CommentStart = %r{^} + TO_SANITIZE = ['img','hr','br'] + + attr_reader :rest + + def my_debug(s) +# puts "---"*10+"\n"+inspect+"\t>>>\t"s + end + + def initialize + @rest = "" + @tag_stack = [] + @m = nil + @already = "" + self.state = :inside_element + end + + attr_accessor :state # = :inside_element, :inside_tag, :inside_comment, + + def eat_this(line) + @rest = line + @rest + things_read = 0 + until @rest.empty? + case self.state + when :inside_comment + if @m = CommentEnd.match(@rest) + @already += @m.pre_match + @m.to_s + @rest = @m.post_match + self.state = :inside_element + else + @already += @rest + @rest = "" + self.state = :inside_comment + end + when :inside_element + if @m = CommentStart.match(@rest) + things_read += 1 + @already += @m.pre_match + @m.to_s + @rest = @m.post_match + self.state = :inside_comment + elsif @m = Tag.match(@rest) then + my_debug "#{@state}: Tag: #{@m.to_s.inspect}" + things_read += 1 + handle_tag + self.state = :inside_element + elsif @m = PartialTag.match(@rest) then + my_debug "#{@state}: PartialTag: #{@m.to_s.inspect}" + @already += @m.pre_match + @rest = @m.post_match + @partial_tag = @m.to_s + self.state = :inside_tag + elsif @m = EverythingElse.match(@rest) + my_debug "#{@state}: Everything: #{@m.to_s.inspect}" + @already += @m.pre_match + @m.to_s + @rest = @m.post_match + self.state = :inside_element + else + error "Malformed HTML: not complete: #{@rest.inspect}" + end + when :inside_tag + if @m = /^[^>]*>/.match(@rest) then + my_debug "#{@state}: inside_tag: matched #{@m.to_s.inspect}" + @partial_tag += @m.to_s + my_debug "#{@state}: inside_tag: matched TOTAL: #{@partial_tag.to_s.inspect}" + @rest = @partial_tag + @m.post_match + @partial_tag = nil + self.state = :inside_element + else + @partial_tag += @rest + @rest = "" + self.state = :inside_tag + end + else + raise "Bug bug: state = #{self.state.inspect}" + end # not inside comment + +# puts inspect +# puts "Read: #{@tag_stack.inspect}" + break if is_finished? and things_read>0 + end + end + + def handle_tag() + @already += @m.pre_match + @rest = @m.post_match + + is_closing = !!@m[1] + tag = @m[2] + attributes = @m[3].to_s + + is_single = false + if attributes[-1] == ?/ # =~ /\A(.*)\/\Z/ + attributes = attributes[0, attributes.size-1] + is_single = true + end + + my_debug "Attributes: #{attributes.inspect}" + my_debug "READ TAG #{@m.to_s.inspect} tag = #{tag} closing? #{is_closing} single = #{is_single}" + + if TO_SANITIZE.include? tag + attributes.strip! + # puts "Attributes: #{attributes.inspect}" + if attributes.size > 0 + @already += '<%s %s />' % [tag, attributes] + else + @already += '<%s />' % [tag] + end + elsif is_closing + @already += @m.to_s + if @tag_stack.empty? + error "Malformed: closing tag #{tag.inspect} "+ + "in empty list" + end + if @tag_stack.last != tag + error "Malformed: tag <#{tag}> "+ + "closes <#{@tag_stack.last}>" + end + @tag_stack.pop + else + @already += @m.to_s + + if not is_single + @tag_stack.push(tag) + my_debug "Pushing #{tag.inspect} when read #{@m.to_s.inspect}" + end + end + end + def error(s) + raise Exception, "Error: #{s} \n"+ inspect, caller + end + + def inspect; "HTML READER\n state=#{self.state} "+ + "match=#{@m.to_s.inspect}\n"+ + "Tag stack = #{@tag_stack.inspect} \n"+ + "Before:\n"+ + add_tabs(@already,1,'|')+"\n"+ + "After:\n"+ + add_tabs(@rest,1,'|')+"\n" + + end + + + def stuff_you_read + @already + end + + def rest() @rest end + + def is_finished? + (self.state == :inside_element) and @tag_stack.empty? + end + end # html helper + +end end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/linesource.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/linesource.rb new file mode 100644 index 00000000..e7adf3fc --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/linesource.rb @@ -0,0 +1,111 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu; module In; module Markdown; module BlockLevelParser + +# This represents a source of lines that can be consumed. +# +# It is the twin of CharSource. +# + +class LineSource + include MaRuKu::Strings + attr_reader :parent + + def initialize(lines, parent=nil, parent_offset=nil) + raise "NIL lines? " if not lines + @lines = lines + @lines_index = 0 + @parent = parent + @parent_offset = parent_offset + end + + def cur_line() @lines[@lines_index] end + def next_line() @lines[@lines_index+1] end + + def shift_line() + raise "Over the rainbow" if @lines_index >= @lines.size + l = @lines[@lines_index] + @lines_index += 1 + return l + end + + def ignore_line + raise "Over the rainbow" if @lines_index >= @lines.size + @lines_index += 1 + end + + def describe + s = "At line #{original_line_number(@lines_index)}\n" + + context = 3 # lines + from = [@lines_index-context, 0].max + to = [@lines_index+context, @lines.size-1].min + + for i in from..to + prefix = (i == @lines_index) ? '--> ' : ' '; + l = @lines[i] + s += "%10s %4s|%s" % + [@lines[i].md_type.to_s, prefix, l] + + s += "|\n" + end + +# if @parent +# s << "Parent context is: \n" +# s << add_tabs(@parent.describe,1,'|') +# end + s + end + + def original_line_number(index) + if @parent + return index + @parent.original_line_number(@parent_offset) + else + 1 + index + end + end + + def cur_index + @lines_index + end + + # Returns the type of next line as a string + # breaks at first :definition + def tell_me_the_future + s = ""; num_e = 0; + for i in @lines_index..@lines.size-1 + c = case @lines[i].md_type + when :text; "t" + when :empty; num_e+=1; "e" + when :definition; "d" + else "o" + end + s += c + break if c == "d" or num_e>1 + end + s + end + +end # linesource + +end end end end # block + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_block.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_block.rb new file mode 100644 index 00000000..c676c296 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_block.rb @@ -0,0 +1,615 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu; module In; module Markdown; module BlockLevelParser + + include Helpers + include MaRuKu::Strings + include MaRuKu::In::Markdown::SpanLevelParser + + class BlockContext < Array + def describe + n = 5 + desc = size > n ? self[-n,n] : self + "Last #{n} elements: "+ + desc.map{|x| "\n -" + x.inspect}.join + end + end + + # Splits the string and calls parse_lines_as_markdown + def parse_text_as_markdown(text) + lines = split_lines(text) + src = LineSource.new(lines) + return parse_blocks(src) + end + + # Input is a LineSource + def parse_blocks(src) + output = BlockContext.new + + # run state machine + while src.cur_line + + next if check_block_extensions(src, output, src.cur_line) + +# Prints detected type (useful for debugging) +# puts "#{src.cur_line.md_type}|#{src.cur_line}" + case src.cur_line.md_type + when :empty; + output.push :empty + src.ignore_line + when :ial + m = InlineAttributeList.match src.shift_line + content = m[1] || "" +# puts "Content: #{content.inspect}" + src2 = CharSource.new(content, src) + interpret_extension(src2, output, [nil]) + when :ald + output.push read_ald(src) + when :text + # paragraph, or table, or definition list + read_text_material(src, output) + when :header2, :hrule + # hrule + src.shift_line + output.push md_hrule() + when :header3 + output.push read_header3(src) + when :ulist, :olist + list_type = src.cur_line.md_type == :ulist ? :ul : :ol + li = read_list_item(src) + # append to current list if we have one + if output.last.kind_of?(MDElement) && + output.last.node_type == list_type then + output.last.children << li + else + output.push md_el(list_type, [li]) + end + when :quote; output.push read_quote(src) + when :code; e = read_code(src); output << e if e + when :raw_html; e = read_raw_html(src); output << e if e + + when :footnote_text; output.push read_footnote_text(src) + when :ref_definition; + if src.parent && (src.cur_index == 0) + read_text_material(src, output) + else + read_ref_definition(src, output) + end + when :abbreviation; output.push read_abbreviation(src) + when :xml_instr; read_xml_instruction(src, output) + when :metadata; + maruku_error "Please use the new meta-data syntax: \n"+ + " http://maruku.rubyforge.org/proposal.html\n", src + src.ignore_line + else # warn if we forgot something + md_type = src.cur_line.md_type + line = src.cur_line + maruku_error "Ignoring line '#{line}' type = #{md_type}", src + src.shift_line + end + end + + merge_ial(output, src, output) + output.delete_if {|x| x.kind_of?(MDElement) && + x.node_type == :ial} + + # get rid of empty line markers + output.delete_if {|x| x == :empty} + # See for each list if we can omit the paragraphs and use li_span + # TODO: do this after + output.each do |c| + # Remove paragraphs that we can get rid of + if [:ul,:ol].include? c.node_type + if c.children.all? {|li| !li.want_my_paragraph} then + c.children.each do |d| + d.node_type = :li_span + d.children = d.children[0].children + end + end + end + if c.node_type == :definition_list + if c.children.all?{|defi| !defi.want_my_paragraph} then + c.children.each do |definition| + definition.definitions.each do |dd| + dd.children = dd.children[0].children + end + end + end + end + end + + output + end + + def read_text_material(src, output) + if src.cur_line =~ MightBeTableHeader and + (src.next_line && src.next_line =~ TableSeparator) + output.push read_table(src) + elsif [:header1,:header2].include? src.next_line.md_type + output.push read_header12(src) + elsif eventually_comes_a_def_list(src) + definition = read_definition(src) + if output.last.kind_of?(MDElement) && + output.last.node_type == :definition_list then + output.last.children << definition + else + output.push md_el(:definition_list, [definition]) + end + else # Start of a paragraph + output.push read_paragraph(src) + end + end + + + def read_ald(src) + if (l=src.shift_line) =~ AttributeDefinitionList + id = $1; al=$2; + al = read_attribute_list(CharSource.new(al,src), context=nil, break_on=[nil]) + self.ald[id] = al; + return md_ald(id, al) + else + maruku_error "Bug Bug:\n#{l.inspect}" + return nil + end + end + + # reads a header (with ----- or ========) + def read_header12(src) + line = src.shift_line.strip + al = nil + # Check if there is an IAL + if new_meta_data? and line =~ /^(.*)\{(.*)\}\s*$/ + line = $1.strip + ial = $2 + al = read_attribute_list(CharSource.new(ial,src), context=nil, break_on=[nil]) + end + text = parse_lines_as_span [ line ] + level = src.cur_line.md_type == :header2 ? 2 : 1; + src.shift_line + return md_header(level, text, al) + end + + # reads a header like '#### header ####' + def read_header3(src) + line = src.shift_line.strip + al = nil + # Check if there is an IAL + if new_meta_data? and line =~ /^(.*)\{(.*)\}\s*$/ + line = $1.strip + ial = $2 + al = read_attribute_list(CharSource.new(ial,src), context=nil, break_on=[nil]) + end + level = num_leading_hashes(line) + text = parse_lines_as_span [strip_hashes(line)] + return md_header(level, text, al) + end + + def read_xml_instruction(src, output) + m = /^\s*<\?((\w+)\s*)?(.*)$/.match src.shift_line + raise "BugBug" if not m + target = m[2] || '' + code = m[3] + until code =~ /\?>/ + code += "\n"+src.shift_line + end + if not code =~ (/\?>\s*$/) + garbage = (/\?>(.*)$/.match(code))[1] + maruku_error "Trailing garbage on last line: #{garbage.inspect}:\n"+ + add_tabs(code, 1, '|'), src + end + code.gsub!(/\?>\s*$/, '') + + if target == 'mrk' && MaRuKu::Globals[:unsafe_features] + result = safe_execute_code(self, code) + if result + if result.kind_of? String + raise "Not expected" + else + output.push(*result) + end + end + else + output.push md_xml_instr(target, code) + end + end + + def read_raw_html(src) + h = HTMLHelper.new + begin + h.eat_this(l=src.shift_line) +# puts "\nBLOCK:\nhtml -> #{l.inspect}" + while src.cur_line and not h.is_finished? + l=src.shift_line +# puts "html -> #{l.inspect}" + h.eat_this "\n"+l + end + rescue Exception => e + ex = e.inspect + e.backtrace.join("\n") + maruku_error "Bad block-level HTML:\n#{add_tabs(ex,1,'|')}\n", src + end + if not (h.rest =~ /^\s*$/) + maruku_error "Could you please format this better?\n"+ + "I see that #{h.rest.inspect} is left after the raw HTML.", src + end + raw_html = h.stuff_you_read + + return md_html(raw_html) + end + + def read_paragraph(src) + lines = [src.shift_line] + while src.cur_line + # :olist does not break + case t = src.cur_line.md_type + when :quote,:header3,:empty,:ref_definition,:ial #,:xml_instr,:raw_html + break + when :olist,:ulist + break if src.next_line.md_type == t + end + break if src.cur_line.strip.size == 0 + break if [:header1,:header2].include? src.next_line.md_type + break if any_matching_block_extension?(src.cur_line) + + lines << src.shift_line + end +# dbg_describe_ary(lines, 'PAR') + children = parse_lines_as_span(lines, src) + + return md_par(children) + end + + # Reads one list item, either ordered or unordered. + def read_list_item(src) + parent_offset = src.cur_index + + item_type = src.cur_line.md_type + first = src.shift_line + + indentation = spaces_before_first_char(first) + break_list = [:ulist, :olist, :ial] + # Ugly things going on inside `read_indented_content` + lines, want_my_paragraph = + read_indented_content(src,indentation, break_list, item_type) + + # add first line + # Strip first '*', '-', '+' from first line + stripped = first[indentation, first.size-1] + lines.unshift stripped + + # dbg_describe_ary(lines, 'LIST ITEM ') + + src2 = LineSource.new(lines, src, parent_offset) + children = parse_blocks(src2) + with_par = want_my_paragraph || (children.size>1) + + return md_li(children, with_par) + end + + def read_abbreviation(src) + if not (l=src.shift_line) =~ Abbreviation + maruku_error "Bug: it's Andrea's fault. Tell him.\n#{l.inspect}" + end + + abbr = $1 + desc = $2 + + if (not abbr) or (abbr.size==0) + maruku_error "Bad abbrev. abbr=#{abbr.inspect} desc=#{desc.inspect}" + end + + self.abbreviations[abbr] = desc + + return md_abbr_def(abbr, desc) + end + + def read_footnote_text(src) + parent_offset = src.cur_index + + first = src.shift_line + + if not first =~ FootnoteText + maruku_error "Bug (it's Andrea's fault)" + end + + id = $1 + text = $2 + + # Ugly things going on inside `read_indented_content` + indentation = 4 #first.size-text.size + +# puts "id =_#{id}_; text=_#{text}_ indent=#{indentation}" + + break_list = [:footnote_text, :ref_definition, :definition, :abbreviation] + item_type = :footnote_text + lines, want_my_paragraph = + read_indented_content(src,indentation, break_list, item_type) + + # add first line + if text && text.strip != "" then lines.unshift text end + +# dbg_describe_ary(lines, 'FOOTNOTE') + src2 = LineSource.new(lines, src, parent_offset) + children = parse_blocks(src2) + + e = md_footnote(id, children) + self.footnotes[id] = e + return e + end + + + # This is the only ugly function in the code base. + # It is used to read list items, descriptions, footnote text + def read_indented_content(src, indentation, break_list, item_type) + lines =[] + # collect all indented lines + saw_empty = false; saw_anything_after = false + while src.cur_line +# puts "Reading indent = #{indentation} #{src.cur_line.inspect}" + #puts "#{src.cur_line.md_type} #{src.cur_line.inspect}" + if src.cur_line.md_type == :empty + saw_empty = true + lines << src.shift_line + next + end + + # after a white line + if saw_empty + # we expect things to be properly aligned + if (ns=number_of_leading_spaces(src.cur_line)) < indentation + #puts "breaking for spaces, only #{ns}: #{src.cur_line}" + break + end + saw_anything_after = true + else +# if src.cur_line[0] != ?\ + break if break_list.include? src.cur_line.md_type +# end +# break if src.cur_line.md_type != :text + end + + + stripped = strip_indent(src.shift_line, indentation) + lines << stripped + + #puts "Accepted as #{stripped.inspect}" + + # You are only required to indent the first line of + # a child paragraph. + if stripped.md_type == :text + while src.cur_line && (src.cur_line.md_type == :text) + lines << strip_indent(src.shift_line, indentation) + end + end + end + + want_my_paragraph = saw_anything_after || + (saw_empty && (src.cur_line && (src.cur_line.md_type == item_type))) + +# dbg_describe_ary(lines, 'LI') + # create a new context + + while lines.last && (lines.last.md_type == :empty) + lines.pop + end + + return lines, want_my_paragraph + end + + + def read_quote(src) + parent_offset = src.cur_index + + lines = [] + # collect all indented lines + while src.cur_line && src.cur_line.md_type == :quote + lines << unquote(src.shift_line) + end +# dbg_describe_ary(lines, 'QUOTE') + + src2 = LineSource.new(lines, src, parent_offset) + children = parse_blocks(src2) + return md_quote(children) + end + + def read_code(src) + # collect all indented lines + lines = [] + while src.cur_line && ([:code, :empty].include? src.cur_line.md_type) + lines << strip_indent(src.shift_line, 4) + end + + #while lines.last && (lines.last.md_type == :empty ) + while lines.last && lines.last.strip.size == 0 + lines.pop + end + + while lines.first && lines.first.strip.size == 0 + lines.shift + end + + return nil if lines.empty? + + source = lines.join("\n") + +# dbg_describe_ary(lines, 'CODE') + + return md_codeblock(source) + end + + # Reads a series of metadata lines with empty lines in between + def read_metadata(src) + hash = {} + while src.cur_line + case src.cur_line.md_type + when :empty; src.shift_line + when :metadata; hash.merge! parse_metadata(src.shift_line) + else break + end + end + hash + end + + + def read_ref_definition(src, out) + line = src.shift_line + + + # if link is incomplete, shift next line + if src.cur_line && !([:footnote_text, :ref_definition, :definition, :abbreviation].include? src.cur_line.md_type) && + ([1,2,3].include? number_of_leading_spaces(src.cur_line) ) + line += " "+ src.shift_line + end + +# puts "total= #{line}" + + match = LinkRegex.match(line) + if not match + maruku_error "Link does not respect format: '#{line}'" + return + end + + id = match[1]; url = match[2]; title = match[3]; + id = sanitize_ref_id(id) + + hash = self.refs[id] = {:url=>url,:title=>title} + + stuff=match[4] + + if stuff + stuff.split.each do |couple| +# puts "found #{couple}" + k, v = couple.split('=') + v ||= "" + if v[0,1]=='"' then v = v[1, v.size-2] end +# puts "key:_#{k}_ value=_#{v}_" + hash[k.to_sym] = v + end + end +# puts hash.inspect + + out.push md_ref_def(id, url, meta={:title=>title}) + end + + def split_cells(s) +# s.strip.split('|').select{|x|x.strip.size>0}.map{|x|x.strip} +# changed to allow empty cells + s.strip.split('|').select{|x|x.size>0}.map{|x|x.strip} + end + + def read_table(src) + head = split_cells(src.shift_line).map{|s| md_el(:head_cell, parse_lines_as_span([s])) } + + separator=split_cells(src.shift_line) + + align = separator.map { |s| s =~ Sep + if $1 and $2 then :center elsif $2 then :right else :left end } + + num_columns = align.size + + if head.size != num_columns + maruku_error "Table head does not have #{num_columns} columns: \n#{head.inspect}" + tell_user "I will ignore this table." + # XXX try to recover + return md_br() + end + + rows = [] + + while src.cur_line && src.cur_line =~ /\|/ + row = split_cells(src.shift_line).map{|s| + md_el(:cell, parse_lines_as_span([s]))} + if head.size != num_columns + maruku_error "Row does not have #{num_columns} columns: \n#{row.inspect}" + tell_user "I will ignore this table." + # XXX try to recover + return md_br() + end + rows << row + end + + children = (head+rows).flatten + return md_el(:table, children, {:align => align}) + end + + # If current line is text, a definition list is coming + # if 1) text,empty,[text,empty]*,definition + + def eventually_comes_a_def_list(src) + future = src.tell_me_the_future + ok = future =~ %r{^t+e?d}x +# puts "future: #{future} - #{ok}" + ok + end + + + def read_definition(src) + # Read one or more terms + terms = [] + while src.cur_line && src.cur_line.md_type == :text + terms << md_el(:definition_term, parse_lines_as_span([src.shift_line])) + end +# dbg_describe_ary(terms, 'DT') + + want_my_paragraph = false + + raise "Chunky Bacon!" if not src.cur_line + + # one optional empty + if src.cur_line.md_type == :empty + want_my_paragraph = true + src.shift_line + end + + raise "Chunky Bacon!" if src.cur_line.md_type != :definition + + # Read one or more definitions + definitions = [] + while src.cur_line && src.cur_line.md_type == :definition + parent_offset = src.cur_index + + first = src.shift_line + first =~ Definition + first = $1 + + # I know, it's ugly!!! + + lines, w_m_p = + read_indented_content(src,4, [:definition], :definition) + want_my_paragraph ||= w_m_p + + lines.unshift first + +# dbg_describe_ary(lines, 'DD') + src2 = LineSource.new(lines, src, parent_offset) + children = parse_blocks(src2) + definitions << md_el(:definition_data, children) + end + + return md_el(:definition, terms+definitions, { + :terms => terms, + :definitions => definitions, + :want_my_paragraph => want_my_paragraph}) + end +end # BlockLevelParser +end # MaRuKu +end +end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_doc.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_doc.rb new file mode 100644 index 00000000..f4a4909a --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_doc.rb @@ -0,0 +1,234 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +require 'iconv' + + +module MaRuKu; module In; module Markdown; module BlockLevelParser + + def parse_doc(s) + # FIXME \r\n => \n + meta2 = parse_email_headers(s) + data = meta2[:data] + meta2.delete :data + + self.attributes.merge! meta2 + +=begin maruku_doc +Attribute: encoding +Scope: document +Summary: Encoding for the document. + +If the `encoding` attribute is specified, then the content +will be converted from the specified encoding to UTF-8. + +Conversion happens using the `iconv` library. +=end + + enc = self.attributes[:encoding] + self.attributes.delete :encoding + if enc && enc.downcase != 'utf-8' + converted = Iconv.new('utf-8', enc).iconv(data) + +# puts "Data: #{data.inspect}: #{data}" +# puts "Conv: #{converted.inspect}: #{converted}" + + data = converted + end + + @children = parse_text_as_markdown(data) + + if true #markdown_extra? + self.search_abbreviations + self.substitute_markdown_inside_raw_html + end + + toc = create_toc + + # use title if not set + if not self.attributes[:title] and toc.header_element + title = toc.header_element.to_s + self.attributes[:title] = title +# puts "Set document title to #{title}" + end + + # save for later use + self.toc = toc + + # Now do the attributes magic + each_element do |e| + # default attribute list + if default = self.ald[e.node_type.to_s] + expand_attribute_list(default, e.attributes) + end + expand_attribute_list(e.al, e.attributes) +# puts "#{e.node_type}: #{e.attributes.inspect}" + end + +=begin maruku_doc +Attribute: unsafe_features +Scope: global +Summary: Enables execution of XML instructions. + +Disabled by default because of security concerns. +=end + + if Maruku::Globals[:unsafe_features] + self.execute_code_blocks + # TODO: remove executed code blocks + end + end + + # Expands an attribute list in an Hash + def expand_attribute_list(al, result) + al.each do |k, v| + case k + when :class + if not result[:class] + result[:class] = v + else + result[:class] += " " + v + end + when :id; result[:id] = v + when :ref; + if self.ald[v] + already = (result[:expanded_references] ||= []) + if not already.include?(v) + already.push v + expand_attribute_list(self.ald[v], result) + else + already.push v + maruku_error "Circular reference between labels.\n\n"+ + "Label #{v.inspect} calls itself via recursion.\nThe recursion is "+ + (already.map{|x| x.inspect}.join(' => ')) + end + else + if not result[:unresolved_references] + result[:unresolved_references] = v + else + result[:unresolved_references] << " #{v}" + end + + # $stderr.puts "Unresolved reference #{v.inspect} (avail: #{self.ald.keys.inspect})" + result[v.to_sym] = true + end + else + result[k.to_sym]=v + end + end + end + + def safe_execute_code(object, code) + begin + return object.instance_eval(code) + rescue Exception => e + maruku_error "Exception while executing this:\n"+ + add_tabs(code, 1, ">")+ + "\nThe error was:\n"+ + add_tabs(e.inspect+"\n"+e.caller.join("\n"), 1, "|") + rescue RuntimeError => e + maruku_error "2: Exception while executing this:\n"+ + add_tabs(code, 1, ">")+ + "\nThe error was:\n"+ + add_tabs(e.inspect, 1, "|") + rescue SyntaxError => e + maruku_error "2: Exception while executing this:\n"+ + add_tabs(code, 1, ">")+ + "\nThe error was:\n"+ + add_tabs(e.inspect, 1, "|") + end + nil + end + + def execute_code_blocks + self.each_element(:xml_instr) do |e| + if e.target == 'maruku' + result = safe_execute_code(e, e.code) + if result.kind_of?(String) + puts "Result is : #{result.inspect}" + end + end + end + end + + def search_abbreviations + self.abbreviations.each do |abbrev, title| + reg = Regexp.new(Regexp.escape(abbrev)) + self.replace_each_string do |s| + # bug if many abbreviations are present (agorf) + if m = reg.match(s) + e = md_abbr(abbrev.dup, title ? title.dup : nil) + [m.pre_match, e, m.post_match] + else + s + end + end + end + end + + include REXML + # (PHP Markdown extra) Search for elements that have + # markdown=1 or markdown=block defined + def substitute_markdown_inside_raw_html + self.each_element(:raw_html) do |e| + doc = e.instance_variable_get :@parsed_html + if doc # valid html + # parse block-level markdown elements in these HTML tags + block_tags = ['div'] + + # use xpath to find elements with 'markdown' attribute + XPath.match(doc, "//*[attribute::markdown]" ).each do |e| +# puts "Found #{e}" + # should we parse block-level or span-level? + + how = e.attributes['markdown'] + parse_blocks = (how == 'block') || block_tags.include?(e.name) + + # Select all text elements of e + XPath.match(e, "//text()" ).each { |original_text| + s = original_text.value.strip + if s.size > 0 + + # puts "Parsing #{s.inspect} as blocks: #{parse_blocks} (#{e.name}, #{e.attributes['markdown']}) " + + el = md_el(:dummy, + parse_blocks ? parse_text_as_markdown(s) : + parse_lines_as_span([s]) ) + p = original_text.parent + el.children_to_html.each do |x| + p.insert_before(original_text, x) + end + p.delete(original_text) + + end + } + + + # remove 'markdown' attribute + e.delete_attribute 'markdown' + + end + + end + end + end + +end end end end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_span_better.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_span_better.rb new file mode 100644 index 00000000..6aad7889 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/input/parse_span_better.rb @@ -0,0 +1,746 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +require 'set' + +module MaRuKu; module In; module Markdown; module SpanLevelParser + include MaRuKu::Helpers + + EscapedCharInText = + Set.new [?\\,?`,?*,?_,?{,?},?[,?],?(,?),?#,?.,?!,?|,?:,?+,?-,?>] + + EscapedCharInQuotes = + Set.new [?\\,?`,?*,?_,?{,?},?[,?],?(,?),?#,?.,?!,?|,?:,?+,?-,?>,?',?"] + + EscapedCharInInlineCode = [?\\,?`] + + def parse_lines_as_span(lines, parent=nil) + parse_span_better lines.join("\n"), parent + end + + def parse_span_better(string, parent=nil) + if not string.kind_of? String then + error "Passed #{string.class}." end + + st = (string + "") + st.freeze + src = CharSource.new(st, parent) + read_span(src, EscapedCharInText, [nil]) + end + + # This is the main loop for reading span elements + # + # It's long, but not *complex* or difficult to understand. + # + # + def read_span(src, escaped, exit_on_chars, exit_on_strings=nil) + con = SpanContext.new + c = d = nil + while true + c = src.cur_char + + # This is only an optimization which cuts 50% of the time used. + # (but you can't use a-zA-z in exit_on_chars) + if c && ((c>=?a && c<=?z) || ((c>=?A && c<=?Z))) + con.cur_string << src.shift_char + next + end + + break if exit_on_chars && exit_on_chars.include?(c) + break if exit_on_strings && exit_on_strings.any? {|x| src.cur_chars_are x} + + # check if there are extensions + if check_span_extensions(src, con) + next + end + + case c = src.cur_char + when ?\ # it's space (32) + if src.cur_chars_are " \n" + src.ignore_chars(3) + con.push_element md_br() + next + else + src.ignore_char + con.push_space + end + when ?\n, ?\t + src.ignore_char + con.push_space + when ?` + read_inline_code(src,con) + when ?< + # It could be: + # 1) HTML "
> + + case d = src.next_char + when ?<; # guillemettes + src.ignore_chars(2) + con.push_char ?< + con.push_char ?< + when ?!; + if src.cur_chars_are ' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + " + + +end end end + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_latex_strings.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_latex_strings.rb new file mode 100644 index 00000000..da043a79 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_latex_strings.rb @@ -0,0 +1,64 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + + +class String + + # These are TeX's special characters + LATEX_ADD_SLASH = [ ?{, ?}, ?$, ?&, ?#, ?_, ?%] + + # These, we transform to {\tt \char} + LATEX_TO_CHARCODE = [ ?^, ?~, ?>,?<] + + def escape_to_latex(s) + s2 = "" + s.each_byte do |b| + if LATEX_TO_CHARCODE.include? b + s2 += "{\\tt \\char#{b}}" + elsif LATEX_ADD_SLASH.include? b + s2 << ?\\ << b + elsif b == ?\\ + # there is no backslash in cmr10 fonts + s2 += "$\\backslash$" + else + s2 << b + end + end + s2 + end + + # escapes special characters + def to_latex + s = escape_to_latex(self) + OtherGoodies.each do |k, v| + s.gsub!(k, v) + end + s + end + + # other things that are good on the eyes + OtherGoodies = { + /(\s)LaTeX/ => '\1\\LaTeX\\xspace ', # XXX not if already \LaTeX +# 'HTML' => '\\textsc{html}\\xspace ', +# 'PDF' => '\\textsc{pdf}\\xspace ' + } + +end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_markdown.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_markdown.rb new file mode 100644 index 00000000..98d9322d --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_markdown.rb @@ -0,0 +1,164 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +class String + # XXX: markdown escaping + def to_md(c=nil) + to_s + end + + # " andrea censi " => [" andrea ", "censi "] + def mysplit + split.map{|x| x+" "} + end +end + + +module MaRuKu; module Out; module Markdown + + DefaultLineLength = 40 + + def to_md(context={}) + children_to_md(context) + end + + def to_md_paragraph(context) + line_length = context[:line_length] || DefaultLineLength + wrap(@children, line_length, context)+"\n" + end + + def to_md_li_span(context) + len = (context[:line_length] || DefaultLineLength) - 2 + s = add_tabs(wrap(@children, len-2, context), 1, ' ') + s[0] = ?* + s + "\n" + end + + def to_md_abbr_def(context) + "*[#{self.abbr}]: #{self.text}\n" + end + + def to_md_ol(context) + len = (context[:line_length] || DefaultLineLength) - 2 + md = "" + self.children.each_with_index do |li, i| + s = add_tabs(w=wrap(li.children, len-2, context), 1, ' ')+"\n" + s[0,4] = "#{i+1}. "[0,4] +# puts w.inspect + md += s + end + md + "\n" + end + + def to_md_ul(context) + len = (context[:line_length] || DefaultLineLength) - 2 + md = "" + self.children.each_with_index do |li, i| + w = wrap(li.children, len-2, context) +# puts "W: "+ w.inspect + s = add_indent(w) +# puts "S: " +s.inspect + s[0,1] = "-" + md += s + end + md + "\n" + end + + def add_indent(s,char=" ") + t = s.split("\n").map{|x| char+x }.join("\n") + s << ?\n if t[-1] == ?\n + s + end + + # Convert each child to html + def children_to_md(context) + array_to_md(@children, context) + end + + def wrap(array, line_length, context) + out = "" + line = "" + array.each do |c| + if c.kind_of?(MDElement) && c.node_type == :linebreak + out << line.strip << " \n"; line=""; + next + end + + pieces = + if c.kind_of? String + c.to_md.mysplit + else + [c.to_md(context)].flatten + end + + # puts "Pieces: #{pieces.inspect}" + pieces.each do |p| + if p.size + line.size > line_length + out << line.strip << "\n"; + line = "" + end + line << p + end + end + out << line.strip << "\n" if line.size > 0 + out << ?\n if not out[-1] == ?\n + out + end + + + def array_to_md(array, context, join_char='') + e = [] + array.each do |c| + method = c.kind_of?(MDElement) ? + "to_md_#{c.node_type}" : "to_md" + + if not c.respond_to?(method) + #raise "Object does not answer to #{method}: #{c.class} #{c.inspect[0,100]}" +# tell_user "Using default for #{c.node_type}" + method = 'to_md' + end + +# puts "#{c.inspect} created with method #{method}" + h = c.send(method, context) + + if h.nil? + raise "Nil md for #{c.inspect} created with method #{method}" + end + + if h.kind_of?Array + e = e + h + else + e << h + end + end + e.join(join_char) + end + +end end end + +module MaRuKu; class MDDocument + alias old_md to_md + def to_md(context={}) + s = old_md(context) +# puts s + s + end +end end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_s.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_s.rb new file mode 100644 index 00000000..ecdadd93 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/output/to_s.rb @@ -0,0 +1,56 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu + +class MDElement + + # Strips all formatting from the string + def to_s + children_to_s + end + + def children_to_s + @children.join + end + + # Generate an id for headers. Assumes @children is set. + def generate_id + + title = children_to_s + title.gsub!(/ /,'_') + title.downcase! + title.gsub!(/[^\w_]/,'') + title.strip! + + if title.size == 0 + $uid ||= 0 + $uid += 1 + title = "id#{$uid}" + end + + # random is a very bad idea +# title << "_" + rand(10000).to_s + title + end +end + +end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/string_utils.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/string_utils.rb new file mode 100644 index 00000000..e041804e --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/string_utils.rb @@ -0,0 +1,191 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +# Boring stuff with strings. +module MaRuKu; module Strings + + def add_tabs(s,n=1,char="\t") + s.split("\n").map{|x| char*n+x }.join("\n") + end + + TabSize = 4; + + def split_lines(s) + s.gsub("\r","").split("\n") + end + + # This parses email headers. Returns an hash. + # + # +hash['data']+ is the message. + # + # Keys are downcased, space becomes underscore, converted to symbols. + # + # My key: true + # + # becomes: + # + # {:my_key => true} + # + def parse_email_headers(s) + keys={} + match = (s =~ /\A((\w[\w\s\_\-]+: .*\n)+)\s*\n/) + if match != 0 + keys[:data] = s + else + keys[:data] = $' + headers = $1 + headers.split("\n").each do |l| +# Fails if there are other ':' characters. +# k, v = l.split(':') + k, v = l.split(':', 2) + k, v = normalize_key_and_value(k, v) + k = k.to_sym +# puts "K = #{k}, V=#{v}" + keys[k] = v + end + end + keys + end + + # Keys are downcased, space becomes underscore, converted to symbols. + def normalize_key_and_value(k,v) + v = v ? v.strip : true # no value defaults to true + k = k.strip + + # check synonyms + v = true if ['yes','true'].include?(v.to_s.downcase) + v = false if ['no','false'].include?(v.to_s.downcase) + + k = k.downcase.gsub(' ','_') + return k, v + end + + # Returns the number of leading spaces, considering that + # a tab counts as `TabSize` spaces. + def number_of_leading_spaces(s) + n=0; i=0; + while i < s.size + c = s[i,1] + if c == ' ' + i+=1; n+=1; + elsif c == "\t" + i+=1; n+=TabSize; + else + break + end + end + n + end + + # This returns the position of the first real char in a list item + # + # For example: + # '*Hello' # => 1 + # '* Hello' # => 2 + # ' * Hello' # => 3 + # ' * Hello' # => 5 + # '1.Hello' # => 2 + # ' 1. Hello' # => 5 + + def spaces_before_first_char(s) + case s.md_type + when :ulist + i=0; + # skip whitespace if present + while s[i,1] =~ /\s/; i+=1 end + # skip indicator (+, -, *) + i+=1 + # skip optional whitespace + while s[i,1] =~ /\s/; i+=1 end + return i + when :olist + i=0; + # skip whitespace + while s[i,1] =~ /\s/; i+=1 end + # skip digits + while s[i,1] =~ /\d/; i+=1 end + # skip dot + i+=1 + # skip whitespace + while s[i,1] =~ /\s/; i+=1 end + return i + else + tell_user "BUG (my bad): '#{s}' is not a list" + 0 + end + end + + # Counts the number of leading '#' in the string + def num_leading_hashes(s) + i=0; + while i<(s.size-1) && (s[i,1]=='#'); i+=1 end + i + end + + # Strips initial and final hashes + def strip_hashes(s) + s = s[num_leading_hashes(s), s.size] + i = s.size-1 + while i > 0 && (s[i,1] =~ /(#|\s)/); i-=1; end + s[0, i+1].strip + end + + # change space to "_" and remove any non-word character + def sanitize_ref_id(x) + x.strip.downcase.gsub(' ','_').gsub(/[^\w]/,'') + end + + + # removes initial quote + def unquote(s) + s.gsub(/^>\s?/,'') + end + + # toglie al massimo n caratteri + def strip_indent(s, n) + i = 0 + while i < s.size && n>0 + c = s[i,1] + if c == ' ' + n-=1; + elsif c == "\t" + n-=TabSize; + else + break + end + i+=1 + end + s[i, s.size] + end + + def dbg_describe_ary(a, prefix='') + i = 0 + a.each do |l| + puts "#{prefix} (#{i+=1})# #{l.inspect}" + end + end + + def force_linebreak?(l) + l =~ / $/ + end + +end +end diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures.rb new file mode 100644 index 00000000..50e2314a --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures.rb @@ -0,0 +1,167 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + + +class Module + def safe_attr_accessor1(symbol, klass) + attr_reader symbol + code = <<-EOF + def #{symbol}=(val) + if not val.kind_of? #{klass} + s = "\nCould not assign an object of type \#{val.class} to #{symbol}.\n\n" + s += "Tried to assign object of class \#{val.class}:\n"+ + "\#{val.inspect}\n"+ + "to \#{self.class}::#{symbol} constrained to be of class #{klass}.\n" + raise s + end + @#{symbol} = val + end + +EOF + module_eval code + end + + def safe_attr_accessor2(symbol, klass) + attr_accessor symbol + end + + alias safe_attr_accessor safe_attr_accessor2 +end + +module MaRuKu + +# I did not want to have a class for each possible element. +# Instead I opted to have only the class "MDElement" +# that represents eveything in the document (paragraphs, headers, etc). +# +# You can tell what it is by the variable `node_type`. +# +# In the instance-variable `children` there are the children. These +# can be of class 1) String or 2) MDElement. +# +# The @doc variable points to the document to which the MDElement +# belongs (which is an instance of Maruku, subclass of MDElement). +# +# Attributes are contained in the hash `attributes`. +# Keys are symbols (downcased, with spaces substituted by underscores) +# +# For example, if you write in the source document. +# +# Title: test document +# My property: value +# +# content content +# +# You can access `value` by writing: +# +# @doc.attributes[:my_property] # => 'value' +# +# from whichever MDElement in the hierarchy. +# +class MDElement + # See helpers.rb for the list of allowed #node_type values + safe_attr_accessor :node_type, Symbol + + # Children are either Strings or MDElement + safe_attr_accessor :children, Array + + # An attribute list, may not be nil + safe_attr_accessor :al, Array #Maruku::AttributeList + + # These are the processed attributes + safe_attr_accessor :attributes, Hash + + # Reference of the document (which is of class Maruku) + attr_accessor :doc + + def initialize(node_type=:unset, children=[], meta={}, + al=MaRuKu::AttributeList.new ) + super(); + self.children = children + self.node_type = node_type + + @attributes = {} + + meta.each do |symbol, value| + self.instance_eval " + def #{symbol}; @#{symbol}; end + def #{symbol}=(val); @#{symbol}=val; end" + self.send "#{symbol}=", value + end + + self.al = al || AttributeList.new + + self.meta_priv = meta + end + + attr_accessor :meta_priv + + def ==(o) + ok = o.kind_of?(MDElement) && + (self.node_type == o.node_type) && + (self.meta_priv == o.meta_priv) && + (self.children == o.children) + + if not ok +# puts "This:\n"+self.inspect+"\nis different from\n"+o.inspect+"\n\n" + end + ok + end +end + +# This represents the whole document and holds global data. + +class MDDocument + + safe_attr_accessor :refs, Hash + safe_attr_accessor :footnotes, Hash + + # This is an hash. The key might be nil. + safe_attr_accessor :abbreviations, Hash + + # Attribute lists definition + safe_attr_accessor :ald, Hash + + # The order in which footnotes are used. Contains the id. + safe_attr_accessor :footnotes_order, Array + + safe_attr_accessor :latex_required_packages, Array + + safe_attr_accessor :refid2ref, Hash + + def initialize(s=nil) + super(:document) + @doc = self + + self.refs = {} + self.footnotes = {} + self.footnotes_order = [] + self.abbreviations = {} + self.ald = {} + self.latex_required_packages = [] + + parse_doc(s) if s + end +end + + +end # MaRuKu + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_inspect.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_inspect.rb new file mode 100644 index 00000000..c0064f90 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_inspect.rb @@ -0,0 +1,87 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + + +class String + def inspect_more(a=nil,b=nil) + inspect + end +end + +class Object + def inspect_more(a=nil,b=nil) + inspect + end +end + +class Array + def inspect_more(compact, join_string, add_brackets=true) + s = map {|x| + x.kind_of?(String) ? x.inspect : + x.kind_of?(MaRuKu::MDElement) ? x.inspect(compact) : + (raise "WTF #{x.class} #{x.inspect}") + }.join(join_string) + + add_brackets ? "[#{s}]" : s + end +end + +class Hash + def inspect_ordered(a=nil,b=nil) + "{"+keys.map{|x|x.to_s}.sort.map{|x|x.to_sym}. + map{|k| k.inspect + "=>"+self[k].inspect}.join(',')+"}" + end +end + +module MaRuKu +class MDElement + def inspect(compact=true) + if compact + i2 = inspect2 + return i2 if i2 + end + + "md_el(:%s,%s,%s,%s)" % + [ + self.node_type, + children_inspect(compact), + @meta_priv.inspect_ordered, + self.al.inspect + ] + end + + def children_inspect(compact=true) + s = @children.inspect_more(compact,', ') + if @children.empty? + "[]" + elsif s.size < 70 + s + else + "[\n"+ + add_tabs(@children.inspect_more(compact,",\n",false))+ + "\n]" + end + end + +end + +end + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_iterators.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_iterators.rb new file mode 100644 index 00000000..ff9c6b43 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/structures_iterators.rb @@ -0,0 +1,61 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +module MaRuKu + +class MDElement + + # Yields to each element of specified node_type + # All elements if e_node_type is nil. + def each_element(e_node_type=nil, &block) + @children.each do |c| + if c.kind_of? MDElement + if (not e_node_type) || (e_node_type == c.node_type) + block.call c + end + c.each_element(e_node_type, &block) + end + end + end + + # Apply passed block to each String in the hierarchy. + def replace_each_string(&block) + for c in @children + if c.kind_of? MDElement + c.replace_each_string(&block) + end + end + + processed = [] + until @children.empty? + c = @children.shift + if c.kind_of? String + result = block.call(c) + [*result].each do |e| processed << e end + else + processed << c + end + end + @children = processed + end + +end +end \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/benchmark.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/benchmark.rb new file mode 100644 index 00000000..21d4a935 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/benchmark.rb @@ -0,0 +1,82 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +require 'maruku' +#require 'bluecloth' + + +data = $stdin.read + +num = 10 + +if ARGV.size > 0 && ((n=ARGV[0].to_i) != 0) + num = n +end + +methods = +[ + + [Maruku, :to_html], +# [BlueCloth, :to_html], + [Maruku, :to_latex] + +] + +#methods = [[Maruku, :class]] +#num = 10 + +stats = +methods .map do |c, method| + puts "Computing for #{c}" + + start = Time.now + doc = nil + for i in 1..num + $stdout.write "#{i} "; $stdout.flush + doc = c.new(data) + end + stop = Time.now + parsing = (stop-start)/num + + start = Time.now + for i in 1..num + $stdout.write "#{i} "; $stdout.flush + s = doc.send method + end + stop = Time.now + rendering = (stop-start)/num + + puts ("%s (%s): parsing %0.2f sec + rendering %0.2f sec "+ + "= %0.2f sec ") % [c, method, parsing,rendering,parsing+rendering] + + [c, method, parsing, rendering] +end + +puts "\n\n\n" +stats.each do |x| x.push(x[2]+x[3]) end +max = stats.map{|x|x[4]}.max +stats.sort! { |x,y| x[4] <=> y[4] } . reverse! +for c, method, parsing, rendering, tot in stats + puts ("%20s: parsing %0.2f sec + rendering %0.2f sec "+ + "= %0.2f sec (%0.2fx)") % + ["#{c} (#{method})", parsing,rendering,tot,max/tot] +end + diff --git a/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/new_parser.rb b/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/new_parser.rb new file mode 100644 index 00000000..8a014833 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/lib/maruku/tests/new_parser.rb @@ -0,0 +1,373 @@ +#-- +# Copyright (C) 2006 Andrea Censi +# +# This file is part of Maruku. +# +# Maruku is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# Maruku is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Maruku; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +#++ + + +require 'maruku' +require 'maruku/ext/math' + +module MaRuKu; module Tests + # 5 accented letters in italian, encoded as UTF-8 + AccIta8 = "\303\240\303\250\303\254\303\262\303\271" + + # Same letters, written in ISO-8859-1 (one byte per letter) + AccIta1 = "\340\350\354\362\371" + + # The word MA-RU-KU, written in katakana using UTF-8 + Maruku8 = "\343\203\236\343\203\253\343\202\257" + + def test_span_parser(verbose, break_on_first_error, quiet) + good_cases = [ + + ["", [], 'Empty string gives empty list'], + ["a", ["a"], 'Easy char'], + [" a", ["a"], 'First space in the paragraph is ignored'], + ["a\n \n", ["a"], 'Last spaces in the paragraphs are ignored'], + [' ', [], 'One char => nothing'], + [' ', [], 'Two chars => nothing'], + ['a b', ['a b'], 'Spaces are compressed'], + ['a b', ['a b'], 'Newlines are spaces'], + ["a\nb", ['a b'], 'Newlines are spaces'], + ["a\n b", ['a b'], 'Compress newlines 1'], + ["a \nb", ['a b'], 'Compress newlines 2'], + [" \nb", ['b'], 'Compress newlines 3'], + ["\nb", ['b'], 'Compress newlines 4'], + ["b\n", ['b'], 'Compress newlines 5'], + ["\n", [], 'Compress newlines 6'], + ["\n\n\n", [], 'Compress newlines 7'], + + [nil, :throw, "Should throw on nil input"], + + # Code blocks + ["`" , :throw, 'Unclosed single ticks'], + ["``" , :throw, 'Unclosed double ticks'], + ["`a`" , [md_code('a')], 'Simple inline code'], + ["`` ` ``" , [md_code('`')], ], + ["`` \\` ``" , [md_code('\\`')], ], + ["``a``" , [md_code('a')], ], + ["`` a ``" , [md_code('a')], ], + + # Newlines + ["a \n", ['a',md_el(:linebreak)], 'Two spaces give br.'], + ["a \n", ['a'], 'Newlines 2'], + [" \n", [md_el(:linebreak)], 'Newlines 3'], + [" \n \n", [md_el(:linebreak),md_el(:linebreak)],'Newlines 3'], + [" \na \n", [md_el(:linebreak),'a',md_el(:linebreak)],'Newlines 3'], + + # Inline HTML + ["a < b", ['a < b'], '< can be on itself'], + ["
", [md_html('
')], 'HR will be sanitized'], + ["
", [md_html('
')], 'Closed tag is ok'], + ["
", [md_html('
')], 'Closed tag is ok 2'], + ["
a", [md_html('
'),'a'], 'Closed tag is ok 2'], + ["a", [md_html(''),'a'], 'Inline HTML 1'], + ["ea", [md_html('e'),'a'], 'Inline HTML 2'], + ["aeb", ['a',md_html('e'),'b'], 'Inline HTML 3'], + ["eaf", + [md_html('e'),'a',md_html('f')], + 'Inline HTML 4'], + ["efa", + [md_html('e'),md_html('f'),'a'], + 'Inline HTML 5'], + + ["", [md_html("")], 'Attributes'], + [""], + + # emphasis + ["**", :throw, 'Unclosed double **'], + ["\\*", ['*'], 'Escaping of *'], + ["a *b* ", ['a ', md_em('b')], 'Emphasis 1'], + ["a *b*", ['a ', md_em('b')], 'Emphasis 2'], + ["a * b", ['a * b'], 'Emphasis 3'], + ["a * b*", :throw, 'Unclosed emphasis'], + # same with underscore + ["__", :throw, 'Unclosed double __'], + ["\\_", ['_'], 'Escaping of _'], + ["a _b_ ", ['a ', md_em('b')], 'Emphasis 4'], + ["a _b_", ['a ', md_em('b')], 'Emphasis 5'], + ["a _ b", ['a _ b'], 'Emphasis 6'], + ["a _ b_", :throw, 'Unclosed emphasis'], + ["_b_", [md_em('b')], 'Emphasis 7'], + ["_b_ _c_", [md_em('b'),' ',md_em('c')], 'Emphasis 8'], + ["_b__c_", [md_em('b'),md_em('c')], 'Emphasis 9'], + # underscores in word + ["mod_ruby", ['mod_ruby'], 'Word with underscore'], + # strong + ["**a*", :throw, 'Unclosed double ** 2'], + ["\\**a*", ['*', md_em('a')], 'Escaping of *'], + ["a **b** ", ['a ', md_strong('b')], 'Emphasis 1'], + ["a **b**", ['a ', md_strong('b')], 'Emphasis 2'], + ["a ** b", ['a ** b'], 'Emphasis 3'], + ["a ** b**", :throw, 'Unclosed emphasis'], + ["**b****c**", [md_strong('b'),md_strong('c')], 'Emphasis 9'], + # strong (with underscore) + ["__a_", :throw, 'Unclosed double __ 2'], + + # ["\\__a_", ['_', md_em('a')], 'Escaping of _'], + ["a __b__ ", ['a ', md_strong('b')], 'Emphasis 1'], + ["a __b__", ['a ', md_strong('b')], 'Emphasis 2'], + ["a __ b", ['a __ b'], 'Emphasis 3'], + ["a __ b__", :throw, 'Unclosed emphasis'], + ["__b____c__", [md_strong('b'),md_strong('c')], 'Emphasis 9'], + # extra strong + ["***a**", :throw, 'Unclosed triple *** '], + ["\\***a**", ['*', md_strong('a')], 'Escaping of *'], + ["a ***b*** ", ['a ', md_emstrong('b')], 'Strong elements'], + ["a ***b***", ['a ', md_emstrong('b')]], + ["a *** b", ['a *** b']], + ["a ** * b", ['a ** * b']], + ["***b******c***", [md_emstrong('b'),md_emstrong('c')]], + ["a *** b***", :throw, 'Unclosed emphasis'], + # same with underscores + ["___a__", :throw, 'Unclosed triple *** '], +# ["\\___a__", ['_', md_strong('a')], 'Escaping of _'], + ["a ___b___ ", ['a ', md_emstrong('b')], 'Strong elements'], + ["a ___b___", ['a ', md_emstrong('b')]], + ["a ___ b", ['a ___ b']], + ["a __ _ b", ['a __ _ b']], + ["___b______c___", [md_emstrong('b'),md_emstrong('c')]], + ["a ___ b___", :throw, 'Unclosed emphasis'], + # mixing is bad + ["*a_", :throw, 'Mixing is bad'], + ["_a*", :throw], + ["**a__", :throw], + ["__a**", :throw], + ["___a***", :throw], + ["***a___", :throw], + # links of the form [text][ref] + ["\\[a]", ["[a]"], 'Escaping 1'], + ["\\[a\\]", ["[a]"], 'Escaping 2'], +# This is valid in the new Markdown version +# ["[a]", ["a"], 'Not a link'], + ["[a]", [ md_link(["a"],'a')], 'Empty link'], + ["[a][]", ], + ["[a][]b", [ md_link(["a"],'a'),'b'], 'Empty link'], + ["[a\\]][]", [ md_link(["a]"],'a')], 'Escape inside link (throw ?] away)'], + + ["[a", :throw, 'Link not closed'], + ["[a][", :throw, 'Ref not closed'], + + # links of the form [text](url) + ["\\[a](b)", ["[a](b)"], 'Links'], + ["[a](url)c", [md_im_link(['a'],'url'),'c'], 'url'], + ["[a]( url )c" ], + ["[a] ( url )c" ], + ["[a] ( url)c" ], + + ["[a](ur:/l/ 'Title')", [md_im_link(['a'],'ur:/l/','Title')], + 'url and title'], + ["[a] ( ur:/l/ \"Title\")" ], + ["[a] ( ur:/l/ \"Title\")" ], + ["[a]( ur:/l/ Title)", :throw, "Must quote title" ], + + ["[a](url 'Tit\\\"l\\\\e')", [md_im_link(['a'],'url','Tit"l\\e')], + 'url and title escaped'], + ["[a] ( url \"Tit\\\"l\\\\e\")" ], + ["[a] ( url \"Tit\\\"l\\\\e\" )" ], + ['[a] ( url "Tit\\"l\\\\e" )' ], + ["[a]()", [md_im_link(['a'],'')], 'No URL is OK'], + + ["[a](\"Title\")", :throw, "No url specified" ], + ["[a](url \"Title)", :throw, "Unclosed quotes" ], + ["[a](url \"Title\\\")", :throw], + ["[a](url \"Title\" ", :throw], + + ["[a](url \'Title\")", :throw, "Mixing is bad" ], + ["[a](url \"Title\')"], + + ["[a](/url)", [md_im_link(['a'],'/url')], 'Funny chars in url'], + ["[a](#url)", [md_im_link(['a'],'#url')]], + ["[a]()", [md_im_link(['a'],'/script?foo=1&bar=2')]], + + + # Images + ["\\![a](url)", ['!', md_im_link(['a'],'url') ], 'Escaping images'], + + ["![a](url)", [md_im_image(['a'],'url')], 'Image no title'], + ["![a]( url )" ], + ["![a] ( url )" ], + ["![a] ( url)" ], + + ["![a](url 'ti\"tle')", [md_im_image(['a'],'url','ti"tle')], 'Image with title'], + ['![a]( url "ti\\"tle")' ], + + ["![a](url", :throw, 'Invalid images'], + ["![a( url )" ], + ["![a] ('url )" ], + + ["![a][imref]", [md_image(['a'],'imref')], 'Image with ref'], + ["![a][ imref]"], + ["![a][ imref ]"], + ["![a][\timref\t]"], + + + ['', + [md_url('http://example.com/?foo=1&bar=2')], 'Immediate link'], + ['ab', + ['a',md_url('http://example.com/?foo=1&bar=2'),'b'] ], + ['', + [md_email('andrea@censi.org')], 'Email address'], + [''], + ["Developmen ", + ["Developmen ", md_url("http://rubyforge.org/projects/maruku/")]], + ["ab", ['a',md_html(''),'b'], + 'HTML Comment'], + + ["a +*** Output of inspect *** +md_el(:document,[md_html("")],{},[]) +*** Output of to_html *** + +*** Output of to_latex *** + +*** Output of to_md *** + +*** Output of to_s *** + +*** EOF *** + + + + OK! + + + +*** Output of Markdown.pl *** +(not used anymore) +*** Output of Markdown.pl (parsed) *** +(not used anymore) \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml3.md b/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml3.md new file mode 100644 index 00000000..b6c8f1ce --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml3.md @@ -0,0 +1,38 @@ +Write a comment here +*** Parameters: *** +{} +*** Markdown input: *** + + Blah + + + +
*em*
+ +*** Output of inspect *** +md_el(:document,[ + md_html("\n\tBlah\n\t\n\t\t\n\t\n
*em*
") +],{},[]) +*** Output of to_html *** +Blah + + +
em
+*** Output of to_latex *** + +*** Output of to_md *** + +*** Output of to_s *** + +*** EOF *** + + + + OK! + + + +*** Output of Markdown.pl *** +(not used anymore) +*** Output of Markdown.pl (parsed) *** +(not used anymore) \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml_instruction.md b/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml_instruction.md new file mode 100644 index 00000000..5f89afdc --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/tests/unittest/xml_instruction.md @@ -0,0 +1,64 @@ +Directives should be preserved. +*** Parameters: *** +{} +*** Markdown input: *** + + + + + + +Targets + +Inside: last + + +*** Output of inspect *** +md_el(:document,[ + md_el(:xml_instr,[],{:code=>" noTarget",:target=>""},[]), + md_el(:xml_instr,[],{:code=>"",:target=>"php"},[]), + md_el(:xml_instr,[],{:code=>"",:target=>"xml"},[]), + md_el(:xml_instr,[],{:code=>"",:target=>"mrk"},[]), + md_par([ + "Targets ", + md_el(:xml_instr,[],{:code=>"noTarget",:target=>""},[]), + " ", + md_el(:xml_instr,[],{:code=>"",:target=>"php"},[]), + " ", + md_el(:xml_instr,[],{:code=>"",:target=>"xml"},[]), + " ", + md_el(:xml_instr,[],{:code=>"",:target=>"mrk"},[]) + ]), + md_par([ + "Inside: ", + md_el(:xml_instr,[],{:code=>"puts \"Inside: Hello\"",:target=>"mrk"},[]), + " last" + ]) +],{},[]) +*** Output of to_html *** + +

Targets

+ +

Inside: last

+*** Output of to_latex *** +Targets + +Inside: last +*** Output of to_md *** +Targets + +Inside: last +*** Output of to_s *** +Targets Inside: last +*** EOF *** + + + + OK! + + + +*** Output of Markdown.pl *** +(not used anymore) +*** Output of Markdown.pl (parsed) *** +(not used anymore) \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/tests/utf8-files/simple.md b/vendor/gems/gems/maruku-0.6.0/tests/utf8-files/simple.md new file mode 100644 index 00000000..775afd51 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/tests/utf8-files/simple.md @@ -0,0 +1 @@ +‡ \ No newline at end of file diff --git a/vendor/gems/gems/maruku-0.6.0/unit_test_block.sh b/vendor/gems/gems/maruku-0.6.0/unit_test_block.sh new file mode 100644 index 00000000..7f2c1080 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/unit_test_block.sh @@ -0,0 +1,5 @@ + +files=`find tests/unittest -name '*.md'` + +ruby -Ilib bin/marutest $files + diff --git a/vendor/gems/gems/maruku-0.6.0/unit_test_span.sh b/vendor/gems/gems/maruku-0.6.0/unit_test_span.sh new file mode 100644 index 00000000..0d98f4b2 --- /dev/null +++ b/vendor/gems/gems/maruku-0.6.0/unit_test_span.sh @@ -0,0 +1,3 @@ +# try ruby -Ilib lib/maruku/tests/new_parser.rb v b +ruby -Ilib lib/maruku/tests/new_parser.rb $* v + diff --git a/vendor/gems/gems/net-scp-1.0.2/CHANGELOG.rdoc b/vendor/gems/gems/net-scp-1.0.2/CHANGELOG.rdoc new file mode 100644 index 00000000..1b5fd66c --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/CHANGELOG.rdoc @@ -0,0 +1,18 @@ +=== 1.0.2 / 4 Feb 2009 + +* Escape spaces in file names on remote server [Jamis Buck] + + +=== 1.0.1 / 29 May 2008 + +* Make sure downloads open the file in binary mode to appease Windows [Jamis Buck] + + +=== 1.0.0 / 1 May 2008 + +* Pass the channel object as the first argument to the progress callback [Jamis Buck] + + +=== 1.0 Preview Release 1 (0.99.0) / 22 Mar 2008 + +* Birthday! diff --git a/vendor/gems/gems/net-scp-1.0.2/Manifest b/vendor/gems/gems/net-scp-1.0.2/Manifest new file mode 100644 index 00000000..9614d571 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/Manifest @@ -0,0 +1,17 @@ +CHANGELOG.rdoc +lib/net/scp/download.rb +lib/net/scp/errors.rb +lib/net/scp/upload.rb +lib/net/scp/version.rb +lib/net/scp.rb +lib/uri/open-scp.rb +lib/uri/scp.rb +Rakefile +README.rdoc +setup.rb +test/common.rb +test/test_all.rb +test/test_download.rb +test/test_scp.rb +test/test_upload.rb +Manifest diff --git a/vendor/gems/gems/net-scp-1.0.2/README.rdoc b/vendor/gems/gems/net-scp-1.0.2/README.rdoc new file mode 100644 index 00000000..480690ea --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/README.rdoc @@ -0,0 +1,98 @@ += Net::SCP + +* http://net-ssh.rubyforge.org/scp + +== DESCRIPTION: + +Net::SCP is a pure-Ruby implementation of the SCP protocol. This operates over SSH (and requires the Net::SSH library), and allows files and directory trees to copied to and from a remote server. + +== FEATURES/PROBLEMS: + +* Transfer files or entire directory trees to or from a remote host via SCP +* Can preserve file attributes across transfers +* Can download files in-memory, or direct-to-disk +* Support for SCP URI's, and OpenURI + +== SYNOPSIS: + +In a nutshell: + + require 'net/scp' + + # upload a file to a remote server + Net::SCP.upload!("remote.host.com", "username", + "/local/path", "/remote/path", + :password => "password") + + # download a file from a remote server + Net::SCP.download!("remote.host.com", "username", + "/remote/path", "/local/path", + :password => password) + + # download a file to an in-memory buffer + data = Net::SCP::download!("remote.host.com", "username", "/remote/path") + + # use a persistent connection to transfer files + Net::SCP.start("remote.host.com", "username", :password => "password") do |scp| + # upload a file to a remote server + scp.upload! "/local/path", "/remote/path" + + # upload from an in-memory buffer + scp.upload! StringIO.new("some data to upload"), "/remote/path" + + # run multiple downloads in parallel + d1 = scp.download("/remote/path", "/local/path") + d2 = scp.download("/remote/path2", "/local/path2") + [d1, d2].each { |d| d.wait } + end + + # You can also use open-uri to grab data via scp: + require 'uri/open-scp' + data = open("scp://user@host/path/to/file.txt").read + +For more information, see Net::SCP. + +== REQUIREMENTS: + +* Net::SSH 2 + +If you wish to run the tests, you'll also need: + +* Echoe (for Rakefile use) +* Mocha (for tests) + +== INSTALL: + +* gem install net-scp (might need sudo privileges) + +Or, you can do it the hard way (without Rubygems): + +* tar xzf net-scp-*.tgz +* cd net-scp-* +* ruby setup.rb config +* ruby setup.rb install (might need sudo privileges) + +== LICENSE: + +(The MIT License) + +Copyright (c) 2008 Jamis Buck + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gems/gems/net-scp-1.0.2/Rakefile b/vendor/gems/gems/net-scp-1.0.2/Rakefile new file mode 100644 index 00000000..ac95f271 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/Rakefile @@ -0,0 +1,30 @@ +$LOAD_PATH.unshift "../net-ssh/lib" +require './lib/net/scp/version' + +begin + require 'echoe' +rescue LoadError + abort "You'll need to have `echoe' installed to use Net::SCP's Rakefile" +end + +version = Net::SCP::Version::STRING.dup +if ENV['SNAPSHOT'].to_i == 1 + version << "." << Time.now.utc.strftime("%Y%m%d%H%M%S") +end + +Echoe.new('net-scp', version) do |p| + p.project = "net-ssh" + p.changelog = "CHANGELOG.rdoc" + + p.author = "Jamis Buck" + p.email = "jamis@jamisbuck.org" + p.summary = "A pure Ruby implementation of the SCP client protocol" + p.url = "http://net-ssh.rubyforge.org/scp" + + p.dependencies = ["net-ssh >=1.99.1"] + + p.need_zip = true + p.include_rakefile = true + + p.rdoc_pattern = /^(lib|README.rdoc|CHANGELOG.rdoc)/ +end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/net/scp.rb b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp.rb new file mode 100644 index 00000000..99756f7c --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp.rb @@ -0,0 +1,414 @@ +require 'stringio' + +require 'net/ssh' +require 'net/scp/errors' +require 'net/scp/upload' +require 'net/scp/download' + +module Net + + # Net::SCP implements the SCP (Secure CoPy) client protocol, allowing Ruby + # programs to securely and programmatically transfer individual files or + # entire directory trees to and from remote servers. It provides support for + # multiple simultaneous SCP copies working in parallel over the same + # connection, as well as for synchronous, serial copies. + # + # Basic usage: + # + # require 'net/scp' + # + # Net::SCP.start("remote.host", "username", :password => "passwd") do |scp| + # # synchronous (blocking) upload; call blocks until upload completes + # scp.upload! "/local/path", "/remote/path" + # + # # asynchronous upload; call returns immediately and requires SSH + # # event loop to run + # channel = scp.upload("/local/path", "/remote/path") + # channel.wait + # end + # + # Net::SCP also provides an open-uri tie-in, so you can use the Kernel#open + # method to open and read a remote file: + # + # # if you just want to parse SCP URL's: + # require 'uri/scp' + # url = URI.parse("scp://user@remote.host/path/to/file") + # + # # if you want to read from a URL voa SCP: + # require 'uri/open-scp' + # puts open("scp://user@remote.host/path/to/file").read + # + # Lastly, Net::SCP adds a method to the Net::SSH::Connection::Session class, + # allowing you to easily grab a Net::SCP reference from an existing Net::SSH + # session: + # + # require 'net/ssh' + # require 'net/scp' + # + # Net::SSH.start("remote.host", "username", :password => "passwd") do |ssh| + # ssh.scp.download! "/remote/path", "/local/path" + # end + # + # == Progress Reporting + # + # By default, uploading and downloading proceed silently, without any + # outword indication of their progress. For long running uploads or downloads + # (and especially in interactive environments) it is desirable to report + # to the user the progress of the current operation. + # + # To receive progress reports for the current operation, just pass a block + # to #upload or #download (or one of their variants): + # + # scp.upload!("/path/to/local", "/path/to/remote") do |ch, name, sent, total| + # puts "#{name}: #{sent}/#{total}" + # end + # + # Whenever a new chunk of data is recieved for or sent to a file, the callback + # will be invoked, indicating the name of the file (local for downloads, + # remote for uploads), the number of bytes that have been sent or received + # so far for the file, and the size of the file. + # + #-- + # = Protocol Description + # + # Although this information has zero relevance to consumers of the Net::SCP + # library, I'm documenting it here so that anyone else looking for documentation + # of the SCP protocol won't be left high-and-dry like I was. The following is + # reversed engineered from the OpenSSH SCP implementation, and so may + # contain errors. You have been warned! + # + # The first step is to invoke the "scp" command on the server. It accepts + # the following parameters, which must be set correctly to avoid errors: + # + # * "-t" -- tells the remote scp process that data will be sent "to" it, + # e.g., that data will be uploaded and it should initialize itself + # accordingly. + # * "-f" -- tells the remote scp process that data should come "from" it, + # e.g., that data will be downloaded and it should initialize itself + # accordingly. + # * "-v" -- verbose mode; the remote scp process should chatter about what + # it is doing via stderr. + # * "-p" -- preserve timestamps. 'T' directives (see below) should be/will + # be sent to indicate the modification and access times of each file. + # * "-r" -- recursive transfers should be allowed. Without this, it is an + # error to upload or download a directory. + # + # After those flags, the name of the remote file/directory should be passed + # as the sole non-switch argument to scp. + # + # Then the fun begins. If you're doing a download, enter the download_start_state. + # Otherwise, look for upload_start_state. + # + # == Net::SCP::Download#download_start_state + # + # This is the start state for downloads. It simply sends a 0-byte to the + # server. The next state is Net::SCP::Download#read_directive_state. + # + # == Net::SCP::Upload#upload_start_state + # + # Sets up the initial upload scaffolding and waits for a 0-byte from the + # server, and then switches to Net::SCP::Upload#upload_current_state. + # + # == Net::SCP::Download#read_directive_state + # + # Reads a directive line from the input. The following directives are + # recognized: + # + # * T%d %d %d %d -- a "times" packet. Indicates that the next file to be + # downloaded must have mtime/usec/atime/usec attributes preserved. + # * D%o %d %s -- a directory change. The process is changing to a directory + # with the given permissions/size/name, and the recipient should create + # a directory with the same name and permissions. Subsequent files and + # directories will be children of this directory, until a matching 'E' + # directive. + # * C%o %d %s -- a file is being sent next. The file will have the given + # permissions/size/name. Immediately following this line, +size+ bytes + # will be sent, raw. + # * E -- terminator directive. Indicates the end of a directory, and subsequent + # files and directories should be received by the parent of the current + # directory. + # + # If a 'C' directive is received, we switch over to + # Net::SCP::Download#read_data_state. If an 'E' directive is received, and + # there is no parent directory, we switch over to Net::SCP#finish_state. + # + # Regardless of what the next state is, we send a 0-byte to the server + # before moving to the next state. + # + # == Net::SCP::Download#read_data_state + # + # Bytes are read to satisfy the size of the incoming file. When all pending + # data has been read, we wait for the server to send a 0-byte, and then we + # switch to the Net::SCP::Download#finish_read_state. + # + # == Net::SCP::Download#finish_read_state + # + # We sent a 0-byte to the server to indicate that the file was successfully + # received. If there is no parent directory, then we're downloading a single + # file and we switch to Net::SCP#finish_state. Otherwise we jump back to the + # Net::SCP::Download#read_directive state to see what we get to download next. + # + # == Net::SCP::Upload#upload_current_state + # + # If the current item is a file, send a file. Sending a file starts with a + # 'T' directive (if :preserve is true), then a wait for the server to respond, + # and then a 'C' directive, and then a wait for the server to respond, and + # then a jump to Net::SCP::Upload#send_data_state. + # + # If current item is a directory, send a 'D' directive, and wait for the + # server to respond with a 0-byte. Then jump to Net::SCP::Upload#next_item_state. + # + # == Net::SCP::Upload#send_data_state + # + # Reads and sends the next chunk of data to the server. The state machine + # remains in this state until all data has been sent, at which point we + # send a 0-byte to the server, and wait for the server to respond with a + # 0-byte of its own. Then we jump back to Net::SCP::Upload#next_item_state. + # + # == Net::SCP::Upload#next_item_state + # + # If there is nothing left to upload, and there is no parent directory, + # jump to Net::SCP#finish_state. + # + # If there is nothing left to upload from the current directory, send an + # 'E' directive and wait for the server to respond with a 0-byte. Then go + # to Net::SCP::Upload#next_item_state. + # + # Otherwise, set the current upload source and go to + # Net::SCP::Upload#upload_current_state. + # + # == Net::SCP#finish_state + # + # Tells the server that no more data is forthcoming from this end of the + # pipe (via Net::SSH::Connection::Channel#eof!) and leaves the pipe to drain. + # It will be terminated when the remote process closes with an exit status + # of zero. + #++ + class SCP + include Net::SSH::Loggable + include Upload, Download + + # Starts up a new SSH connection and instantiates a new SCP session on + # top of it. If a block is given, the SCP session is yielded, and the + # SSH session is closed automatically when the block terminates. If no + # block is given, the SCP session is returned. + def self.start(host, username, options={}) + session = Net::SSH.start(host, username, options) + scp = new(session) + + if block_given? + begin + yield scp + session.loop + ensure + session.close + end + else + return scp + end + end + + # Starts up a new SSH connection using the +host+ and +username+ parameters, + # instantiates a new SCP session on top of it, and then begins an + # upload from +local+ to +remote+. If the +options+ hash includes an + # :ssh key, the value for that will be passed to the SSH connection as + # options (e.g., to set the password, etc.). All other options are passed + # to the #upload! method. If a block is given, it will be used to report + # progress (see "Progress Reporting", under Net::SCP). + def self.upload!(host, username, local, remote, options={}, &progress) + options = options.dup + start(host, username, options.delete(:ssh) || {}) do |scp| + scp.upload!(local, remote, options, &progress) + end + end + + # Starts up a new SSH connection using the +host+ and +username+ parameters, + # instantiates a new SCP session on top of it, and then begins a + # download from +remote+ to +local+. If the +options+ hash includes an + # :ssh key, the value for that will be passed to the SSH connection as + # options (e.g., to set the password, etc.). All other options are passed + # to the #download! method. If a block is given, it will be used to report + # progress (see "Progress Reporting", under Net::SCP). + def self.download!(host, username, remote, local=nil, options={}, &progress) + options = options.dup + start(host, username, options.delete(:ssh) || {}) do |scp| + return scp.download!(remote, local, options, &progress) + end + end + + # The underlying Net::SSH session that acts as transport for the SCP + # packets. + attr_reader :session + + # Creates a new Net::SCP session on top of the given Net::SSH +session+ + # object. + def initialize(session) + @session = session + self.logger = session.logger + end + + # Inititiate a synchronous (non-blocking) upload from +local+ to +remote+. + # The following options are recognized: + # + # * :recursive - the +local+ parameter refers to a local directory, which + # should be uploaded to a new directory named +remote+ on the remote + # server. + # * :preserve - the atime and mtime of the file should be preserved. + # * :verbose - the process should result in verbose output on the server + # end (useful for debugging). + # * :chunk_size - the size of each "chunk" that should be sent. Defaults + # to 2048. Changing this value may improve throughput at the expense + # of decreasing interactivity. + # + # This method will return immediately, returning the Net::SSH::Connection::Channel + # object that will support the upload. To wait for the upload to finish, + # you can either call the #wait method on the channel, or otherwise run + # the Net::SSH event loop until the channel's #active? method returns false. + # + # channel = scp.upload("/local/path", "/remote/path") + # channel.wait + def upload(local, remote, options={}, &progress) + start_command(:upload, local, remote, options, &progress) + end + + # Same as #upload, but blocks until the upload finishes. Identical to + # calling #upload and then calling the #wait method on the channel object + # that is returned. The return value is not defined. + def upload!(local, remote, options={}, &progress) + upload(local, remote, options, &progress).wait + end + + # Inititiate a synchronous (non-blocking) download from +remote+ to +local+. + # The following options are recognized: + # + # * :recursive - the +remote+ parameter refers to a remote directory, which + # should be downloaded to a new directory named +local+ on the local + # machine. + # * :preserve - the atime and mtime of the file should be preserved. + # * :verbose - the process should result in verbose output on the server + # end (useful for debugging). + # + # This method will return immediately, returning the Net::SSH::Connection::Channel + # object that will support the download. To wait for the download to finish, + # you can either call the #wait method on the channel, or otherwise run + # the Net::SSH event loop until the channel's #active? method returns false. + # + # channel = scp.download("/remote/path", "/local/path") + # channel.wait + def download(remote, local, options={}, &progress) + start_command(:download, local, remote, options, &progress) + end + + # Same as #download, but blocks until the download finishes. Identical to + # calling #download and then calling the #wait method on the channel + # object that is returned. + # + # scp.download!("/remote/path", "/local/path") + # + # If +local+ is nil, and the download is not recursive (e.g., it is downloading + # only a single file), the file will be downloaded to an in-memory buffer + # and the resulting string returned. + # + # data = download!("/remote/path") + def download!(remote, local=nil, options={}, &progress) + destination = local ? local : StringIO.new + download(remote, destination, options, &progress).wait + local ? true : destination.string + end + + private + + # Constructs the scp command line needed to initiate and SCP session + # for the given +mode+ (:upload or :download) and with the given options + # (:verbose, :recursive, :preserve). Returns the command-line as a + # string, ready to execute. + def scp_command(mode, options) + command = "scp " + command << (mode == :upload ? "-t" : "-f") + command << " -v" if options[:verbose] + command << " -r" if options[:recursive] + command << " -p" if options[:preserve] + command + end + + # Opens a new SSH channel and executes the necessary SCP command over + # it (see #scp_command). It then sets up the necessary callbacks, and + # sets up a state machine to use to process the upload or download. + # (See Net::SCP::Upload and Net::SCP::Download). + def start_command(mode, local, remote, options={}, &callback) + session.open_channel do |channel| + command = "#{scp_command(mode, options)} #{sanitize_file_name(remote)}" + channel.exec(command) do |ch, success| + if success + channel[:local ] = local + channel[:remote ] = remote + channel[:options ] = options.dup + channel[:callback] = callback + channel[:buffer ] = Net::SSH::Buffer.new + channel[:state ] = "#{mode}_start" + channel[:stack ] = [] + + channel.on_close { |ch| raise Net::SCP::Error, "SCP did not finish successfully (#{ch[:exit]})" if ch[:exit] != 0 } + channel.on_data { |ch, data| channel[:buffer].append(data) } + channel.on_extended_data { |ch, type, data| debug { data.chomp } } + channel.on_request("exit-status") { |ch, data| channel[:exit] = data.read_long } + channel.on_process { send("#{channel[:state]}_state", channel) } + else + channel.close + raise Net::SCP::Error, "could not exec scp on the remote host" + end + end + end + end + + # Causes the state machine to enter the "await response" state, where + # things just pause until the server replies with a 0 (see + # #await_response_state), at which point the state machine will pick up + # at +next_state+ and continue processing. + def await_response(channel, next_state) + channel[:state] = :await_response + channel[:next ] = next_state.to_sym + # check right away, to see if the response is immediately available + await_response_state(channel) + end + + # The action invoked while the state machine remains in the "await + # response" state. As long as there is no data ready to process, the + # machine will remain in this state. As soon as the server replies with + # an integer 0 as the only byte, the state machine is kicked into the + # next state (see +await_response+). If the response is not a 0, an + # exception is raised. + def await_response_state(channel) + return if channel[:buffer].available == 0 + c = channel[:buffer].read_byte + raise "#{c.chr}#{channel[:buffer].read}" if c != 0 + channel[:next], channel[:state] = nil, channel[:next] + send("#{channel[:state]}_state", channel) + end + + # The action invoked when the state machine is in the "finish" state. + # It just tells the server not to expect any more data from this end + # of the pipe, and allows the pipe to drain until the server closes it. + def finish_state(channel) + channel.eof! + end + + # Invoked to report progress back to the client. If a callback was not + # set, this does nothing. + def progress_callback(channel, name, sent, total) + channel[:callback].call(channel, name, sent, total) if channel[:callback] + end + + def sanitize_file_name(file_name) + file_name.gsub(/[ ]/) { |m| "\\#{m}" } + end + end +end + +class Net::SSH::Connection::Session + # Provides a convenient way to initialize a SCP session given a Net::SSH + # session. Returns the Net::SCP instance, ready to use. + def scp + @scp ||= Net::SCP.new(self) + end +end diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/download.rb b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/download.rb new file mode 100644 index 00000000..3384f456 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/download.rb @@ -0,0 +1,150 @@ +require 'net/scp/errors' + +module Net; class SCP + + # This module implements the state machine for downloading information from + # a remote server. It exposes no public methods. See Net::SCP#download for + # a discussion of how to use Net::SCP to download data. + module Download + private + + # This is the starting state for the download state machine. The + # #start_command method puts the state machine into this state the first + # time the channel is processed. This state does some basic error checking + # and scaffolding and then sends a 0-byte to the remote server, indicating + # readiness to proceed. Then, the state machine is placed into the + # "read directive" state (see #read_directive_state). + def download_start_state(channel) + if channel[:local].respond_to?(:write) && channel[:options][:recursive] + raise Net::SCP::Error, "cannot recursively download to an in-memory location" + elsif channel[:local].respond_to?(:write) && channel[:options][:preserve] + lwarn { ":preserve option is ignored when downloading to an in-memory buffer" } + channel[:options].delete(:preserve) + elsif channel[:options][:recursive] && !File.exists?(channel[:local]) + Dir.mkdir(channel[:local]) + end + + channel.send_data("\0") + channel[:state] = :read_directive + end + + # This state parses the next full line (up to a new-line) for the next + # directive. (See the SCP protocol documentation in Net::SCP for the + # possible directives). + def read_directive_state(channel) + return unless line = channel[:buffer].read_to("\n") + channel[:buffer].consume! + + directive = parse_directive(line) + case directive[:type] + when :times then + channel[:times] = directive + when :directory + read_directory(channel, directive) + when :file + read_file(channel, directive) + when :end + channel[:local] = File.dirname(channel[:local]) + channel[:stack].pop + channel[:state] = :finish if channel[:stack].empty? + end + + channel.send_data("\0") + end + + # Reads data from the channel for as long as there is data remaining to + # be read. As soon as there is no more data to read for the current file, + # the state machine switches to #finish_read_state. + def read_data_state(channel) + return if channel[:buffer].empty? + data = channel[:buffer].read!(channel[:remaining]) + channel[:io].write(data) + channel[:remaining] -= data.length + progress_callback(channel, channel[:file][:name], channel[:file][:size] - channel[:remaining], channel[:file][:size]) + await_response(channel, :finish_read) if channel[:remaining] <= 0 + end + + # Finishes off the read, sets the times for the file (if any), and then + # jumps to either #finish_state (for single-file downloads) or + # #read_directive_state (for recursive downloads). A 0-byte is sent to the + # server to indicate that the file was recieved successfully. + def finish_read_state(channel) + channel[:io].close unless channel[:io] == channel[:local] + + if channel[:options][:preserve] && channel[:file][:times] + File.utime(channel[:file][:times][:atime], + channel[:file][:times][:mtime], channel[:file][:name]) + end + + channel[:file] = nil + channel[:state] = channel[:stack].empty? ? :finish : :read_directive + channel.send_data("\0") + end + + # Parses the given +text+ to extract which SCP directive it contains. It + # then returns a hash with at least one key, :type, which describes what + # type of directive it is. The hash may also contain other, directive-specific + # data. + def parse_directive(text) + case type = text[0] + when ?T + parts = text[1..-1].split(/ /, 4).map { |i| i.to_i } + { :type => :times, + :mtime => Time.at(parts[0], parts[1]), + :atime => Time.at(parts[2], parts[3]) } + when ?C, ?D + parts = text[1..-1].split(/ /, 3) + { :type => (type == ?C ? :file : :directory), + :mode => parts[0].to_i(8), + :size => parts[1].to_i, + :name => parts[2].chomp } + when ?E + { :type => :end } + else raise ArgumentError, "unknown directive: #{text.inspect}" + end + end + + # Sets the new directory as the current directory, creates the directory + # if it does not exist, and then falls back into #read_directive_state. + def read_directory(channel, directive) + if !channel[:options][:recursive] + raise Net::SCP::Error, ":recursive not specified for directory download" + end + + channel[:local] = File.join(channel[:local], directive[:name]) + + if File.exists?(channel[:local]) && !File.directory?(channel[:local]) + raise "#{channel[:local]} already exists and is not a directory" + elsif !File.exists?(channel[:local]) + Dir.mkdir(channel[:local], directive[:mode] | 0700) + end + + if channel[:options][:preserve] && channel[:times] + File.utime(channel[:times][:atime], channel[:times][:mtime], channel[:local]) + end + + channel[:stack] << directive + channel[:times] = nil + end + + # Opens the given file locally, and switches to #read_data_state to do the + # actual read. + def read_file(channel, directive) + if !channel[:local].respond_to?(:write) + directive[:name] = (channel[:options][:recursive] || File.directory?(channel[:local])) ? + File.join(channel[:local], directive[:name]) : + channel[:local] + end + + channel[:file] = directive.merge(:times => channel[:times]) + channel[:io] = channel[:local].respond_to?(:write) ? channel[:local] : + File.new(directive[:name], "wb", directive[:mode] | 0600) + channel[:times] = nil + channel[:remaining] = channel[:file][:size] + channel[:state] = :read_data + + progress_callback(channel, channel[:file][:name], 0, channel[:file][:size]) + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/errors.rb b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/errors.rb new file mode 100644 index 00000000..9687decf --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/errors.rb @@ -0,0 +1,5 @@ +module Net; class SCP + + class Error < RuntimeError; end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/upload.rb b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/upload.rb new file mode 100644 index 00000000..1e62e7d7 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/upload.rb @@ -0,0 +1,142 @@ +require 'net/scp/errors' + +module Net; class SCP + + # This module implements the state machine for uploading information to + # a remote server. It exposes no public methods. See Net::SCP#upload for + # a discussion of how to use Net::SCP to upload data. + module Upload + private + + # The default read chunk size, if an explicit chunk-size is not specified + # by the client. + DEFAULT_CHUNK_SIZE = 2048 + + # The start state for uploads. Simply sets up the upload scaffolding, + # sets the current item to upload, and jumps to #upload_current_state. + def upload_start_state(channel) + if channel[:local].respond_to?(:read) + channel[:options].delete(:recursive) + channel[:options].delete(:preserve) + end + + channel[:chunk_size] = channel[:options][:chunk_size] || DEFAULT_CHUNK_SIZE + set_current(channel, channel[:local]) + await_response(channel, :upload_current) + end + + # Determines what the next thing to upload is, and branches. If the next + # item is a file, goes to #upload_file_state. If it is a directory, goes + # to #upload_directory_state. + def upload_current_state(channel) + if channel[:current].respond_to?(:read) + upload_file_state(channel) + elsif File.directory?(channel[:current]) + raise Net::SCP::Error, "can't upload directories unless :recursive" unless channel[:options][:recursive] + upload_directory_state(channel) + elsif File.file?(channel[:current]) + upload_file_state(channel) + else + raise Net::SCP::Error, "not a directory or a regular file: #{channel[:current].inspect}" + end + end + + # After transferring attributes (if requested), sends a 'D' directive and + # awaites the server's 0-byte response. Then goes to #next_item_state. + def upload_directory_state(channel) + if preserve_attributes_if_requested(channel) + mode = channel[:stat].mode & 07777 + directive = "D%04o %d %s\n" % [mode, 0, File.basename(channel[:current])] + channel.send_data(directive) + channel[:cwd] = channel[:current] + channel[:stack] << Dir.entries(channel[:current]).reject { |i| i == "." || i == ".." } + await_response(channel, :next_item) + end + end + + # After transferring attributes (if requested), sends a 'C' directive and + # awaits the server's 0-byte response. Then goes to #send_data_state. + def upload_file_state(channel) + if preserve_attributes_if_requested(channel) + mode = channel[:stat] ? channel[:stat].mode & 07777 : channel[:options][:mode] + channel[:name] = channel[:current].respond_to?(:read) ? channel[:remote] : channel[:current] + directive = "C%04o %d %s\n" % [mode || 0640, channel[:size], File.basename(channel[:name])] + channel.send_data(directive) + channel[:io] = channel[:current].respond_to?(:read) ? channel[:current] : File.open(channel[:current], "rb") + channel[:sent] = 0 + progress_callback(channel, channel[:name], channel[:sent], channel[:size]) + await_response(channel, :send_data) + end + end + + # If any data remains to be transferred from the current file, sends it. + # Otherwise, sends a 0-byte and transfers to #next_item_state. + def send_data_state(channel) + data = channel[:io].read(channel[:chunk_size]) + if data.nil? + channel[:io].close unless channel[:local].respond_to?(:read) + channel.send_data("\0") + await_response(channel, :next_item) + else + channel[:sent] += data.length + progress_callback(channel, channel[:name], channel[:sent], channel[:size]) + channel.send_data(data) + end + end + + # Checks the work queue to see what needs to be done next. If there is + # nothing to do, calls Net::SCP#finish_state. If we're at the end of a + # directory, sends an 'E' directive and waits for the server to respond + # before moving to #next_item_state. Otherwise, sets the next thing to + # upload and moves to #upload_current_state. + def next_item_state(channel) + if channel[:stack].empty? + finish_state(channel) + else + next_item = channel[:stack].last.shift + if next_item.nil? + channel[:stack].pop + channel[:cwd] = File.dirname(channel[:cwd]) + channel.send_data("E\n") + await_response(channel, channel[:stack].empty? ? :finish : :next_item) + else + set_current(channel, next_item) + upload_current_state(channel) + end + end + end + + # Sets the given +path+ as the new current item to upload. + def set_current(channel, path) + path = channel[:cwd] ? File.join(channel[:cwd], path) : path + channel[:current] = path + + if channel[:current].respond_to?(:read) + channel[:stat] = channel[:current].stat if channel[:current].respond_to?(:stat) + else + channel[:stat] = File.stat(channel[:current]) + end + + channel[:size] = channel[:stat] ? channel[:stat].size : channel[:current].size + end + + # If the :preserve option is set, send a 'T' directive and wait for the + # server to respond before proceeding to either #upload_file_state or + # #upload_directory_state, depending on what is being uploaded. + def preserve_attributes_if_requested(channel) + if channel[:options][:preserve] && !channel[:preserved] + channel[:preserved] = true + stat = channel[:stat] + directive = "T%d %d %d %d\n" % [stat.mtime.to_i, stat.mtime.usec, stat.atime.to_i, stat.atime.usec] + channel.send_data(directive) + type = stat.directory? ? :directory : :file + await_response(channel, "upload_#{type}") + return false + else + channel[:preserved] = false + return true + end + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/version.rb b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/version.rb new file mode 100644 index 00000000..75a7c93f --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/net/scp/version.rb @@ -0,0 +1,18 @@ +require 'net/ssh/version' + +module Net; class SCP + + # Describes the current version of the Net::SCP library. + class Version < Net::SSH::Version + MAJOR = 1 + MINOR = 0 + TINY = 2 + + # The current version, as a Version instance + CURRENT = new(MAJOR, MINOR, TINY) + + # The current version, as a String instance + STRING = CURRENT.to_s + end + +end; end diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/uri/open-scp.rb b/vendor/gems/gems/net-scp-1.0.2/lib/uri/open-scp.rb new file mode 100644 index 00000000..af919550 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/uri/open-scp.rb @@ -0,0 +1,18 @@ +require 'open-uri' +require 'uri/scp' +require 'net/scp' + +module URI + + class SCP + def buffer_open(buf, proxy, open_options) + options = open_options.merge(:port => port, :password => password) + progress = options.delete(:progress_proc) + buf << Net::SCP.download!(host, user, path, nil, open_options, &progress) + buf.io.rewind + end + + include OpenURI::OpenRead + end + +end diff --git a/vendor/gems/gems/net-scp-1.0.2/lib/uri/scp.rb b/vendor/gems/gems/net-scp-1.0.2/lib/uri/scp.rb new file mode 100644 index 00000000..5829bc50 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/lib/uri/scp.rb @@ -0,0 +1,35 @@ +require 'uri/generic' + +module URI + class SCP < Generic + DEFAULT_PORT = 22 + + COMPONENT = [ + :scheme, + :userinfo, + :host, :port, :path, + :query + ].freeze + + attr_reader :options + + def self.new2(user, password, host, port, path, query) + new('scp', [user, password], host, port, nil, path, nil, query) + end + + def initialize(*args) + super(*args) + + @options = Hash.new + (query || "").split(/&/).each do |pair| + name, value = pair.split(/=/, 2) + opt_name = name.to_sym + values = value.split(/,/).map { |v| v.to_i.to_s == v ? v.to_i : v } + values = values.first if values.length == 1 + options[opt_name] = values + end + end + end + + @@schemes['SCP'] = SCP +end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/net-scp.gemspec b/vendor/gems/gems/net-scp-1.0.2/net-scp.gemspec new file mode 100644 index 00000000..e27db0a6 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/net-scp.gemspec @@ -0,0 +1,36 @@ +Gem::Specification.new do |s| + s.name = %q{net-scp} + s.version = "1.0.2" + + s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version= + s.authors = ["Jamis Buck"] + s.date = %q{2009-02-04} + s.description = %q{A pure Ruby implementation of the SCP client protocol} + s.email = %q{jamis@jamisbuck.org} + s.extra_rdoc_files = ["CHANGELOG.rdoc", "lib/net/scp/download.rb", "lib/net/scp/errors.rb", "lib/net/scp/upload.rb", "lib/net/scp/version.rb", "lib/net/scp.rb", "lib/uri/open-scp.rb", "lib/uri/scp.rb", "README.rdoc"] + s.files = ["CHANGELOG.rdoc", "lib/net/scp/download.rb", "lib/net/scp/errors.rb", "lib/net/scp/upload.rb", "lib/net/scp/version.rb", "lib/net/scp.rb", "lib/uri/open-scp.rb", "lib/uri/scp.rb", "Rakefile", "README.rdoc", "setup.rb", "test/common.rb", "test/test_all.rb", "test/test_download.rb", "test/test_scp.rb", "test/test_upload.rb", "Manifest", "net-scp.gemspec"] + s.has_rdoc = true + s.homepage = %q{http://net-ssh.rubyforge.org/scp} + s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Net-scp", "--main", "README.rdoc"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{net-ssh} + s.rubygems_version = %q{1.2.0} + s.summary = %q{A pure Ruby implementation of the SCP client protocol} + s.test_files = ["test/test_all.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 2 + + if current_version >= 3 then + s.add_runtime_dependency(%q, [">= 1.99.1"]) + s.add_development_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 1.99.1"]) + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 1.99.1"]) + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/gems/daemons-1.0.10/setup.rb b/vendor/gems/gems/net-scp-1.0.2/setup.rb similarity index 60% rename from vendor/gems/gems/daemons-1.0.10/setup.rb rename to vendor/gems/gems/net-scp-1.0.2/setup.rb index 0807023d..1549b3f3 100644 --- a/vendor/gems/gems/daemons-1.0.10/setup.rb +++ b/vendor/gems/gems/net-scp-1.0.2/setup.rb @@ -5,16 +5,66 @@ # # This program is free software. # You can distribute/modify this program under the terms of -# the GNU LGPL, Lesser General Public License version 2.1. +# the GNU Lesser General Public License version 2.1. # -unless Enumerable.method_defined?(:map) # Ruby 1.4.6 +# +# For backward compatibility +# + +unless Enumerable.method_defined?(:map) module Enumerable alias map collect end end -unless File.respond_to?(:read) # Ruby 1.6 +unless Enumerable.method_defined?(:detect) + module Enumerable + alias detect find + end +end + +unless Enumerable.method_defined?(:select) + module Enumerable + alias select find_all + end +end + +unless Enumerable.method_defined?(:reject) + module Enumerable + def reject + result = [] + each do |i| + result.push i unless yield(i) + end + result + end + end +end + +unless Enumerable.method_defined?(:inject) + module Enumerable + def inject(result) + each do |i| + result = yield(result, i) + end + result + end + end +end + +unless Enumerable.method_defined?(:any?) + module Enumerable + def any? + each do |i| + return true if yield(i) + end + false + end + end +end + +unless File.respond_to?(:read) def File.read(fname) open(fname) {|f| return f.read @@ -22,6 +72,10 @@ unless File.respond_to?(:read) # Ruby 1.6 end end +# +# Application independent utilities +# + def File.binread(fname) open(fname, 'rb') {|f| return f.read @@ -33,18 +87,11 @@ def File.dir?(path) File.directory?((path[-1,1] == '/') ? path : path + '/') end - -class SetupError < StandardError; end - -def setup_rb_error(msg) - raise SetupError, msg -end - # # Config # -if arg = ARGV.detect {|arg| /\A--rbconfig=/ =~ arg } +if arg = ARGV.detect{|arg| /\A--rbconfig=/ =~ arg } ARGV.delete(arg) require arg.split(/=/, 2)[1] $".push 'rbconfig.rb' @@ -57,325 +104,219 @@ def multipackage_install? end -class ConfigItem - def initialize(name, template, default, desc) - @name = name.freeze - @template = template - @value = default - @default = default.dup.freeze - @description = desc +class ConfigTable + + c = ::Config::CONFIG + + rubypath = c['bindir'] + '/' + c['ruby_install_name'] + + major = c['MAJOR'].to_i + minor = c['MINOR'].to_i + teeny = c['TEENY'].to_i + version = "#{major}.#{minor}" + + # ruby ver. >= 1.4.4? + newpath_p = ((major >= 2) or + ((major == 1) and + ((minor >= 5) or + ((minor == 4) and (teeny >= 4))))) + + subprefix = lambda {|path| + path.sub(/\A#{Regexp.quote(c['prefix'])}/o, '$prefix') + } + + if c['rubylibdir'] + # V < 1.6.3 + stdruby = subprefix.call(c['rubylibdir']) + siteruby = subprefix.call(c['sitedir']) + versite = subprefix.call(c['sitelibdir']) + sodir = subprefix.call(c['sitearchdir']) + elsif newpath_p + # 1.4.4 <= V <= 1.6.3 + stdruby = "$prefix/lib/ruby/#{version}" + siteruby = subprefix.call(c['sitedir']) + versite = siteruby + '/' + version + sodir = "$site-ruby/#{c['arch']}" + else + # V < 1.4.4 + stdruby = "$prefix/lib/ruby/#{version}" + siteruby = "$prefix/lib/ruby/#{version}/site_ruby" + versite = siteruby + sodir = "$site-ruby/#{c['arch']}" end - attr_reader :name - attr_reader :description - - attr_accessor :default - alias help_default default - - def help_opt - "--#{@name}=#{@template}" + if arg = c['configure_args'].split.detect {|arg| /--with-make-prog=/ =~ arg } + makeprog = arg.sub(/'/, '').split(/=/, 2)[1] + else + makeprog = 'make' end - def value - @value + common_descripters = [ + [ 'prefix', [ c['prefix'], + 'path', + 'path prefix of target environment' ] ], + [ 'std-ruby', [ stdruby, + 'path', + 'the directory for standard ruby libraries' ] ], + [ 'site-ruby-common', [ siteruby, + 'path', + 'the directory for version-independent non-standard ruby libraries' ] ], + [ 'site-ruby', [ versite, + 'path', + 'the directory for non-standard ruby libraries' ] ], + [ 'bin-dir', [ '$prefix/bin', + 'path', + 'the directory for commands' ] ], + [ 'rb-dir', [ '$site-ruby', + 'path', + 'the directory for ruby scripts' ] ], + [ 'so-dir', [ sodir, + 'path', + 'the directory for ruby extentions' ] ], + [ 'data-dir', [ '$prefix/share', + 'path', + 'the directory for shared data' ] ], + [ 'ruby-path', [ rubypath, + 'path', + 'path to set to #! line' ] ], + [ 'ruby-prog', [ rubypath, + 'name', + 'the ruby program using for installation' ] ], + [ 'make-prog', [ makeprog, + 'name', + 'the make program to compile ruby extentions' ] ], + [ 'without-ext', [ 'no', + 'yes/no', + 'does not compile/install ruby extentions' ] ] + ] + multipackage_descripters = [ + [ 'with', [ '', + 'name,name...', + 'package names that you want to install', + 'ALL' ] ], + [ 'without', [ '', + 'name,name...', + 'package names that you do not want to install', + 'NONE' ] ] + ] + if multipackage_install? + DESCRIPTER = common_descripters + multipackage_descripters + else + DESCRIPTER = common_descripters end - def eval(table) - @value.gsub(%r<\$([^/]+)>) { table[$1] } + SAVE_FILE = 'config.save' + + def ConfigTable.each_name(&block) + keys().each(&block) end - def set(val) - @value = check(val) + def ConfigTable.keys + DESCRIPTER.map {|name, *dummy| name } end - private - - def check(val) - setup_rb_error "config: --#{name} requires argument" unless val - val - end -end - -class BoolItem < ConfigItem - def config_type - 'bool' + def ConfigTable.each_definition(&block) + DESCRIPTER.each(&block) end - def help_opt - "--#{@name}" + def ConfigTable.get_entry(name) + name, ent = DESCRIPTER.assoc(name) + ent end - private - - def check(val) - return 'yes' unless val - unless /\A(y(es)?|n(o)?|t(rue)?|f(alse))\z/i =~ val - setup_rb_error "config: --#{@name} accepts only yes/no for argument" - end - (/\Ay(es)?|\At(rue)/i =~ value) ? 'yes' : 'no' - end -end - -class PathItem < ConfigItem - def config_type - 'path' + def ConfigTable.get_entry!(name) + get_entry(name) or raise ArgumentError, "no such config: #{name}" end - private - - def check(path) - setup_rb_error "config: --#{@name} requires argument" unless path - path[0,1] == '$' ? path : File.expand_path(path) - end -end - -class ProgramItem < ConfigItem - def config_type - 'program' - end -end - -class SelectItem < ConfigItem - def initialize(name, template, default, desc) - super - @ok = template.split('/') + def ConfigTable.add_entry(name, vals) + ConfigTable::DESCRIPTER.push [name,vals] end - def config_type - 'select' + def ConfigTable.remove_entry(name) + get_entry(name) or raise ArgumentError, "no such config: #{name}" + DESCRIPTER.delete_if {|n, arr| n == name } end - private - - def check(val) - unless @ok.include?(val.strip) - setup_rb_error "config: use --#{@name}=#{@template} (#{val})" - end - val.strip - end -end - -class PackageSelectionItem < ConfigItem - def initialize(name, template, default, help_default, desc) - super name, template, default, desc - @help_default = help_default + def ConfigTable.config_key?(name) + get_entry(name) ? true : false end - attr_reader :help_default - - def config_type - 'package' + def ConfigTable.bool_config?(name) + ent = get_entry(name) or return false + ent[1] == 'yes/no' end - private - - def check(val) - unless File.dir?("packages/#{val}") - setup_rb_error "config: no such package: #{val}" - end - val + def ConfigTable.value_config?(name) + ent = get_entry(name) or return false + ent[1] != 'yes/no' end -end -class ConfigTable_class + def ConfigTable.path_config?(name) + ent = get_entry(name) or return false + ent[1] == 'path' + end - def initialize(items) - @items = items + + class << self + alias newobj new + end + + def ConfigTable.new + c = newobj() + c.initialize_from_table + c + end + + def ConfigTable.load + c = newobj() + c.initialize_from_file + c + end + + def initialize_from_table @table = {} - items.each do |i| - @table[i.name] = i - end - ALIASES.each do |ali, name| - @table[ali] = @table[name] + DESCRIPTER.each do |k, (default, vname, desc, default2)| + @table[k] = default end end - include Enumerable - - def each(&block) - @items.each(&block) - end - - def key?(name) - @table.key?(name) - end - - def lookup(name) - @table[name] or raise ArgumentError, "no such config item: #{name}" - end - - def add(item) - @items.push item - @table[item.name] = item - end - - def remove(name) - item = lookup(name) - @items.delete_if {|i| i.name == name } - @table.delete_if {|name, i| i.name == name } - item - end - - def new - dup() - end - - def savefile - '.config' - end - - def load - begin - t = dup() - File.foreach(savefile()) do |line| - k, v = *line.split(/=/, 2) - t[k] = v.strip - end - t - rescue Errno::ENOENT - setup_rb_error $!.message + "#{File.basename($0)} config first" + def initialize_from_file + raise InstallError, "#{File.basename $0} config first"\ + unless File.file?(SAVE_FILE) + @table = {} + File.foreach(SAVE_FILE) do |line| + k, v = line.split(/=/, 2) + @table[k] = v.strip end end def save - @items.each {|i| i.value } - File.open(savefile(), 'w') {|f| - @items.each do |i| - f.printf "%s=%s\n", i.name, i.value if i.value + File.open(SAVE_FILE, 'w') {|f| + @table.each do |k, v| + f.printf "%s=%s\n", k, v if v end } end + def []=(k, v) + raise InstallError, "unknown config option #{k}"\ + unless ConfigTable.config_key?(k) + @table[k] = v + end + def [](key) - lookup(key).eval(self) + return nil unless @table[key] + @table[key].gsub(%r<\$([^/]+)>) { self[$1] } end - def []=(key, val) - lookup(key).set val + def set_raw(key, val) + @table[key] = val end -end + def get_raw(key) + @table[key] + end -c = ::Config::CONFIG - -rubypath = c['bindir'] + '/' + c['ruby_install_name'] - -major = c['MAJOR'].to_i -minor = c['MINOR'].to_i -teeny = c['TEENY'].to_i -version = "#{major}.#{minor}" - -# ruby ver. >= 1.4.4? -newpath_p = ((major >= 2) or - ((major == 1) and - ((minor >= 5) or - ((minor == 4) and (teeny >= 4))))) - -if c['rubylibdir'] - # V < 1.6.3 - _stdruby = c['rubylibdir'] - _siteruby = c['sitedir'] - _siterubyver = c['sitelibdir'] - _siterubyverarch = c['sitearchdir'] -elsif newpath_p - # 1.4.4 <= V <= 1.6.3 - _stdruby = "$prefix/lib/ruby/#{version}" - _siteruby = c['sitedir'] - _siterubyver = "$siteruby/#{version}" - _siterubyverarch = "$siterubyver/#{c['arch']}" -else - # V < 1.4.4 - _stdruby = "$prefix/lib/ruby/#{version}" - _siteruby = "$prefix/lib/ruby/#{version}/site_ruby" - _siterubyver = _siteruby - _siterubyverarch = "$siterubyver/#{c['arch']}" -end -libdir = '-* dummy libdir *-' -stdruby = '-* dummy rubylibdir *-' -siteruby = '-* dummy site_ruby *-' -siterubyver = '-* dummy site_ruby version *-' -parameterize = lambda {|path| - path.sub(/\A#{Regexp.quote(c['prefix'])}/, '$prefix')\ - .sub(/\A#{Regexp.quote(libdir)}/, '$libdir')\ - .sub(/\A#{Regexp.quote(stdruby)}/, '$stdruby')\ - .sub(/\A#{Regexp.quote(siteruby)}/, '$siteruby')\ - .sub(/\A#{Regexp.quote(siterubyver)}/, '$siterubyver') -} -libdir = parameterize.call(c['libdir']) -stdruby = parameterize.call(_stdruby) -siteruby = parameterize.call(_siteruby) -siterubyver = parameterize.call(_siterubyver) -siterubyverarch = parameterize.call(_siterubyverarch) - -if arg = c['configure_args'].split.detect {|arg| /--with-make-prog=/ =~ arg } - makeprog = arg.sub(/'/, '').split(/=/, 2)[1] -else - makeprog = 'make' -end - -common_conf = [ - PathItem.new('prefix', 'path', c['prefix'], - 'path prefix of target environment'), - PathItem.new('bindir', 'path', parameterize.call(c['bindir']), - 'the directory for commands'), - PathItem.new('libdir', 'path', libdir, - 'the directory for libraries'), - PathItem.new('datadir', 'path', parameterize.call(c['datadir']), - 'the directory for shared data'), - PathItem.new('mandir', 'path', parameterize.call(c['mandir']), - 'the directory for man pages'), - PathItem.new('sysconfdir', 'path', parameterize.call(c['sysconfdir']), - 'the directory for man pages'), - PathItem.new('stdruby', 'path', stdruby, - 'the directory for standard ruby libraries'), - PathItem.new('siteruby', 'path', siteruby, - 'the directory for version-independent aux ruby libraries'), - PathItem.new('siterubyver', 'path', siterubyver, - 'the directory for aux ruby libraries'), - PathItem.new('siterubyverarch', 'path', siterubyverarch, - 'the directory for aux ruby binaries'), - PathItem.new('rbdir', 'path', '$siterubyver', - 'the directory for ruby scripts'), - PathItem.new('sodir', 'path', '$siterubyverarch', - 'the directory for ruby extentions'), - PathItem.new('rubypath', 'path', rubypath, - 'the path to set to #! line'), - ProgramItem.new('rubyprog', 'name', rubypath, - 'the ruby program using for installation'), - ProgramItem.new('makeprog', 'name', makeprog, - 'the make program to compile ruby extentions'), - SelectItem.new('shebang', 'all/ruby/never', 'ruby', - 'shebang line (#!) editing mode'), - BoolItem.new('without-ext', 'yes/no', 'no', - 'does not compile/install ruby extentions') -] -class ConfigTable_class # open again - ALIASES = { - 'std-ruby' => 'stdruby', - 'site-ruby-common' => 'siteruby', # For backward compatibility - 'site-ruby' => 'siterubyver', # For backward compatibility - 'bin-dir' => 'bindir', - 'bin-dir' => 'bindir', - 'rb-dir' => 'rbdir', - 'so-dir' => 'sodir', - 'data-dir' => 'datadir', - 'ruby-path' => 'rubypath', - 'ruby-prog' => 'rubyprog', - 'ruby' => 'rubyprog', - 'make-prog' => 'makeprog', - 'make' => 'makeprog' - } -end -multipackage_conf = [ - PackageSelectionItem.new('with', 'name,name...', '', 'ALL', - 'package names that you want to install'), - PackageSelectionItem.new('without', 'name,name...', '', 'NONE', - 'package names that you do not want to install') -] -if multipackage_install? - ConfigTable = ConfigTable_class.new(common_conf + multipackage_conf) -else - ConfigTable = ConfigTable_class.new(common_conf) end @@ -386,53 +327,53 @@ module MetaConfigAPI end def config_names - ConfigTable.map {|i| i.name } + ConfigTable.keys end def config?(name) - ConfigTable.key?(name) + ConfigTable.config_key?(name) end def bool_config?(name) - ConfigTable.lookup(name).config_type == 'bool' - end - - def path_config?(name) - ConfigTable.lookup(name).config_type == 'path' + ConfigTable.bool_config?(name) end def value_config?(name) - case ConfigTable.lookup(name).config_type - when 'bool', 'path' - true - else - false - end + ConfigTable.value_config?(name) end - def add_config(item) - ConfigTable.add item + def path_config?(name) + ConfigTable.path_config?(name) end - def add_bool_config(name, default, desc) - ConfigTable.add BoolItem.new(name, 'yes/no', default ? 'yes' : 'no', desc) + def add_config(name, argname, default, desc) + ConfigTable.add_entry name,[default,argname,desc] end def add_path_config(name, default, desc) - ConfigTable.add PathItem.new(name, 'path', default, desc) + add_config name, 'path', default, desc + end + + def add_bool_config(name, default, desc) + add_config name, 'yes/no', default ? 'yes' : 'no', desc end def set_config_default(name, default) - ConfigTable.lookup(name).default = default + if bool_config?(name) + ConfigTable.get_entry!(name)[0] = (default ? 'yes' : 'no') + else + ConfigTable.get_entry!(name)[0] = default + end end def remove_config(name) - ConfigTable.remove(name) + ent = ConfigTable.get_entry(name) + ConfigTable.remove_entry name + ent end end - # # File Operations # @@ -440,12 +381,12 @@ end module FileOperations def mkdir_p(dirname, prefix = nil) - dirname = prefix + File.expand_path(dirname) if prefix + dirname = prefix + dirname if prefix $stderr.puts "mkdir -p #{dirname}" if verbose? return if no_harm? # does not check '/'... it's too abnormal case - dirs = File.expand_path(dirname).split(%r<(?=/)>) + dirs = dirname.split(%r<(?=/)>) if /\A[a-z]:\z/i =~ dirs[0] disk = dirs.shift dirs[0] = disk + dirs[0] @@ -503,7 +444,7 @@ module FileOperations $stderr.puts "install #{from} #{dest}" if verbose? return if no_harm? - realdest = prefix ? prefix + File.expand_path(dest) : dest + realdest = prefix + dest if prefix realdest = File.join(realdest, File.basename(from)) if File.dir?(realdest) str = File.binread(from) if diff?(str, realdest) @@ -536,11 +477,11 @@ module FileOperations end def ruby(str) - command config('rubyprog') + ' ' + str + command config('ruby-prog') + ' ' + str end def make(task = '') - command config('makeprog') + ' ' + task + command config('make-prog') + ' ' + task end def extdir?(dir) @@ -554,7 +495,7 @@ module FileOperations end REJECT_DIRS = %w( - CVS SCCS RCS CVS.adm .svn + CVS SCCS RCS CVS.adm ) def all_dirs_in(dirname) @@ -565,11 +506,13 @@ module FileOperations end - # # Main Installer # +class InstallError < StandardError; end + + module HookUtils def run_hook(name) @@ -582,7 +525,7 @@ module HookUtils begin instance_eval File.read(fname), fname, 1 rescue - setup_rb_error "hook #{fname} failed:\n" + $!.message + raise InstallError, "hook #{fname} failed:\n" + $!.message end true end @@ -657,11 +600,10 @@ end class ToplevelInstaller - Version = '3.3.1' + Version = '3.2.4' Copyright = 'Copyright (c) 2000-2004 Minero Aoki' TASKS = [ - [ 'all', 'do config, setup, then install' ], [ 'config', 'saves your configurations' ], [ 'show', 'shows current configuration' ], [ 'setup', 'compiles ruby extentions and others' ], @@ -695,22 +637,13 @@ class ToplevelInstaller def invoke run_metaconfigs - case task = parsearg_global() - when nil, 'all' - @config = load_config('config') - parsearg_config - init_installers - exec_config - exec_setup - exec_install - else - @config = load_config(task) - __send__ "parsearg_#{task}" - init_installers - __send__ "exec_#{task}" - end + task = parsearg_global() + @config = load_config(task) + __send__ "parsearg_#{task}" + init_installers + __send__ "exec_#{task}" end - + def run_metaconfigs eval_file_ifexist "#{@ardir}/metaconfig" end @@ -720,7 +653,7 @@ class ToplevelInstaller when 'config' ConfigTable.new when 'clean', 'distclean' - if File.exist?(ConfigTable.savefile) + if File.exist?('config.save') then ConfigTable.load else ConfigTable.new end @@ -759,7 +692,7 @@ class ToplevelInstaller while arg = ARGV.shift case arg when /\A\w+\z/ - setup_rb_error "invalid task: #{arg}" unless valid_task =~ arg + raise InstallError, "invalid task: #{arg}" unless valid_task =~ arg return arg when '-q', '--quiet' @@ -781,18 +714,23 @@ class ToplevelInstaller exit 0 else - setup_rb_error "unknown global option '#{arg}'" + raise InstallError, "unknown global option '#{arg}'" end end - nil + raise InstallError, <" out.puts " ruby #{File.basename $0} [] []" - fmt = " %-24s %s\n" + fmt = " %-20s %s\n" out.puts out.puts 'Global options:' out.printf fmt, '-q,--quiet', 'suppress message outputs' @@ -850,23 +799,31 @@ class ToplevelInstaller out.printf fmt, '-h,--help', 'print this message' out.printf fmt, '-v,--version', 'print version and quit' out.printf fmt, ' --copyright', 'print copyright and quit' + out.puts out.puts 'Tasks:' TASKS.each do |name, desc| - out.printf fmt, name, desc + out.printf " %-10s %s\n", name, desc end - fmt = " %-24s %s [%s]\n" out.puts - out.puts 'Options for CONFIG or ALL:' - ConfigTable.each do |item| - out.printf fmt, item.help_opt, item.description, item.help_default + out.puts 'Options for config:' + ConfigTable.each_definition do |name, (default, arg, desc, default2)| + out.printf " %-20s %s [%s]\n", + '--'+ name + (ConfigTable.bool_config?(name) ? '' : '='+arg), + desc, + default2 || default end - out.printf fmt, '--rbconfig=path', 'rbconfig.rb to load',"running ruby's" + out.printf " %-20s %s [%s]\n", + '--rbconfig=path', 'your rbconfig.rb to load', "running ruby's" + out.puts - out.puts 'Options for INSTALL:' - out.printf fmt, '--no-harm', 'only display what to do if given', 'off' - out.printf fmt, '--prefix=path', 'install path prefix', '$prefix' + out.puts 'Options for install:' + out.printf " %-20s %s [%s]\n", + '--no-harm', 'only display what to do if given', 'off' + out.printf " %-20s %s [%s]\n", + '--prefix', 'install path prefix', '$prefix' + out.puts end @@ -888,8 +845,12 @@ class ToplevelInstaller end def exec_show - ConfigTable.each do |i| - printf "%-20s %s\n", i.name, i.value + ConfigTable.each_name do |k| + v = @config.get_raw(k) + if not v or v.empty? + v = '(not specified)' + end + printf "%-10s %s\n", k, v end end @@ -942,7 +903,8 @@ class ToplevelInstallerMulti < ToplevelInstaller def extract_selection(list) a = list.split(/,/) a.each do |name| - setup_rb_error "no such package: #{name}" unless @installers.key?(name) + raise InstallError, "no such package: #{name}" \ + unless @installers.key?(name) end a end @@ -993,14 +955,14 @@ class ToplevelInstallerMulti < ToplevelInstaller end def exec_clean - rm_f ConfigTable.savefile + rm_f 'config.save' run_hook 'pre-clean' each_selected_installers {|inst| inst.exec_clean } run_hook 'post-clean' end def exec_distclean - rm_f ConfigTable.savefile + rm_f 'config.save' run_hook 'pre-distclean' each_selected_installers {|inst| inst.exec_distclean } run_hook 'post-distclean' @@ -1053,7 +1015,7 @@ class Installer end # - # Hook Script API base methods + # Hook Script API bases # def srcdir_root @@ -1109,7 +1071,7 @@ class Installer def extconf opt = @options['config-opt'].join(' ') - command "#{config('rubyprog')} #{curr_srcdir()}/extconf.rb #{opt}" + command "#{config('ruby-prog')} #{curr_srcdir()}/extconf.rb #{opt}" end def config_dir_data(rel) @@ -1129,21 +1091,28 @@ class Installer end end + # modify: #!/usr/bin/ruby + # modify: #! /usr/bin/ruby + # modify: #!ruby + # not modify: #!/usr/bin/env ruby + SHEBANG_RE = /\A\#!\s*\S*ruby\S*/ + def adjust_shebang(path) return if no_harm? + tmpfile = File.basename(path) + '.tmp' begin File.open(path, 'rb') {|r| - first = r.gets - return unless File.basename(config('rubypath')) == 'ruby' - return unless File.basename(first.sub(/\A\#!/, '').split[0]) == 'ruby' - $stderr.puts "adjusting shebang: #{File.basename(path)}" if verbose? File.open(tmpfile, 'wb') {|w| - w.print first.sub(/\A\#!\s*\S+/, '#! ' + config('rubypath')) + first = r.gets + return unless SHEBANG_RE =~ first + + $stderr.puts "adjusting shebang: #{File.basename path}" if verbose? + w.print first.sub(SHEBANG_RE, '#!' + config('ruby-path')) w.write r.read } - move_file tmpfile, File.basename(path) } + move_file tmpfile, File.basename(path) ensure File.unlink tmpfile if File.exist?(tmpfile) end @@ -1164,27 +1133,26 @@ class Installer # def exec_install - rm_f 'InstalledFiles' exec_task_traverse 'install' end def install_dir_bin(rel) - install_files collect_filenames_auto(), "#{config('bindir')}/#{rel}", 0755 + install_files collect_filenames_auto(), "#{config('bin-dir')}/#{rel}", 0755 end def install_dir_lib(rel) - install_files ruby_scripts(), "#{config('rbdir')}/#{rel}", 0644 + install_files ruby_scripts(), "#{config('rb-dir')}/#{rel}", 0644 end def install_dir_ext(rel) return unless extdir?(curr_srcdir()) install_files ruby_extentions('.'), - "#{config('sodir')}/#{File.dirname(rel)}", + "#{config('so-dir')}/#{File.dirname(rel)}", 0555 end def install_dir_data(rel) - install_files collect_filenames_auto(), "#{config('datadir')}/#{rel}", 0644 + install_files collect_filenames_auto(), "#{config('data-dir')}/#{rel}", 0644 end def install_files(list, dest, mode) @@ -1195,7 +1163,7 @@ class Installer end def ruby_scripts - collect_filenames_auto().select {|n| /\.rb\z/ =~ n } + collect_filenames_auto().select {|n| /\.rb\z/ =~ n || "module.yml" == n } end # picked up many entries from cvs-1.11.1/src/ignore.c @@ -1245,12 +1213,15 @@ class Installer end def ruby_extentions(dir) + _ruby_extentions(dir) or + raise InstallError, "no ruby extention exists: 'ruby #{$0} setup' first" + end + + DLEXT = /\.#{ ::Config::CONFIG['DLEXT'] }\z/ + + def _ruby_extentions(dir) Dir.open(dir) {|d| - ents = d.select {|fname| /\.#{::Config::CONFIG['DLEXT']}\z/ =~ fname } - if ents.empty? - setup_rb_error "no ruby extention exists: 'ruby #{$0} setup' first" - end - return ents + return d.select {|fname| DLEXT =~ fname } } end @@ -1260,7 +1231,7 @@ class Installer def exec_clean exec_task_traverse 'clean' - rm_f ConfigTable.savefile + rm_f 'config.save' rm_f 'InstalledFiles' end @@ -1284,7 +1255,7 @@ class Installer def exec_distclean exec_task_traverse 'distclean' - rm_f ConfigTable.savefile + rm_f 'config.save' rm_f 'InstalledFiles' end @@ -1351,7 +1322,7 @@ if $0 == __FILE__ else ToplevelInstaller.invoke end - rescue SetupError + rescue raise if $DEBUG $stderr.puts $!.message $stderr.puts "Try 'ruby #{$0} --help' for detailed usage." diff --git a/vendor/gems/gems/net-scp-1.0.2/test/common.rb b/vendor/gems/gems/net-scp-1.0.2/test/common.rb new file mode 100644 index 00000000..107456d7 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/test/common.rb @@ -0,0 +1,138 @@ +require 'test/unit' +require 'mocha' + +begin + gem 'net-ssh', ">= 2.0.0" + require 'net/ssh' +rescue LoadError + $LOAD_PATH.unshift "#{File.dirname(__FILE__)}/../../net-ssh/lib" + + begin + require 'net/ssh' + require 'net/ssh/version' + raise LoadError, "wrong version" unless Net::SSH::Version::STRING >= '1.99.0' + rescue LoadError => e + abort "could not load net/ssh v2 (#{e.inspect})" + end +end + +$LOAD_PATH.unshift "#{File.dirname(__FILE__)}/../lib" + +require 'net/scp' +require 'net/ssh/test' + +class Net::SSH::Test::Channel + def gets_ok + gets_data "\0" + end + + def sends_ok + sends_data "\0" + end +end + +class Net::SCP::TestCase < Test::Unit::TestCase + include Net::SSH::Test + + def default_test + # do nothing, this is just a hacky-hack to work around Test::Unit's + # insistence that all TestCase subclasses have at least one test + # method defined. + end + + protected + + def prepare_file(path, contents="", mode=0666, mtime=Time.now, atime=Time.now) + entry = FileEntry.new(path, contents, mode, mtime, atime) + entry.stub! + entry + end + + def prepare_directory(path, mode=0777, mtime=Time.now, atime=Time.now) + directory = DirectoryEntry.new(path, mode, mtime, atime) + yield directory if block_given? + directory.stub! + end + + class FileEntry + attr_reader :path, :contents, :mode, :mtime, :atime, :io + + def initialize(path, contents, mode=0666, mtime=Time.now, atime=Time.now) + @path, @contents, @mode = path, contents, mode + @mtime, @atime = mtime, atime + end + + def name + @name ||= File.basename(path) + end + + def stub! + stat = Mocha::Mock.new("file::stat") + stat.stubs(:size => contents.length, :mode => mode, :mtime => mtime, :atime => atime, :directory? => false) + + File.stubs(:stat).with(path).returns(stat) + File.stubs(:directory?).with(path).returns(false) + File.stubs(:file?).with(path).returns(true) + File.stubs(:open).with(path, "rb").returns(StringIO.new(contents)) + + @io = StringIO.new + File.stubs(:new).with(path, "wb", mode).returns(io) + end + end + + class DirectoryEntry + attr_reader :path, :mode, :mtime, :atime + attr_reader :entries + + def initialize(path, mode=0777, mtime=Time.now, atime=Time.now) + @path, @mode = path, mode + @mtime, @atime = mtime, atime + @entries = [] + end + + def name + @name ||= File.basename(path) + end + + def file(name, *args) + (entries << FileEntry.new(File.join(path, name), *args)).last + end + + def directory(name, *args) + entry = DirectoryEntry.new(File.join(path, name), *args) + yield entry if block_given? + (entries << entry).last + end + + def stub! + Dir.stubs(:mkdir).with { |*a| a.first == path } + + stat = Mocha::Mock.new("file::stat") + stat.stubs(:size => 1024, :mode => mode, :mtime => mtime, :atime => atime, :directory? => true) + + File.stubs(:stat).with(path).returns(stat) + File.stubs(:directory?).with(path).returns(true) + File.stubs(:file?).with(path).returns(false) + Dir.stubs(:entries).with(path).returns(%w(. ..) + entries.map { |e| e.name }.sort) + + entries.each { |e| e.stub! } + end + end + + def expect_scp_session(arguments) + story do |session| + channel = session.opens_channel + channel.sends_exec "scp #{arguments}" + yield channel if block_given? + channel.sends_eof + channel.gets_exit_status + channel.gets_eof + channel.gets_close + channel.sends_close + end + end + + def scp(options={}) + @scp ||= Net::SCP.new(connection(options)) + end +end diff --git a/vendor/gems/gems/net-scp-1.0.2/test/test_all.rb b/vendor/gems/gems/net-scp-1.0.2/test/test_all.rb new file mode 100644 index 00000000..08485bc9 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/test/test_all.rb @@ -0,0 +1,3 @@ +Dir.chdir(File.dirname(__FILE__)) do + Dir['**/test_*.rb'].each { |file| require(file) } +end \ No newline at end of file diff --git a/vendor/gems/gems/net-scp-1.0.2/test/test_download.rb b/vendor/gems/gems/net-scp-1.0.2/test/test_download.rb new file mode 100644 index 00000000..b909313b --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/test/test_download.rb @@ -0,0 +1,156 @@ +require 'common' + +class TestDownload < Net::SCP::TestCase + def test_download_file_should_transfer_file + file = prepare_file("/path/to/local.txt", "a" * 1234) + + expect_scp_session "-f /path/to/remote.txt" do |channel| + simple_download(channel) + end + + assert_scripted { scp.download!("/path/to/remote.txt", "/path/to/local.txt") } + assert_equal "a" * 1234, file.io.string + end + + def test_download_file_with_spaces_in_name_should_escape_remote_file_name + file = prepare_file("/path/to/local file.txt", "") + + expect_scp_session "-f /path/to/remote\\ file.txt" do |channel| + channel.sends_ok + channel.gets_data "C0666 0 local file.txt\n" + channel.sends_ok + channel.gets_ok + channel.sends_ok + end + + assert_scripted { scp.download!("/path/to/remote file.txt", "/path/to/local file.txt") } + end + + def test_download_with_preserve_should_send_times + file = prepare_file("/path/to/local.txt", "a" * 1234, 0644, Time.at(1234567890, 123456), Time.at(12121212, 232323)) + + expect_scp_session "-f -p /path/to/remote.txt" do |channel| + channel.sends_ok + channel.gets_data "T1234567890 123456 12121212 232323\n" + simple_download(channel, 0644) + end + + File.expects(:utime).with(Time.at(12121212, 232323), Time.at(1234567890, 123456), "/path/to/local.txt") + assert_scripted { scp.download!("/path/to/remote.txt", "/path/to/local.txt", :preserve => true) } + assert_equal "a" * 1234, file.io.string + end + + def test_download_with_progress_callback_should_invoke_callback + prepare_file("/path/to/local.txt", "a" * 3000 + "b" * 3000 + "c" * 3000 + "d" * 3000) + + expect_scp_session "-f /path/to/remote.txt" do |channel| + channel.sends_ok + channel.gets_data "C0666 12000 remote.txt\n" + channel.sends_ok + channel.gets_data "a" * 3000 + channel.inject_remote_delay! + channel.gets_data "b" * 3000 + channel.inject_remote_delay! + channel.gets_data "c" * 3000 + channel.inject_remote_delay! + channel.gets_data "d" * 3000 + channel.gets_ok + channel.sends_ok + end + + calls = [] + progress = Proc.new { |ch, *args| calls << args } + + assert_scripted do + scp.download!("/path/to/remote.txt", "/path/to/local.txt", &progress) + end + + assert_equal ["/path/to/local.txt", 0, 12000], calls.shift + assert_equal ["/path/to/local.txt", 3000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 6000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 9000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 12000, 12000], calls.shift + assert calls.empty? + end + + def test_download_io_with_recursive_should_raise_error + expect_scp_session "-f -r /path/to/remote.txt" + assert_raises(Net::SCP::Error) { scp.download!("/path/to/remote.txt", StringIO.new, :recursive => true) } + end + + def test_download_io_with_preserve_should_ignore_preserve + expect_scp_session "-f -p /path/to/remote.txt" do |channel| + simple_download(channel) + end + + io = StringIO.new + assert_scripted { scp.download!("/path/to/remote.txt", io, :preserve => true) } + assert_equal "a" * 1234, io.string + end + + def test_download_io_should_transfer_data + expect_scp_session "-f /path/to/remote.txt" do |channel| + simple_download(channel) + end + + io = StringIO.new + assert_scripted { scp.download!("/path/to/remote.txt", io) } + assert_equal "a" * 1234, io.string + end + + def test_download_bang_without_target_should_return_string + expect_scp_session "-f /path/to/remote.txt" do |channel| + simple_download(channel) + end + + assert_scripted do + assert_equal "a" * 1234, scp.download!("/path/to/remote.txt") + end + end + + def test_download_directory_without_recursive_should_raise_error + expect_scp_session "-f /path/to/remote" do |channel| + channel.sends_ok + channel.gets_data "D0755 0 remote\n" + end + + assert_raises(Net::SCP::Error) { scp.download!("/path/to/remote") } + end + + def test_download_directory_should_create_directory_and_files_locally + file = nil + prepare_directory "/path/to/local" do |dir| + dir.directory "remote" do |dir2| + dir2.directory "sub" do |dir3| + file = dir3.file "remote.txt", "" + end + end + end + + expect_scp_session "-f -r /path/to/remote" do |channel| + channel.sends_ok + channel.gets_data "D0755 0 remote\n" + channel.sends_ok + channel.gets_data "D0755 0 sub\n" + simple_download(channel) + channel.gets_data "E\n" + channel.sends_ok + channel.gets_data "E\n" + channel.sends_ok + end + + scp.download!("/path/to/remote", "/path/to/local", :recursive => true, :ssh => { :verbose => :debug }) + assert_equal "a" * 1234, file.io.string + end + + private + + def simple_download(channel, mode=0666) + channel.sends_ok + channel.gets_data "C%04o 1234 remote.txt\n" % mode + channel.sends_ok + channel.gets_data "a" * 1234 + channel.gets_ok + channel.sends_ok + end +end diff --git a/vendor/gems/gems/net-scp-1.0.2/test/test_scp.rb b/vendor/gems/gems/net-scp-1.0.2/test/test_scp.rb new file mode 100644 index 00000000..b2809346 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/test/test_scp.rb @@ -0,0 +1,60 @@ +require 'common' + +class TestSCP < Net::SCP::TestCase + def test_start_without_block_should_return_scp_instance + ssh = stub('session', :logger => nil) + Net::SSH.expects(:start). + with("remote.host", "username", :password => "foo"). + returns(ssh) + + ssh.expects(:close).never + scp = Net::SCP.start("remote.host", "username", :password => "foo") + assert_instance_of Net::SCP, scp + assert_equal ssh, scp.session + end + + def test_start_with_block_should_yield_scp_and_close_ssh_session + ssh = stub('session', :logger => nil) + Net::SSH.expects(:start). + with("remote.host", "username", :password => "foo"). + returns(ssh) + + ssh.expects(:loop) + ssh.expects(:close) + + yielded = false + Net::SCP.start("remote.host", "username", :password => "foo") do |scp| + yielded = true + assert_instance_of Net::SCP, scp + assert_equal ssh, scp.session + end + + assert yielded + end + + def test_self_upload_should_instatiate_scp_and_invoke_synchronous_upload + scp = stub('scp') + scp.expects(:upload!).with("/path/to/local", "/path/to/remote", :recursive => true) + + Net::SCP.expects(:start). + with("remote.host", "username", :password => "foo"). + yields(scp) + + Net::SCP.upload!("remote.host", "username", "/path/to/local", "/path/to/remote", + :ssh => { :password => "foo" }, :recursive => true) + end + + def test_self_download_should_instatiate_scp_and_invoke_synchronous_download + scp = stub('scp') + scp.expects(:download!).with("/path/to/remote", "/path/to/local", :recursive => true).returns(:result) + + Net::SCP.expects(:start). + with("remote.host", "username", :password => "foo"). + yields(scp) + + result = Net::SCP.download!("remote.host", "username", "/path/to/remote", "/path/to/local", + :ssh => { :password => "foo" }, :recursive => true) + + assert_equal :result, result + end +end diff --git a/vendor/gems/gems/net-scp-1.0.2/test/test_upload.rb b/vendor/gems/gems/net-scp-1.0.2/test/test_upload.rb new file mode 100644 index 00000000..00eb30b9 --- /dev/null +++ b/vendor/gems/gems/net-scp-1.0.2/test/test_upload.rb @@ -0,0 +1,255 @@ +require 'common' + +class TestUpload < Net::SCP::TestCase + def test_upload_file_should_transfer_file + prepare_file("/path/to/local.txt", "a" * 1234) + + expect_scp_session "-t /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0666 1234 local.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local.txt", "/path/to/remote.txt") } + end + + def test_upload_file_with_spaces_in_name_should_escape_remote_file_name + prepare_file("/path/to/local file.txt", "") + + expect_scp_session "-t /path/to/remote\\ file.txt" do |channel| + channel.gets_ok + channel.sends_data "C0666 0 local file.txt\n" + channel.gets_ok + channel.sends_ok + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local file.txt", "/path/to/remote file.txt") } + end + + def test_upload_file_with_preserve_should_send_times + prepare_file("/path/to/local.txt", "a" * 1234, 0666, Time.at(1234567890, 123456), Time.at(1234543210, 345678)) + + expect_scp_session "-t -p /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "T1234567890 123456 1234543210 345678\n" + channel.gets_ok + channel.sends_data "C0666 1234 local.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local.txt", "/path/to/remote.txt", :preserve => true) } + end + + def test_upload_file_with_progress_callback_should_invoke_callback + prepare_file("/path/to/local.txt", "a" * 3000 + "b" * 3000 + "c" * 3000 + "d" * 3000) + + expect_scp_session "-t /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0666 12000 local.txt\n" + channel.gets_ok + channel.sends_data "a" * 3000 + channel.sends_data "b" * 3000 + channel.sends_data "c" * 3000 + channel.sends_data "d" * 3000 + channel.sends_ok + channel.gets_ok + end + + calls = [] + progress = Proc.new do |ch, name, sent, total| + calls << [name, sent, total] + end + + assert_scripted do + scp.upload!("/path/to/local.txt", "/path/to/remote.txt", :chunk_size => 3000, &progress) + end + + assert_equal ["/path/to/local.txt", 0, 12000], calls.shift + assert_equal ["/path/to/local.txt", 3000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 6000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 9000, 12000], calls.shift + assert_equal ["/path/to/local.txt", 12000, 12000], calls.shift + assert calls.empty? + end + + def test_upload_io_with_recursive_should_ignore_recursive + expect_scp_session "-t -r /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0640 1234 remote.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + io = StringIO.new("a" * 1234) + assert_scripted { scp.upload!(io, "/path/to/remote.txt", :recursive => true) } + end + + def test_upload_io_with_preserve_should_ignore_preserve + expect_scp_session "-t -p /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0640 1234 remote.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + io = StringIO.new("a" * 1234) + assert_scripted { scp.upload!(io, "/path/to/remote.txt", :preserve => true) } + end + + def test_upload_io_should_transfer_data + expect_scp_session "-t /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0640 1234 remote.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + io = StringIO.new("a" * 1234) + assert_scripted { scp.upload!(io, "/path/to/remote.txt") } + end + + def test_upload_io_with_mode_should_honor_mode_as_permissions + expect_scp_session "-t /path/to/remote.txt" do |channel| + channel.gets_ok + channel.sends_data "C0666 1234 remote.txt\n" + channel.gets_ok + channel.sends_data "a" * 1234 + channel.sends_ok + channel.gets_ok + end + + io = StringIO.new("a" * 1234) + assert_scripted { scp.upload!(io, "/path/to/remote.txt", :mode => 0666) } + end + + def test_upload_directory_without_recursive_should_error + prepare_directory("/path/to/local") + + expect_scp_session("-t /path/to/remote") do |channel| + channel.gets_ok + end + + assert_raises(Net::SCP::Error) { scp.upload!("/path/to/local", "/path/to/remote") } + end + + def test_upload_empty_directory_should_create_directory_and_finish + prepare_directory("/path/to/local") + + expect_scp_session("-t -r /path/to/remote") do |channel| + channel.gets_ok + channel.sends_data "D0777 0 local\n" + channel.gets_ok + channel.sends_data "E\n" + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local", "/path/to/remote", :recursive => true) } + end + + def test_upload_directory_should_recursively_create_and_upload_items + prepare_directory("/path/to/local") do |d| + d.file "hello.txt", "hello world\n" + d.directory "others" do |d2| + d2.file "data.dat", "abcdefghijklmnopqrstuvwxyz" + end + d.file "zoo.doc", "going to the zoo\n" + end + + expect_scp_session("-t -r /path/to/remote") do |channel| + channel.gets_ok + channel.sends_data "D0777 0 local\n" + channel.gets_ok + channel.sends_data "C0666 12 hello.txt\n" + channel.gets_ok + channel.sends_data "hello world\n" + channel.sends_ok + channel.gets_ok + channel.sends_data "D0777 0 others\n" + channel.gets_ok + channel.sends_data "C0666 26 data.dat\n" + channel.gets_ok + channel.sends_data "abcdefghijklmnopqrstuvwxyz" + channel.sends_ok + channel.gets_ok + channel.sends_data "E\n" + channel.gets_ok + channel.sends_data "C0666 17 zoo.doc\n" + channel.gets_ok + channel.sends_data "going to the zoo\n" + channel.sends_ok + channel.gets_ok + channel.sends_data "E\n" + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local", "/path/to/remote", :recursive => true) } + end + + def test_upload_directory_with_preserve_should_send_times_for_all_items + prepare_directory("/path/to/local", 0755, Time.at(17171717, 191919), Time.at(18181818, 101010)) do |d| + d.file "hello.txt", "hello world\n", 0640, Time.at(12345, 67890), Time.at(234567, 890) + d.directory "others", 0770, Time.at(112233, 4455), Time.at(22334455, 667788) do |d2| + d2.file "data.dat", "abcdefghijklmnopqrstuvwxyz", 0600, Time.at(13579135, 13131), Time.at(7654321, 654321) + end + d.file "zoo.doc", "going to the zoo\n", 0444, Time.at(12121212, 131313), Time.at(23232323, 242424) + end + + expect_scp_session("-t -r -p /path/to/remote") do |channel| + channel.gets_ok + channel.sends_data "T17171717 191919 18181818 101010\n" + channel.gets_ok + channel.sends_data "D0755 0 local\n" + channel.gets_ok + channel.sends_data "T12345 67890 234567 890\n" + channel.gets_ok + channel.sends_data "C0640 12 hello.txt\n" + channel.gets_ok + channel.sends_data "hello world\n" + channel.sends_ok + channel.gets_ok + channel.sends_data "T112233 4455 22334455 667788\n" + channel.gets_ok + channel.sends_data "D0770 0 others\n" + channel.gets_ok + channel.sends_data "T13579135 13131 7654321 654321\n" + channel.gets_ok + channel.sends_data "C0600 26 data.dat\n" + channel.gets_ok + channel.sends_data "abcdefghijklmnopqrstuvwxyz" + channel.sends_ok + channel.gets_ok + channel.sends_data "E\n" + channel.gets_ok + channel.sends_data "T12121212 131313 23232323 242424\n" + channel.gets_ok + channel.sends_data "C0444 17 zoo.doc\n" + channel.gets_ok + channel.sends_data "going to the zoo\n" + channel.sends_ok + channel.gets_ok + channel.sends_data "E\n" + channel.gets_ok + end + + assert_scripted { scp.upload!("/path/to/local", "/path/to/remote", :preserve => true, :recursive => true) } + end + + def test_upload_should_not_block + prepare_file("/path/to/local.txt", "data") + story { |s| s.opens_channel(false) } + assert_scripted { scp.upload("/path/to/local.txt", "/path/to/remote.txt") } + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/CHANGELOG.rdoc b/vendor/gems/gems/net-ssh-2.0.15/CHANGELOG.rdoc new file mode 100644 index 00000000..5b905780 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/CHANGELOG.rdoc @@ -0,0 +1,161 @@ + + +=== 2.0.15 / 03 Sep 2009 + +* Scale back IO#select patch so it mutexes only zero-timeout calls [Daniel Azuma, Will Bryant] + + +=== 2.0.14 / 28 Aug 2009 + +* Fix for IO#select threading bug in Ruby 1.8 (LH-1) [Daniel Azuma] + +* Fix for "uninitialized constant OpenSSL::Digest::MD5" exception in Net::SFTP [DL Redden] + + +=== 2.0.13 / 17 Aug 2009 + +* Added fix for hanging in ServerVersion#negotiate! when using SOCKS5 proxy (GH-9) [Gerald Talton] + +* Added support for specifying a list of hosts in .ssh/config, with tests (GH-6) [ckoehler, Delano Mandelbaum] + +* Added tests for arcfour128/256/512 lengths, encryption, and decryption [Delano Mandelbaum] + +* Skip packet stream tests for arcfour128/256/512 [Delano Mandelbaum] + +* Fix for OpenSSL cipher key length because it always returns 16, even when 32 byte keys are required, e.g. for arcfour256 and arcfour512 ciphers [Karl Varga] + + +=== 2.0.12 / 08 Jun 2009 + +* Applied patch for arcfour128 and arcfour256 support [Denis Bernard] + +* Use unbuffered reads when negotiating the protocol version [Steven Hazel] + + +=== 2.0.11 / 24 Feb 2009 + +* Add :key_data option for specifying raw private keys in PEM format [Alex Holems, Andrew Babkin] + + +=== 2.0.10 / 4 Feb 2009 + +* Added Net::SSH.configuration_for to make it easier to query the SSH configuration file(s) [Jamis Buck] + + +=== 2.0.9 / 1 Feb 2009 + +* Specifying non-nil user argument overrides user in .ssh/config [Jamis Buck] + +* Ignore requests for non-existent channels (workaround ssh server bug) [Jamis Buck] + +* Add terminate! method for hard shutdown scenarios [Jamis Buck] + +* Revert to pre-2.0.7 key-loading behavior by default, but load private-key if public-key doesn't exist [Jamis Buck] + +* Make sure :passphrase option gets passed to key manager [Bob Cotton] + + +=== 2.0.8 / 29 December 2008 + +* Fix private key change from 2.0.7 so that keys are loaded just-in-time, avoiding unecessary prompts from encrypted keys. [Jamis Buck] + + +=== 2.0.7 / 29 December 2008 + +* Make key manager use private keys instead of requiring public key to exist [arilerner@mac.com] + +* Fix failing tests [arilerner@mac.com] + +* Don't include pageant when running under JRuby [Angel N. Sciortino] + + +=== 2.0.6 / 6 December 2008 + +* Update the Manifest file so that the gem includes all necessary files [Jamis Buck] + + +=== 2.0.5 / 6 December 2008 + +* Make the Pageant interface comply with more of the Socket interface to avoid related errors [Jamis Buck] + +* Don't busy-wait on session close for remaining channels to close [Will Bryant] + +* Ruby 1.9 compatibility [Jamis Buck] + +* Fix Cipher#final to correctly flag a need for a cipher reset [Jamis Buck] + + +=== 2.0.4 / 27 Aug 2008 + +* Added Connection::Session#closed? and Transport::Session#closed? [Jamis Buck] + +* Numeric host names in .ssh/config are now parsed correct [Yanko Ivanov] + +* Make sure the error raised when a public key file is malformed is more informative than a MethodMissing error [Jamis Buck] + +* Cipher#reset is now called after Cipher#final, with the last n bytes used as the next initialization vector [Jamis Buck] + + +=== 2.0.3 / 27 Jun 2008 + +* Make Net::SSH::Version comparable [Brian Candler] + +* Fix errors in port forwarding when a channel could not be opened due to a typo in the exception name [Matthew Todd] + +* Use #chomp instead of #strip when cleaning the version string reported by the remote host, so that trailing whitespace is preserved (this is to play nice with servers like Mocana SSH) [Timo Gatsonides] + +* Correctly parse ssh_config entries with eq-sign delimiters [Jamis Buck] + +* Ignore malformed ssh_config entries [Jamis Buck] + +=== 2.0.2 / 29 May 2008 + +* Make sure the agent client understands both RSA "identities answers" [Jamis Buck] + +* Fixed key truncation bug that caused hmacs other than SHA1 to fail with "corrupt hmac" errors [Jamis Buck] + +* Fix detection and loading of public keys when the keys don't actually exist [David Dollar] + + +=== 2.0.1 / 5 May 2008 + +* Teach Net::SSH about a handful of default key names [Jamis Buck] + + +=== 2.0.0 / 1 May 2008 + +* Allow the :verbose argument to accept symbols (:debug, etc.) as well as Logger level constants (Logger::DEBUG, etc.) [Jamis Buck] + + +=== 2.0 Preview Release 4 (1.99.3) / 19 Apr 2008 + +* Make sure HOME is set to something sane, even on OS's that don't set it by default [Jamis Buck] + +* Add a :passphrase option to specify the passphrase to use with private keys [Francis Sullivan] + +* Open a new auth agent connection for every auth-agent channel request [Jamis Buck] + + +=== 2.0 Preview Release 3 (1.99.2) / 10 Apr 2008 + +* Session properties [Jamis Buck] + +* Make channel open failure work with a callback so that failures can be handled similarly to successes [Jamis Buck] + + +=== 2.0 Preview Release 2 (1.99.1) / 22 Mar 2008 + +* Partial support for ~/.ssh/config (and related) SSH configuration files [Daniel J. Berger, Jamis Buck] + +* Added Net::SSH::Test to facilitate testing complex SSH state machines [Jamis Buck] + +* Reworked Net::SSH::Prompt to use conditionally-selected modules [Jamis Buck, suggested by James Rosen] + +* Added Channel#eof? and Channel#eof! [Jamis Buck] + +* Fixed bug in strict host key verifier on cache miss [Mike Timm] + + +=== 2.0 Preview Release 1 (1.99.0) / 21 Aug 2007 + +* First preview release of Net::SSH v2 diff --git a/vendor/gems/gems/net-ssh-2.0.15/Manifest b/vendor/gems/gems/net-ssh-2.0.15/Manifest new file mode 100644 index 00000000..27018a4a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/Manifest @@ -0,0 +1,107 @@ +CHANGELOG.rdoc +Manifest +README.rdoc +Rakefile +Rudyfile +THANKS.rdoc +lib/net/ssh.rb +lib/net/ssh/authentication/agent.rb +lib/net/ssh/authentication/constants.rb +lib/net/ssh/authentication/key_manager.rb +lib/net/ssh/authentication/methods/abstract.rb +lib/net/ssh/authentication/methods/hostbased.rb +lib/net/ssh/authentication/methods/keyboard_interactive.rb +lib/net/ssh/authentication/methods/password.rb +lib/net/ssh/authentication/methods/publickey.rb +lib/net/ssh/authentication/pageant.rb +lib/net/ssh/authentication/session.rb +lib/net/ssh/buffer.rb +lib/net/ssh/buffered_io.rb +lib/net/ssh/config.rb +lib/net/ssh/connection/channel.rb +lib/net/ssh/connection/constants.rb +lib/net/ssh/connection/session.rb +lib/net/ssh/connection/term.rb +lib/net/ssh/errors.rb +lib/net/ssh/key_factory.rb +lib/net/ssh/known_hosts.rb +lib/net/ssh/loggable.rb +lib/net/ssh/packet.rb +lib/net/ssh/prompt.rb +lib/net/ssh/proxy/errors.rb +lib/net/ssh/proxy/http.rb +lib/net/ssh/proxy/socks4.rb +lib/net/ssh/proxy/socks5.rb +lib/net/ssh/ruby_compat.rb +lib/net/ssh/service/forward.rb +lib/net/ssh/test.rb +lib/net/ssh/test/channel.rb +lib/net/ssh/test/extensions.rb +lib/net/ssh/test/kex.rb +lib/net/ssh/test/local_packet.rb +lib/net/ssh/test/packet.rb +lib/net/ssh/test/remote_packet.rb +lib/net/ssh/test/script.rb +lib/net/ssh/test/socket.rb +lib/net/ssh/transport/algorithms.rb +lib/net/ssh/transport/cipher_factory.rb +lib/net/ssh/transport/constants.rb +lib/net/ssh/transport/hmac.rb +lib/net/ssh/transport/hmac/abstract.rb +lib/net/ssh/transport/hmac/md5.rb +lib/net/ssh/transport/hmac/md5_96.rb +lib/net/ssh/transport/hmac/none.rb +lib/net/ssh/transport/hmac/sha1.rb +lib/net/ssh/transport/hmac/sha1_96.rb +lib/net/ssh/transport/identity_cipher.rb +lib/net/ssh/transport/kex.rb +lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb +lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb +lib/net/ssh/transport/openssl.rb +lib/net/ssh/transport/packet_stream.rb +lib/net/ssh/transport/server_version.rb +lib/net/ssh/transport/session.rb +lib/net/ssh/transport/state.rb +lib/net/ssh/verifiers/lenient.rb +lib/net/ssh/verifiers/null.rb +lib/net/ssh/verifiers/strict.rb +lib/net/ssh/version.rb +net-ssh.gemspec +setup.rb +support/arcfour_check.rb +test/authentication/methods/common.rb +test/authentication/methods/test_abstract.rb +test/authentication/methods/test_hostbased.rb +test/authentication/methods/test_keyboard_interactive.rb +test/authentication/methods/test_password.rb +test/authentication/methods/test_publickey.rb +test/authentication/test_agent.rb +test/authentication/test_key_manager.rb +test/authentication/test_session.rb +test/common.rb +test/configs/eqsign +test/configs/exact_match +test/configs/multihost +test/configs/wild_cards +test/connection/test_channel.rb +test/connection/test_session.rb +test/test_all.rb +test/test_buffer.rb +test/test_buffered_io.rb +test/test_config.rb +test/test_key_factory.rb +test/transport/hmac/test_md5.rb +test/transport/hmac/test_md5_96.rb +test/transport/hmac/test_none.rb +test/transport/hmac/test_sha1.rb +test/transport/hmac/test_sha1_96.rb +test/transport/kex/test_diffie_hellman_group1_sha1.rb +test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb +test/transport/test_algorithms.rb +test/transport/test_cipher_factory.rb +test/transport/test_hmac.rb +test/transport/test_identity_cipher.rb +test/transport/test_packet_stream.rb +test/transport/test_server_version.rb +test/transport/test_session.rb +test/transport/test_state.rb diff --git a/vendor/gems/gems/net-ssh-2.0.15/README.rdoc b/vendor/gems/gems/net-ssh-2.0.15/README.rdoc new file mode 100644 index 00000000..ff49d783 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/README.rdoc @@ -0,0 +1,140 @@ += Net::SSH + +* http://net-ssh.rubyforge.org/ssh + +== DESCRIPTION: + +Net::SSH is a pure-Ruby implementation of the SSH2 client protocol. It allows you to write programs that invoke and interact with processes on remote servers, via SSH2. + +== FEATURES: + +* Execute processes on remote servers and capture their output +* Run multiple processes in parallel over a single SSH connection +* Support for SSH subsystems +* Forward local and remote ports via an SSH connection + +== SYNOPSIS: + +In a nutshell: + + require 'net/ssh' + + Net::SSH.start('host', 'user', :password => "password") do |ssh| + # capture all stderr and stdout output from a remote process + output = ssh.exec!("hostname") + + # capture only stdout matching a particular pattern + stdout = "" + ssh.exec!("ls -l /home/jamis") do |channel, stream, data| + stdout << data if stream == :stdout + end + puts stdout + + # run multiple processes in parallel to completion + ssh.exec "sed ..." + ssh.exec "awk ..." + ssh.exec "rm -rf ..." + ssh.loop + + # open a new channel and configure a minimal set of callbacks, then run + # the event loop until the channel finishes (closes) + channel = ssh.open_channel do |ch| + ch.exec "/usr/local/bin/ruby /path/to/file.rb" do |ch, success| + raise "could not execute command" unless success + + # "on_data" is called when the process writes something to stdout + ch.on_data do |c, data| + $STDOUT.print data + end + + # "on_extended_data" is called when the process writes something to stderr + ch.on_extended_data do |c, type, data| + $STDERR.print data + end + + ch.on_close { puts "done!" } + end + end + + channel.wait + + # forward connections on local port 1234 to port 80 of www.capify.org + ssh.forward.local(1234, "www.capify.org", 80) + ssh.loop { true } + end + +See Net::SSH for more documentation, and links to further information. + +== REQUIREMENTS: + +The only requirement you might be missing is the OpenSSL bindings for Ruby. These are built by default on most platforms, but you can verify that they're built and installed on your system by running the following command line: + + ruby -ropenssl -e 'puts OpenSSL::OPENSSL_VERSION' + +If that spits out something like "OpenSSL 0.9.8g 19 Oct 2007", then you're set. If you get an error, then you'll need to see about rebuilding ruby with OpenSSL support, or (if your platform supports it) installing the OpenSSL bindings separately. + +Additionally: if you are going to be having Net::SSH prompt you for things like passwords or certificate passphrases, you'll want to have either the Highline (recommended) or Termios (unix systems only) gem installed, so that the passwords don't echo in clear text. + +Lastly, if you want to run the tests or use any of the Rake tasks, you'll need: + +* Echoe (for the Rakefile) +* Mocha (for the tests) + + +== INSTALL: + +* gem install net-ssh (might need sudo privileges) + + +== ARCFOUR SUPPORT: + +from Karl Varga: + +Ruby's OpenSSL bindings always return a key length of 16 for RC4 ciphers, which means that when we try to use ARCFOUR256 or higher, Net::SSH generates keys which are consistently too short - 16 bytes as opposed to 32 bytes - resulting in the following error: + + OpenSSL::CipherError: key length too short + +My patch simply instructs Net::SSH to build keys of the the proper length, regardless of the required key length reported by OpenSSL. + +You should also be aware that your OpenSSL C libraries may also contain this bug. I've updated to 0.9.8k, but according to this thread[https://bugzilla.mindrot.org/show_bug.cgi?id=1291], the bug existed as recently as 0.9.8e! I've manually taken a look at my header files and they look ok, which is what makes me think it's a bug in the Ruby implementation. + +To see your OpenSSL version: + + $ openssl version + OpenSSL 0.9.8k 25 Mar 2009 + +After installing this gem, verify that Net::SSH is generating keys of the correct length by running the script support/arcfour_check.rb: + + $ ruby arcfour_support.rb + +which should produce the following: + + arcfour128: [16, 8] OpenSSL::Cipher::Cipher + arcfour256: [32, 8] OpenSSL::Cipher::Cipher + arcfour512: [64, 8] OpenSSL::Cipher::Cipher + + +== LICENSE: + +(The MIT License) + +Copyright (c) 2008 Jamis Buck + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gems/gems/net-ssh-2.0.15/Rakefile b/vendor/gems/gems/net-ssh-2.0.15/Rakefile new file mode 100644 index 00000000..478db120 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/Rakefile @@ -0,0 +1,79 @@ +require 'rubygems' +require 'rake/clean' +require 'rake/gempackagetask' +require 'hanna/rdoctask' +require 'fileutils' +include FileUtils + +task :default => :package + +# CONFIG ============================================================= + +# Change the following according to your needs +README = "README.rdoc" +CHANGES = "CHANGELOG.rdoc" +THANKS = 'THANKS.rdoc' + +# Files and directories to be deleted when you run "rake clean" +CLEAN.include [ 'pkg', '*.gem', '.config', 'doc'] + +# Virginia assumes your project and gemspec have the same name +name = 'net-ssh' +load "#{name}.gemspec" +version = @spec.version + +# That's it! The following defaults should allow you to get started +# on other things. + + +# TESTS/SPECS ========================================================= + + + +# INSTALL ============================================================= + +Rake::GemPackageTask.new(@spec) do |p| + p.need_tar = true if RUBY_PLATFORM !~ /mswin/ +end + +task :release => [ :rdoc, :package ] +task :install => [ :rdoc, :package ] do + sh %{sudo gem install pkg/#{name}-#{version}.gem} +end +task :uninstall => [ :clean ] do + sh %{sudo gem uninstall #{name}} +end + + +# RUBYFORGE RELEASE / PUBLISH TASKS ================================== + +if @spec.rubyforge_project + desc 'Publish website to rubyforge' + task 'publish:rdoc' => 'doc/index.html' do + sh "scp -rp doc/* rubyforge.org:/var/www/gforge-projects/#{name}/ssh/v2/api/" + end + + desc 'Public release to rubyforge' + task 'publish:gem' => [:package] do |t| + sh <<-end + rubyforge add_release -o Any -a #{CHANGES} -f -n #{README} #{name} #{name} #{@spec.version} pkg/#{name}-#{@spec.version}.gem && + rubyforge add_file -o Any -a #{CHANGES} -f -n #{README} #{name} #{name} #{@spec.version} pkg/#{name}-#{@spec.version}.tgz + end + end +end + + + +# RUBY DOCS TASK ================================== + +Rake::RDocTask.new do |t| + t.rdoc_dir = 'doc' + t.title = @spec.summary + t.options << '--line-numbers' << '-A cattr_accessor=object' + t.options << '--charset' << 'utf-8' + t.rdoc_files.include(README) + t.rdoc_files.include(CHANGES) + t.rdoc_files.include(THANKS) + t.rdoc_files.include('lib/**/*.rb') +end + diff --git a/vendor/gems/gems/net-ssh-2.0.15/Rudyfile b/vendor/gems/gems/net-ssh-2.0.15/Rudyfile new file mode 100644 index 00000000..2028574e --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/Rudyfile @@ -0,0 +1,110 @@ +# Rudyfile +# +# This configuration is used to test installing +# and running net-ssh on a clean machine. +# +# Usage: +# +# $ rudy -vv startup +# $ rudy -vv testsuite +# $ rudy -vv shutdown +# +# Requires: Rudy 0.9 (http://code.google.com/p/rudy/) +# + +defaults do + color true + environment :test + role :netssh +end + +machines do + region :'us-east-1' do + ami 'ami-e348af8a' # Alestic Debian 5.0, 32-bit (US) + end + env :test do + role :netssh do + user :root + end + end +end + +commands do + allow :apt_get, "apt-get", :y, :q + allow :gem_install, "/usr/bin/gem", "install", :n, '/usr/bin', :y, :V, "--no-rdoc", "--no-ri" + allow :gem_sources, "/usr/bin/gem", "sources" + allow :gem_uninstall, "/usr/bin/gem", "uninstall", :V + allow :update_rubygems + allow :rm +end + +routines do + + testsuite do + before :sysupdate, :installdeps, :install_gem + + remote :root do + directory_upload 'test', '/tmp/' + cd '/tmp' + ruby :I, 'lib/', :I, 'test/', :r, 'rubygems', 'test/test_all.rb' + end + + after :install_rubyforge, :install_github + end + + install_rubyforge do + remote :root do + gem_install 'net-ssh', '--version', '2.0.7' + gem_install 'net-ssh' + end + end + + install_github do + remote :root do + gem_sources :a, "http://gems.github.com" + gem_install 'net-ssh-net-ssh' + end + end + + install_gem do + before :package_gem + remote :root do + disable_safe_mode + file_upload "pkg/net-ssh-*.gem", "/tmp/" + gem_install "/tmp/net-ssh-*.gem" + end + end + + package_gem do + local do + rm :r, :f, 'pkg' + rake 'package' + end + end + + remove do + remote :root do + gem_uninstall 'net-ssh' + end + end + + installdeps do + remote :root do + gem_install "rye", "test-unit", "mocha" + rye 'authorize-local' + end + end + + sysupdate do + remote :root do + apt_get "update" + apt_get "install", "build-essential", "git-core" + apt_get "install", "ruby1.8-dev", "rdoc", "libzlib-ruby", "rubygems" + mkdir :p, "/var/lib/gems/1.8/bin" # Doesn't get created, but causes Rubygems to fail + gem_install "builder", "session" + gem_install 'rubygems-update', "-v=1.3.4" # circular issue with 1.3.5 and hoe + update_rubygems + end + end +end + diff --git a/vendor/gems/gems/net-ssh-2.0.15/THANKS.rdoc b/vendor/gems/gems/net-ssh-2.0.15/THANKS.rdoc new file mode 100644 index 00000000..d060dce6 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/THANKS.rdoc @@ -0,0 +1,16 @@ +Net::SSH was originally written by Jamis Buck . In +addition, the following individuals are gratefully acknowledged for their +contributions: + +GOTOU Yuuzou + * help and code related to OpenSSL + +Guillaume Marçais + * support for communicating with the the PuTTY "pageant" process + +Daniel Berger + * help getting unit tests in earlier Net::SSH versions to pass in Windows + * initial version of Net::SSH::Config provided inspiration and encouragement + +Chris Andrews and Lee Jensen + * support for ssh agent forwarding diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh.rb new file mode 100644 index 00000000..c752c71b --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh.rb @@ -0,0 +1,215 @@ +# Make sure HOME is set, regardless of OS, so that File.expand_path works +# as expected with tilde characters. +ENV['HOME'] ||= ENV['HOMEPATH'] ? "#{ENV['HOMEDRIVE']}#{ENV['HOMEPATH']}" : "." + +require 'logger' + +require 'net/ssh/config' +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/transport/session' +require 'net/ssh/authentication/session' +require 'net/ssh/connection/session' + +module Net + + # Net::SSH is a library for interacting, programmatically, with remote + # processes via the SSH2 protocol. Sessions are always initiated via + # Net::SSH.start. From there, a program interacts with the new SSH session + # via the convenience methods on Net::SSH::Connection::Session, by opening + # and interacting with new channels (Net::SSH::Connection:Session#open_channel + # and Net::SSH::Connection::Channel), or by forwarding local and/or + # remote ports through the connection (Net::SSH::Service::Forward). + # + # The SSH protocol is very event-oriented. Requests are sent from the client + # to the server, and are answered asynchronously. This gives great flexibility + # (since clients can have multiple requests pending at a time), but it also + # adds complexity. Net::SSH tries to manage this complexity by providing + # some simpler methods of synchronous communication (see Net::SSH::Connection::Session#exec!). + # + # In general, though, and if you want to do anything more complicated than + # simply executing commands and capturing their output, you'll need to use + # channels (Net::SSH::Connection::Channel) to build state machines that are + # executed while the event loop runs (Net::SSH::Connection::Session#loop). + # + # Net::SSH::Connection::Session and Net::SSH::Connection::Channel have more + # information about this technique. + # + # = "Um, all I want to do is X, just show me how!" + # + # == X == "execute a command and capture the output" + # + # Net::SSH.start("host", "user", :password => "password") do |ssh| + # result = ssh.exec!("ls -l") + # puts result + # end + # + # == X == "forward connections on a local port to a remote host" + # + # Net::SSH.start("host", "user", :password => "password") do |ssh| + # ssh.forward.local(1234, "www.google.com", 80) + # ssh.loop { true } + # end + # + # == X == "forward connections on a remote port to the local host" + # + # Net::SSH.start("host", "user", :password => "password") do |ssh| + # ssh.forward.remote(80, "www.google.com", 1234) + # ssh.loop { true } + # end + module SSH + # This is the set of options that Net::SSH.start recognizes. See + # Net::SSH.start for a description of each option. + VALID_OPTIONS = [ + :auth_methods, :compression, :compression_level, :config, :encryption, + :forward_agent, :hmac, :host_key, :kex, :keys, :key_data, :languages, + :logger, :paranoid, :password, :port, :proxy, :rekey_blocks_limit, + :rekey_limit, :rekey_packet_limit, :timeout, :verbose, + :global_known_hosts_file, :user_known_hosts_file, :host_key_alias, + :host_name, :user, :properties, :passphrase + ] + + # The standard means of starting a new SSH connection. When used with a + # block, the connection will be closed when the block terminates, otherwise + # the connection will just be returned. The yielded (or returned) value + # will be an instance of Net::SSH::Connection::Session (q.v.). (See also + # Net::SSH::Connection::Channel and Net::SSH::Service::Forward.) + # + # Net::SSH.start("host", "user") do |ssh| + # ssh.exec! "cp /some/file /another/location" + # hostname = ssh.exec!("hostname") + # + # ssh.open_channel do |ch| + # ch.exec "sudo -p 'sudo password: ' ls" do |ch, success| + # abort "could not execute sudo ls" unless success + # + # ch.on_data do |ch, data| + # print data + # if data =~ /sudo password: / + # ch.send_data("password\n") + # end + # end + # end + # end + # + # ssh.loop + # end + # + # This method accepts the following options (all are optional): + # + # * :auth_methods => an array of authentication methods to try + # * :compression => the compression algorithm to use, or +true+ to use + # whatever is supported. + # * :compression_level => the compression level to use when sending data + # * :config => set to +true+ to load the default OpenSSH config files + # (~/.ssh/config, /etc/ssh_config), or to +false+ to not load them, or to + # a file-name (or array of file-names) to load those specific configuration + # files. Defaults to +true+. + # * :encryption => the encryption cipher (or ciphers) to use + # * :forward_agent => set to true if you want the SSH agent connection to + # be forwarded + # * :global_known_hosts_file => the location of the global known hosts + # file. Set to an array if you want to specify multiple global known + # hosts files. Defaults to %w(/etc/ssh/known_hosts /etc/ssh/known_hosts2). + # * :hmac => the hmac algorithm (or algorithms) to use + # * :host_key => the host key algorithm (or algorithms) to use + # * :host_key_alias => the host name to use when looking up or adding a + # host to a known_hosts dictionary file + # * :host_name => the real host name or IP to log into. This is used + # instead of the +host+ parameter, and is primarily only useful when + # specified in an SSH configuration file. It lets you specify an + # "alias", similarly to adding an entry in /etc/hosts but without needing + # to modify /etc/hosts. + # * :kex => the key exchange algorithm (or algorithms) to use + # * :keys => an array of file names of private keys to use for publickey + # and hostbased authentication + # * :key_data => an array of strings, with each element of the array being + # a raw private key in PEM format. + # * :logger => the logger instance to use when logging + # * :paranoid => either true, false, or :very, specifying how strict + # host-key verification should be + # * :passphrase => the passphrase to use when loading a private key (default + # is +nil+, for no passphrase) + # * :password => the password to use to login + # * :port => the port to use when connecting to the remote host + # * :properties => a hash of key/value pairs to add to the new connection's + # properties (see Net::SSH::Connection::Session#properties) + # * :proxy => a proxy instance (see Proxy) to use when connecting + # * :rekey_blocks_limit => the max number of blocks to process before rekeying + # * :rekey_limit => the max number of bytes to process before rekeying + # * :rekey_packet_limit => the max number of packets to process before rekeying + # * :timeout => how long to wait for the initial connection to be made + # * :user => the user name to log in as; this overrides the +user+ + # parameter, and is primarily only useful when provided via an SSH + # configuration file. + # * :user_known_hosts_file => the location of the user known hosts file. + # Set to an array to specify multiple user known hosts files. + # Defaults to %w(~/.ssh/known_hosts ~/.ssh/known_hosts2). + # * :verbose => how verbose to be (Logger verbosity constants, Logger::DEBUG + # is very verbose, Logger::FATAL is all but silent). Logger::FATAL is the + # default. The symbols :debug, :info, :warn, :error, and :fatal are also + # supported and are translated to the corresponding Logger constant. + def self.start(host, user, options={}, &block) + invalid_options = options.keys - VALID_OPTIONS + if invalid_options.any? + raise ArgumentError, "invalid option(s): #{invalid_options.join(', ')}" + end + + options[:user] = user if user + options = configuration_for(host, options.fetch(:config, true)).merge(options) + host = options.fetch(:host_name, host) + + if !options.key?(:logger) + options[:logger] = Logger.new(STDERR) + options[:logger].level = Logger::FATAL + end + + if options[:verbose] + options[:logger].level = case options[:verbose] + when Fixnum then options[:verbose] + when :debug then Logger::DEBUG + when :info then Logger::INFO + when :warn then Logger::WARN + when :error then Logger::ERROR + when :fatal then Logger::FATAL + else raise ArgumentError, "can't convert #{options[:verbose].inspect} to any of the Logger level constants" + end + end + + transport = Transport::Session.new(host, options) + auth = Authentication::Session.new(transport, options) + + user = options.fetch(:user, user) + if auth.authenticate("ssh-connection", user, options[:password]) + connection = Connection::Session.new(transport, options) + if block_given? + yield connection + connection.close + else + return connection + end + else + raise AuthenticationFailed, user + end + end + + # Returns a hash of the configuration options for the given host, as read + # from the SSH configuration file(s). If +use_ssh_config+ is true (the + # default), this will load configuration from both ~/.ssh/config and + # /etc/ssh_config. If +use_ssh_config+ is nil or false, nothing will be + # loaded (and an empty hash returned). Otherwise, +use_ssh_config+ may + # be a file name (or array of file names) of SSH configuration file(s) + # to read. + # + # See Net::SSH::Config for the full description of all supported options. + def self.configuration_for(host, use_ssh_config=true) + files = case use_ssh_config + when true then Net::SSH::Config.default_files + when false, nil then return {} + else Array(use_ssh_config) + end + + Net::SSH::Config.for(host, files) + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/agent.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/agent.rb new file mode 100644 index 00000000..2915cce9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/agent.rb @@ -0,0 +1,176 @@ +require 'net/ssh/buffer' +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/transport/server_version' + +require 'net/ssh/authentication/pageant' if File::ALT_SEPARATOR && !(RUBY_PLATFORM =~ /java/) + +module Net; module SSH; module Authentication + + # A trivial exception class for representing agent-specific errors. + class AgentError < Net::SSH::Exception; end + + # An exception for indicating that the SSH agent is not available. + class AgentNotAvailable < AgentError; end + + # This class implements a simple client for the ssh-agent protocol. It + # does not implement any specific protocol, but instead copies the + # behavior of the ssh-agent functions in the OpenSSH library (3.8). + # + # This means that although it behaves like a SSH1 client, it also has + # some SSH2 functionality (like signing data). + class Agent + include Loggable + + # A simple module for extending keys, to allow comments to be specified + # for them. + module Comment + attr_accessor :comment + end + + SSH2_AGENT_REQUEST_VERSION = 1 + SSH2_AGENT_REQUEST_IDENTITIES = 11 + SSH2_AGENT_IDENTITIES_ANSWER = 12 + SSH2_AGENT_SIGN_REQUEST = 13 + SSH2_AGENT_SIGN_RESPONSE = 14 + SSH2_AGENT_FAILURE = 30 + SSH2_AGENT_VERSION_RESPONSE = 103 + + SSH_COM_AGENT2_FAILURE = 102 + + SSH_AGENT_REQUEST_RSA_IDENTITIES = 1 + SSH_AGENT_RSA_IDENTITIES_ANSWER1 = 2 + SSH_AGENT_RSA_IDENTITIES_ANSWER2 = 5 + SSH_AGENT_FAILURE = 5 + + # The underlying socket being used to communicate with the SSH agent. + attr_reader :socket + + # Instantiates a new agent object, connects to a running SSH agent, + # negotiates the agent protocol version, and returns the agent object. + def self.connect(logger=nil) + agent = new(logger) + agent.connect! + agent.negotiate! + agent + end + + # Creates a new Agent object, using the optional logger instance to + # report status. + def initialize(logger=nil) + self.logger = logger + end + + # Connect to the agent process using the socket factory and socket name + # given by the attribute writers. If the agent on the other end of the + # socket reports that it is an SSH2-compatible agent, this will fail + # (it only supports the ssh-agent distributed by OpenSSH). + def connect! + begin + debug { "connecting to ssh-agent" } + @socket = agent_socket_factory.open(ENV['SSH_AUTH_SOCK']) + rescue + error { "could not connect to ssh-agent" } + raise AgentNotAvailable, $!.message + end + end + + # Attempts to negotiate the SSH agent protocol version. Raises an error + # if the version could not be negotiated successfully. + def negotiate! + # determine what type of agent we're communicating with + type, body = send_and_wait(SSH2_AGENT_REQUEST_VERSION, :string, Transport::ServerVersion::PROTO_VERSION) + + if type == SSH2_AGENT_VERSION_RESPONSE + raise NotImplementedError, "SSH2 agents are not yet supported" + elsif type != SSH_AGENT_RSA_IDENTITIES_ANSWER1 && type != SSH_AGENT_RSA_IDENTITIES_ANSWER2 + raise AgentError, "unknown response from agent: #{type}, #{body.to_s.inspect}" + end + end + + # Return an array of all identities (public keys) known to the agent. + # Each key returned is augmented with a +comment+ property which is set + # to the comment returned by the agent for that key. + def identities + type, body = send_and_wait(SSH2_AGENT_REQUEST_IDENTITIES) + raise AgentError, "could not get identity count" if agent_failed(type) + raise AgentError, "bad authentication reply: #{type}" if type != SSH2_AGENT_IDENTITIES_ANSWER + + identities = [] + body.read_long.times do + key = Buffer.new(body.read_string).read_key + key.extend(Comment) + key.comment = body.read_string + identities.push key + end + + return identities + end + + # Closes this socket. This agent reference is no longer able to + # query the agent. + def close + @socket.close + end + + # Using the agent and the given public key, sign the given data. The + # signature is returned in SSH2 format. + def sign(key, data) + type, reply = send_and_wait(SSH2_AGENT_SIGN_REQUEST, :string, Buffer.from(:key, key), :string, data, :long, 0) + + if agent_failed(type) + raise AgentError, "agent could not sign data with requested identity" + elsif type != SSH2_AGENT_SIGN_RESPONSE + raise AgentError, "bad authentication response #{type}" + end + + return reply.read_string + end + + private + + # Returns the agent socket factory to use. + def agent_socket_factory + if File::ALT_SEPARATOR + Pageant::Socket + else + UNIXSocket + end + end + + # Send a new packet of the given type, with the associated data. + def send_packet(type, *args) + buffer = Buffer.from(*args) + data = [buffer.length + 1, type.to_i, buffer.to_s].pack("NCA*") + debug { "sending agent request #{type} len #{buffer.length}" } + @socket.send data, 0 + end + + # Read the next packet from the agent. This will return a two-part + # tuple consisting of the packet type, and the packet's body (which + # is returned as a Net::SSH::Buffer). + def read_packet + buffer = Net::SSH::Buffer.new(@socket.read(4)) + buffer.append(@socket.read(buffer.read_long)) + type = buffer.read_byte + debug { "received agent packet #{type} len #{buffer.length-4}" } + return type, buffer + end + + # Send the given packet and return the subsequent reply from the agent. + # (See #send_packet and #read_packet). + def send_and_wait(type, *args) + send_packet(type, *args) + read_packet + end + + # Returns +true+ if the parameter indicates a "failure" response from + # the agent, and +false+ otherwise. + def agent_failed(type) + type == SSH_AGENT_FAILURE || + type == SSH2_AGENT_FAILURE || + type == SSH_COM_AGENT2_FAILURE + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/constants.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/constants.rb new file mode 100644 index 00000000..911b9ca4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/constants.rb @@ -0,0 +1,18 @@ +module Net; module SSH; module Authentication + + # Describes the constants used by the Net::SSH::Authentication components + # of the Net::SSH library. Individual authentication method implemenations + # may define yet more constants that are specific to their implementation. + module Constants + USERAUTH_REQUEST = 50 + USERAUTH_FAILURE = 51 + USERAUTH_SUCCESS = 52 + USERAUTH_BANNER = 53 + + USERAUTH_PASSWD_CHANGEREQ = 60 + USERAUTH_PK_OK = 60 + + USERAUTH_METHOD_RANGE = 60..79 + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/key_manager.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/key_manager.rb new file mode 100644 index 00000000..1a2f386e --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/key_manager.rb @@ -0,0 +1,193 @@ +require 'net/ssh/errors' +require 'net/ssh/key_factory' +require 'net/ssh/loggable' +require 'net/ssh/authentication/agent' + +module Net + module SSH + module Authentication + + # A trivial exception class used to report errors in the key manager. + class KeyManagerError < Net::SSH::Exception; end + + # This class encapsulates all operations done by clients on a user's + # private keys. In practice, the client should never need a reference + # to a private key; instead, they grab a list of "identities" (public + # keys) that are available from the KeyManager, and then use + # the KeyManager to do various private key operations using those + # identities. + # + # The KeyManager also uses the Agent class to encapsulate the + # ssh-agent. Thus, from a client's perspective it is completely + # hidden whether an identity comes from the ssh-agent or from a file + # on disk. + class KeyManager + include Loggable + + # The list of user key files that will be examined + attr_reader :key_files + + # The list of user key data that will be examined + attr_reader :key_data + + # The map of loaded identities + attr_reader :known_identities + + # The map of options that were passed to the key-manager + attr_reader :options + + # Create a new KeyManager. By default, the manager will + # use the ssh-agent (if it is running). + def initialize(logger, options={}) + self.logger = logger + @key_files = [] + @key_data = [] + @use_agent = true + @known_identities = {} + @agent = nil + @options = options + end + + # Clear all knowledge of any loaded user keys. This also clears the list + # of default identity files that are to be loaded, thus making it + # appropriate to use if a client wishes to NOT use the default identity + # files. + def clear! + key_files.clear + key_data.clear + known_identities.clear + self + end + + # Add the given key_file to the list of key files that will be used. + def add(key_file) + key_files.push(File.expand_path(key_file)).uniq! + self + end + + # Add the given key_file to the list of keys that will be used. + def add_key_data(key_data_) + key_data.push(key_data_).uniq! + self + end + + # This is used as a hint to the KeyManager indicating that the agent + # connection is no longer needed. Any other open resources may be closed + # at this time. + # + # Calling this does NOT indicate that the KeyManager will no longer + # be used. Identities may still be requested and operations done on + # loaded identities, in which case, the agent will be automatically + # reconnected. This method simply allows the client connection to be + # closed when it will not be used in the immediate future. + def finish + @agent.close if @agent + @agent = nil + end + + # Iterates over all available identities (public keys) known to this + # manager. As it finds one, it will then yield it to the caller. + # The origin of the identities may be from files on disk or from an + # ssh-agent. Note that identities from an ssh-agent are always listed + # first in the array, with other identities coming after. + def each_identity + if agent + agent.identities.each do |key| + known_identities[key] = { :from => :agent } + yield key + end + end + + key_files.each do |file| + public_key_file = file + ".pub" + if File.readable?(public_key_file) + begin + key = KeyFactory.load_public_key(public_key_file) + known_identities[key] = { :from => :file, :file => file } + yield key + rescue Exception => e + error { "could not load public key file `#{public_key_file}': #{e.class} (#{e.message})" } + end + elsif File.readable?(file) + begin + private_key = KeyFactory.load_private_key(file, options[:passphrase]) + key = private_key.send(:public_key) + known_identities[key] = { :from => :file, :file => file, :key => private_key } + yield key + rescue Exception => e + error { "could not load private key file `#{file}': #{e.class} (#{e.message})" } + end + end + end + + key_data.each do |data| + private_key = KeyFactory.load_data_private_key(data) + key = private_key.send(:public_key) + known_identities[key] = { :from => :key_data, :data => data, :key => private_key } + yield key + end + + self + end + + # Sign the given data, using the corresponding private key of the given + # identity. If the identity was originally obtained from an ssh-agent, + # then the ssh-agent will be used to sign the data, otherwise the + # private key for the identity will be loaded from disk (if it hasn't + # been loaded already) and will then be used to sign the data. + # + # Regardless of the identity's origin or who does the signing, this + # will always return the signature in an SSH2-specified "signature + # blob" format. + def sign(identity, data) + info = known_identities[identity] or raise KeyManagerError, "the given identity is unknown to the key manager" + + if info[:key].nil? && info[:from] == :file + begin + info[:key] = KeyFactory.load_private_key(info[:file], options[:passphrase]) + rescue Exception => e + raise KeyManagerError, "the given identity is known, but the private key could not be loaded: #{e.class} (#{e.message})" + end + end + + if info[:key] + return Net::SSH::Buffer.from(:string, identity.ssh_type, + :string, info[:key].ssh_do_sign(data.to_s)).to_s + end + + if info[:from] == :agent + raise KeyManagerError, "the agent is no longer available" unless agent + return agent.sign(identity, data.to_s) + end + + raise KeyManagerError, "[BUG] can't determine identity origin (#{info.inspect})" + end + + # Identifies whether the ssh-agent will be used or not. + def use_agent? + @use_agent + end + + # Toggles whether the ssh-agent will be used or not. If true, an + # attempt will be made to use the ssh-agent. If false, any existing + # connection to an agent is closed and the agent will not be used. + def use_agent=(use_agent) + finish if !use_agent + @use_agent = use_agent + end + + # Returns an Agent instance to use for communicating with an SSH + # agent process. Returns nil if use of an SSH agent has been disabled, + # or if the agent is otherwise not available. + def agent + return unless use_agent? + @agent ||= Agent.connect(logger) + rescue AgentNotAvailable + @use_agent = false + nil + end + end + + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/abstract.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/abstract.rb new file mode 100644 index 00000000..339c53c7 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/abstract.rb @@ -0,0 +1,60 @@ +require 'net/ssh/buffer' +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/authentication/constants' + +module Net; module SSH; module Authentication; module Methods + + # The base class of all user authentication methods. It provides a few + # bits of common functionality. + class Abstract + include Constants, Loggable + + # The authentication session object + attr_reader :session + + # The key manager object. Not all authentication methods will require + # this. + attr_reader :key_manager + + # Instantiates a new authentication method. + def initialize(session, options={}) + @session = session + @key_manager = options[:key_manager] + @options = options + self.logger = session.logger + end + + # Returns the session-id, as generated during the first key exchange of + # an SSH connection. + def session_id + session.transport.algorithms.session_id + end + + # Sends a message via the underlying transport layer abstraction. This + # will block until the message is completely sent. + def send_message(msg) + session.transport.send_message(msg) + end + + # Creates a new USERAUTH_REQUEST packet. The extra arguments on the end + # must be either boolean values or strings, and are tacked onto the end + # of the packet. The new packet is returned, ready for sending. + def userauth_request(username, next_service, auth_method, *others) + buffer = Net::SSH::Buffer.from(:byte, USERAUTH_REQUEST, + :string, username, :string, next_service, :string, auth_method) + + others.each do |value| + case value + when true, false then buffer.write_bool(value) + when String then buffer.write_string(value) + else raise ArgumentError, "don't know how to write #{value.inspect}" + end + end + + buffer + end + + end + +end; end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/hostbased.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/hostbased.rb new file mode 100644 index 00000000..43c3eac8 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/hostbased.rb @@ -0,0 +1,71 @@ +require 'net/ssh/authentication/methods/abstract' + +module Net + module SSH + module Authentication + module Methods + + # Implements the host-based SSH authentication method. + class Hostbased < Abstract + include Constants + + # Attempts to perform host-based authorization of the user by trying + # all known keys. + def authenticate(next_service, username, password=nil) + return false unless key_manager + + key_manager.each_identity do |identity| + return true if authenticate_with(identity, next_service, + username, key_manager) + end + + return false + end + + private + + # Returns the hostname as reported by the underlying socket. + def hostname + session.transport.socket.client_name + end + + # Attempts to perform host-based authentication of the user, using + # the given host identity (key). + def authenticate_with(identity, next_service, username, key_manager) + debug { "trying hostbased (#{identity.fingerprint})" } + client_username = ENV['USER'] || username + + req = build_request(identity, next_service, username, "#{hostname}.", client_username) + sig_data = Buffer.from(:string, session_id, :raw, req) + + sig = key_manager.sign(identity, sig_data.to_s) + + message = Buffer.from(:raw, req, :string, sig) + + send_message(message) + message = session.next_message + + case message.type + when USERAUTH_SUCCESS + info { "hostbased succeeded (#{identity.fingerprint})" } + return true + when USERAUTH_FAILURE + info { "hostbased failed (#{identity.fingerprint})" } + return false + else + raise Net::SSH::Exception, "unexpected server response to USERAUTH_REQUEST: #{message.type} (#{message.inspect})" + end + end + + # Build the "core" hostbased request string. + def build_request(identity, next_service, username, hostname, client_username) + userauth_request(username, next_service, "hostbased", identity.ssh_type, + Buffer.from(:key, identity).to_s, hostname, client_username).to_s + end + + end + + end + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/keyboard_interactive.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/keyboard_interactive.rb new file mode 100644 index 00000000..1ab24590 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/keyboard_interactive.rb @@ -0,0 +1,66 @@ +require 'net/ssh/prompt' +require 'net/ssh/authentication/methods/abstract' + +module Net + module SSH + module Authentication + module Methods + + # Implements the "keyboard-interactive" SSH authentication method. + class KeyboardInteractive < Abstract + include Prompt + + USERAUTH_INFO_REQUEST = 60 + USERAUTH_INFO_RESPONSE = 61 + + # Attempt to authenticate the given user for the given service. + def authenticate(next_service, username, password=nil) + debug { "trying keyboard-interactive" } + send_message(userauth_request(username, next_service, "keyboard-interactive", "", "")) + + loop do + message = session.next_message + + case message.type + when USERAUTH_SUCCESS + debug { "keyboard-interactive succeeded" } + return true + when USERAUTH_FAILURE + debug { "keyboard-interactive failed" } + return false + when USERAUTH_INFO_REQUEST + name = message.read_string + instruction = message.read_string + debug { "keyboard-interactive info request" } + + unless password + puts(name) unless name.empty? + puts(instruction) unless instruction.empty? + end + + lang_tag = message.read_string + responses =[] + + message.read_long.times do + text = message.read_string + echo = message.read_bool + responses << (password || prompt(text, echo)) + end + + # if the password failed the first time around, don't try + # and use it on subsequent requests. + password = nil + + msg = Buffer.from(:byte, USERAUTH_INFO_RESPONSE, :long, responses.length, :string, responses) + send_message(msg) + else + raise Net::SSH::Exception, "unexpected reply in keyboard interactive: #{message.type} (#{message.inspect})" + end + end + end + end + + end + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/password.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/password.rb new file mode 100644 index 00000000..9b1c0958 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/password.rb @@ -0,0 +1,39 @@ +require 'net/ssh/errors' +require 'net/ssh/authentication/methods/abstract' + +module Net + module SSH + module Authentication + module Methods + + # Implements the "password" SSH authentication method. + class Password < Abstract + # Attempt to authenticate the given user for the given service. If + # the password parameter is nil, this will never do anything except + # return false. + def authenticate(next_service, username, password=nil) + return false unless password + + send_message(userauth_request(username, next_service, "password", false, password)) + message = session.next_message + + case message.type + when USERAUTH_SUCCESS + debug { "password succeeded" } + return true + when USERAUTH_FAILURE + debug { "password failed" } + return false + when USERAUTH_PASSWD_CHANGEREQ + debug { "password change request received, failing" } + return false + else + raise Net::SSH::Exception, "unexpected reply to USERAUTH_REQUEST: #{message.type} (#{message.inspect})" + end + end + end + + end + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/publickey.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/publickey.rb new file mode 100644 index 00000000..b453def5 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/methods/publickey.rb @@ -0,0 +1,92 @@ +require 'net/ssh/buffer' +require 'net/ssh/errors' +require 'net/ssh/authentication/methods/abstract' + +module Net + module SSH + module Authentication + module Methods + + # Implements the "publickey" SSH authentication method. + class Publickey < Abstract + # Attempts to perform public-key authentication for the given + # username, trying each identity known to the key manager. If any of + # them succeed, returns +true+, otherwise returns +false+. This + # requires the presence of a key manager. + def authenticate(next_service, username, password=nil) + return false unless key_manager + + key_manager.each_identity do |identity| + return true if authenticate_with(identity, next_service, username) + end + + return false + end + + private + + # Builds a packet that contains the request formatted for sending + # a public-key request to the server. + def build_request(pub_key, username, next_service, has_sig) + blob = Net::SSH::Buffer.new + blob.write_key pub_key + + userauth_request(username, next_service, "publickey", has_sig, + pub_key.ssh_type, blob.to_s) + end + + # Builds and sends a request formatted for a public-key + # authentication request. + def send_request(pub_key, username, next_service, signature=nil) + msg = build_request(pub_key, username, next_service, !signature.nil?) + msg.write_string(signature) if signature + send_message(msg) + end + + # Attempts to perform public-key authentication for the given + # username, with the given identity (public key). Returns +true+ if + # successful, or +false+ otherwise. + def authenticate_with(identity, next_service, username) + debug { "trying publickey (#{identity.fingerprint})" } + send_request(identity, username, next_service) + + message = session.next_message + + case message.type + when USERAUTH_PK_OK + buffer = build_request(identity, username, next_service, true) + sig_data = Net::SSH::Buffer.new + sig_data.write_string(session_id) + sig_data.append(buffer.to_s) + + sig_blob = key_manager.sign(identity, sig_data) + + send_request(identity, username, next_service, sig_blob.to_s) + message = session.next_message + + case message.type + when USERAUTH_SUCCESS + debug { "publickey succeeded (#{identity.fingerprint})" } + return true + when USERAUTH_FAILURE + debug { "publickey failed (#{identity.fingerprint})" } + return false + else + raise Net::SSH::Exception, + "unexpected server response to USERAUTH_REQUEST: #{message.type} (#{message.inspect})" + end + + when USERAUTH_FAILURE + return false + + else + raise Net::SSH::Exception, "unexpected reply to USERAUTH_REQUEST: #{message.type} (#{message.inspect})" + end + end + + end + + end + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/pageant.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/pageant.rb new file mode 100644 index 00000000..871e86b1 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/pageant.rb @@ -0,0 +1,183 @@ +require 'dl/import' +require 'dl/struct' + +require 'net/ssh/errors' + +module Net; module SSH; module Authentication + + # This module encapsulates the implementation of a socket factory that + # uses the PuTTY "pageant" utility to obtain information about SSH + # identities. + # + # This code is a slightly modified version of the original implementation + # by Guillaume Marçais (guillaume.marcais@free.fr). It is used and + # relicensed by permission. + module Pageant + + # From Putty pageant.c + AGENT_MAX_MSGLEN = 8192 + AGENT_COPYDATA_ID = 0x804e50ba + + # The definition of the Windows methods and data structures used in + # communicating with the pageant process. + module Win + extend DL::Importable + + dlload 'user32' + dlload 'kernel32' + + typealias("LPCTSTR", "char *") # From winnt.h + typealias("LPVOID", "void *") # From winnt.h + typealias("LPCVOID", "const void *") # From windef.h + typealias("LRESULT", "long") # From windef.h + typealias("WPARAM", "unsigned int *") # From windef.h + typealias("LPARAM", "long *") # From windef.h + typealias("PDWORD_PTR", "long *") # From basetsd.h + + # From winbase.h, winnt.h + INVALID_HANDLE_VALUE = -1 + NULL = nil + PAGE_READWRITE = 0x0004 + FILE_MAP_WRITE = 2 + WM_COPYDATA = 74 + + SMTO_NORMAL = 0 # From winuser.h + + # args: lpClassName, lpWindowName + extern 'HWND FindWindow(LPCTSTR, LPCTSTR)' + + # args: none + extern 'DWORD GetCurrentThreadId()' + + # args: hFile, (ignored), flProtect, dwMaximumSizeHigh, + # dwMaximumSizeLow, lpName + extern 'HANDLE CreateFileMapping(HANDLE, void *, DWORD, DWORD, ' + + 'DWORD, LPCTSTR)' + + # args: hFileMappingObject, dwDesiredAccess, dwFileOffsetHigh, + # dwfileOffsetLow, dwNumberOfBytesToMap + extern 'LPVOID MapViewOfFile(HANDLE, DWORD, DWORD, DWORD, DWORD)' + + # args: lpBaseAddress + extern 'BOOL UnmapViewOfFile(LPCVOID)' + + # args: hObject + extern 'BOOL CloseHandle(HANDLE)' + + # args: hWnd, Msg, wParam, lParam, fuFlags, uTimeout, lpdwResult + extern 'LRESULT SendMessageTimeout(HWND, UINT, WPARAM, LPARAM, ' + + 'UINT, UINT, PDWORD_PTR)' + end + + # This is the pseudo-socket implementation that mimics the interface of + # a socket, translating each request into a Windows messaging call to + # the pageant daemon. This allows pageant support to be implemented + # simply by replacing the socket factory used by the Agent class. + class Socket + + private_class_method :new + + # The factory method for creating a new Socket instance. The location + # parameter is ignored, and is only needed for compatibility with + # the general Socket interface. + def self.open(location=nil) + new + end + + # Create a new instance that communicates with the running pageant + # instance. If no such instance is running, this will cause an error. + def initialize + @win = Win.findWindow("Pageant", "Pageant") + + if @win == 0 + raise Net::SSH::Exception, + "pageant process not running" + end + + @res = nil + @pos = 0 + end + + # Forwards the data to #send_query, ignoring any arguments after + # the first. Returns 0. + def send(data, *args) + @res = send_query(data) + @pos = 0 + end + + # Packages the given query string and sends it to the pageant + # process via the Windows messaging subsystem. The result is + # cached, to be returned piece-wise when #read is called. + def send_query(query) + res = nil + filemap = 0 + ptr = nil + id = DL::PtrData.malloc(DL.sizeof("L")) + + mapname = "PageantRequest%08x\000" % Win.getCurrentThreadId() + filemap = Win.createFileMapping(Win::INVALID_HANDLE_VALUE, + Win::NULL, + Win::PAGE_READWRITE, 0, + AGENT_MAX_MSGLEN, mapname) + if filemap == 0 + raise Net::SSH::Exception, + "Creation of file mapping failed" + end + + ptr = Win.mapViewOfFile(filemap, Win::FILE_MAP_WRITE, 0, 0, + AGENT_MAX_MSGLEN) + + if ptr.nil? || ptr.null? + raise Net::SSH::Exception, "Mapping of file failed" + end + + ptr[0] = query + + cds = [AGENT_COPYDATA_ID, mapname.size + 1, mapname]. + pack("LLp").to_ptr + succ = Win.sendMessageTimeout(@win, Win::WM_COPYDATA, Win::NULL, + cds, Win::SMTO_NORMAL, 5000, id) + + if succ > 0 + retlen = 4 + ptr.to_s(4).unpack("N")[0] + res = ptr.to_s(retlen) + end + + return res + ensure + Win.unmapViewOfFile(ptr) unless ptr.nil? || ptr.null? + Win.closeHandle(filemap) if filemap != 0 + end + + # Conceptually close the socket. This doesn't really do anthing + # significant, but merely complies with the Socket interface. + def close + @res = nil + @pos = 0 + end + + # Conceptually asks if the socket is closed. As with #close, + # this doesn't really do anything significant, but merely + # complies with the Socket interface. + def closed? + @res.nil? && @pos.zero? + end + + # Reads +n+ bytes from the cached result of the last query. If +n+ + # is +nil+, returns all remaining data from the last query. + def read(n = nil) + return nil unless @res + if n.nil? + start, @pos = @pos, @res.size + return @res[start..-1] + else + start, @pos = @pos, @pos + n + return @res[start, n] + end + end + + end + + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/session.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/session.rb new file mode 100644 index 00000000..8e7d3df8 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/authentication/session.rb @@ -0,0 +1,134 @@ +require 'net/ssh/loggable' +require 'net/ssh/transport/constants' +require 'net/ssh/authentication/constants' +require 'net/ssh/authentication/key_manager' +require 'net/ssh/authentication/methods/publickey' +require 'net/ssh/authentication/methods/hostbased' +require 'net/ssh/authentication/methods/password' +require 'net/ssh/authentication/methods/keyboard_interactive' + +module Net; module SSH; module Authentication + + # Represents an authentication session. It manages the authentication of + # a user over an established connection (the "transport" object, see + # Net::SSH::Transport::Session). + # + # The use of an authentication session to manage user authentication is + # internal to Net::SSH (specifically Net::SSH.start). Consumers of the + # Net::SSH library will never need to access this class directly. + class Session + include Transport::Constants, Constants, Loggable + + # transport layer abstraction + attr_reader :transport + + # the list of authentication methods to try + attr_reader :auth_methods + + # the list of authentication methods that are allowed + attr_reader :allowed_auth_methods + + # a hash of options, given at construction time + attr_reader :options + + # Instantiates a new Authentication::Session object over the given + # transport layer abstraction. + def initialize(transport, options={}) + self.logger = transport.logger + @transport = transport + + @auth_methods = options[:auth_methods] || %w(publickey hostbased password keyboard-interactive) + @options = options + + @allowed_auth_methods = @auth_methods + end + + # Attempts to authenticate the given user, in preparation for the next + # service request. Returns true if an authentication method succeeds in + # authenticating the user, and false otherwise. + def authenticate(next_service, username, password=nil) + debug { "beginning authentication of `#{username}'" } + + transport.send_message(transport.service_request("ssh-userauth")) + message = expect_message(SERVICE_ACCEPT) + + key_manager = KeyManager.new(logger, options) + keys.each { |key| key_manager.add(key) } unless keys.empty? + key_data.each { |key2| key_manager.add_key_data(key2) } unless key_data.empty? + + attempted = [] + + @auth_methods.each do |name| + next unless @allowed_auth_methods.include?(name) + attempted << name + + debug { "trying #{name}" } + method = Methods.const_get(name.split(/\W+/).map { |p| p.capitalize }.join).new(self, :key_manager => key_manager) + + return true if method.authenticate(next_service, username, password) + end + + error { "all authorization methods failed (tried #{attempted.join(', ')})" } + return false + ensure + key_manager.finish if key_manager + end + + # Blocks until a packet is received. It silently handles USERAUTH_BANNER + # packets, and will raise an error if any packet is received that is not + # valid during user authentication. + def next_message + loop do + packet = transport.next_message + + case packet.type + when USERAUTH_BANNER + info { packet[:message] } + # TODO add a hook for people to retrieve the banner when it is sent + + when USERAUTH_FAILURE + @allowed_auth_methods = packet[:authentications].split(/,/) + debug { "allowed methods: #{packet[:authentications]}" } + return packet + + when USERAUTH_METHOD_RANGE, SERVICE_ACCEPT + return packet + + when USERAUTH_SUCCESS + transport.hint :authenticated + return packet + + else + raise Net::SSH::Exception, "unexpected message #{packet.type} (#{packet})" + end + end + end + + # Blocks until a packet is received, and returns it if it is of the given + # type. If it is not, an exception is raised. + def expect_message(type) + message = next_message + unless message.type == type + raise Net::SSH::Exception, "expected #{type}, got #{message.type} (#{message})" + end + message + end + + private + + # Returns an array of paths to the key files that should be used when + # attempting any key-based authentication mechanism. + def keys + Array( + options[:keys] || + %w(~/.ssh/id_dsa ~/.ssh/id_rsa ~/.ssh2/id_dsa ~/.ssh2/id_rsa) + ) + end + + # Returns an array of the key data that should be used when + # attempting any key-based authentication mechanism. + def key_data + Array(options[:key_data]) + end + end +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffer.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffer.rb new file mode 100644 index 00000000..51e1c4e8 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffer.rb @@ -0,0 +1,340 @@ +require 'net/ssh/ruby_compat' +require 'net/ssh/transport/openssl' + +module Net; module SSH + + # Net::SSH::Buffer is a flexible class for building and parsing binary + # data packets. It provides a stream-like interface for sequentially + # reading data items from the buffer, as well as a useful helper method + # for building binary packets given a signature. + # + # Writing to a buffer always appends to the end, regardless of where the + # read cursor is. Reading, on the other hand, always begins at the first + # byte of the buffer and increments the read cursor, with subsequent reads + # taking up where the last left off. + # + # As a consumer of the Net::SSH library, you will rarely come into contact + # with these buffer objects directly, but it could happen. Also, if you + # are ever implementing a protocol on top of SSH (e.g. SFTP), this buffer + # class can be quite handy. + class Buffer + # This is a convenience method for creating and populating a new buffer + # from a single command. The arguments must be even in length, with the + # first of each pair of arguments being a symbol naming the type of the + # data that follows. If the type is :raw, the value is written directly + # to the hash. + # + # b = Buffer.from(:byte, 1, :string, "hello", :raw, "\1\2\3\4") + # #-> "\1\0\0\0\5hello\1\2\3\4" + # + # The supported data types are: + # + # * :raw => write the next value verbatim (#write) + # * :int64 => write an 8-byte integer (#write_int64) + # * :long => write a 4-byte integer (#write_long) + # * :byte => write a single byte (#write_byte) + # * :string => write a 4-byte length followed by character data (#write_string) + # * :bool => write a single byte, interpreted as a boolean (#write_bool) + # * :bignum => write an SSH-encoded bignum (#write_bignum) + # * :key => write an SSH-encoded key value (#write_key) + # + # Any of these, except for :raw, accepts an Array argument, to make it + # easier to write multiple values of the same type in a briefer manner. + def self.from(*args) + raise ArgumentError, "odd number of arguments given" unless args.length % 2 == 0 + + buffer = new + 0.step(args.length-1, 2) do |index| + type = args[index] + value = args[index+1] + if type == :raw + buffer.append(value.to_s) + elsif Array === value + buffer.send("write_#{type}", *value) + else + buffer.send("write_#{type}", value) + end + end + + buffer + end + + # exposes the raw content of the buffer + attr_reader :content + + # the current position of the pointer in the buffer + attr_accessor :position + + # Creates a new buffer, initialized to the given content. The position + # is initialized to the beginning of the buffer. + def initialize(content="") + @content = content.to_s + @position = 0 + end + + # Returns the length of the buffer's content. + def length + @content.length + end + + # Returns the number of bytes available to be read (e.g., how many bytes + # remain between the current position and the end of the buffer). + def available + length - position + end + + # Returns a copy of the buffer's content. + def to_s + (@content || "").dup + end + + # Compares the contents of the two buffers, returning +true+ only if they + # are identical in size and content. + def ==(buffer) + to_s == buffer.to_s + end + + # Returns +true+ if the buffer contains no data (e.g., it is of zero length). + def empty? + @content.empty? + end + + # Resets the pointer to the start of the buffer. Subsequent reads will + # begin at position 0. + def reset! + @position = 0 + end + + # Returns true if the pointer is at the end of the buffer. Subsequent + # reads will return nil, in this case. + def eof? + @position >= length + end + + # Resets the buffer, making it empty. Also, resets the read position to + # 0. + def clear! + @content = "" + @position = 0 + end + + # Consumes n bytes from the buffer, where n is the current position + # unless otherwise specified. This is useful for removing data from the + # buffer that has previously been read, when you are expecting more data + # to be appended. It helps to keep the size of buffers down when they + # would otherwise tend to grow without bound. + # + # Returns the buffer object itself. + def consume!(n=position) + if n >= length + # optimize for a fairly common case + clear! + elsif n > 0 + @content = @content[n..-1] || "" + @position -= n + @position = 0 if @position < 0 + end + self + end + + # Appends the given text to the end of the buffer. Does not alter the + # read position. Returns the buffer object itself. + def append(text) + @content << text + self + end + + # Returns all text from the current pointer to the end of the buffer as + # a new Net::SSH::Buffer object. + def remainder_as_buffer + Buffer.new(@content[@position..-1]) + end + + # Reads all data up to and including the given pattern, which may be a + # String, Fixnum, or Regexp and is interpreted exactly as String#index + # does. Returns nil if nothing matches. Increments the position to point + # immediately after the pattern, if it does match. Returns all data up to + # and including the text that matched the pattern. + def read_to(pattern) + index = @content.index(pattern, @position) or return nil + length = case pattern + when String then pattern.length + when Fixnum then 1 + when Regexp then $&.length + end + index && read(index+length) + end + + # Reads and returns the next +count+ bytes from the buffer, starting from + # the read position. If +count+ is +nil+, this will return all remaining + # text in the buffer. This method will increment the pointer. + def read(count=nil) + count ||= length + count = length - @position if @position + count > length + @position += count + @content[@position-count, count] + end + + # Reads (as #read) and returns the given number of bytes from the buffer, + # and then consumes (as #consume!) all data up to the new read position. + def read!(count=nil) + data = read(count) + consume! + data + end + + # Return the next 8 bytes as a 64-bit integer (in network byte order). + # Returns nil if there are less than 8 bytes remaining to be read in the + # buffer. + def read_int64 + hi = read_long or return nil + lo = read_long or return nil + return (hi << 32) + lo + end + + # Return the next four bytes as a long integer (in network byte order). + # Returns nil if there are less than 4 bytes remaining to be read in the + # buffer. + def read_long + b = read(4) or return nil + b.unpack("N").first + end + + # Read and return the next byte in the buffer. Returns nil if called at + # the end of the buffer. + def read_byte + b = read(1) or return nil + b.getbyte(0) + end + + # Read and return an SSH2-encoded string. The string starts with a long + # integer that describes the number of bytes remaining in the string. + # Returns nil if there are not enough bytes to satisfy the request. + def read_string + length = read_long or return nil + read(length) + end + + # Read a single byte and convert it into a boolean, using 'C' rules + # (i.e., zero is false, non-zero is true). + def read_bool + b = read_byte or return nil + b != 0 + end + + # Read a bignum (OpenSSL::BN) from the buffer, in SSH2 format. It is + # essentially just a string, which is reinterpreted to be a bignum in + # binary format. + def read_bignum + data = read_string + return unless data + OpenSSL::BN.new(data, 2) + end + + # Read a key from the buffer. The key will start with a string + # describing its type. The remainder of the key is defined by the + # type that was read. + def read_key + type = read_string + return (type ? read_keyblob(type) : nil) + end + + # Read a keyblob of the given type from the buffer, and return it as + # a key. Only RSA and DSA keys are supported. + def read_keyblob(type) + case type + when "ssh-dss" + key = OpenSSL::PKey::DSA.new + key.p = read_bignum + key.q = read_bignum + key.g = read_bignum + key.pub_key = read_bignum + + when "ssh-rsa" + key = OpenSSL::PKey::RSA.new + key.e = read_bignum + key.n = read_bignum + + else + raise NotImplementedError, "unsupported key type `#{type}'" + end + + return key + end + + # Reads the next string from the buffer, and returns a new Buffer + # object that wraps it. + def read_buffer + Buffer.new(read_string) + end + + # Writes the given data literally into the string. Does not alter the + # read position. Returns the buffer object. + def write(*data) + data.each { |datum| @content << datum } + self + end + + # Writes each argument to the buffer as a network-byte-order-encoded + # 64-bit integer (8 bytes). Does not alter the read position. Returns the + # buffer object. + def write_int64(*n) + n.each do |i| + hi = (i >> 32) & 0xFFFFFFFF + lo = i & 0xFFFFFFFF + @content << [hi, lo].pack("N2") + end + self + end + + # Writes each argument to the buffer as a network-byte-order-encoded + # long (4-byte) integer. Does not alter the read position. Returns the + # buffer object. + def write_long(*n) + @content << n.pack("N*") + self + end + + # Writes each argument to the buffer as a byte. Does not alter the read + # position. Returns the buffer object. + def write_byte(*n) + n.each { |b| @content << b.chr } + self + end + + # Writes each argument to the buffer as an SSH2-encoded string. Each + # string is prefixed by its length, encoded as a 4-byte long integer. + # Does not alter the read position. Returns the buffer object. + def write_string(*text) + text.each do |string| + s = string.to_s + write_long(s.length) + write(s) + end + self + end + + # Writes each argument to the buffer as a (C-style) boolean, with 1 + # meaning true, and 0 meaning false. Does not alter the read position. + # Returns the buffer object. + def write_bool(*b) + b.each { |v| @content << (v ? "\1" : "\0") } + self + end + + # Writes each argument to the buffer as a bignum (SSH2-style). No + # checking is done to ensure that the arguments are, in fact, bignums. + # Does not alter the read position. Returns the buffer object. + def write_bignum(*n) + @content << n.map { |b| b.to_ssh }.join + self + end + + # Writes the given arguments to the buffer as SSH2-encoded keys. Does not + # alter the read position. Returns the buffer object. + def write_key(*key) + key.each { |k| append(k.to_blob) } + self + end + end +end; end; \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffered_io.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffered_io.rb new file mode 100644 index 00000000..e48a8dcf --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/buffered_io.rb @@ -0,0 +1,150 @@ +require 'net/ssh/buffer' +require 'net/ssh/loggable' +require 'net/ssh/ruby_compat' + +module Net; module SSH + + # This module is used to extend sockets and other IO objects, to allow + # them to be buffered for both read and write. This abstraction makes it + # quite easy to write a select-based event loop + # (see Net::SSH::Connection::Session#listen_to). + # + # The general idea is that instead of calling #read directly on an IO that + # has been extended with this module, you call #fill (to add pending input + # to the internal read buffer), and then #read_available (to read from that + # buffer). Likewise, you don't call #write directly, you call #enqueue to + # add data to the write buffer, and then #send_pending or #wait_for_pending_sends + # to actually send the data across the wire. + # + # In this way you can easily use the object as an argument to IO.select, + # calling #fill when it is available for read, or #send_pending when it is + # available for write, and then call #enqueue and #read_available during + # the idle times. + # + # socket = TCPSocket.new(address, port) + # socket.extend(Net::SSH::BufferedIo) + # + # ssh.listen_to(socket) + # + # ssh.loop do + # if socket.available > 0 + # puts socket.read_available + # socket.enqueue("response\n") + # end + # end + # + # Note that this module must be used to extend an instance, and should not + # be included in a class. If you do want to use it via an include, then you + # must make sure to invoke the private #initialize_buffered_io method in + # your class' #initialize method: + # + # class Foo < IO + # include Net::SSH::BufferedIo + # + # def initialize + # initialize_buffered_io + # # ... + # end + # end + module BufferedIo + include Loggable + + # Called when the #extend is called on an object, with this module as the + # argument. It ensures that the modules instance variables are all properly + # initialized. + def self.extended(object) #:nodoc: + # need to use __send__ because #send is overridden in Socket + object.__send__(:initialize_buffered_io) + end + + # Tries to read up to +n+ bytes of data from the remote end, and appends + # the data to the input buffer. It returns the number of bytes read, or 0 + # if no data was available to be read. + def fill(n=8192) + input.consume! + data = recv(n) + debug { "read #{data.length} bytes" } + input.append(data) + return data.length + end + + # Read up to +length+ bytes from the input buffer. If +length+ is nil, + # all available data is read from the buffer. (See #available.) + def read_available(length=nil) + input.read(length || available) + end + + # Returns the number of bytes available to be read from the input buffer. + # (See #read_available.) + def available + input.available + end + + # Enqueues data in the output buffer, to be written when #send_pending + # is called. Note that the data is _not_ sent immediately by this method! + def enqueue(data) + output.append(data) + end + + # Returns +true+ if there is data waiting in the output buffer, and + # +false+ otherwise. + def pending_write? + output.length > 0 + end + + # Sends as much of the pending output as possible. Returns +true+ if any + # data was sent, and +false+ otherwise. + def send_pending + if output.length > 0 + sent = send(output.to_s, 0) + debug { "sent #{sent} bytes" } + output.consume!(sent) + return sent > 0 + else + return false + end + end + + # Calls #send_pending repeatedly, if necessary, blocking until the output + # buffer is empty. + def wait_for_pending_sends + send_pending + while output.length > 0 + result = Net::SSH::Compat.io_select(nil, [self]) or next + next unless result[1].any? + send_pending + end + end + + public # these methods are primarily for use in tests + + def write_buffer #:nodoc: + output.to_s + end + + def read_buffer #:nodoc: + input.to_s + end + + private + + #-- + # Can't use attr_reader here (after +private+) without incurring the + # wrath of "ruby -w". We hates it. + #++ + + def input; @input; end + def output; @output; end + + # Initializes the intput and output buffers for this object. This method + # is called automatically when the module is mixed into an object via + # Object#extend (see Net::SSH::BufferedIo.extended), but must be called + # explicitly in the +initialize+ method of any class that uses + # Module#include to add this module. + def initialize_buffered_io + @input = Net::SSH::Buffer.new + @output = Net::SSH::Buffer.new + end + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/config.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/config.rb new file mode 100644 index 00000000..7dc922ea --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/config.rb @@ -0,0 +1,185 @@ +module Net; module SSH + + # The Net::SSH::Config class is used to parse OpenSSH configuration files, + # and translates that syntax into the configuration syntax that Net::SSH + # understands. This lets Net::SSH scripts read their configuration (to + # some extent) from OpenSSH configuration files (~/.ssh/config, /etc/ssh_config, + # and so forth). + # + # Only a subset of OpenSSH configuration options are understood: + # + # * Ciphers => maps to the :encryption option + # * Compression => :compression + # * CompressionLevel => :compression_level + # * ConnectTimeout => maps to the :timeout option + # * ForwardAgent => :forward_agent + # * GlobalKnownHostsFile => :global_known_hosts_file + # * HostBasedAuthentication => maps to the :auth_methods option + # * HostKeyAlgorithms => maps to :host_key option + # * HostKeyAlias => :host_key_alias + # * HostName => :host_name + # * IdentityFile => maps to the :keys option + # * Macs => maps to the :hmac option + # * PasswordAuthentication => maps to the :auth_methods option + # * Port => :port + # * PreferredAuthentications => maps to the :auth_methods option + # * RekeyLimit => :rekey_limit + # * User => :user + # * UserKnownHostsFile => :user_known_hosts_file + # + # Note that you will never need to use this class directly--you can control + # whether the OpenSSH configuration files are read by passing the :config + # option to Net::SSH.start. (They are, by default.) + class Config + class < "xterm", + :chars_wide => 80, + :chars_high => 24, + :pixels_wide => 640, + :pixels_high => 480, + :modes => {} } + + # Requests that a pseudo-tty (or "pty") be made available for this channel. + # This is useful when you want to invoke and interact with some kind of + # screen-based program (e.g., vim, or some menuing system). + # + # Note, that without a pty some programs (e.g. sudo, or subversion) on + # some systems, will not be able to run interactively, and will error + # instead of prompt if they ever need some user interaction. + # + # Note, too, that when a pty is requested, user's shell configuration + # scripts (.bashrc and such) are not run by default, whereas they are + # run when a pty is not present. + # + # channel.request_pty do |ch, success| + # if success + # puts "pty successfully obtained" + # else + # puts "could not obtain pty" + # end + # end + def request_pty(opts={}, &block) + extra = opts.keys - VALID_PTY_OPTIONS.keys + raise ArgumentError, "invalid option(s) to request_pty: #{extra.inspect}" if extra.any? + + opts = VALID_PTY_OPTIONS.merge(opts) + + modes = opts[:modes].inject(Buffer.new) do |memo, (mode, data)| + memo.write_byte(mode).write_long(data) + end + # mark the end of the mode opcode list with a 0 byte + modes.write_byte(0) + + send_channel_request("pty-req", :string, opts[:term], + :long, opts[:chars_wide], :long, opts[:chars_high], + :long, opts[:pixels_wide], :long, opts[:pixels_high], + :string, modes.to_s, &block) + end + + # Sends data to the channel's remote endpoint. This usually has the + # effect of sending the given string to the remote process' stdin stream. + # Note that it does not immediately send the data across the channel, + # but instead merely appends the given data to the channel's output buffer, + # preparatory to being packaged up and sent out the next time the connection + # is accepting data. (A connection might not be accepting data if, for + # instance, it has filled its data window and has not yet been resized by + # the remote end-point.) + # + # This will raise an exception if the channel has previously declared + # that no more data will be sent (see #eof!). + # + # channel.send_data("the password\n") + def send_data(data) + raise EOFError, "cannot send data if channel has declared eof" if eof? + output.append(data.to_s) + end + + # Returns true if the channel exists in the channel list of the session, + # and false otherwise. This can be used to determine whether a channel has + # been closed or not. + # + # ssh.loop { channel.active? } + def active? + connection.channels.key?(local_id) + end + + # Runs the SSH event loop until the channel is no longer active. This is + # handy for blocking while you wait for some channel to finish. + # + # channel.exec("grep ...") { ... } + # channel.wait + def wait + connection.loop { active? } + end + + # Returns true if the channel is currently closing, but not actually + # closed. A channel is closing when, for instance, #close has been + # invoked, but the server has not yet responded with a CHANNEL_CLOSE + # packet of its own. + def closing? + @closing + end + + # Requests that the channel be closed. If the channel is already closing, + # this does nothing, nor does it do anything if the channel has not yet + # been confirmed open (see #do_open_confirmation). Otherwise, it sends a + # CHANNEL_CLOSE message and marks the channel as closing. + def close + return if @closing + if remote_id + @closing = true + connection.send_message(Buffer.from(:byte, CHANNEL_CLOSE, :long, remote_id)) + end + end + + # Returns true if the local end of the channel has declared that no more + # data is forthcoming (see #eof!). Trying to send data via #send_data when + # this is true will result in an exception being raised. + def eof? + @eof + end + + # Tells the remote end of the channel that no more data is forthcoming + # from this end of the channel. The remote end may still send data. + def eof! + return if eof? + @eof = true + connection.send_message(Buffer.from(:byte, CHANNEL_EOF, :long, remote_id)) + end + + # If an #on_process handler has been set up, this will cause it to be + # invoked (passing the channel itself as an argument). It also causes all + # pending output to be enqueued as CHANNEL_DATA packets (see #enqueue_pending_output). + def process + @on_process.call(self) if @on_process + enqueue_pending_output + end + + # Registers a callback to be invoked when data packets are received by the + # channel. The callback is called with the channel as the first argument, + # and the data as the second. + # + # channel.on_data do |ch, data| + # puts "got data: #{data.inspect}" + # end + # + # Data received this way is typically the data written by the remote + # process to its +stdout+ stream. + def on_data(&block) + old, @on_data = @on_data, block + old + end + + # Registers a callback to be invoked when extended data packets are received + # by the channel. The callback is called with the channel as the first + # argument, the data type (as an integer) as the second, and the data as + # the third. Extended data is almost exclusively used to send +stderr+ data + # (+type+ == 1). Other extended data types are not defined by the SSH + # protocol. + # + # channel.on_extended_data do |ch, type, data| + # puts "got stderr: #{data.inspect}" + # end + def on_extended_data(&block) + old, @on_extended_data = @on_extended_data, block + old + end + + # Registers a callback to be invoked for each pass of the event loop for + # this channel. There are no guarantees on timeliness in the event loop, + # but it will be called roughly once for each packet received by the + # connection (not the channel). This callback is invoked with the channel + # as the sole argument. + # + # Here's an example that accumulates the channel data into a variable on + # the channel itself, and displays individual lines in the input one + # at a time when the channel is processed: + # + # channel[:data] = "" + # + # channel.on_data do |ch, data| + # channel[:data] << data + # end + # + # channel.on_process do |ch| + # if channel[:data] =~ /^.*?\n/ + # puts $& + # channel[:data] = $' + # end + # end + def on_process(&block) + old, @on_process = @on_process, block + old + end + + # Registers a callback to be invoked when the server acknowledges that a + # channel is closed. This is invoked with the channel as the sole argument. + # + # channel.on_close do |ch| + # puts "remote end is closing!" + # end + def on_close(&block) + old, @on_close = @on_close, block + old + end + + # Registers a callback to be invoked when the server indicates that no more + # data will be sent to the channel (although the channel can still send + # data to the server). The channel is the sole argument to the callback. + # + # channel.on_eof do |ch| + # puts "remote end is done sending data" + # end + def on_eof(&block) + old, @on_eof = @on_eof, block + old + end + + # Registers a callback to be invoked when the server was unable to open + # the requested channel. The channel itself will be passed to the block, + # along with the integer "reason code" for the failure, and a textual + # description of the failure from the server. + # + # channel = session.open_channel do |ch| + # # .. + # end + # + # channel.on_open_failed { |ch, code, desc| ... } + def on_open_failed(&block) + old, @on_open_failed = @on_open_failed, block + old + end + + # Registers a callback to be invoked when a channel request of the given + # type is received. The callback will receive the channel as the first + # argument, and the associated (unparsed) data as the second. The data + # will be a Net::SSH::Buffer that you will need to parse, yourself, + # according to the kind of request you are watching. + # + # By default, if the request wants a reply, Net::SSH will send a + # CHANNEL_SUCCESS response for any request that was handled by a registered + # callback, and CHANNEL_FAILURE for any that wasn't, but if you want your + # registered callback to result in a CHANNEL_FAILURE response, just raise + # Net::SSH::ChannelRequestFailed. + # + # Some common channel requests that your programs might want to listen + # for are: + # + # * "exit-status" : the exit status of the remote process will be reported + # as a long integer in the data buffer, which you can grab via + # data.read_long. + # * "exit-signal" : if the remote process died as a result of a signal + # being sent to it, the signal will be reported as a string in the + # data, via data.read_string. (Not all SSH servers support this channel + # request type.) + # + # channel.on_request "exit-status" do |ch, data| + # puts "process terminated with exit status: #{data.read_long}" + # end + def on_request(type, &block) + old, @on_request[type] = @on_request[type], block + old + end + + # Sends a new channel request with the given name. The extra +data+ + # parameter must either be empty, or consist of an even number of + # arguments. See Net::SSH::Buffer.from for a description of their format. + # If a block is given, it is registered as a callback for a pending + # request, and the packet will be flagged so that the server knows a + # reply is required. If no block is given, the server will send no + # response to this request. Responses, where required, will cause the + # callback to be invoked with the channel as the first argument, and + # either true or false as the second, depending on whether the request + # succeeded or not. The meaning of "success" and "failure" in this context + # is dependent on the specific request that was sent. + # + # channel.send_channel_request "shell" do |ch, success| + # if success + # puts "user shell started successfully" + # else + # puts "could not start user shell" + # end + # end + # + # Most channel requests you'll want to send are already wrapped in more + # convenient helper methods (see #exec and #subsystem). + def send_channel_request(request_name, *data, &callback) + info { "sending channel request #{request_name.inspect}" } + msg = Buffer.from(:byte, CHANNEL_REQUEST, + :long, remote_id, :string, request_name, + :bool, !callback.nil?, *data) + connection.send_message(msg) + pending_requests << callback if callback + end + + public # these methods are public, but for Net::SSH internal use only + + # Enqueues pending output at the connection as CHANNEL_DATA packets. This + # does nothing if the channel has not yet been confirmed open (see + # #do_open_confirmation). This is called automatically by #process, which + # is called from the event loop (Connection::Session#process). You will + # generally not need to invoke it directly. + def enqueue_pending_output #:nodoc: + return unless remote_id + + while output.length > 0 + length = output.length + length = remote_window_size if length > remote_window_size + length = remote_maximum_packet_size if length > remote_maximum_packet_size + + if length > 0 + connection.send_message(Buffer.from(:byte, CHANNEL_DATA, :long, remote_id, :string, output.read(length))) + output.consume! + @remote_window_size -= length + else + break + end + end + end + + # Invoked when the server confirms that a channel has been opened. + # The remote_id is the id of the channel as assigned by the remote host, + # and max_window and max_packet are the maximum window and maximum + # packet sizes, respectively. If an open-confirmation callback was + # given when the channel was created, it is invoked at this time with + # the channel itself as the sole argument. + def do_open_confirmation(remote_id, max_window, max_packet) #:nodoc: + @remote_id = remote_id + @remote_window_size = @remote_maximum_window_size = max_window + @remote_maximum_packet_size = max_packet + connection.forward.agent(self) if connection.options[:forward_agent] && type == "session" + @on_confirm_open.call(self) if @on_confirm_open + end + + # Invoked when the server failed to open the channel. If an #on_open_failed + # callback was specified, it will be invoked with the channel, reason code, + # and description as arguments. Otherwise, a ChannelOpenFailed exception + # will be raised. + def do_open_failed(reason_code, description) + if @on_open_failed + @on_open_failed.call(self, reason_code, description) + else + raise ChannelOpenFailed.new(reason_code, description) + end + end + + # Invoked when the server sends a CHANNEL_WINDOW_ADJUST packet, and + # causes the remote window size to be adjusted upwards by the given + # number of bytes. This has the effect of allowing more data to be sent + # from the local end to the remote end of the channel. + def do_window_adjust(bytes) #:nodoc: + @remote_maximum_window_size += bytes + @remote_window_size += bytes + end + + # Invoked when the server sends a channel request. If any #on_request + # callback has been registered for the specific type of this request, + # it is invoked. If +want_reply+ is true, a packet will be sent of + # either CHANNEL_SUCCESS or CHANNEL_FAILURE type. If there was no callback + # to handle the request, CHANNEL_FAILURE will be sent. Otherwise, + # CHANNEL_SUCCESS, unless the callback raised ChannelRequestFailed. The + # callback should accept the channel as the first argument, and the + # request-specific data as the second. + def do_request(request, want_reply, data) #:nodoc: + result = true + + begin + callback = @on_request[request] or raise ChannelRequestFailed + callback.call(self, data) + rescue ChannelRequestFailed + result = false + end + + if want_reply + msg = Buffer.from(:byte, result ? CHANNEL_SUCCESS : CHANNEL_FAILURE, :long, remote_id) + connection.send_message(msg) + end + end + + # Invokes the #on_data callback when the server sends data to the + # channel. This will reduce the available window size on the local end, + # but does not actually throttle requests that come in illegally when + # the window size is too small. The callback is invoked with the channel + # as the first argument, and the data as the second. + def do_data(data) #:nodoc: + update_local_window_size(data.length) + @on_data.call(self, data) if @on_data + end + + # Invokes the #on_extended_data callback when the server sends + # extended data to the channel. This will reduce the available window + # size on the local end. The callback is invoked with the channel, + # type, and data. + def do_extended_data(type, data) + update_local_window_size(data.length) + @on_extended_data.call(self, type, data) if @on_extended_data + end + + # Invokes the #on_eof callback when the server indicates that no + # further data is forthcoming. The callback is invoked with the channel + # as the argument. + def do_eof + @on_eof.call(self) if @on_eof + end + + # Invokes the #on_close callback when the server closes a channel. + # The channel is the only argument. + def do_close + @on_close.call(self) if @on_close + end + + # Invokes the next pending request callback with +false+ as the second + # argument. + def do_failure + if callback = pending_requests.shift + callback.call(self, false) + else + error { "channel failure recieved with no pending request to handle it (bug?)" } + end + end + + # Invokes the next pending request callback with +true+ as the second + # argument. + def do_success + if callback = pending_requests.shift + callback.call(self, true) + else + error { "channel success recieved with no pending request to handle it (bug?)" } + end + end + + private + + # Updates the local window size by the given amount. If the window + # size drops to less than half of the local maximum (an arbitrary + # threshold), a CHANNEL_WINDOW_ADJUST message will be sent to the + # server telling it that the window size has grown. + def update_local_window_size(size) + @local_window_size -= size + if local_window_size < local_maximum_window_size/2 + connection.send_message(Buffer.from(:byte, CHANNEL_WINDOW_ADJUST, + :long, remote_id, :long, 0x20000)) + @local_window_size += 0x20000 + @local_maximum_window_size += 0x20000 + end + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/constants.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/constants.rb new file mode 100644 index 00000000..2c3df5af --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/constants.rb @@ -0,0 +1,33 @@ +module Net; module SSH; module Connection + + # Definitions of constants that are specific to the connection layer of the + # SSH protocol. + module Constants + + #-- + # Connection protocol generic messages + #++ + + GLOBAL_REQUEST = 80 + REQUEST_SUCCESS = 81 + REQUEST_FAILURE = 82 + + #-- + # Channel related messages + #++ + + CHANNEL_OPEN = 90 + CHANNEL_OPEN_CONFIRMATION = 91 + CHANNEL_OPEN_FAILURE = 92 + CHANNEL_WINDOW_ADJUST = 93 + CHANNEL_DATA = 94 + CHANNEL_EXTENDED_DATA = 95 + CHANNEL_EOF = 96 + CHANNEL_CLOSE = 97 + CHANNEL_REQUEST = 98 + CHANNEL_SUCCESS = 99 + CHANNEL_FAILURE = 100 + + end + +end; end end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/session.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/session.rb new file mode 100644 index 00000000..13270daa --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/session.rb @@ -0,0 +1,597 @@ +require 'net/ssh/loggable' +require 'net/ssh/ruby_compat' +require 'net/ssh/connection/channel' +require 'net/ssh/connection/constants' +require 'net/ssh/service/forward' + +module Net; module SSH; module Connection + + # A session class representing the connection service running on top of + # the SSH transport layer. It manages the creation of channels (see + # #open_channel), and the dispatching of messages to the various channels. + # It also encapsulates the SSH event loop (via #loop and #process), + # and serves as a central point-of-reference for all SSH-related services (e.g. + # port forwarding, SFTP, SCP, etc.). + # + # You will rarely (if ever) need to instantiate this class directly; rather, + # you'll almost always use Net::SSH.start to initialize a new network + # connection, authenticate a user, and return a new connection session, + # all in one call. + # + # Net::SSH.start("localhost", "user") do |ssh| + # # 'ssh' is an instance of Net::SSH::Connection::Session + # ssh.exec! "/etc/init.d/some_process start" + # end + class Session + include Constants, Loggable + + # The underlying transport layer abstraction (see Net::SSH::Transport::Session). + attr_reader :transport + + # The map of options that were used to initialize this instance. + attr_reader :options + + # The collection of custom properties for this instance. (See #[] and #[]=). + attr_reader :properties + + # The map of channels, each key being the local-id for the channel. + attr_reader :channels #:nodoc: + + # The map of listeners that the event loop knows about. See #listen_to. + attr_reader :listeners #:nodoc: + + # The map of specialized handlers for opening specific channel types. See + # #on_open_channel. + attr_reader :channel_open_handlers #:nodoc: + + # The list of callbacks for pending requests. See #send_global_request. + attr_reader :pending_requests #:nodoc: + + class NilChannel + def initialize(session) + @session = session + end + + def method_missing(sym, *args) + @session.lwarn { "ignoring request #{sym.inspect} for non-existent (closed?) channel; probably ssh server bug" } + end + end + + # Create a new connection service instance atop the given transport + # layer. Initializes the listeners to be only the underlying socket object. + def initialize(transport, options={}) + self.logger = transport.logger + + @transport = transport + @options = options + + @channel_id_counter = -1 + @channels = Hash.new(NilChannel.new(self)) + @listeners = { transport.socket => nil } + @pending_requests = [] + @channel_open_handlers = {} + @on_global_request = {} + @properties = (options[:properties] || {}).dup + end + + # Retrieves a custom property from this instance. This can be used to + # store additional state in applications that must manage multiple + # SSH connections. + def [](key) + @properties[key] + end + + # Sets a custom property for this instance. + def []=(key, value) + @properties[key] = value + end + + # Returns the name of the host that was given to the transport layer to + # connect to. + def host + transport.host + end + + # Returns true if the underlying transport has been closed. Note that + # this can be a little misleading, since if the remote server has + # closed the connection, the local end will still think it is open + # until the next operation on the socket. Nevertheless, this method can + # be useful if you just want to know if _you_ have closed the connection. + def closed? + transport.closed? + end + + # Closes the session gracefully, blocking until all channels have + # successfully closed, and then closes the underlying transport layer + # connection. + def close + info { "closing remaining channels (#{channels.length} open)" } + channels.each { |id, channel| channel.close } + loop { channels.any? } + transport.close + end + + # Performs a "hard" shutdown of the connection. In general, this should + # never be done, but it might be necessary (in a rescue clause, for instance, + # when the connection needs to close but you don't know the status of the + # underlying protocol's state). + def shutdown! + transport.shutdown! + end + + # preserve a reference to Kernel#loop + alias :loop_forever :loop + + # Returns +true+ if there are any channels currently active on this + # session. By default, this will not include "invisible" channels + # (such as those created by forwarding ports and such), but if you pass + # a +true+ value for +include_invisible+, then those will be counted. + # + # This can be useful for determining whether the event loop should continue + # to be run. + # + # ssh.loop { ssh.busy? } + def busy?(include_invisible=false) + if include_invisible + channels.any? + else + channels.any? { |id, ch| !ch[:invisible] } + end + end + + # The main event loop. Calls #process until #process returns false. If a + # block is given, it is passed to #process, otherwise a default proc is + # used that just returns true if there are any channels active (see #busy?). + # The # +wait+ parameter is also passed through to #process (where it is + # interpreted as the maximum number of seconds to wait for IO.select to return). + # + # # loop for as long as there are any channels active + # ssh.loop + # + # # loop for as long as there are any channels active, but make sure + # # the event loop runs at least once per 0.1 second + # ssh.loop(0.1) + # + # # loop until ctrl-C is pressed + # int_pressed = false + # trap("INT") { int_pressed = true } + # ssh.loop(0.1) { not int_pressed } + def loop(wait=nil, &block) + running = block || Proc.new { busy? } + loop_forever { break unless process(wait, &running) } + end + + # The core of the event loop. It processes a single iteration of the event + # loop. If a block is given, it should return false when the processing + # should abort, which causes #process to return false. Otherwise, + # #process returns true. The session itself is yielded to the block as its + # only argument. + # + # If +wait+ is nil (the default), this method will block until any of the + # monitored IO objects are ready to be read from or written to. If you want + # it to not block, you can pass 0, or you can pass any other numeric value + # to indicate that it should block for no more than that many seconds. + # Passing 0 is a good way to poll the connection, but if you do it too + # frequently it can make your CPU quite busy! + # + # This will also cause all active channels to be processed once each (see + # Net::SSH::Connection::Channel#on_process). + # + # # process multiple Net::SSH connections in parallel + # connections = [ + # Net::SSH.start("host1", ...), + # Net::SSH.start("host2", ...) + # ] + # + # connections.each do |ssh| + # ssh.exec "grep something /in/some/files" + # end + # + # condition = Proc.new { |s| s.busy? } + # + # loop do + # connections.delete_if { |ssh| !ssh.process(0.1, &condition) } + # break if connections.empty? + # end + def process(wait=nil, &block) + return false unless preprocess(&block) + + r = listeners.keys + w = r.select { |w2| w2.respond_to?(:pending_write?) && w2.pending_write? } + readers, writers, = Net::SSH::Compat.io_select(r, w, nil, wait) + + postprocess(readers, writers) + end + + # This is called internally as part of #process. It dispatches any + # available incoming packets, and then runs Net::SSH::Connection::Channel#process + # for any active channels. If a block is given, it is invoked at the + # start of the method and again at the end, and if the block ever returns + # false, this method returns false. Otherwise, it returns true. + def preprocess + return false if block_given? && !yield(self) + dispatch_incoming_packets + channels.each { |id, channel| channel.process unless channel.closing? } + return false if block_given? && !yield(self) + return true + end + + # This is called internally as part of #process. It loops over the given + # arrays of reader IO's and writer IO's, processing them as needed, and + # then calls Net::SSH::Transport::Session#rekey_as_needed to allow the + # transport layer to rekey. Then returns true. + def postprocess(readers, writers) + Array(readers).each do |reader| + if listeners[reader] + listeners[reader].call(reader) + else + if reader.fill.zero? + reader.close + stop_listening_to(reader) + end + end + end + + Array(writers).each do |writer| + writer.send_pending + end + + transport.rekey_as_needed + + return true + end + + # Send a global request of the given type. The +extra+ parameters must + # be even in number, and conform to the same format as described for + # Net::SSH::Buffer.from. If a callback is not specified, the request will + # not require a response from the server, otherwise the server is required + # to respond and indicate whether the request was successful or not. This + # success or failure is indicated by the callback being invoked, with the + # first parameter being true or false (success, or failure), and the second + # being the packet itself. + # + # Generally, Net::SSH will manage global requests that need to be sent + # (e.g. port forward requests and such are handled in the Net::SSH::Service::Forward + # class, for instance). However, there may be times when you need to + # send a global request that isn't explicitly handled by Net::SSH, and so + # this method is available to you. + # + # ssh.send_global_request("keep-alive@openssh.com") + def send_global_request(type, *extra, &callback) + info { "sending global request #{type}" } + msg = Buffer.from(:byte, GLOBAL_REQUEST, :string, type.to_s, :bool, !callback.nil?, *extra) + send_message(msg) + pending_requests << callback if callback + self + end + + # Requests that a new channel be opened. By default, the channel will be + # of type "session", but if you know what you're doing you can select any + # of the channel types supported by the SSH protocol. The +extra+ parameters + # must be even in number and conform to the same format as described for + # Net::SSH::Buffer.from. If a callback is given, it will be invoked when + # the server confirms that the channel opened successfully. The sole parameter + # for the callback is the channel object itself. + # + # In general, you'll use #open_channel without any arguments; the only + # time you'd want to set the channel type or pass additional initialization + # data is if you were implementing an SSH extension. + # + # channel = ssh.open_channel do |ch| + # ch.exec "grep something /some/files" do |ch, success| + # ... + # end + # end + # + # channel.wait + def open_channel(type="session", *extra, &on_confirm) + local_id = get_next_channel_id + channel = Channel.new(self, type, local_id, &on_confirm) + + msg = Buffer.from(:byte, CHANNEL_OPEN, :string, type, :long, local_id, + :long, channel.local_maximum_window_size, + :long, channel.local_maximum_packet_size, *extra) + send_message(msg) + + channels[local_id] = channel + end + + # A convenience method for executing a command and interacting with it. If + # no block is given, all output is printed via $stdout and $stderr. Otherwise, + # the block is called for each data and extended data packet, with three + # arguments: the channel object, a symbol indicating the data type + # (:stdout or :stderr), and the data (as a string). + # + # Note that this method returns immediately, and requires an event loop + # (see Session#loop) in order for the command to actually execute. + # + # This is effectively identical to calling #open_channel, and then + # Net::SSH::Connection::Channel#exec, and then setting up the channel + # callbacks. However, for most uses, this will be sufficient. + # + # ssh.exec "grep something /some/files" do |ch, stream, data| + # if stream == :stderr + # puts "ERROR: #{data}" + # else + # puts data + # end + # end + def exec(command, &block) + open_channel do |channel| + channel.exec(command) do |ch, success| + raise "could not execute command: #{command.inspect}" unless success + + channel.on_data do |ch2, data| + if block + block.call(ch2, :stdout, data) + else + $stdout.print(data) + end + end + + channel.on_extended_data do |ch2, type, data| + if block + block.call(ch2, :stderr, data) + else + $stderr.print(data) + end + end + end + end + end + + # Same as #exec, except this will block until the command finishes. Also, + # if a block is not given, this will return all output (stdout and stderr) + # as a single string. + # + # matches = ssh.exec!("grep something /some/files") + def exec!(command, &block) + block ||= Proc.new do |ch, type, data| + ch[:result] ||= "" + ch[:result] << data + end + + channel = exec(command, &block) + channel.wait + + return channel[:result] + end + + # Enqueues a message to be sent to the server as soon as the socket is + # available for writing. Most programs will never need to call this, but + # if you are implementing an extension to the SSH protocol, or if you + # need to send a packet that Net::SSH does not directly support, you can + # use this to send it. + # + # ssh.send_message(Buffer.from(:byte, REQUEST_SUCCESS).to_s) + def send_message(message) + transport.enqueue_message(message) + end + + # Adds an IO object for the event loop to listen to. If a callback + # is given, it will be invoked when the io is ready to be read, otherwise, + # the io will merely have its #fill method invoked. + # + # Any +io+ value passed to this method _must_ have mixed into it the + # Net::SSH::BufferedIo functionality, typically by calling #extend on the + # object. + # + # The following example executes a process on the remote server, opens + # a socket to somewhere, and then pipes data from that socket to the + # remote process' stdin stream: + # + # channel = ssh.open_channel do |ch| + # ch.exec "/some/process/that/wants/input" do |ch, success| + # abort "can't execute!" unless success + # + # io = TCPSocket.new(somewhere, port) + # io.extend(Net::SSH::BufferedIo) + # ssh.listen_to(io) + # + # ch.on_process do + # if io.available > 0 + # ch.send_data(io.read_available) + # end + # end + # + # ch.on_close do + # ssh.stop_listening_to(io) + # io.close + # end + # end + # end + # + # channel.wait + def listen_to(io, &callback) + listeners[io] = callback + end + + # Removes the given io object from the listeners collection, so that the + # event loop will no longer monitor it. + def stop_listening_to(io) + listeners.delete(io) + end + + # Returns a reference to the Net::SSH::Service::Forward service, which can + # be used for forwarding ports over SSH. + def forward + @forward ||= Service::Forward.new(self) + end + + # Registers a handler to be invoked when the server wants to open a + # channel on the client. The callback receives the connection object, + # the new channel object, and the packet itself as arguments, and should + # raise ChannelOpenFailed if it is unable to open the channel for some + # reason. Otherwise, the channel will be opened and a confirmation message + # sent to the server. + # + # This is used by the Net::SSH::Service::Forward service to open a channel + # when a remote forwarded port receives a connection. However, you are + # welcome to register handlers for other channel types, as needed. + def on_open_channel(type, &block) + channel_open_handlers[type] = block + end + + # Registers a handler to be invoked when the server sends a global request + # of the given type. The callback receives the request data as the first + # parameter, and true/false as the second (indicating whether a response + # is required). If the callback sends the response, it should return + # :sent. Otherwise, if it returns true, REQUEST_SUCCESS will be sent, and + # if it returns false, REQUEST_FAILURE will be sent. + def on_global_request(type, &block) + old, @on_global_request[type] = @on_global_request[type], block + old + end + + private + + # Read all pending packets from the connection and dispatch them as + # appropriate. Returns as soon as there are no more pending packets. + def dispatch_incoming_packets + while packet = transport.poll_message + unless MAP.key?(packet.type) + raise Net::SSH::Exception, "unexpected response #{packet.type} (#{packet.inspect})" + end + + send(MAP[packet.type], packet) + end + end + + # Returns the next available channel id to be assigned, and increments + # the counter. + def get_next_channel_id + @channel_id_counter += 1 + end + + # Invoked when a global request is received. The registered global + # request callback will be invoked, if one exists, and the necessary + # reply returned. + def global_request(packet) + info { "global request received: #{packet[:request_type]} #{packet[:want_reply]}" } + callback = @on_global_request[packet[:request_type]] + result = callback ? callback.call(packet[:request_data], packet[:want_reply]) : false + + if result != :sent && result != true && result != false + raise "expected global request handler for `#{packet[:request_type]}' to return true, false, or :sent, but got #{result.inspect}" + end + + if packet[:want_reply] && result != :sent + msg = Buffer.from(:byte, result ? REQUEST_SUCCESS : REQUEST_FAILURE) + send_message(msg) + end + end + + # Invokes the next pending request callback with +true+. + def request_success(packet) + info { "global request success" } + callback = pending_requests.shift + callback.call(true, packet) if callback + end + + # Invokes the next pending request callback with +false+. + def request_failure(packet) + info { "global request failure" } + callback = pending_requests.shift + callback.call(false, packet) if callback + end + + # Called when the server wants to open a channel. If no registered + # channel handler exists for the given channel type, CHANNEL_OPEN_FAILURE + # is returned, otherwise the callback is invoked and everything proceeds + # accordingly. + def channel_open(packet) + info { "channel open #{packet[:channel_type]}" } + + local_id = get_next_channel_id + channel = Channel.new(self, packet[:channel_type], local_id) + channel.do_open_confirmation(packet[:remote_id], packet[:window_size], packet[:packet_size]) + + callback = channel_open_handlers[packet[:channel_type]] + + if callback + begin + callback[self, channel, packet] + rescue ChannelOpenFailed => err + failure = [err.code, err.reason] + else + channels[local_id] = channel + msg = Buffer.from(:byte, CHANNEL_OPEN_CONFIRMATION, :long, channel.remote_id, :long, channel.local_id, :long, channel.local_maximum_window_size, :long, channel.local_maximum_packet_size) + end + else + failure = [3, "unknown channel type #{channel.type}"] + end + + if failure + error { failure.inspect } + msg = Buffer.from(:byte, CHANNEL_OPEN_FAILURE, :long, channel.remote_id, :long, failure[0], :string, failure[1], :string, "") + end + + send_message(msg) + end + + def channel_open_confirmation(packet) + info { "channel_open_confirmation: #{packet[:local_id]} #{packet[:remote_id]} #{packet[:window_size]} #{packet[:packet_size]}" } + channel = channels[packet[:local_id]] + channel.do_open_confirmation(packet[:remote_id], packet[:window_size], packet[:packet_size]) + end + + def channel_open_failure(packet) + error { "channel_open_failed: #{packet[:local_id]} #{packet[:reason_code]} #{packet[:description]}" } + channel = channels.delete(packet[:local_id]) + channel.do_open_failed(packet[:reason_code], packet[:description]) + end + + def channel_window_adjust(packet) + info { "channel_window_adjust: #{packet[:local_id]} +#{packet[:extra_bytes]}" } + channels[packet[:local_id]].do_window_adjust(packet[:extra_bytes]) + end + + def channel_request(packet) + info { "channel_request: #{packet[:local_id]} #{packet[:request]} #{packet[:want_reply]}" } + channels[packet[:local_id]].do_request(packet[:request], packet[:want_reply], packet[:request_data]) + end + + def channel_data(packet) + info { "channel_data: #{packet[:local_id]} #{packet[:data].length}b" } + channels[packet[:local_id]].do_data(packet[:data]) + end + + def channel_extended_data(packet) + info { "channel_extended_data: #{packet[:local_id]} #{packet[:data_type]} #{packet[:data].length}b" } + channels[packet[:local_id]].do_extended_data(packet[:data_type], packet[:data]) + end + + def channel_eof(packet) + info { "channel_eof: #{packet[:local_id]}" } + channels[packet[:local_id]].do_eof + end + + def channel_close(packet) + info { "channel_close: #{packet[:local_id]}" } + + channel = channels[packet[:local_id]] + channel.close + + channels.delete(packet[:local_id]) + channel.do_close + end + + def channel_success(packet) + info { "channel_success: #{packet[:local_id]}" } + channels[packet[:local_id]].do_success + end + + def channel_failure(packet) + info { "channel_failure: #{packet[:local_id]}" } + channels[packet[:local_id]].do_failure + end + + MAP = Constants.constants.inject({}) do |memo, name| + value = const_get(name) + next unless Integer === value + memo[value] = name.downcase.to_sym + memo + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/term.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/term.rb new file mode 100644 index 00000000..3e1caa5d --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/connection/term.rb @@ -0,0 +1,178 @@ +module Net; module SSH; module Connection + + # These constants are used when requesting a pseudo-terminal (via + # Net::SSH::Connection::Channel#request_pty). The descriptions for each are + # taken directly from RFC 4254 ("The Secure Shell (SSH) Connection Protocol"), + # http://tools.ietf.org/html/rfc4254. + module Term + # Interrupt character; 255 if none. Similarly for the other characters. + # Not all of these characters are supported on all systems. + VINTR = 1 + + # The quit character (sends SIGQUIT signal on POSIX systems). + VQUIT = 2 + + # Erase the character to left of the cursor. + VERASE = 3 + + # Kill the current input line. + VKILL = 4 + + # End-of-file character (sends EOF from the terminal). + VEOF = 5 + + # End-of-line character in addition to carriage return and/or linefeed. + VEOL = 6 + + # Additional end-of-line character. + VEOL2 = 7 + + # Continues paused output (normally control-Q). + VSTART = 8 + + # Pauses output (normally control-S). + VSTOP = 9 + + # Suspends the current program. + VSUSP = 10 + + # Another suspend character. + VDSUSP = 11 + + # Reprints the current input line. + VREPRINT = 12 + + # Erases a word left of cursor. + VWERASE = 13 + + # Enter the next character typed literally, even if it is a special + # character. + VLNEXT = 14 + + # Character to flush output. + VFLUSH = 15 + + # Switch to a different shell layer. + VSWITCH = 16 + + # Prints system status line (load, command, pid, etc). + VSTATUS = 17 + + # Toggles the flushing of terminal output. + VDISCARD = 18 + + # The ignore parity flag. The parameter SHOULD be 0 if this flag is FALSE, + # and 1 if it is TRUE. + IGNPAR = 30 + + # Mark parity and framing errors. + PARMRK = 31 + + # Enable checking of parity errors. + INPCK = 32 + + # Strip 8th bit off characters. + ISTRIP = 33 + + # Map NL into CR on input. + INCLR = 34 + + # Ignore CR on input. + IGNCR = 35 + + # Map CR to NL on input. + ICRNL = 36 + + # Translate uppercase characters to lowercase. + IUCLC = 37 + + # Enable output flow control. + IXON = 38 + + # Any char will restart after stop. + IXANY = 39 + + # Enable input flow control. + IXOFF = 40 + + # Ring bell on input queue full. + IMAXBEL = 41 + + # Enable signals INTR, QUIT, [D]SUSP. + ISIG = 50 + + # Canonicalize input lines. + ICANON = 51 + + # Enable input and output of uppercase characters by preceding their + # lowercase equivalents with "\". + XCASE = 52 + + # Enable echoing. + ECHO = 53 + + # Visually erase chars. + ECHOE = 54 + + # Kill character discards current line. + ECHOK = 55 + + # Echo NL even if ECHO is off. + ECHONL = 56 + + # Don't flush after interrupt. + NOFLSH = 57 + + # Stop background jobs from output. + TOSTOP= 58 + + # Enable extensions. + IEXTEN = 59 + + # Echo control characters as ^(Char). + ECHOCTL = 60 + + # Visual erase for line kill. + ECHOKE = 61 + + # Retype pending input. + PENDIN = 62 + + # Enable output processing. + OPOST = 70 + + # Convert lowercase to uppercase. + OLCUC = 71 + + # Map NL to CR-NL. + ONLCR = 72 + + # Translate carriage return to newline (output). + OCRNL = 73 + + # Translate newline to carriage return-newline (output). + ONOCR = 74 + + # Newline performs a carriage return (output). + ONLRET = 75 + + # 7 bit mode. + CS7 = 90 + + # 8 bit mode. + CS8 = 91 + + # Parity enable. + PARENB = 92 + + # Odd parity, else even. + PARODD = 93 + + # Specifies the input baud rate in bits per second. + TTY_OP_ISPEED = 128 + + # Specifies the output baud rate in bits per second. + TTY_OP_OSPEED = 129 + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/errors.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/errors.rb new file mode 100644 index 00000000..e6532780 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/errors.rb @@ -0,0 +1,85 @@ +module Net; module SSH + # A general exception class, to act as the ancestor of all other Net::SSH + # exception classes. + class Exception < ::RuntimeError; end + + # This exception is raised when authentication fails (whether it be + # public key authentication, password authentication, or whatever). + class AuthenticationFailed < Exception; end + + # This exception is raised when the remote host has disconnected + # unexpectedly. + class Disconnect < Exception; end + + # This exception is primarily used internally, but if you have a channel + # request handler (see Net::SSH::Connection::Channel#on_request) that you + # want to fail in such a way that the server knows it failed, you can + # raise this exception in the handler and Net::SSH will translate that into + # a "channel failure" message. + class ChannelRequestFailed < Exception; end + + # This is exception is primarily used internally, but if you have a channel + # open handler (see Net::SSH::Connection::Session#on_open_channel) and you + # want to fail in such a way that the server knows it failed, you can + # raise this exception in the handler and Net::SSH will translate that into + # a "channel open failed" message. + class ChannelOpenFailed < Exception + attr_reader :code, :reason + + def initialize(code, reason) + @code, @reason = code, reason + super "#{reason} (#{code})" + end + end + + # Raised when the cached key for a particular host does not match the + # key given by the host, which can be indicative of a man-in-the-middle + # attack. When rescuing this exception, you can inspect the key fingerprint + # and, if you want to proceed anyway, simply call the remember_host! + # method on the exception, and then retry. + class HostKeyMismatch < Exception + # the callback to use when #remember_host! is called + attr_writer :callback #:nodoc: + + # situation-specific data describing the host (see #host, #port, etc.) + attr_writer :data #:nodoc: + + # An accessor for getting at the data that was used to look up the host + # (see also #fingerprint, #host, #port, #ip, and #key). + def [](key) + @data && @data[key] + end + + # Returns the fingerprint of the key for the host, which either was not + # found or did not match. + def fingerprint + @data && @data[:fingerprint] + end + + # Returns the host name for the remote host, as reported by the socket. + def host + @data && @data[:peer] && @data[:peer][:host] + end + + # Returns the port number for the remote host, as reported by the socket. + def port + @data && @data[:peer] && @data[:peer][:port] + end + + # Returns the IP address of the remote host, as reported by the socket. + def ip + @data && @data[:peer] && @data[:peer][:ip] + end + + # Returns the key itself, as reported by the remote host. + def key + @data && @data[:key] + end + + # Tell Net::SSH to record this host and key in the known hosts file, so + # that subsequent connections will remember them. + def remember_host! + @callback.call + end + end +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/key_factory.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/key_factory.rb new file mode 100644 index 00000000..cec1d3b9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/key_factory.rb @@ -0,0 +1,102 @@ +require 'net/ssh/transport/openssl' +require 'net/ssh/prompt' + +module Net; module SSH + + # A factory class for returning new Key classes. It is used for obtaining + # OpenSSL key instances via their SSH names, and for loading both public and + # private keys. It used used primarily by Net::SSH itself, internally, and + # will rarely (if ever) be directly used by consumers of the library. + # + # klass = Net::SSH::KeyFactory.get("rsa") + # assert klass.is_a?(OpenSSL::PKey::RSA) + # + # key = Net::SSH::KeyFacory.load_public_key("~/.ssh/id_dsa.pub") + class KeyFactory + # Specifies the mapping of SSH names to OpenSSL key classes. + MAP = { + "dh" => OpenSSL::PKey::DH, + "rsa" => OpenSSL::PKey::RSA, + "dsa" => OpenSSL::PKey::DSA + } + + class < e + if encrypted_key + tries += 1 + if tries <= 3 + passphrase = prompt("Enter passphrase for #{filename}:", false) + retry + else + raise + end + else + raise + end + end + end + + # Loads a public key from a file. It will correctly determine whether + # the file describes an RSA or DSA key, and will load it + # appropriately. The new public key is returned. + def load_public_key(filename) + data = File.read(File.expand_path(filename)) + load_data_public_key(data, filename) + end + + # Loads a public key. It will correctly determine whether + # the file describes an RSA or DSA key, and will load it + # appropriately. The new public key is returned. + def load_data_public_key(data, filename="") + type, blob = data.split(/ /) + + raise Net::SSH::Exception, "public key at #{filename} is not valid" if blob.nil? + + blob = blob.unpack("m*").first + reader = Net::SSH::Buffer.new(blob) + reader.read_key or raise OpenSSL::PKey::PKeyError, "not a public key #{filename.inspect}" + end + end + + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/known_hosts.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/known_hosts.rb new file mode 100644 index 00000000..7b153d49 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/known_hosts.rb @@ -0,0 +1,129 @@ +require 'strscan' +require 'net/ssh/buffer' + +module Net; module SSH + + # Searches an OpenSSH-style known-host file for a given host, and returns all + # matching keys. This is used to implement host-key verification, as well as + # to determine what key a user prefers to use for a given host. + # + # This is used internally by Net::SSH, and will never need to be used directly + # by consumers of the library. + class KnownHosts + class < 98 (CHANNEL_REQUEST) + # p packet[:request] + # p packet[:want_reply] + # + # This is used exclusively internally by Net::SSH, and unless you're doing + # protocol-level manipulation or are extending Net::SSH in some way, you'll + # never need to use this class directly. + class Packet < Buffer + @@types = {} + + # Register a new packet type that should be recognized and auto-parsed by + # Net::SSH::Packet. Note that any packet type that is not preregistered + # will not be autoparsed. + # + # The +pairs+ parameter must be either empty, or an array of two-element + # tuples, where the first element of each tuple is the name of the field, + # and the second is the type. + # + # register DISCONNECT, [:reason_code, :long], [:description, :string], [:language, :string] + def self.register(type, *pairs) + @@types[type] = pairs + end + + include Transport::Constants, Authentication::Constants, Connection::Constants + + #-- + # These are the recognized packet types. All other packet types will be + # accepted, but not auto-parsed, requiring the client to parse the + # fields using the methods provided by Net::SSH::Buffer. + #++ + + register DISCONNECT, [:reason_code, :long], [:description, :string], [:language, :string] + register IGNORE, [:data, :string] + register UNIMPLEMENTED, [:number, :long] + register DEBUG, [:always_display, :bool], [:message, :string], [:language, :string] + register SERVICE_ACCEPT, [:service_name, :string] + register USERAUTH_BANNER, [:message, :string], [:language, :string] + register USERAUTH_FAILURE, [:authentications, :string], [:partial_success, :bool] + register GLOBAL_REQUEST, [:request_type, :string], [:want_reply, :bool], [:request_data, :buffer] + register CHANNEL_OPEN, [:channel_type, :string], [:remote_id, :long], [:window_size, :long], [:packet_size, :long] + register CHANNEL_OPEN_CONFIRMATION, [:local_id, :long], [:remote_id, :long], [:window_size, :long], [:packet_size, :long] + register CHANNEL_OPEN_FAILURE, [:local_id, :long], [:reason_code, :long], [:description, :string], [:language, :string] + register CHANNEL_WINDOW_ADJUST, [:local_id, :long], [:extra_bytes, :long] + register CHANNEL_DATA, [:local_id, :long], [:data, :string] + register CHANNEL_EXTENDED_DATA, [:local_id, :long], [:data_type, :long], [:data, :string] + register CHANNEL_EOF, [:local_id, :long] + register CHANNEL_CLOSE, [:local_id, :long] + register CHANNEL_REQUEST, [:local_id, :long], [:request, :string], [:want_reply, :bool], [:request_data, :buffer] + register CHANNEL_SUCCESS, [:local_id, :long] + register CHANNEL_FAILURE, [:local_id, :long] + + # The (integer) type of this packet. + attr_reader :type + + # Create a new packet from the given payload. This will automatically + # parse the packet if it is one that has been previously registered with + # Packet.register; otherwise, the packet will need to be manually parsed + # using the methods provided in the Net::SSH::Buffer superclass. + def initialize(payload) + @named_elements = {} + super + @type = read_byte + instantiate! + end + + # Access one of the auto-parsed fields by name. Raises an error if no + # element by the given name exists. + def [](name) + name = name.to_sym + raise ArgumentError, "no such element #{name}" unless @named_elements.key?(name) + @named_elements[name] + end + + private + + # Parse the packet's contents and assign the named elements, as described + # by the registered format for the packet. + def instantiate! + (@@types[type] || []).each do |name, datatype| + @named_elements[name.to_sym] = if datatype == :buffer + remainder_as_buffer + else + send("read_#{datatype}") + end + end + end + end +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/prompt.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/prompt.rb new file mode 100644 index 00000000..505e0b3d --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/prompt.rb @@ -0,0 +1,93 @@ +module Net; module SSH + + # A basic prompt module that can be mixed into other objects. If HighLine is + # installed, it will be used to display prompts and read input from the + # user. Otherwise, the termios library will be used. If neither HighLine + # nor termios is installed, a simple prompt that echos text in the clear + # will be used. + + module PromptMethods + + # Defines the prompt method to use if the Highline library is installed. + module Highline + # Uses Highline#ask to present a prompt and accept input. If +echo+ is + # +false+, the characters entered by the user will not be echoed to the + # screen. + def prompt(prompt, echo=true) + @highline ||= ::HighLine.new + @highline.ask(prompt + " ") { |q| q.echo = echo } + end + end + + # Defines the prompt method to use if the Termios library is installed. + module Termios + # Displays the prompt to $stdout. If +echo+ is false, the Termios + # library will be used to disable keystroke echoing for the duration of + # this method. + def prompt(prompt, echo=true) + $stdout.print(prompt) + $stdout.flush + + set_echo(false) unless echo + $stdin.gets.chomp + ensure + if !echo + set_echo(true) + $stdout.puts + end + end + + private + + # Enables or disables keystroke echoing using the Termios library. + def set_echo(enable) + term = ::Termios.getattr($stdin) + + if enable + term.c_lflag |= (::Termios::ECHO | ::Termios::ICANON) + else + term.c_lflag &= ~::Termios::ECHO + end + + ::Termios.setattr($stdin, ::Termios::TCSANOW, term) + end + end + + # Defines the prompt method to use when neither Highline nor Termios are + # installed. + module Clear + # Displays the prompt to $stdout and pulls the response from $stdin. + # Text is always echoed in the clear, regardless of the +echo+ setting. + # The first time a prompt is given and +echo+ is false, a warning will + # be written to $stderr recommending that either Highline or Termios + # be installed. + def prompt(prompt, echo=true) + @seen_warning ||= false + if !echo && !@seen_warning + $stderr.puts "Text will be echoed in the clear. Please install the HighLine or Termios libraries to suppress echoed text." + @seen_warning = true + end + + $stdout.print(prompt) + $stdout.flush + $stdin.gets.chomp + end + end + end + + # Try to load Highline and Termios in turn, selecting the corresponding + # PromptMethods module to use. If neither are available, choose PromptMethods::Clear. + Prompt = begin + require 'highline' + HighLine.track_eof = false + PromptMethods::Highline + rescue LoadError + begin + require 'termios' + PromptMethods::Termios + rescue LoadError + PromptMethods::Clear + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/errors.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/errors.rb new file mode 100644 index 00000000..6eb3501a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/errors.rb @@ -0,0 +1,14 @@ +require 'net/ssh/errors' + +module Net; module SSH; module Proxy + + # A general exception class for all Proxy errors. + class Error < Net::SSH::Exception; end + + # Used for reporting proxy connection errors. + class ConnectError < Error; end + + # Used when the server doesn't recognize the user's credentials. + class UnauthorizedError < Error; end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/http.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/http.rb new file mode 100644 index 00000000..4f86dcfc --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/http.rb @@ -0,0 +1,94 @@ +require 'socket' +require 'net/ssh/proxy/errors' + +module Net; module SSH; module Proxy + + # An implementation of an HTTP proxy. To use it, instantiate it, then + # pass the instantiated object via the :proxy key to Net::SSH.start: + # + # require 'net/ssh/proxy/http' + # + # proxy = Net::SSH::Proxy::HTTP.new('proxy.host', proxy_port) + # Net::SSH.start('host', 'user', :proxy => proxy) do |ssh| + # ... + # end + # + # If the proxy requires authentication, you can pass :user and :password + # to the proxy's constructor: + # + # proxy = Net::SSH::Proxy::HTTP.new('proxy.host', proxy_port, + # :user => "user", :password => "password") + # + # Note that HTTP digest authentication is not supported; Basic only at + # this point. + class HTTP + + # The hostname or IP address of the HTTP proxy. + attr_reader :proxy_host + + # The port number of the proxy. + attr_reader :proxy_port + + # The map of additional options that were given to the object at + # initialization. + attr_reader :options + + # Create a new socket factory that tunnels via the given host and + # port. The +options+ parameter is a hash of additional settings that + # can be used to tweak this proxy connection. Specifically, the following + # options are supported: + # + # * :user => the user name to use when authenticating to the proxy + # * :password => the password to use when authenticating + def initialize(proxy_host, proxy_port=80, options={}) + @proxy_host = proxy_host + @proxy_port = proxy_port + @options = options + end + + # Return a new socket connected to the given host and port via the + # proxy that was requested when the socket factory was instantiated. + def open(host, port) + socket = TCPSocket.new(proxy_host, proxy_port) + socket.write "CONNECT #{host}:#{port} HTTP/1.0\r\n" + + if options[:user] + credentials = ["#{options[:user]}:#{options[:password]}"].pack("m*").gsub(/\s/, "") + socket.write "Proxy-Authorization: Basic #{credentials}\r\n" + end + + socket.write "\r\n" + + resp = parse_response(socket) + + return socket if resp[:code] == 200 + + socket.close + raise ConnectError, resp.inspect + end + + private + + def parse_response(socket) + version, code, reason = socket.gets.chomp.split(/ /, 3) + headers = {} + + while (line = socket.gets.chomp) != "" + name, value = line.split(/:/, 2) + headers[name.strip] = value.strip + end + + if headers["Content-Length"] + body = socket.read(headers["Content-Length"].to_i) + end + + return { :version => version, + :code => code.to_i, + :reason => reason, + :headers => headers, + :body => body } + end + + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks4.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks4.rb new file mode 100644 index 00000000..fdb1e329 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks4.rb @@ -0,0 +1,70 @@ +require 'socket' +require 'resolv' +require 'ipaddr' +require 'net/ssh/proxy/errors' + +module Net + module SSH + module Proxy + + # An implementation of a SOCKS4 proxy. To use it, instantiate it, then + # pass the instantiated object via the :proxy key to Net::SSH.start: + # + # require 'net/ssh/proxy/socks4' + # + # proxy = Net::SSH::Proxy::SOCKS4.new('proxy.host', proxy_port, :user => 'user') + # Net::SSH.start('host', 'user', :proxy => proxy) do |ssh| + # ... + # end + class SOCKS4 + + # The SOCKS protocol version used by this class + VERSION = 4 + + # The packet type for connection requests + CONNECT = 1 + + # The status code for a successful connection + GRANTED = 90 + + # The proxy's host name or IP address, as given to the constructor. + attr_reader :proxy_host + + # The proxy's port number. + attr_reader :proxy_port + + # The additional options that were given to the proxy's constructor. + attr_reader :options + + # Create a new proxy connection to the given proxy host and port. + # Optionally, a :user key may be given to identify the username + # with which to authenticate. + def initialize(proxy_host, proxy_port=1080, options={}) + @proxy_host = proxy_host + @proxy_port = proxy_port + @options = options + end + + # Return a new socket connected to the given host and port via the + # proxy that was requested when the socket factory was instantiated. + def open(host, port) + socket = TCPSocket.new(proxy_host, proxy_port) + ip_addr = IPAddr.new(Resolv.getaddress(host)) + + packet = [VERSION, CONNECT, port.to_i, ip_addr.to_i, options[:user]].pack("CCnNZ*") + socket.send packet, 0 + + version, status, port, ip = socket.recv(8).unpack("CCnN") + if status != GRANTED + socket.close + raise ConnectError, "error connecting to proxy (#{status})" + end + + return socket + end + + end + + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks5.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks5.rb new file mode 100644 index 00000000..7fc06003 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/proxy/socks5.rb @@ -0,0 +1,142 @@ +require 'socket' +require 'net/ssh/ruby_compat' +require 'net/ssh/proxy/errors' + +module Net + module SSH + module Proxy + + # An implementation of a SOCKS5 proxy. To use it, instantiate it, then + # pass the instantiated object via the :proxy key to Net::SSH.start: + # + # require 'net/ssh/proxy/socks5' + # + # proxy = Net::SSH::Proxy::SOCKS5.new('proxy.host', proxy_port, + # :user => 'user', :password => "password") + # Net::SSH.start('host', 'user', :proxy => proxy) do |ssh| + # ... + # end + class SOCKS5 + # The SOCKS protocol version used by this class + VERSION = 5 + + # The SOCKS authentication type for requests without authentication + METHOD_NO_AUTH = 0 + + # The SOCKS authentication type for requests via username/password + METHOD_PASSWD = 2 + + # The SOCKS authentication type for when there are no supported + # authentication methods. + METHOD_NONE = 0xFF + + # The SOCKS packet type for requesting a proxy connection. + CMD_CONNECT = 1 + + # The SOCKS address type for connections via IP address. + ATYP_IPV4 = 1 + + # The SOCKS address type for connections via domain name. + ATYP_DOMAIN = 3 + + # The SOCKS response code for a successful operation. + SUCCESS = 0 + + # The proxy's host name or IP address + attr_reader :proxy_host + + # The proxy's port number + attr_reader :proxy_port + + # The map of options given at initialization + attr_reader :options + + # Create a new proxy connection to the given proxy host and port. + # Optionally, :user and :password options may be given to + # identify the username and password with which to authenticate. + def initialize(proxy_host, proxy_port=1080, options={}) + @proxy_host = proxy_host + @proxy_port = proxy_port + @options = options + end + + # Return a new socket connected to the given host and port via the + # proxy that was requested when the socket factory was instantiated. + def open(host, port) + socket = TCPSocket.new(proxy_host, proxy_port) + + methods = [METHOD_NO_AUTH] + methods << METHOD_PASSWD if options[:user] + + packet = [VERSION, methods.size, *methods].pack("C*") + socket.send packet, 0 + + version, method = socket.recv(2).unpack("CC") + if version != VERSION + socket.close + raise Net::SSH::Proxy::Error, "invalid SOCKS version (#{version})" + end + + if method == METHOD_NONE + socket.close + raise Net::SSH::Proxy::Error, "no supported authorization methods" + end + + negotiate_password(socket) if method == METHOD_PASSWD + + packet = [VERSION, CMD_CONNECT, 0].pack("C*") + + if host =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/ + packet << [ATYP_IPV4, $1.to_i, $2.to_i, $3.to_i, $4.to_i].pack("C*") + else + packet << [ATYP_DOMAIN, host.length, host].pack("CCA*") + end + + packet << [port].pack("n") + socket.send packet, 0 + + version, reply, = socket.recv(2).unpack("C*") + socket.recv(1) + address_type = socket.recv(1).getbyte(0) + case address_type + when 1 + socket.recv(4) # get four bytes for IPv4 address + when 3 + len = socket.recv(1).getbyte(0) + hostname = socket.recv(len) + when 4 + ipv6addr hostname = socket.recv(16) + else + socket.close + raise ConnectionError, "Illegal response type" + end + portnum = socket.recv(2) + + unless reply == SUCCESS + socket.close + raise ConnectError, "#{reply}" + end + + return socket + end + + private + + # Simple username/password negotiation with the SOCKS5 server. + def negotiate_password(socket) + packet = [0x01, options[:user].length, options[:user], + options[:password].length, options[:password]].pack("CCA*CA*") + socket.send packet, 0 + + version, status = socket.recv(2).unpack("CC") + + if status != SUCCESS + socket.close + raise UnauthorizedError, "could not authorize user" + end + end + end + + end + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/ruby_compat.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/ruby_compat.rb new file mode 100644 index 00000000..8a1c8f22 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/ruby_compat.rb @@ -0,0 +1,43 @@ +require 'thread' + +class String + if RUBY_VERSION < "1.9" + def getbyte(index) + self[index] + end + end +end + +module Net; module SSH + + # This class contains miscellaneous patches and workarounds + # for different ruby implementations. + class Compat + + # A workaround for an IO#select threading bug in certain versions of MRI 1.8. + # See: http://net-ssh.lighthouseapp.com/projects/36253/tickets/1-ioselect-threading-bug-in-ruby-18 + # The root issue is documented here: http://redmine.ruby-lang.org/issues/show/1993 + if RUBY_VERSION >= '1.9' || RUBY_PLATFORM == 'java' + def self.io_select(*params) + IO.select(*params) + end + else + SELECT_MUTEX = Mutex.new + def self.io_select(*params) + # It should be safe to wrap calls in a mutex when the timeout is 0 + # (that is, the call is not supposed to block). + # We leave blocking calls unprotected to avoid causing deadlocks. + # This should still catch the main case for Capistrano users. + if params[3] == 0 + SELECT_MUTEX.synchronize do + IO.select(*params) + end + else + IO.select(*params) + end + end + end + + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/service/forward.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/service/forward.rb new file mode 100644 index 00000000..6df93ea4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/service/forward.rb @@ -0,0 +1,267 @@ +require 'net/ssh/loggable' + +module Net; module SSH; module Service + + # This class implements various port forwarding services for use by + # Net::SSH clients. The Forward class should never need to be instantiated + # directly; instead, it should be accessed via the singleton instance + # returned by Connection::Session#forward: + # + # ssh.forward.local(1234, "www.capify.org", 80) + class Forward + include Loggable + + # The underlying connection service instance that the port-forwarding + # services employ. + attr_reader :session + + # A simple class for representing a requested remote forwarded port. + Remote = Struct.new(:host, :port) #:nodoc: + + # Instantiates a new Forward service instance atop the given connection + # service session. This will register new channel open handlers to handle + # the specialized channels that the SSH port forwarding protocols employ. + def initialize(session) + @session = session + self.logger = session.logger + @remote_forwarded_ports = {} + @local_forwarded_ports = {} + @agent_forwarded = false + + session.on_open_channel('forwarded-tcpip', &method(:forwarded_tcpip)) + session.on_open_channel('auth-agent', &method(:auth_agent_channel)) + session.on_open_channel('auth-agent@openssh.com', &method(:auth_agent_channel)) + end + + # Starts listening for connections on the local host, and forwards them + # to the specified remote host/port via the SSH connection. This method + # accepts either three or four arguments. When four arguments are given, + # they are: + # + # * the local address to bind to + # * the local port to listen on + # * the remote host to forward connections to + # * the port on the remote host to connect to + # + # If three arguments are given, it is as if the local bind address is + # "127.0.0.1", and the rest are applied as above. + # + # ssh.forward.local(1234, "www.capify.org", 80) + # ssh.forward.local("0.0.0.0", 1234, "www.capify.org", 80) + def local(*args) + if args.length < 3 || args.length > 4 + raise ArgumentError, "expected 3 or 4 parameters, got #{args.length}" + end + + bind_address = "127.0.0.1" + bind_address = args.shift if args.first.is_a?(String) && args.first =~ /\D/ + + local_port = args.shift.to_i + remote_host = args.shift + remote_port = args.shift.to_i + + socket = TCPServer.new(bind_address, local_port) + + @local_forwarded_ports[[local_port, bind_address]] = socket + + session.listen_to(socket) do |server| + client = server.accept + debug { "received connection on #{bind_address}:#{local_port}" } + + channel = session.open_channel("direct-tcpip", :string, remote_host, :long, remote_port, :string, bind_address, :long, local_port) do |achannel| + achannel.info { "direct channel established" } + end + + prepare_client(client, channel, :local) + + channel.on_open_failed do |ch, code, description| + channel.error { "could not establish direct channel: #{description} (#{code})" } + channel[:socket].close + end + end + end + + # Terminates an active local forwarded port. If no such forwarded port + # exists, this will raise an exception. Otherwise, the forwarded connection + # is terminated. + # + # ssh.forward.cancel_local(1234) + # ssh.forward.cancel_local(1234, "0.0.0.0") + def cancel_local(port, bind_address="127.0.0.1") + socket = @local_forwarded_ports.delete([port, bind_address]) + socket.shutdown rescue nil + socket.close rescue nil + session.stop_listening_to(socket) + end + + # Returns a list of all active locally forwarded ports. The returned value + # is an array of arrays, where each element is a two-element tuple + # consisting of the local port and bind address corresponding to the + # forwarding port. + def active_locals + @local_forwarded_ports.keys + end + + # Requests that all connections on the given remote-port be forwarded via + # the local host to the given port/host. The last argument describes the + # bind address on the remote host, and defaults to 127.0.0.1. + # + # This method will return immediately, but the port will not actually be + # forwarded immediately. If the remote server is not able to begin the + # listener for this request, an exception will be raised asynchronously. + # + # If you want to know when the connection is active, it will show up in the + # #active_remotes list. If you want to block until the port is active, you + # could do something like this: + # + # ssh.forward.remote(80, "www.google.com", 1234, "0.0.0.0") + # ssh.loop { !ssh.forward.active_remotes.include?([1234, "0.0.0.0"]) } + def remote(port, host, remote_port, remote_host="127.0.0.1") + session.send_global_request("tcpip-forward", :string, remote_host, :long, remote_port) do |success, response| + if success + debug { "remote forward from remote #{remote_host}:#{remote_port} to #{host}:#{port} established" } + @remote_forwarded_ports[[remote_port, remote_host]] = Remote.new(host, port) + else + error { "remote forwarding request failed" } + raise Net::SSH::Exception, "remote forwarding request failed" + end + end + end + + # an alias, for token backwards compatibility with the 1.x API + alias :remote_to :remote + + # Requests that a remote forwarded port be cancelled. The remote forwarded + # port on the remote host, bound to the given address on the remote host, + # will be terminated, but not immediately. This method returns immediately + # after queueing the request to be sent to the server. If for some reason + # the port cannot be cancelled, an exception will be raised (asynchronously). + # + # If you want to know when the connection has been cancelled, it will no + # longer be present in the #active_remotes list. If you want to block until + # the port is no longer active, you could do something like this: + # + # ssh.forward.cancel_remote(1234, "0.0.0.0") + # ssh.loop { ssh.forward.active_remotes.include?([1234, "0.0.0.0"]) } + def cancel_remote(port, host="127.0.0.1") + session.send_global_request("cancel-tcpip-forward", :string, host, :long, port) do |success, response| + if success + @remote_forwarded_ports.delete([port, host]) + else + raise Net::SSH::Exception, "could not cancel remote forward request on #{host}:#{port}" + end + end + end + + # Returns all active forwarded remote ports. The returned value is an + # array of two-element tuples, where the first element is the port on the + # remote host and the second is the bind address. + def active_remotes + @remote_forwarded_ports.keys + end + + # Enables SSH agent forwarding on the given channel. The forwarded agent + # will remain active even after the channel closes--the channel is only + # used as the transport for enabling the forwarded connection. You should + # never need to call this directly--it is called automatically the first + # time a session channel is opened, when the connection was created with + # :forward_agent set to true: + # + # Net::SSH.start("remote.host", "me", :forwrd_agent => true) do |ssh| + # ssh.open_channel do |ch| + # # agent will be automatically forwarded by this point + # end + # ssh.loop + # end + def agent(channel) + return if @agent_forwarded + @agent_forwarded = true + + channel.send_channel_request("auth-agent-req@openssh.com") do |achannel, success| + if success + debug { "authentication agent forwarding is active" } + else + achannel.send_channel_request("auth-agent-req") do |a2channel, success2| + if success2 + debug { "authentication agent forwarding is active" } + else + error { "could not establish forwarding of authentication agent" } + end + end + end + end + end + + private + + # Perform setup operations that are common to all forwarded channels. + # +client+ is a socket, +channel+ is the channel that was just created, + # and +type+ is an arbitrary string describing the type of the channel. + def prepare_client(client, channel, type) + client.extend(Net::SSH::BufferedIo) + client.logger = logger + + session.listen_to(client) + channel[:socket] = client + + channel.on_data do |ch, data| + ch[:socket].enqueue(data) + end + + channel.on_close do |ch| + debug { "closing #{type} forwarded channel" } + ch[:socket].close if !client.closed? + session.stop_listening_to(ch[:socket]) + end + + channel.on_process do |ch| + if ch[:socket].closed? + ch.info { "#{type} forwarded connection closed" } + ch.close + elsif ch[:socket].available > 0 + data = ch[:socket].read_available(8192) + ch.debug { "read #{data.length} bytes from client, sending over #{type} forwarded connection" } + ch.send_data(data) + end + end + end + + # The callback used when a new "forwarded-tcpip" channel is requested + # by the server. This will open a new socket to the host/port specified + # when the forwarded connection was first requested. + def forwarded_tcpip(session, channel, packet) + connected_address = packet.read_string + connected_port = packet.read_long + originator_address = packet.read_string + originator_port = packet.read_long + + remote = @remote_forwarded_ports[[connected_port, connected_address]] + + if remote.nil? + raise Net::SSH::ChannelOpenFailed.new(1, "unknown request from remote forwarded connection on #{connected_address}:#{connected_port}") + end + + client = TCPSocket.new(remote.host, remote.port) + info { "connected #{connected_address}:#{connected_port} originator #{originator_address}:#{originator_port}" } + + prepare_client(client, channel, :remote) + rescue SocketError => err + raise Net::SSH::ChannelOpenFailed.new(2, "could not connect to remote host (#{remote.host}:#{remote.port}): #{err.message}") + end + + # The callback used when an auth-agent channel is requested by the server. + def auth_agent_channel(session, channel, packet) + info { "opening auth-agent channel" } + channel[:invisible] = true + + begin + agent = Authentication::Agent.connect(logger) + prepare_client(agent.socket, channel, :agent) + rescue Exception => e + error { "attempted to connect to agent but failed: #{e.class.name} (#{e.message})" } + raise Net::SSH::ChannelOpenFailed.new(2, "could not connect to authentication agent") + end + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test.rb new file mode 100644 index 00000000..5ecb7d5a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test.rb @@ -0,0 +1,89 @@ +require 'net/ssh/transport/session' +require 'net/ssh/connection/session' +require 'net/ssh/test/kex' +require 'net/ssh/test/socket' + +module Net; module SSH + + # This module may be used in unit tests, for when you want to test that your + # SSH state machines are really doing what you expect they are doing. You will + # typically include this module in your unit test class, and then build a + # "story" of expected sends and receives: + # + # require 'test/unit' + # require 'net/ssh/test' + # + # class MyTest < Test::Unit::TestCase + # include Net::SSH::Test + # + # def test_exec_via_channel_works + # story do |session| + # channel = session.opens_channel + # channel.sends_exec "ls" + # channel.gets_data "result of ls" + # channel.gets_close + # channel.sends_close + # end + # + # assert_scripted do + # result = nil + # + # connection.open_channel do |ch| + # ch.exec("ls") do |success| + # ch.on_data { |c, data| result = data } + # ch.on_close { |c| c.close } + # end + # end + # + # connection.loop + # assert_equal "result of ls", result + # end + # end + # end + # + # See Net::SSH::Test::Channel and Net::SSH::Test::Script for more options. + # + # Note that the Net::SSH::Test system is rather finicky yet, and can be kind + # of frustrating to get working. Any suggestions for improvement will be + # welcome! + module Test + # If a block is given, yields the script for the test socket (#socket). + # Otherwise, simply returns the socket's script. See Net::SSH::Test::Script. + def story + yield socket.script if block_given? + return socket.script + end + + # Returns the test socket instance to use for these tests (see + # Net::SSH::Test::Socket). + def socket(options={}) + @socket ||= Net::SSH::Test::Socket.new + end + + # Returns the connection session (Net::SSH::Connection::Session) for use + # in these tests. It is a fully functional SSH session, operating over + # a mock socket (#socket). + def connection(options={}) + @connection ||= Net::SSH::Connection::Session.new(transport(options), options) + end + + # Returns the transport session (Net::SSH::Transport::Session) for use + # in these tests. It is a fully functional SSH transport session, operating + # over a mock socket (#socket). + def transport(options={}) + @transport ||= Net::SSH::Transport::Session.new(options[:host] || "localhost", options.merge(:kex => "test", :host_key => "ssh-rsa", :paranoid => false, :proxy => socket(options))) + end + + # First asserts that a story has been described (see #story). Then yields, + # and then asserts that all items described in the script have been + # processed. Typically, this is called immediately after a story has + # been built, and the SSH commands being tested are then executed within + # the block passed to this assertion. + def assert_scripted + raise "there is no script to be processed" if socket.script.events.empty? + yield + assert socket.script.events.empty?, "there should not be any remaining scripted events, but there are still #{socket.script.events.length} pending" + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/channel.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/channel.rb new file mode 100644 index 00000000..261d8f38 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/channel.rb @@ -0,0 +1,129 @@ +module Net; module SSH; module Test + + # A mock channel, used for scripting actions in tests. It wraps a + # Net::SSH::Test::Script instance, and delegates to it for the most part. + # This class has little real functionality on its own, but rather acts as + # a convenience for scripting channel-related activity for later comparison + # in a unit test. + # + # story do |session| + # channel = session.opens_channel + # channel.sends_exec "ls" + # channel.gets_data "result of ls" + # channel.gets_close + # channel.sends_close + # end + class Channel + # The Net::SSH::Test::Script instance employed by this mock channel. + attr_reader :script + + # Sets the local-id of this channel object (the id assigned by the client). + attr_writer :local_id + + # Sets the remote-id of this channel object (the id assigned by the mock-server). + attr_writer :remote_id + + # Creates a new Test::Channel instance on top of the given +script+ (which + # must be a Net::SSH::Test::Script instance). + def initialize(script) + @script = script + @local_id = @remote_id = nil + end + + # Returns the local (client-assigned) id for this channel, or a Proc object + # that will return the local-id later if the local id has not yet been set. + # (See Net::SSH::Test::Packet#instantiate!.) + def local_id + @local_id || Proc.new { @local_id or raise "local-id has not been set yet!" } + end + + # Returns the remote (server-assigned) id for this channel, or a Proc object + # that will return the remote-id later if the remote id has not yet been set. + # (See Net::SSH::Test::Packet#instantiate!.) + def remote_id + @remote_id || Proc.new { @remote_id or raise "remote-id has not been set yet!" } + end + + # Because adjacent calls to #gets_data will sometimes cause the data packets + # to be concatenated (causing expectations in tests to fail), you may + # need to separate those calls with calls to #inject_remote_delay! (which + # essentially just mimics receiving an empty data packet): + # + # channel.gets_data "abcdefg" + # channel.inject_remote_delay! + # channel.gets_data "hijklmn" + def inject_remote_delay! + gets_data("") + end + + # Scripts the sending of an "exec" channel request packet to the mock + # server. If +reply+ is true, then the server is expected to reply to the + # request, otherwise no response to this request will be sent. If +success+ + # is +true+, then the request will be successful, otherwise a failure will + # be scripted. + # + # channel.sends_exec "ls -l" + def sends_exec(command, reply=true, success=true) + script.sends_channel_request(self, "exec", reply, command, success) + end + + # Scripts the sending of a "subsystem" channel request packet to the mock + # server. See #sends_exec for a discussion of the meaning of the +reply+ + # and +success+ arguments. + # + # channel.sends_subsystem "sftp" + def sends_subsystem(subsystem, reply=true, success=true) + script.sends_channel_request(self, "subsystem", reply, subsystem, success) + end + + # Scripts the sending of a data packet across the channel. + # + # channel.sends_data "foo" + def sends_data(data) + script.sends_channel_data(self, data) + end + + # Scripts the sending of an EOF packet across the channel. + # + # channel.sends_eof + def sends_eof + script.sends_channel_eof(self) + end + + # Scripts the sending of a "channel close" packet across the channel. + # + # channel.sends_close + def sends_close + script.sends_channel_close(self) + end + + # Scripts the reception of a channel data packet from the remote end. + # + # channel.gets_data "bar" + def gets_data(data) + script.gets_channel_data(self, data) + end + + # Scripts the reception of an "exit-status" channel request packet. + # + # channel.gets_exit_status(127) + def gets_exit_status(status=0) + script.gets_channel_request(self, "exit-status", false, status) + end + + # Scripts the reception of an EOF packet from the remote end. + # + # channel.gets_eof + def gets_eof + script.gets_channel_eof(self) + end + + # Scripts the reception of a "channel close" packet from the remote end. + # + # channel.gets_close + def gets_close + script.gets_channel_close(self) + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/extensions.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/extensions.rb new file mode 100644 index 00000000..e19a9863 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/extensions.rb @@ -0,0 +1,152 @@ +require 'net/ssh/buffer' +require 'net/ssh/packet' +require 'net/ssh/buffered_io' +require 'net/ssh/connection/channel' +require 'net/ssh/connection/constants' +require 'net/ssh/transport/constants' +require 'net/ssh/transport/packet_stream' + +module Net; module SSH; module Test + + # A collection of modules used to extend/override the default behavior of + # Net::SSH internals for ease of testing. As a consumer of Net::SSH, you'll + # never need to use this directly--they're all used under the covers by + # the Net::SSH::Test system. + module Extensions + + # An extension to Net::SSH::BufferedIo (assumes that the underlying IO + # is actually a StringIO). Facilitates unit testing. + module BufferedIo + # Returns +true+ if the position in the stream is less than the total + # length of the stream. + def select_for_read? + pos < size + end + + # Set this to +true+ if you want the IO to pretend to be available for writing + attr_accessor :select_for_write + + # Set this to +true+ if you want the IO to pretend to be in an error state + attr_accessor :select_for_error + + alias select_for_write? select_for_write + alias select_for_error? select_for_error + end + + # An extension to Net::SSH::Transport::PacketStream (assumes that the + # underlying IO is actually a StringIO). Facilitates unit testing. + module PacketStream + include BufferedIo # make sure we get the extensions here, too + + def self.included(base) #:nodoc: + base.send :alias_method, :real_available_for_read?, :available_for_read? + base.send :alias_method, :available_for_read?, :test_available_for_read? + + base.send :alias_method, :real_enqueue_packet, :enqueue_packet + base.send :alias_method, :enqueue_packet, :test_enqueue_packet + + base.send :alias_method, :real_poll_next_packet, :poll_next_packet + base.send :alias_method, :poll_next_packet, :test_poll_next_packet + end + + # Called when another packet should be inspected from the current + # script. If the next packet is a remote packet, it pops it off the + # script and shoves it onto this IO object, making it available to + # be read. + def idle! + return false unless script.next(:first) + + if script.next(:first).remote? + self.string << script.next.to_s + self.pos = pos + end + + return true + end + + # The testing version of Net::SSH::Transport::PacketStream#available_for_read?. + # Returns true if there is data pending to be read. Otherwise calls #idle!. + def test_available_for_read? + return true if select_for_read? + idle! + false + end + + # The testing version of Net::SSH::Transport::PacketStream#enqueued_packet. + # Simply calls Net::SSH::Test::Script#process on the packet. + def test_enqueue_packet(payload) + packet = Net::SSH::Buffer.new(payload.to_s) + script.process(packet) + end + + # The testing version of Net::SSH::Transport::PacketStream#poll_next_packet. + # Reads the next available packet from the IO object and returns it. + def test_poll_next_packet + return nil if available <= 0 + packet = Net::SSH::Buffer.new(read_available(4)) + length = packet.read_long + Net::SSH::Packet.new(read_available(length)) + end + end + + # An extension to Net::SSH::Connection::Channel. Facilitates unit testing. + module Channel + def self.included(base) #:nodoc: + base.send :alias_method, :send_data_for_real, :send_data + base.send :alias_method, :send_data, :send_data_for_test + end + + # The testing version of Net::SSH::Connection::Channel#send_data. Calls + # the original implementation, and then immediately enqueues the data for + # output so that scripted sends are properly interpreted as discrete + # (rather than concatenated) data packets. + def send_data_for_test(data) + send_data_for_real(data) + enqueue_pending_output + end + end + + # An extension to the built-in ::IO class. Simply redefines IO.select + # so that it can be scripted in Net::SSH unit tests. + module IO + def self.included(base) #:nodoc: + base.extend(ClassMethods) + end + + module ClassMethods + def self.extended(obj) #:nodoc: + class < "abc-xyz", + :server_key => OpenSSL::PKey::RSA.new(32), + :shared_secret => OpenSSL::BN.new("1234567890", 10), + :hashing_algorithm => OpenSSL::Digest::SHA1 } + end + end + +end; end; end + +Net::SSH::Transport::Algorithms::ALGORITHMS[:kex] << "test" +Net::SSH::Transport::Kex::MAP["test"] = Net::SSH::Test::Kex diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/local_packet.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/local_packet.rb new file mode 100644 index 00000000..3909d211 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/local_packet.rb @@ -0,0 +1,51 @@ +require 'net/ssh/packet' +require 'net/ssh/test/packet' + +module Net; module SSH; module Test + + # This is a specialization of Net::SSH::Test::Packet for representing mock + # packets that are sent from the local (client) host. These are created + # automatically by Net::SSH::Test::Script and Net::SSH::Test::Channel by any + # of the sends_* methods. + class LocalPacket < Packet + attr_reader :init + + # Extend the default Net::SSH::Test::Packet constructor to also accept an + # optional block, which is used to finalize the initialization of the + # packet when #process is first called. + def initialize(type, *args, &block) + super(type, *args) + @init = block + end + + # Returns +true+; this is a local packet. + def local? + true + end + + # Called by Net::SSH::Test::Extensions::PacketStream#test_enqueue_packet + # to mimic remote processing of a locally-sent packet. It compares the + # packet it was given with the contents of this LocalPacket's data, to see + # if what was sent matches what was scripted. If it differs in any way, + # an exception is raised. + def process(packet) + @init.call(Net::SSH::Packet.new(packet.to_s)) if @init + type = packet.read_byte + raise "expected #{@type}, but got #{type}" if @type != type + + @data.zip(types).each do |expected, type| + type ||= case expected + when nil then break + when Numeric then :long + when String then :string + when TrueClass, FalseClass then :bool + end + + actual = packet.send("read_#{type}") + next if expected.nil? + raise "expected #{type} #{expected.inspect} but got #{actual.inspect}" unless expected == actual + end + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/packet.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/packet.rb new file mode 100644 index 00000000..0853003e --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/packet.rb @@ -0,0 +1,81 @@ +require 'net/ssh/connection/constants' +require 'net/ssh/transport/constants' + +module Net; module SSH; module Test + + # This is an abstract class, not to be instantiated directly, subclassed by + # Net::SSH::Test::LocalPacket and Net::SSH::Test::RemotePacket. It implements + # functionality common to those subclasses. + # + # These packets are not true packets, in that they don't represent what was + # actually sent between the hosst; rather, they represent what was expected + # to be sent, as dictated by the script (Net::SSH::Test::Script). Thus, + # though they are defined with data elements, these data elements are used + # to either validate data that was sent by the local host (Net::SSH::Test::LocalPacket) + # or to mimic the sending of data by the remote host (Net::SSH::Test::RemotePacket). + class Packet + include Net::SSH::Transport::Constants + include Net::SSH::Connection::Constants + + # Ceate a new packet of the given +type+, and with +args+ being a list of + # data elements in the order expected for packets of the given +type+ + # (see #types). + def initialize(type, *args) + @type = self.class.const_get(type.to_s.upcase) + @data = args + end + + # The default for +remote?+ is false. Subclasses should override as necessary. + def remote? + false + end + + # The default for +local?+ is false. Subclasses should override as necessary. + def local? + false + end + + # Instantiates the packets data elements. When the packet was first defined, + # some elements may not have been fully realized, and were described as + # Proc objects rather than atomic types. This invokes those Proc objects + # and replaces them with their returned values. This allows for values + # like Net::SSH::Test::Channel#remote_id to be used in scripts before + # the remote_id is known (since it is only known after a channel has been + # confirmed open). + def instantiate! + @data.map! { |i| i.respond_to?(:call) ? i.call : i } + end + + # Returns an array of symbols describing the data elements for packets of + # the same type as this packet. These types are used to either validate + # sent packets (Net::SSH::Test::LocalPacket) or build received packets + # (Net::SSH::Test::RemotePacket). + # + # Not all packet types are defined here. As new packet types are required + # (e.g., a unit test needs to test that the remote host sent a packet that + # is not implemented here), the description of that packet should be + # added. Unsupported packet types will otherwise raise an exception. + def types + @types ||= case @type + when KEXINIT then + [:long, :long, :long, :long, + :string, :string, :string, :string, :string, :string, :string, :string, :string, :string, + :bool] + when NEWKEYS then [] + when CHANNEL_OPEN then [:string, :long, :long, :long] + when CHANNEL_OPEN_CONFIRMATION then [:long, :long, :long, :long] + when CHANNEL_DATA then [:long, :string] + when CHANNEL_EOF, CHANNEL_CLOSE, CHANNEL_SUCCESS, CHANNEL_FAILURE then [:long] + when CHANNEL_REQUEST + parts = [:long, :string, :bool] + case @data[1] + when "exec", "subsystem" then parts << :string + when "exit-status" then parts << :long + else raise "don't know what to do about #{@data[1]} channel request" + end + else raise "don't know how to parse packet type #{@type}" + end + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/remote_packet.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/remote_packet.rb new file mode 100644 index 00000000..c09d750a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/remote_packet.rb @@ -0,0 +1,38 @@ +require 'net/ssh/buffer' +require 'net/ssh/test/packet' + +module Net; module SSH; module Test + + # This is a specialization of Net::SSH::Test::Packet for representing mock + # packets that are received by the local (client) host. These are created + # automatically by Net::SSH::Test::Script and Net::SSH::Test::Channel by any + # of the gets_* methods. + class RemotePacket < Packet + # Returns +true+; this is a remote packet. + def remote? + true + end + + # The #process method should only be called on Net::SSH::Test::LocalPacket + # packets; if it is attempted on a remote packet, then it is an expectation + # mismatch (a remote packet was received when a local packet was expected + # to be sent). This will happen when either your test script + # (Net::SSH::Test::Script) or your program are wrong. + def process(packet) + raise "received packet type #{packet.read_byte} and was not expecting any packet" + end + + # Returns this remote packet as a string, suitable for parsing by + # Net::SSH::Transport::PacketStream and friends. When a remote packet is + # received, this method is called and the result concatenated onto the + # input buffer for the packet stream. + def to_s + @to_s ||= begin + instantiate! + string = Net::SSH::Buffer.from(:byte, @type, *types.zip(@data).flatten).to_s + [string.length, string].pack("NA*") + end + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/script.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/script.rb new file mode 100644 index 00000000..89ee064a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/script.rb @@ -0,0 +1,157 @@ +require 'net/ssh/test/channel' +require 'net/ssh/test/local_packet' +require 'net/ssh/test/remote_packet' + +module Net; module SSH; module Test + + # Represents a sequence of scripted events that identify the behavior that + # a test expects. Methods named "sends_*" create events for packets being + # sent from the local to the remote host, and methods named "gets_*" create + # events for packets being received by the local from the remote host. + # + # A reference to a script. is generally obtained in a unit test via the + # Net::SSH::Test#story helper method: + # + # story do |script| + # channel = script.opens_channel + # ... + # end + class Script + # The list of scripted events. These will be Net::SSH::Test::LocalPacket + # and Net::SSH::Test::RemotePacket instances. + attr_reader :events + + # Create a new, empty script. + def initialize + @events = [] + end + + # Scripts the opening of a channel by adding a local packet sending the + # channel open request, and if +confirm+ is true (the default), also + # adding a remote packet confirming the new channel. + # + # A new Net::SSH::Test::Channel instance is returned, which can be used + # to script additional channel operations. + def opens_channel(confirm=true) + channel = Channel.new(self) + channel.remote_id = 5555 + + events << LocalPacket.new(:channel_open) { |p| channel.local_id = p[:remote_id] } + + if confirm + events << RemotePacket.new(:channel_open_confirmation, channel.local_id, channel.remote_id, 0x20000, 0x10000) + end + + channel + end + + # A convenience method for adding an arbitrary local packet to the events + # list. + def sends(type, *args, &block) + events << LocalPacket.new(type, *args, &block) + end + + # A convenience method for adding an arbitrary remote packet to the events + # list. + def gets(type, *args) + events << RemotePacket.new(type, *args) + end + + # Scripts the sending of a new channel request packet to the remote host. + # +channel+ should be an instance of Net::SSH::Test::Channel. +request+ + # is a string naming the request type to send, +reply+ is a boolean + # indicating whether a response to this packet is required , and +data+ + # is any additional request-specific data that this packet should send. + # +success+ indicates whether the response (if one is required) should be + # success or failure. + # + # If a reply is desired, a remote packet will also be queued, :channel_success + # if +success+ is true, or :channel_failure if +success+ is false. + # + # This will typically be called via Net::SSH::Test::Channel#sends_exec or + # Net::SSH::Test::Channel#sends_subsystem. + def sends_channel_request(channel, request, reply, data, success=true) + events << LocalPacket.new(:channel_request, channel.remote_id, request, reply, data) + if reply + if success + events << RemotePacket.new(:channel_success, channel.local_id) + else + events << RemotePacket.new(:channel_failure, channel.local_id) + end + end + end + + # Scripts the sending of a channel data packet. +channel+ must be a + # Net::SSH::Test::Channel object, and +data+ is the (string) data to + # expect will be sent. + # + # This will typically be called via Net::SSH::Test::Channel#sends_data. + def sends_channel_data(channel, data) + events << LocalPacket.new(:channel_data, channel.remote_id, data) + end + + # Scripts the sending of a channel EOF packet from the given + # Net::SSH::Test::Channel +channel+. This will typically be called via + # Net::SSH::Test::Channel#sends_eof. + def sends_channel_eof(channel) + events << LocalPacket.new(:channel_eof, channel.remote_id) + end + + # Scripts the sending of a channel close packet from the given + # Net::SSH::Test::Channel +channel+. This will typically be called via + # Net::SSH::Test::Channel#sends_close. + def sends_channel_close(channel) + events << LocalPacket.new(:channel_close, channel.remote_id) + end + + # Scripts the reception of a channel data packet from the remote host by + # the given Net::SSH::Test::Channel +channel+. This will typically be + # called via Net::SSH::Test::Channel#gets_data. + def gets_channel_data(channel, data) + events << RemotePacket.new(:channel_data, channel.local_id, data) + end + + # Scripts the reception of a channel request packet from the remote host by + # the given Net::SSH::Test::Channel +channel+. This will typically be + # called via Net::SSH::Test::Channel#gets_exit_status. + def gets_channel_request(channel, request, reply, data) + events << RemotePacket.new(:channel_request, channel.local_id, request, reply, data) + end + + # Scripts the reception of a channel EOF packet from the remote host by + # the given Net::SSH::Test::Channel +channel+. This will typically be + # called via Net::SSH::Test::Channel#gets_eof. + def gets_channel_eof(channel) + events << RemotePacket.new(:channel_eof, channel.local_id) + end + + # Scripts the reception of a channel close packet from the remote host by + # the given Net::SSH::Test::Channel +channel+. This will typically be + # called via Net::SSH::Test::Channel#gets_close. + def gets_channel_close(channel) + events << RemotePacket.new(:channel_close, channel.local_id) + end + + # By default, removes the next event in the list and returns it. However, + # this can also be used to non-destructively peek at the next event in the + # list, by passing :first as the argument. + # + # # remove the next event and return it + # event = script.next + # + # # peek at the next event + # event = script.next(:first) + def next(mode=:shift) + events.send(mode) + end + + # Compare the given packet against the next event in the list. If there is + # no next event, an exception will be raised. This is called by + # Net::SSH::Test::Extensions::PacketStream#test_enqueue_packet. + def process(packet) + event = events.shift or raise "end of script reached, but got a packet type #{packet.read_byte}" + event.process(packet) + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/socket.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/socket.rb new file mode 100644 index 00000000..4741255a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/test/socket.rb @@ -0,0 +1,59 @@ +require 'socket' +require 'stringio' +require 'net/ssh/test/extensions' +require 'net/ssh/test/script' + +module Net; module SSH; module Test + + # A mock socket implementation for use in testing. It implements the minimum + # necessary interface for interacting with the rest of the Net::SSH::Test + # system. + class Socket < StringIO + attr_reader :host, :port + + # The Net::SSH::Test::Script object in use by this socket. This is the + # canonical script instance that should be used for any test depending on + # this socket instance. + attr_reader :script + + # Create a new test socket. This will also instantiate a new Net::SSH::Test::Script + # and seed it with the necessary events to power the initialization of the + # connection. + def initialize + extend(Net::SSH::Transport::PacketStream) + super "SSH-2.0-Test\r\n" + + @script = Script.new + + script.gets(:kexinit, 1, 2, 3, 4, "test", "ssh-rsa", "none", "none", "none", "none", "none", "none", "", "", false) + script.sends(:kexinit) + script.sends(:newkeys) + script.gets(:newkeys) + end + + # This doesn't actually do anything, since we don't really care what gets + # written. + def write(data) + # black hole, because we don't actually care about what gets written + end + + # Allows the socket to also mimic a socket factory, simply returning + # +self+. + def open(host, port) + @host, @port = host, port + self + end + + # Returns a sockaddr struct for the port and host that were used when the + # socket was instantiated. + def getpeername + ::Socket.sockaddr_in(port, host) + end + + # Alias to #read, but never returns nil (returns an empty string instead). + def recv(n) + read(n) || "" + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/algorithms.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/algorithms.rb new file mode 100644 index 00000000..9c7f8d0a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/algorithms.rb @@ -0,0 +1,384 @@ +require 'net/ssh/buffer' +require 'net/ssh/known_hosts' +require 'net/ssh/loggable' +require 'net/ssh/transport/cipher_factory' +require 'net/ssh/transport/constants' +require 'net/ssh/transport/hmac' +require 'net/ssh/transport/kex' +require 'net/ssh/transport/server_version' + +module Net; module SSH; module Transport + + # Implements the higher-level logic behind an SSH key-exchange. It handles + # both the initial exchange, as well as subsequent re-exchanges (as needed). + # It also encapsulates the negotiation of the algorithms, and provides a + # single point of access to the negotiated algorithms. + # + # You will never instantiate or reference this directly. It is used + # internally by the transport layer. + class Algorithms + include Constants, Loggable + + # Define the default algorithms, in order of preference, supported by + # Net::SSH. + ALGORITHMS = { + :host_key => %w(ssh-rsa ssh-dss), + :kex => %w(diffie-hellman-group-exchange-sha1 + diffie-hellman-group1-sha1), + :encryption => %w(aes128-cbc 3des-cbc blowfish-cbc cast128-cbc + aes192-cbc aes256-cbc rijndael-cbc@lysator.liu.se + idea-cbc none arcfour128 arcfour256), + :hmac => %w(hmac-sha1 hmac-md5 hmac-sha1-96 hmac-md5-96 none), + :compression => %w(none zlib@openssh.com zlib), + :language => %w() + } + + # The underlying transport layer session that supports this object + attr_reader :session + + # The hash of options used to initialize this object + attr_reader :options + + # The kex algorithm to use settled on between the client and server. + attr_reader :kex + + # The type of host key that will be used for this session. + attr_reader :host_key + + # The type of the cipher to use to encrypt packets sent from the client to + # the server. + attr_reader :encryption_client + + # The type of the cipher to use to decrypt packets arriving from the server. + attr_reader :encryption_server + + # The type of HMAC to use to sign packets sent by the client. + attr_reader :hmac_client + + # The type of HMAC to use to validate packets arriving from the server. + attr_reader :hmac_server + + # The type of compression to use to compress packets being sent by the client. + attr_reader :compression_client + + # The type of compression to use to decompress packets arriving from the server. + attr_reader :compression_server + + # The language that will be used in messages sent by the client. + attr_reader :language_client + + # The language that will be used in messages sent from the server. + attr_reader :language_server + + # The hash of algorithms preferred by the client, which will be told to + # the server during algorithm negotiation. + attr_reader :algorithms + + # The session-id for this session, as decided during the initial key exchange. + attr_reader :session_id + + # Returns true if the given packet can be processed during a key-exchange. + def self.allowed_packet?(packet) + ( 1.. 4).include?(packet.type) || + ( 6..19).include?(packet.type) || + (21..49).include?(packet.type) + end + + # Instantiates a new Algorithms object, and prepares the hash of preferred + # algorithms based on the options parameter and the ALGORITHMS constant. + def initialize(session, options={}) + @session = session + @logger = session.logger + @options = options + @algorithms = {} + @pending = @initialized = false + @client_packet = @server_packet = nil + prepare_preferred_algorithms! + end + + # Request a rekey operation. This will return immediately, and does not + # actually perform the rekey operation. It does cause the session to change + # state, however--until the key exchange finishes, no new packets will be + # processed. + def rekey! + @client_packet = @server_packet = nil + @initialized = false + send_kexinit + end + + # Called by the transport layer when a KEXINIT packet is recieved, indicating + # that the server wants to exchange keys. This can be spontaneous, or it + # can be in response to a client-initiated rekey request (see #rekey!). Either + # way, this will block until the key exchange completes. + def accept_kexinit(packet) + info { "got KEXINIT from server" } + @server_data = parse_server_algorithm_packet(packet) + @server_packet = @server_data[:raw] + if !pending? + send_kexinit + else + proceed! + end + end + + # A convenience method for accessing the list of preferred types for a + # specific algorithm (see #algorithms). + def [](key) + algorithms[key] + end + + # Returns +true+ if a key-exchange is pending. This will be true from the + # moment either the client or server requests the key exchange, until the + # exchange completes. While an exchange is pending, only a limited number + # of packets are allowed, so event processing essentially stops during this + # period. + def pending? + @pending + end + + # Returns true if no exchange is pending, and otherwise returns true or + # false depending on whether the given packet is of a type that is allowed + # during a key exchange. + def allow?(packet) + !pending? || Algorithms.allowed_packet?(packet) + end + + # Returns true if the algorithms have been negotiated at all. + def initialized? + @initialized + end + + private + + # Sends a KEXINIT packet to the server. If a server KEXINIT has already + # been received, this will then invoke #proceed! to proceed with the key + # exchange, otherwise it returns immediately (but sets the object to the + # pending state). + def send_kexinit + info { "sending KEXINIT" } + @pending = true + packet = build_client_algorithm_packet + @client_packet = packet.to_s + session.send_message(packet) + proceed! if @server_packet + end + + # After both client and server have sent their KEXINIT packets, this + # will do the algorithm negotiation and key exchange. Once both finish, + # the object leaves the pending state and the method returns. + def proceed! + info { "negotiating algorithms" } + negotiate_algorithms + exchange_keys + @pending = false + end + + # Prepares the list of preferred algorithms, based on the options hash + # that was given when the object was constructed, and the ALGORITHMS + # constant. Also, when determining the host_key type to use, the known + # hosts files are examined to see if the host has ever sent a host_key + # before, and if so, that key type is used as the preferred type for + # communicating with this server. + def prepare_preferred_algorithms! + options[:compression] = %w(zlib@openssh.com zlib) if options[:compression] == true + + ALGORITHMS.each do |algorithm, list| + algorithms[algorithm] = list.dup + + # apply the preferred algorithm order, if any + if options[algorithm] + algorithms[algorithm] = Array(options[algorithm]).compact.uniq + invalid = algorithms[algorithm].detect { |name| !ALGORITHMS[algorithm].include?(name) } + raise NotImplementedError, "unsupported #{algorithm} algorithm: `#{invalid}'" if invalid + + # make sure all of our supported algorithms are tacked onto the + # end, so that if the user tries to give a list of which none are + # supported, we can still proceed. + list.each { |name| algorithms[algorithm] << name unless algorithms[algorithm].include?(name) } + end + end + + # for convention, make sure our list has the same keys as the server + # list + + algorithms[:encryption_client ] = algorithms[:encryption_server ] = algorithms[:encryption] + algorithms[:hmac_client ] = algorithms[:hmac_server ] = algorithms[:hmac] + algorithms[:compression_client] = algorithms[:compression_server] = algorithms[:compression] + algorithms[:language_client ] = algorithms[:language_server ] = algorithms[:language] + + if !options.key?(:host_key) + # make sure the host keys are specified in preference order, where any + # existing known key for the host has preference. + + existing_keys = KnownHosts.search_for(options[:host_key_alias] || session.host_as_string, options) + host_keys = existing_keys.map { |key| key.ssh_type }.uniq + algorithms[:host_key].each do |name| + host_keys << name unless host_keys.include?(name) + end + algorithms[:host_key] = host_keys + end + end + + # Parses a KEXINIT packet from the server. + def parse_server_algorithm_packet(packet) + data = { :raw => packet.content } + + packet.read(16) # skip the cookie value + + data[:kex] = packet.read_string.split(/,/) + data[:host_key] = packet.read_string.split(/,/) + data[:encryption_client] = packet.read_string.split(/,/) + data[:encryption_server] = packet.read_string.split(/,/) + data[:hmac_client] = packet.read_string.split(/,/) + data[:hmac_server] = packet.read_string.split(/,/) + data[:compression_client] = packet.read_string.split(/,/) + data[:compression_server] = packet.read_string.split(/,/) + data[:language_client] = packet.read_string.split(/,/) + data[:language_server] = packet.read_string.split(/,/) + + # TODO: if first_kex_packet_follows, we need to try to skip the + # actual kexinit stuff and try to guess what the server is doing... + # need to read more about this scenario. + first_kex_packet_follows = packet.read_bool + + return data + end + + # Given the #algorithms map of preferred algorithm types, this constructs + # a KEXINIT packet to send to the server. It does not actually send it, + # it simply builds the packet and returns it. + def build_client_algorithm_packet + kex = algorithms[:kex ].join(",") + host_key = algorithms[:host_key ].join(",") + encryption = algorithms[:encryption ].join(",") + hmac = algorithms[:hmac ].join(",") + compression = algorithms[:compression].join(",") + language = algorithms[:language ].join(",") + + Net::SSH::Buffer.from(:byte, KEXINIT, + :long, [rand(0xFFFFFFFF), rand(0xFFFFFFFF), rand(0xFFFFFFFF), rand(0xFFFFFFFF)], + :string, [kex, host_key, encryption, encryption, hmac, hmac], + :string, [compression, compression, language, language], + :bool, false, :long, 0) + end + + # Given the parsed server KEX packet, and the client's preferred algorithm + # lists in #algorithms, determine which preferred algorithms each has + # in common and set those as the selected algorithms. If, for any algorithm, + # no type can be settled on, an exception is raised. + def negotiate_algorithms + @kex = negotiate(:kex) + @host_key = negotiate(:host_key) + @encryption_client = negotiate(:encryption_client) + @encryption_server = negotiate(:encryption_server) + @hmac_client = negotiate(:hmac_client) + @hmac_server = negotiate(:hmac_server) + @compression_client = negotiate(:compression_client) + @compression_server = negotiate(:compression_server) + @language_client = negotiate(:language_client) rescue "" + @language_server = negotiate(:language_server) rescue "" + + debug do + "negotiated:\n" + + [:kex, :host_key, :encryption_server, :encryption_client, :hmac_client, :hmac_server, :compression_client, :compression_server, :language_client, :language_server].map do |key| + "* #{key}: #{instance_variable_get("@#{key}")}" + end.join("\n") + end + end + + # Negotiates a single algorithm based on the preferences reported by the + # server and those set by the client. This is called by + # #negotiate_algorithms. + def negotiate(algorithm) + match = self[algorithm].find { |item| @server_data[algorithm].include?(item) } + + if match.nil? + raise Net::SSH::Exception, "could not settle on #{algorithm} algorithm" + end + + return match + end + + # Considers the sizes of the keys and block-sizes for the selected ciphers, + # and the lengths of the hmacs, and returns the largest as the byte requirement + # for the key-exchange algorithm. + def kex_byte_requirement + sizes = [8] # require at least 8 bytes + + sizes.concat(CipherFactory.get_lengths(encryption_client)) + sizes.concat(CipherFactory.get_lengths(encryption_server)) + + sizes << HMAC.key_length(hmac_client) + sizes << HMAC.key_length(hmac_server) + + sizes.max + end + + # Instantiates one of the Transport::Kex classes (based on the negotiated + # kex algorithm), and uses it to exchange keys. Then, the ciphers and + # HMACs are initialized and fed to the transport layer, to be used in + # further communication with the server. + def exchange_keys + debug { "exchanging keys" } + + algorithm = Kex::MAP[kex].new(self, session, + :client_version_string => Net::SSH::Transport::ServerVersion::PROTO_VERSION, + :server_version_string => session.server_version.version, + :server_algorithm_packet => @server_packet, + :client_algorithm_packet => @client_packet, + :need_bytes => kex_byte_requirement, + :logger => logger) + result = algorithm.exchange_keys + + secret = result[:shared_secret].to_ssh + hash = result[:session_id] + digester = result[:hashing_algorithm] + + @session_id ||= hash + + key = Proc.new { |salt| digester.digest(secret + hash + salt + @session_id) } + + iv_client = key["A"] + iv_server = key["B"] + key_client = key["C"] + key_server = key["D"] + mac_key_client = key["E"] + mac_key_server = key["F"] + + parameters = { :iv => iv_client, :key => key_client, :shared => secret, + :hash => hash, :digester => digester } + + cipher_client = CipherFactory.get(encryption_client, parameters.merge(:encrypt => true)) + cipher_server = CipherFactory.get(encryption_server, parameters.merge(:iv => iv_server, :key => key_server, :decrypt => true)) + + mac_client = HMAC.get(hmac_client, mac_key_client) + mac_server = HMAC.get(hmac_server, mac_key_server) + + session.configure_client :cipher => cipher_client, :hmac => mac_client, + :compression => normalize_compression_name(compression_client), + :compression_level => options[:compression_level], + :rekey_limit => options[:rekey_limit], + :max_packets => options[:rekey_packet_limit], + :max_blocks => options[:rekey_blocks_limit] + + session.configure_server :cipher => cipher_server, :hmac => mac_server, + :compression => normalize_compression_name(compression_server), + :rekey_limit => options[:rekey_limit], + :max_packets => options[:rekey_packet_limit], + :max_blocks => options[:rekey_blocks_limit] + + @initialized = true + end + + # Given the SSH name for some compression algorithm, return a normalized + # name as a symbol. + def normalize_compression_name(name) + case name + when "none" then false + when "zlib" then :standard + when "zlib@openssh.com" then :delayed + else raise ArgumentError, "unknown compression type `#{name}'" + end + end + end +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/cipher_factory.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/cipher_factory.rb new file mode 100644 index 00000000..ae57f2c2 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/cipher_factory.rb @@ -0,0 +1,97 @@ +require 'openssl' +require 'net/ssh/transport/identity_cipher' + +module Net; module SSH; module Transport + + # Implements a factory of OpenSSL cipher algorithms. + class CipherFactory + # Maps the SSH name of a cipher to it's corresponding OpenSSL name + SSH_TO_OSSL = { + "3des-cbc" => "des-ede3-cbc", + "blowfish-cbc" => "bf-cbc", + "aes256-cbc" => "aes-256-cbc", + "aes192-cbc" => "aes-192-cbc", + "aes128-cbc" => "aes-128-cbc", + "idea-cbc" => "idea-cbc", + "cast128-cbc" => "cast-cbc", + "rijndael-cbc@lysator.liu.se" => "aes-256-cbc", + "arcfour128" => "rc4", + "arcfour256" => "rc4", + "arcfour512" => "rc4", + "none" => "none" + } + + # Ruby's OpenSSL bindings always return a key length of 16 for RC4 ciphers + # resulting in the error: OpenSSL::CipherError: key length too short. + # The following ciphers will override this key length. + KEY_LEN_OVERRIDE = { + "arcfour256" => 32, + "arcfour512" => 64 + } + + # Returns true if the underlying OpenSSL library supports the given cipher, + # and false otherwise. + def self.supported?(name) + ossl_name = SSH_TO_OSSL[name] or raise NotImplementedError, "unimplemented cipher `#{name}'" + return true if ossl_name == "none" + return OpenSSL::Cipher.ciphers.include?(ossl_name) + end + + # Retrieves a new instance of the named algorithm. The new instance + # will be initialized using an iv and key generated from the given + # iv, key, shared, hash and digester values. Additionally, the + # cipher will be put into encryption or decryption mode, based on the + # value of the +encrypt+ parameter. + def self.get(name, options={}) + ossl_name = SSH_TO_OSSL[name] or raise NotImplementedError, "unimplemented cipher `#{name}'" + return IdentityCipher if ossl_name == "none" + + cipher = OpenSSL::Cipher::Cipher.new(ossl_name) + cipher.send(options[:encrypt] ? :encrypt : :decrypt) + + cipher.padding = 0 + cipher.iv = make_key(cipher.iv_len, options[:iv], options) if ossl_name != "rc4" + key_len = KEY_LEN_OVERRIDE[name] || cipher.key_len + cipher.key_len = key_len + cipher.key = make_key(key_len, options[:key], options) + cipher.update(" " * 1536) if ossl_name == "rc4" + + return cipher + end + + # Returns a two-element array containing the [ key-length, + # block-size ] for the named cipher algorithm. If the cipher + # algorithm is unknown, or is "none", 0 is returned for both elements + # of the tuple. + def self.get_lengths(name) + ossl_name = SSH_TO_OSSL[name] + return [0, 0] if ossl_name.nil? || ossl_name == "none" + + cipher = OpenSSL::Cipher::Cipher.new(ossl_name) + key_len = KEY_LEN_OVERRIDE[name] || cipher.key_len + cipher.key_len = key_len + + return [key_len, ossl_name=="rc4" ? 8 : cipher.block_size] + end + + private + + # Generate a key value in accordance with the SSH2 specification. + def self.make_key(bytes, start, options={}) + k = start[0, bytes] + + digester = options[:digester] or raise 'No digester supplied' + shared = options[:shared] or raise 'No shared secret supplied' + hash = options[:hash] or raise 'No hash supplied' + + while k.length < bytes + step = digester.digest(shared + hash + k) + bytes_needed = bytes - k.length + k << step[0, bytes_needed] + end + + return k + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/constants.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/constants.rb new file mode 100644 index 00000000..c87ce4cc --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/constants.rb @@ -0,0 +1,30 @@ +module Net; module SSH; module Transport + module Constants + + #-- + # Transport layer generic messages + #++ + + DISCONNECT = 1 + IGNORE = 2 + UNIMPLEMENTED = 3 + DEBUG = 4 + SERVICE_REQUEST = 5 + SERVICE_ACCEPT = 6 + + #-- + # Algorithm negotiation messages + #++ + + KEXINIT = 20 + NEWKEYS = 21 + + #-- + # Key exchange method specific messages + #++ + + KEXDH_INIT = 30 + KEXDH_REPLY = 31 + + end +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac.rb new file mode 100644 index 00000000..262db733 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac.rb @@ -0,0 +1,31 @@ +require 'net/ssh/transport/hmac/md5' +require 'net/ssh/transport/hmac/md5_96' +require 'net/ssh/transport/hmac/sha1' +require 'net/ssh/transport/hmac/sha1_96' +require 'net/ssh/transport/hmac/none' + +# Implements a simple factory interface for fetching hmac implementations, or +# for finding the key lengths for hmac implementations.s +module Net::SSH::Transport::HMAC + # The mapping of SSH hmac algorithms to their implementations + MAP = { + 'hmac-md5' => MD5, + 'hmac-md5-96' => MD5_96, + 'hmac-sha1' => SHA1, + 'hmac-sha1-96' => SHA1_96, + 'none' => None + } + + # Retrieves a new hmac instance of the given SSH type (+name+). If +key+ is + # given, the new instance will be initialized with that key. + def self.get(name, key="") + impl = MAP[name] or raise ArgumentError, "hmac not found: #{name.inspect}" + impl.new(key) + end + + # Retrieves the key length for the hmac of the given SSH type (+name+). + def self.key_length(name) + impl = MAP[name] or raise ArgumentError, "hmac not found: #{name.inspect}" + impl.key_length + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac/abstract.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac/abstract.rb new file mode 100644 index 00000000..b3e3eaab --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/hmac/abstract.rb @@ -0,0 +1,79 @@ +require 'openssl' +require 'openssl/digest' + +module Net; module SSH; module Transport; module HMAC + + # The base class of all OpenSSL-based HMAC algorithm wrappers. + class Abstract + + class < DiffieHellmanGroupExchangeSHA1, + 'diffie-hellman-group1-sha1' => DiffieHellmanGroup1SHA1 + } + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb new file mode 100644 index 00000000..a9875ac4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb @@ -0,0 +1,208 @@ +require 'net/ssh/buffer' +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/transport/openssl' +require 'net/ssh/transport/constants' + +module Net; module SSH; module Transport; module Kex + + # A key-exchange service implementing the "diffie-hellman-group1-sha1" + # key-exchange algorithm. + class DiffieHellmanGroup1SHA1 + include Constants, Loggable + + # The value of 'P', as a string, in hexadecimal + P_s = "FFFFFFFF" "FFFFFFFF" "C90FDAA2" "2168C234" + + "C4C6628B" "80DC1CD1" "29024E08" "8A67CC74" + + "020BBEA6" "3B139B22" "514A0879" "8E3404DD" + + "EF9519B3" "CD3A431B" "302B0A6D" "F25F1437" + + "4FE1356D" "6D51C245" "E485B576" "625E7EC6" + + "F44C42E9" "A637ED6B" "0BFF5CB6" "F406B7ED" + + "EE386BFB" "5A899FA5" "AE9F2411" "7C4B1FE6" + + "49286651" "ECE65381" "FFFFFFFF" "FFFFFFFF" + + # The radix in which P_s represents the value of P + P_r = 16 + + # The group constant + G = 2 + + attr_reader :p + attr_reader :g + attr_reader :digester + attr_reader :algorithms + attr_reader :connection + attr_reader :data + attr_reader :dh + + # Create a new instance of the DiffieHellmanGroup1SHA1 algorithm. + # The data is a Hash of symbols representing information + # required by this algorithm, which was acquired during earlier + # processing. + def initialize(algorithms, connection, data) + @p = OpenSSL::BN.new(P_s, P_r) + @g = G + + @digester = OpenSSL::Digest::SHA1 + @algorithms = algorithms + @connection = connection + + @data = data.dup + @dh = generate_key + @logger = @data.delete(:logger) + end + + # Perform the key-exchange for the given session, with the given + # data. This method will return a hash consisting of the + # following keys: + # + # * :session_id + # * :server_key + # * :shared_secret + # * :hashing_algorithm + # + # The caller is expected to be able to understand how to use these + # deliverables. + def exchange_keys + result = send_kexinit + verify_server_key(result[:server_key]) + session_id = verify_signature(result) + confirm_newkeys + + return { :session_id => session_id, + :server_key => result[:server_key], + :shared_secret => result[:shared_secret], + :hashing_algorithm => digester } + end + + private + + # Returns the DH key parameters for the current connection. + def get_parameters + [p, g] + end + + # Returns the INIT/REPLY constants used by this algorithm. + def get_message_types + [KEXDH_INIT, KEXDH_REPLY] + end + + # Build the signature buffer to use when verifying a signature from + # the server. + def build_signature_buffer(result) + response = Net::SSH::Buffer.new + response.write_string data[:client_version_string], + data[:server_version_string], + data[:client_algorithm_packet], + data[:server_algorithm_packet], + result[:key_blob] + response.write_bignum dh.pub_key, + result[:server_dh_pubkey], + result[:shared_secret] + response + end + + # Generate a DH key with a private key consisting of the given + # number of bytes. + def generate_key #:nodoc: + dh = OpenSSL::PKey::DH.new + + dh.p, dh.g = get_parameters + dh.priv_key = OpenSSL::BN.rand(data[:need_bytes] * 8) + + dh.generate_key! until dh.valid? + + dh + end + + # Send the KEXDH_INIT message, and expect the KEXDH_REPLY. Return the + # resulting buffer. + # + # Parse the buffer from a KEXDH_REPLY message, returning a hash of + # the extracted values. + def send_kexinit #:nodoc: + init, reply = get_message_types + + # send the KEXDH_INIT message + buffer = Net::SSH::Buffer.from(:byte, init, :bignum, dh.pub_key) + connection.send_message(buffer) + + # expect the KEXDH_REPLY message + buffer = connection.next_message + raise Net::SSH::Exception, "expected REPLY" unless buffer.type == reply + + result = Hash.new + + result[:key_blob] = buffer.read_string + result[:server_key] = Net::SSH::Buffer.new(result[:key_blob]).read_key + result[:server_dh_pubkey] = buffer.read_bignum + result[:shared_secret] = OpenSSL::BN.new(dh.compute_key(result[:server_dh_pubkey]), 2) + + sig_buffer = Net::SSH::Buffer.new(buffer.read_string) + sig_type = sig_buffer.read_string + if sig_type != algorithms.host_key + raise Net::SSH::Exception, + "host key algorithm mismatch for signature " + + "'#{sig_type}' != '#{algorithms.host_key}'" + end + result[:server_sig] = sig_buffer.read_string + + return result + end + + # Verify that the given key is of the expected type, and that it + # really is the key for the session's host. Raise Net::SSH::Exception + # if it is not. + def verify_server_key(key) #:nodoc: + if key.ssh_type != algorithms.host_key + raise Net::SSH::Exception, + "host key algorithm mismatch " + + "'#{key.ssh_type}' != '#{algorithms.host_key}'" + end + + blob, fingerprint = generate_key_fingerprint(key) + + unless connection.host_key_verifier.verify(:key => key, :key_blob => blob, :fingerprint => fingerprint, :session => connection) + raise Net::SSH::Exception, "host key verification failed" + end + end + + def generate_key_fingerprint(key) + blob = Net::SSH::Buffer.from(:key, key).to_s + fingerprint = OpenSSL::Digest::MD5.hexdigest(blob).scan(/../).join(":") + + [blob, fingerprint] + rescue ::Exception => e + [nil, "(could not generate fingerprint: #{e.message})"] + end + + # Verify the signature that was received. Raise Net::SSH::Exception + # if the signature could not be verified. Otherwise, return the new + # session-id. + def verify_signature(result) #:nodoc: + response = build_signature_buffer(result) + + hash = @digester.digest(response.to_s) + + unless result[:server_key].ssh_do_verify(result[:server_sig], hash) + raise Net::SSH::Exception, "could not verify server signature" + end + + return hash + end + + # Send the NEWKEYS message, and expect the NEWKEYS message in + # reply. + def confirm_newkeys #:nodoc: + # send own NEWKEYS message first (the wodSSHServer won't send first) + response = Net::SSH::Buffer.new + response.write_byte(NEWKEYS) + connection.send_message(response) + + # wait for the server's NEWKEYS message + buffer = connection.next_message + raise Net::SSH::Exception, "expected NEWKEYS" unless buffer.type == NEWKEYS + end + end + +end; end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb new file mode 100644 index 00000000..da47ec2f --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb @@ -0,0 +1,77 @@ +require 'net/ssh/errors' +require 'net/ssh/transport/constants' +require 'net/ssh/transport/kex/diffie_hellman_group1_sha1' + +module Net::SSH::Transport::Kex + + # A key-exchange service implementing the + # "diffie-hellman-group-exchange-sha1" key-exchange algorithm. + class DiffieHellmanGroupExchangeSHA1 < DiffieHellmanGroup1SHA1 + MINIMUM_BITS = 1024 + MAXIMUM_BITS = 8192 + + KEXDH_GEX_GROUP = 31 + KEXDH_GEX_INIT = 32 + KEXDH_GEX_REPLY = 33 + KEXDH_GEX_REQUEST = 34 + + private + + # Compute the number of bits needed for the given number of bytes. + def compute_need_bits + need_bits = data[:need_bytes] * 8 + if need_bits < MINIMUM_BITS + need_bits = MINIMUM_BITS + elsif need_bits > MAXIMUM_BITS + need_bits = MAXIMUM_BITS + end + + data[:need_bits ] = need_bits + data[:need_bytes] = need_bits / 8 + end + + # Returns the DH key parameters for the given session. + def get_parameters + compute_need_bits + + # request the DH key parameters for the given number of bits. + buffer = Net::SSH::Buffer.from(:byte, KEXDH_GEX_REQUEST, :long, MINIMUM_BITS, + :long, data[:need_bits], :long, MAXIMUM_BITS) + connection.send_message(buffer) + + buffer = connection.next_message + unless buffer.type == KEXDH_GEX_GROUP + raise Net::SSH::Exception, "expected KEXDH_GEX_GROUP, got #{buffer.type}" + end + + p = buffer.read_bignum + g = buffer.read_bignum + + [p, g] + end + + # Returns the INIT/REPLY constants used by this algorithm. + def get_message_types + [KEXDH_GEX_INIT, KEXDH_GEX_REPLY] + end + + # Build the signature buffer to use when verifying a signature from + # the server. + def build_signature_buffer(result) + response = Net::SSH::Buffer.new + response.write_string data[:client_version_string], + data[:server_version_string], + data[:client_algorithm_packet], + data[:server_algorithm_packet], + result[:key_blob] + response.write_long MINIMUM_BITS, + data[:need_bits], + MAXIMUM_BITS + response.write_bignum dh.p, dh.g, dh.pub_key, + result[:server_dh_pubkey], + result[:shared_secret] + response + end + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/openssl.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/openssl.rb new file mode 100644 index 00000000..03036ec9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/openssl.rb @@ -0,0 +1,128 @@ +require 'openssl' +require 'net/ssh/buffer' + +module OpenSSL + + # This class is originally defined in the OpenSSL module. As needed, methods + # have been added to it by the Net::SSH module for convenience in dealing with + # SSH functionality. + class BN + + # Converts a BN object to a string. The format used is that which is + # required by the SSH2 protocol. + def to_ssh + if zero? + return [0].pack("N") + else + buf = to_s(2) + if buf.getbyte(0)[7] == 1 + return [buf.length+1, 0, buf].pack("NCA*") + else + return [buf.length, buf].pack("NA*") + end + end + end + + end + + module PKey + + class PKey + def fingerprint + @fingerprint ||= OpenSSL::Digest::MD5.hexdigest(to_blob).scan(/../).join(":") + end + end + + # This class is originally defined in the OpenSSL module. As needed, methods + # have been added to it by the Net::SSH module for convenience in dealing + # with SSH functionality. + class DH + + # Determines whether the pub_key for this key is valid. (This algorithm + # lifted more-or-less directly from OpenSSH, dh.c, dh_pub_is_valid.) + def valid? + return false if pub_key.nil? || pub_key < 0 + bits_set = 0 + pub_key.num_bits.times { |i| bits_set += 1 if pub_key.bit_set?(i) } + return ( bits_set > 1 && pub_key < p ) + end + + end + + # This class is originally defined in the OpenSSL module. As needed, methods + # have been added to it by the Net::SSH module for convenience in dealing + # with SSH functionality. + class RSA + + # Returns "ssh-rsa", which is the description of this key type used by the + # SSH2 protocol. + def ssh_type + "ssh-rsa" + end + + # Converts the key to a blob, according to the SSH2 protocol. + def to_blob + @blob ||= Net::SSH::Buffer.from(:string, ssh_type, :bignum, e, :bignum, n).to_s + end + + # Verifies the given signature matches the given data. + def ssh_do_verify(sig, data) + verify(OpenSSL::Digest::SHA1.new, sig, data) + end + + # Returns the signature for the given data. + def ssh_do_sign(data) + sign(OpenSSL::Digest::SHA1.new, data) + end + end + + # This class is originally defined in the OpenSSL module. As needed, methods + # have been added to it by the Net::SSH module for convenience in dealing + # with SSH functionality. + class DSA + + # Returns "ssh-dss", which is the description of this key type used by the + # SSH2 protocol. + def ssh_type + "ssh-dss" + end + + # Converts the key to a blob, according to the SSH2 protocol. + def to_blob + @blob ||= Net::SSH::Buffer.from(:string, ssh_type, + :bignum, p, :bignum, q, :bignum, g, :bignum, pub_key).to_s + end + + # Verifies the given signature matches the given data. + def ssh_do_verify(sig, data) + sig_r = sig[0,20].unpack("H*")[0].to_i(16) + sig_s = sig[20,20].unpack("H*")[0].to_i(16) + a1sig = OpenSSL::ASN1::Sequence([ + OpenSSL::ASN1::Integer(sig_r), + OpenSSL::ASN1::Integer(sig_s) + ]) + return verify(OpenSSL::Digest::DSS1.new, a1sig.to_der, data) + end + + # Signs the given data. + def ssh_do_sign(data) + sig = sign( OpenSSL::Digest::DSS1.new, data) + a1sig = OpenSSL::ASN1.decode( sig ) + + sig_r = a1sig.value[0].value.to_s(2) + sig_s = a1sig.value[1].value.to_s(2) + + if sig_r.length > 20 || sig_s.length > 20 + raise OpenSSL::PKey::DSAError, "bad sig size" + end + + sig_r = "\0" * ( 20 - sig_r.length ) + sig_r if sig_r.length < 20 + sig_s = "\0" * ( 20 - sig_s.length ) + sig_s if sig_s.length < 20 + + return sig_r + sig_s + end + end + + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/packet_stream.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/packet_stream.rb new file mode 100644 index 00000000..8404c56c --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/packet_stream.rb @@ -0,0 +1,232 @@ +require 'net/ssh/buffered_io' +require 'net/ssh/errors' +require 'net/ssh/packet' +require 'net/ssh/ruby_compat' +require 'net/ssh/transport/cipher_factory' +require 'net/ssh/transport/hmac' +require 'net/ssh/transport/state' + + +module Net; module SSH; module Transport + + # A module that builds additional functionality onto the Net::SSH::BufferedIo + # module. It adds SSH encryption, compression, and packet validation, as + # per the SSH2 protocol. It also adds an abstraction for polling packets, + # to allow for both blocking and non-blocking reads. + module PacketStream + include BufferedIo + + def self.extended(object) + object.__send__(:initialize_ssh) + end + + # The map of "hints" that can be used to modify the behavior of the packet + # stream. For instance, when authentication succeeds, an "authenticated" + # hint is set, which is used to determine whether or not to compress the + # data when using the "delayed" compression algorithm. + attr_reader :hints + + # The server state object, which encapsulates the algorithms used to interpret + # packets coming from the server. + attr_reader :server + + # The client state object, which encapsulates the algorithms used to build + # packets to send to the server. + attr_reader :client + + # The name of the client (local) end of the socket, as reported by the + # socket. + def client_name + @client_name ||= begin + sockaddr = getsockname + begin + Socket.getnameinfo(sockaddr, Socket::NI_NAMEREQD).first + rescue + begin + Socket.getnameinfo(sockaddr).first + rescue + begin + Socket.gethostbyname(Socket.gethostname).first + rescue + lwarn { "the client ipaddr/name could not be determined" } + "unknown" + end + end + end + end + end + + # The IP address of the peer (remote) end of the socket, as reported by + # the socket. + def peer_ip + @peer_ip ||= begin + addr = getpeername + Socket.getnameinfo(addr, Socket::NI_NUMERICHOST | Socket::NI_NUMERICSERV).first + end + end + + # Returns true if the IO is available for reading, and false otherwise. + def available_for_read? + result = Net::SSH::Compat.io_select([self], nil, nil, 0) + result && result.first.any? + end + + # Returns the next full packet. If the mode parameter is :nonblock (the + # default), then this will return immediately, whether a packet is + # available or not, and will return nil if there is no packet ready to be + # returned. If the mode parameter is :block, then this method will block + # until a packet is available. + def next_packet(mode=:nonblock) + case mode + when :nonblock then + fill if available_for_read? + poll_next_packet + + when :block then + loop do + packet = poll_next_packet + return packet if packet + + loop do + result = Net::SSH::Compat.io_select([self]) or next + break if result.first.any? + end + + if fill <= 0 + raise Net::SSH::Disconnect, "connection closed by remote host" + end + end + + else + raise ArgumentError, "expected :block or :nonblock, got #{mode.inspect}" + end + end + + # Enqueues a packet to be sent, and blocks until the entire packet is + # sent. + def send_packet(payload) + enqueue_packet(payload) + wait_for_pending_sends + end + + # Enqueues a packet to be sent, but does not immediately send the packet. + # The given payload is pre-processed according to the algorithms specified + # in the client state (compression, cipher, and hmac). + def enqueue_packet(payload) + # try to compress the packet + payload = client.compress(payload) + + # the length of the packet, minus the padding + actual_length = 4 + payload.length + 1 + + # compute the padding length + padding_length = client.block_size - (actual_length % client.block_size) + padding_length += client.block_size if padding_length < 4 + + # compute the packet length (sans the length field itself) + packet_length = payload.length + padding_length + 1 + + if packet_length < 16 + padding_length += client.block_size + packet_length = payload.length + padding_length + 1 + end + + padding = Array.new(padding_length) { rand(256) }.pack("C*") + + unencrypted_data = [packet_length, padding_length, payload, padding].pack("NCA*A*") + mac = client.hmac.digest([client.sequence_number, unencrypted_data].pack("NA*")) + + encrypted_data = client.update_cipher(unencrypted_data) << client.final_cipher + message = encrypted_data + mac + + debug { "queueing packet nr #{client.sequence_number} type #{payload.getbyte(0)} len #{packet_length}" } + enqueue(message) + + client.increment(packet_length) + + self + end + + # Performs any pending cleanup necessary on the IO and its associated + # state objects. (See State#cleanup). + def cleanup + client.cleanup + server.cleanup + end + + # If the IO object requires a rekey operation (as indicated by either its + # client or server state objects, see State#needs_rekey?), this will + # yield. Otherwise, this does nothing. + def if_needs_rekey? + if client.needs_rekey? || server.needs_rekey? + yield + client.reset! if client.needs_rekey? + server.reset! if server.needs_rekey? + end + end + + protected + + # Called when this module is used to extend an object. It initializes + # the states and generally prepares the object for use as a packet stream. + def initialize_ssh + @hints = {} + @server = State.new(self, :server) + @client = State.new(self, :client) + @packet = nil + initialize_buffered_io + end + + # Tries to read the next packet. If there is insufficient data to read + # an entire packet, this returns immediately, otherwise the packet is + # read, post-processed according to the cipher, hmac, and compression + # algorithms specified in the server state object, and returned as a + # new Packet object. + def poll_next_packet + if @packet.nil? + minimum = server.block_size < 4 ? 4 : server.block_size + return nil if available < minimum + data = read_available(minimum) + + # decipher it + @packet = Net::SSH::Buffer.new(server.update_cipher(data)) + @packet_length = @packet.read_long + end + + need = @packet_length + 4 - server.block_size + raise Net::SSH::Exception, "padding error, need #{need} block #{server.block_size}" if need % server.block_size != 0 + + return nil if available < need + server.hmac.mac_length + + if need > 0 + # read the remainder of the packet and decrypt it. + data = read_available(need) + @packet.append(server.update_cipher(data)) + end + + # get the hmac from the tail of the packet (if one exists), and + # then validate it. + real_hmac = read_available(server.hmac.mac_length) || "" + + @packet.append(server.final_cipher) + padding_length = @packet.read_byte + + payload = @packet.read(@packet_length - padding_length - 1) + padding = @packet.read(padding_length) if padding_length > 0 + + my_computed_hmac = server.hmac.digest([server.sequence_number, @packet.content].pack("NA*")) + raise Net::SSH::Exception, "corrupted mac detected" if real_hmac != my_computed_hmac + + # try to decompress the payload, in case compression is active + payload = server.decompress(payload) + + debug { "received packet nr #{server.sequence_number} type #{payload.getbyte(0)} len #{@packet_length}" } + + server.increment(@packet_length) + @packet = nil + + return Packet.new(payload) + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/server_version.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/server_version.rb new file mode 100644 index 00000000..9a0fb605 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/server_version.rb @@ -0,0 +1,70 @@ +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/version' + +module Net; module SSH; module Transport + + # Negotiates the SSH protocol version and trades information about server + # and client. This is never used directly--it is always called by the + # transport layer as part of the initialization process of the transport + # layer. + # + # Note that this class also encapsulates the negotiated version, and acts as + # the authoritative reference for any queries regarding the version in effect. + class ServerVersion + include Loggable + + # The SSH version string as reported by Net::SSH + PROTO_VERSION = "SSH-2.0-Ruby/Net::SSH_#{Net::SSH::Version::CURRENT} #{RUBY_PLATFORM}" + + # Any header text sent by the server prior to sending the version. + attr_reader :header + + # The version string reported by the server. + attr_reader :version + + # Instantiates a new ServerVersion and immediately (and synchronously) + # negotiates the SSH protocol in effect, using the given socket. + def initialize(socket, logger) + @header = "" + @version = nil + @logger = logger + negotiate!(socket) + end + + private + + # Negotiates the SSH protocol to use, via the given socket. If the server + # reports an incompatible SSH version (e.g., SSH1), this will raise an + # exception. + def negotiate!(socket) + info { "negotiating protocol version" } + + loop do + @version = "" + loop do + b = socket.recv(1) + + if b.nil? + raise Net::SSH::Disconnect, "connection closed by remote host" + end + @version << b + break if b == "\n" + end + break if @version.match(/^SSH-/) + @header << @version + end + + @version.chomp! + debug { "remote is `#{@version}'" } + + unless @version.match(/^SSH-(1\.99|2\.0)-/) + raise Net::SSH::Exception, "incompatible SSH version `#{@version}'" + end + + debug { "local is `#{PROTO_VERSION}'" } + socket.write "#{PROTO_VERSION}\r\n" + socket.flush + end + end +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/session.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/session.rb new file mode 100644 index 00000000..519777f2 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/session.rb @@ -0,0 +1,276 @@ +require 'socket' +require 'timeout' + +require 'net/ssh/errors' +require 'net/ssh/loggable' +require 'net/ssh/version' +require 'net/ssh/transport/algorithms' +require 'net/ssh/transport/constants' +require 'net/ssh/transport/packet_stream' +require 'net/ssh/transport/server_version' +require 'net/ssh/verifiers/null' +require 'net/ssh/verifiers/strict' +require 'net/ssh/verifiers/lenient' + +module Net; module SSH; module Transport + + # The transport layer represents the lowest level of the SSH protocol, and + # implements basic message exchanging and protocol initialization. It will + # never be instantiated directly (unless you really know what you're about), + # but will instead be created for you automatically when you create a new + # SSH session via Net::SSH.start. + class Session + include Constants, Loggable + + # The standard port for the SSH protocol. + DEFAULT_PORT = 22 + + # The host to connect to, as given to the constructor. + attr_reader :host + + # The port number to connect to, as given in the options to the constructor. + # If no port number was given, this will default to DEFAULT_PORT. + attr_reader :port + + # The underlying socket object being used to communicate with the remote + # host. + attr_reader :socket + + # The ServerVersion instance that encapsulates the negotiated protocol + # version. + attr_reader :server_version + + # The Algorithms instance used to perform key exchanges. + attr_reader :algorithms + + # The host-key verifier object used to verify host keys, to ensure that + # the connection is not being spoofed. + attr_reader :host_key_verifier + + # The hash of options that were given to the object at initialization. + attr_reader :options + + # Instantiates a new transport layer abstraction. This will block until + # the initial key exchange completes, leaving you with a ready-to-use + # transport session. + def initialize(host, options={}) + self.logger = options[:logger] + + @host = host + @port = options[:port] || DEFAULT_PORT + @options = options + + debug { "establishing connection to #{@host}:#{@port}" } + factory = options[:proxy] || TCPSocket + @socket = timeout(options[:timeout] || 0) { factory.open(@host, @port) } + @socket.extend(PacketStream) + @socket.logger = @logger + + debug { "connection established" } + + @queue = [] + + @host_key_verifier = select_host_key_verifier(options[:paranoid]) + + @server_version = ServerVersion.new(socket, logger) + + @algorithms = Algorithms.new(self, options) + wait { algorithms.initialized? } + end + + # Returns the host (and possibly IP address) in a format compatible with + # SSH known-host files. + def host_as_string + @host_as_string ||= begin + string = "#{host}" + string = "[#{string}]:#{port}" if port != DEFAULT_PORT + if socket.peer_ip != host + string2 = socket.peer_ip + string2 = "[#{string2}]:#{port}" if port != DEFAULT_PORT + string << "," << string2 + end + string + end + end + + # Returns true if the underlying socket has been closed. + def closed? + socket.closed? + end + + # Cleans up (see PacketStream#cleanup) and closes the underlying socket. + def close + socket.cleanup + socket.close + end + + # Performs a "hard" shutdown of the connection. In general, this should + # never be done, but it might be necessary (in a rescue clause, for instance, + # when the connection needs to close but you don't know the status of the + # underlying protocol's state). + def shutdown! + error { "forcing connection closed" } + socket.close + end + + # Returns a new service_request packet for the given service name, ready + # for sending to the server. + def service_request(service) + Net::SSH::Buffer.from(:byte, SERVICE_REQUEST, :string, service) + end + + # Requests a rekey operation, and blocks until the operation completes. + # If a rekey is already pending, this returns immediately, having no + # effect. + def rekey! + if !algorithms.pending? + algorithms.rekey! + wait { algorithms.initialized? } + end + end + + # Returns immediately if a rekey is already in process. Otherwise, if a + # rekey is needed (as indicated by the socket, see PacketStream#if_needs_rekey?) + # one is performed, causing this method to block until it completes. + def rekey_as_needed + return if algorithms.pending? + socket.if_needs_rekey? { rekey! } + end + + # Returns a hash of information about the peer (remote) side of the socket, + # including :ip, :port, :host, and :canonized (see #host_as_string). + def peer + @peer ||= { :ip => socket.peer_ip, :port => @port.to_i, :host => @host, :canonized => host_as_string } + end + + # Blocks until a new packet is available to be read, and returns that + # packet. See #poll_message. + def next_message + poll_message(:block) + end + + # Tries to read the next packet from the socket. If mode is :nonblock (the + # default), this will not block and will return nil if there are no packets + # waiting to be read. Otherwise, this will block until a packet is + # available. Note that some packet types (DISCONNECT, IGNORE, UNIMPLEMENTED, + # DEBUG, and KEXINIT) are handled silently by this method, and will never + # be returned. + # + # If a key-exchange is in process and a disallowed packet type is + # received, it will be enqueued and otherwise ignored. When a key-exchange + # is not in process, and consume_queue is true, packets will be first + # read from the queue before the socket is queried. + def poll_message(mode=:nonblock, consume_queue=true) + loop do + if consume_queue && @queue.any? && algorithms.allow?(@queue.first) + return @queue.shift + end + + packet = socket.next_packet(mode) + return nil if packet.nil? + + case packet.type + when DISCONNECT + raise Net::SSH::Disconnect, "disconnected: #{packet[:description]} (#{packet[:reason_code]})" + + when IGNORE + debug { "IGNORE packet recieved: #{packet[:data].inspect}" } + + when UNIMPLEMENTED + lwarn { "UNIMPLEMENTED: #{packet[:number]}" } + + when DEBUG + send(packet[:always_display] ? :fatal : :debug) { packet[:message] } + + when KEXINIT + algorithms.accept_kexinit(packet) + + else + return packet if algorithms.allow?(packet) + push(packet) + end + end + end + + # Waits (blocks) until the given block returns true. If no block is given, + # this just waits long enough to see if there are any pending packets. Any + # packets read are enqueued (see #push). + def wait + loop do + break if block_given? && yield + message = poll_message(:nonblock, false) + push(message) if message + break if !block_given? + end + end + + # Adds the given packet to the packet queue. If the queue is non-empty, + # #poll_message will return packets from the queue in the order they + # were received. + def push(packet) + @queue.push(packet) + end + + # Sends the given message via the packet stream, blocking until the + # entire message has been sent. + def send_message(message) + socket.send_packet(message) + end + + # Enqueues the given message, such that it will be sent at the earliest + # opportunity. This does not block, but returns immediately. + def enqueue_message(message) + socket.enqueue_packet(message) + end + + # Configure's the packet stream's client state with the given set of + # options. This is typically used to define the cipher, compression, and + # hmac algorithms to use when sending packets to the server. + def configure_client(options={}) + socket.client.set(options) + end + + # Configure's the packet stream's server state with the given set of + # options. This is typically used to define the cipher, compression, and + # hmac algorithms to use when reading packets from the server. + def configure_server(options={}) + socket.server.set(options) + end + + # Sets a new hint for the packet stream, which the packet stream may use + # to change its behavior. (See PacketStream#hints). + def hint(which, value=true) + socket.hints[which] = value + end + + public + + # this method is primarily for use in tests + attr_reader :queue #:nodoc: + + private + + # Instantiates a new host-key verification class, based on the value of + # the parameter. When true or nil, the default Lenient verifier is + # returned. If it is false, the Null verifier is returned, and if it is + # :very, the Strict verifier is returned. If the argument happens to + # respond to :verify, it is returned directly. Otherwise, an exception + # is raised. + def select_host_key_verifier(paranoid) + case paranoid + when true, nil then + Net::SSH::Verifiers::Lenient.new + when false then + Net::SSH::Verifiers::Null.new + when :very then + Net::SSH::Verifiers::Strict.new + else + if paranoid.respond_to?(:verify) + paranoid + else + raise ArgumentError, "argument to :paranoid is not valid: #{paranoid.inspect}" + end + end + end + end +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/state.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/state.rb new file mode 100644 index 00000000..2d8a3dda --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/transport/state.rb @@ -0,0 +1,206 @@ +require 'zlib' +require 'net/ssh/transport/cipher_factory' +require 'net/ssh/transport/hmac' + +module Net; module SSH; module Transport + + # Encapsulates state information about one end of an SSH connection. Such + # state includes the packet sequence number, the algorithms in use, how + # many packets and blocks have been processed since the last reset, and so + # forth. This class will never be instantiated directly, but is used as + # part of the internal state of the PacketStream module. + class State + # The socket object that owns this state object. + attr_reader :socket + + # The next packet sequence number for this socket endpoint. + attr_reader :sequence_number + + # The hmac algorithm in use for this endpoint. + attr_reader :hmac + + # The compression algorithm in use for this endpoint. + attr_reader :compression + + # The compression level to use when compressing data (or nil, for the default). + attr_reader :compression_level + + # The number of packets processed since the last call to #reset! + attr_reader :packets + + # The number of data blocks processed since the last call to #reset! + attr_reader :blocks + + # The cipher algorithm in use for this socket endpoint. + attr_reader :cipher + + # The block size for the cipher + attr_reader :block_size + + # The role that this state plays (either :client or :server) + attr_reader :role + + # The maximum number of packets that this endpoint wants to process before + # needing a rekey. + attr_accessor :max_packets + + # The maximum number of blocks that this endpoint wants to process before + # needing a rekey. + attr_accessor :max_blocks + + # The user-specified maximum number of bytes that this endpoint ought to + # process before needing a rekey. + attr_accessor :rekey_limit + + # Creates a new state object, belonging to the given socket. Initializes + # the algorithms to "none". + def initialize(socket, role) + @socket = socket + @role = role + @sequence_number = @packets = @blocks = 0 + @cipher = CipherFactory.get("none") + @block_size = 8 + @hmac = HMAC.get("none") + @compression = nil + @compressor = @decompressor = nil + @next_iv = "" + end + + # A convenience method for quickly setting multiple values in a single + # command. + def set(values) + values.each do |key, value| + instance_variable_set("@#{key}", value) + end + reset! + end + + def update_cipher(data) + result = cipher.update(data) + update_next_iv(role == :client ? result : data) + return result + end + + def final_cipher + result = cipher.final + update_next_iv(role == :client ? result : "", true) + return result + end + + # Increments the counters. The sequence number is incremented (and remapped + # so it always fits in a 32-bit integer). The number of packets and blocks + # are also incremented. + def increment(packet_length) + @sequence_number = (@sequence_number + 1) & 0xFFFFFFFF + @packets += 1 + @blocks += (packet_length + 4) / @block_size + end + + # The compressor object to use when compressing data. This takes into account + # the desired compression level. + def compressor + @compressor ||= Zlib::Deflate.new(compression_level || Zlib::DEFAULT_COMPRESSION) + end + + # The decompressor object to use when decompressing data. + def decompressor + @decompressor ||= Zlib::Inflate.new(nil) + end + + # Returns true if data compression/decompression is enabled. This will + # return true if :standard compression is selected, or if :delayed + # compression is selected and the :authenticated hint has been received + # by the socket. + def compression? + compression == :standard || (compression == :delayed && socket.hints[:authenticated]) + end + + # Compresses the data. If no compression is in effect, this will just return + # the data unmodified, otherwise it uses #compressor to compress the data. + def compress(data) + data = data.to_s + return data unless compression? + compressor.deflate(data, Zlib::SYNC_FLUSH) + end + + # Deompresses the data. If no compression is in effect, this will just return + # the data unmodified, otherwise it uses #decompressor to decompress the data. + def decompress(data) + data = data.to_s + return data unless compression? + decompressor.inflate(data) + end + + # Resets the counters on the state object, but leaves the sequence_number + # unchanged. It also sets defaults for and recomputes the max_packets and + # max_blocks values. + def reset! + @packets = @blocks = 0 + + @max_packets ||= 1 << 31 + + @block_size = cipher.name == "RC4" ? 8 : cipher.block_size + + if max_blocks.nil? + # cargo-culted from openssh. the idea is that "the 2^(blocksize*2) + # limit is too expensive for 3DES, blowfish, etc., so enforce a 1GB + # limit for small blocksizes." + if @block_size >= 16 + @max_blocks = 1 << (@block_size * 2) + else + @max_blocks = (1 << 30) / @block_size + end + + # if a limit on the # of bytes has been given, convert that into a + # minimum number of blocks processed. + + if rekey_limit + @max_blocks = [@max_blocks, rekey_limit / @block_size].min + end + end + + cleanup + end + + # Closes any the compressor and/or decompressor objects that have been + # instantiated. + def cleanup + if @compressor + @compressor.finish if !@compressor.finished? + @compressor.close + end + + if @decompressor + # we call reset here so that we don't get warnings when we try to + # close the decompressor + @decompressor.reset + @decompressor.close + end + + @compressor = @decompressor = nil + end + + # Returns true if the number of packets processed exceeds the maximum + # number of packets, or if the number of blocks processed exceeds the + # maximum number of blocks. + def needs_rekey? + max_packets && packets > max_packets || + max_blocks && blocks > max_blocks + end + + private + + def update_next_iv(data, reset=false) + @next_iv << data + @next_iv = @next_iv[-cipher.iv_len..-1] + + if reset + cipher.reset + cipher.iv = @next_iv + end + + return data + end + end + +end; end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/lenient.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/lenient.rb new file mode 100644 index 00000000..1fcdd583 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/lenient.rb @@ -0,0 +1,30 @@ +require 'net/ssh/verifiers/strict' + +module Net; module SSH; module Verifiers + + # Basically the same as the Strict verifier, but does not try to actually + # verify a connection if the server is the localhost and the port is a + # nonstandard port number. Those two conditions will typically mean the + # connection is being tunnelled through a forwarded port, so the known-hosts + # file will not be helpful (in general). + class Lenient < Strict + # Tries to determine if the connection is being tunnelled, and if so, + # returns true. Otherwise, performs the standard strict verification. + def verify(arguments) + return true if tunnelled?(arguments) + super + end + + private + + # A connection is potentially being tunnelled if the port is not 22, + # and the ip refers to the localhost. + def tunnelled?(args) + return false if args[:session].port == Net::SSH::Transport::Session::DEFAULT_PORT + + ip = args[:session].peer[:ip] + return ip == "127.0.0.1" || ip == "::1" + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/null.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/null.rb new file mode 100644 index 00000000..c2bda3a0 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/null.rb @@ -0,0 +1,12 @@ +module Net; module SSH; module Verifiers + + # The Null host key verifier simply allows every key it sees, without + # bothering to verify. This is simple, but is not particularly secure. + class Null + # Returns true. + def verify(arguments) + true + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/strict.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/strict.rb new file mode 100644 index 00000000..ef8edfde --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/verifiers/strict.rb @@ -0,0 +1,53 @@ +require 'net/ssh/errors' +require 'net/ssh/known_hosts' + +module Net; module SSH; module Verifiers + + # Does a strict host verification, looking the server up in the known + # host files to see if a key has already been seen for this server. If this + # server does not appear in any host file, this will silently add the + # server. If the server does appear at least once, but the key given does + # not match any known for the server, an exception will be raised (HostKeyMismatch). + # Otherwise, this returns true. + class Strict + def verify(arguments) + options = arguments[:session].options + host = options[:host_key_alias] || arguments[:session].host_as_string + matches = Net::SSH::KnownHosts.search_for(host, arguments[:session].options) + + # we've never seen this host before, so just automatically add the key. + # not the most secure option (since the first hit might be the one that + # is hacked), but since almost nobody actually compares the key + # fingerprint, this is a reasonable compromise between usability and + # security. + if matches.empty? + ip = arguments[:session].peer[:ip] + Net::SSH::KnownHosts.add(host, arguments[:key], arguments[:session].options) + return true + end + + # If we found any matches, check to see that the key type and + # blob also match. + found = matches.any? do |key| + key.ssh_type == arguments[:key].ssh_type && + key.to_blob == arguments[:key].to_blob + end + + # If a match was found, return true. Otherwise, raise an exception + # indicating that the key was not recognized. + found || process_cache_miss(host, arguments) + end + + private + + def process_cache_miss(host, args) + exception = HostKeyMismatch.new("fingerprint #{args[:fingerprint]} does not match for #{host.inspect}") + exception.data = args + exception.callback = Proc.new do + Net::SSH::KnownHosts.add(host, args[:key], args[:session].options) + end + raise exception + end + end + +end; end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/version.rb b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/version.rb new file mode 100644 index 00000000..263f36e7 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/lib/net/ssh/version.rb @@ -0,0 +1,62 @@ +module Net; module SSH + # A class for describing the current version of a library. The version + # consists of three parts: the +major+ number, the +minor+ number, and the + # +tiny+ (or +patch+) number. + # + # Two Version instances may be compared, so that you can test that a version + # of a library is what you require: + # + # require 'net/ssh/version' + # + # if Net::SSH::Version::CURRENT < Net::SSH::Version[2,1,0] + # abort "your software is too old!" + # end + class Version + include Comparable + + # A convenience method for instantiating a new Version instance with the + # given +major+, +minor+, and +tiny+ components. + def self.[](major, minor, tiny) + new(major, minor, tiny) + end + + attr_reader :major, :minor, :tiny + + # Create a new Version object with the given components. + def initialize(major, minor, tiny) + @major, @minor, @tiny = major, minor, tiny + end + + # Compare this version to the given +version+ object. + def <=>(version) + to_i <=> version.to_i + end + + # Converts this version object to a string, where each of the three + # version components are joined by the '.' character. E.g., 2.0.0. + def to_s + @to_s ||= [@major, @minor, @tiny].join(".") + end + + # Converts this version to a canonical integer that may be compared + # against other version objects. + def to_i + @to_i ||= @major * 1_000_000 + @minor * 1_000 + @tiny + end + + # The major component of this version of the Net::SSH library + MAJOR = 2 + + # The minor component of this version of the Net::SSH library + MINOR = 0 + + # The tiny component of this version of the Net::SSH library + TINY = 15 + + # The current version of the Net::SSH library as a Version instance + CURRENT = new(MAJOR, MINOR, TINY) + + # The current version of the Net::SSH library as a String + STRING = CURRENT.to_s + end +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/net-ssh.gemspec b/vendor/gems/gems/net-ssh-2.0.15/net-ssh.gemspec new file mode 100644 index 00000000..a7ac88ae --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/net-ssh.gemspec @@ -0,0 +1,131 @@ +@spec = Gem::Specification.new do |s| + s.name = "net-ssh" + s.rubyforge_project = 'net-ssh' + s.version = "2.0.15" + s.summary = "Net::SSH: a pure-Ruby implementation of the SSH2 client protocol." + s.description = s.summary + s.authors = ["Jamis Buck", "Delano Mandelbaum"] + s.email = ["net-ssh@solutious.com", "net-ssh@solutious.com"] + s.homepage = "http://rubyforge.org/projects/net-ssh/" + + s.extra_rdoc_files = %w[README.rdoc THANKS.rdoc CHANGELOG.rdoc] + s.has_rdoc = true + s.rdoc_options = ["--line-numbers", "--title", s.summary, "--main", "README.rdoc"] + s.require_paths = %w[lib] + s.rubygems_version = '1.3.2' + + s.executables = %w[] + + # = MANIFEST = + s.files = %w( + CHANGELOG.rdoc + Manifest + README.rdoc + Rakefile + Rudyfile + THANKS.rdoc + lib/net/ssh.rb + lib/net/ssh/authentication/agent.rb + lib/net/ssh/authentication/constants.rb + lib/net/ssh/authentication/key_manager.rb + lib/net/ssh/authentication/methods/abstract.rb + lib/net/ssh/authentication/methods/hostbased.rb + lib/net/ssh/authentication/methods/keyboard_interactive.rb + lib/net/ssh/authentication/methods/password.rb + lib/net/ssh/authentication/methods/publickey.rb + lib/net/ssh/authentication/pageant.rb + lib/net/ssh/authentication/session.rb + lib/net/ssh/buffer.rb + lib/net/ssh/buffered_io.rb + lib/net/ssh/config.rb + lib/net/ssh/connection/channel.rb + lib/net/ssh/connection/constants.rb + lib/net/ssh/connection/session.rb + lib/net/ssh/connection/term.rb + lib/net/ssh/errors.rb + lib/net/ssh/key_factory.rb + lib/net/ssh/known_hosts.rb + lib/net/ssh/loggable.rb + lib/net/ssh/packet.rb + lib/net/ssh/prompt.rb + lib/net/ssh/proxy/errors.rb + lib/net/ssh/proxy/http.rb + lib/net/ssh/proxy/socks4.rb + lib/net/ssh/proxy/socks5.rb + lib/net/ssh/ruby_compat.rb + lib/net/ssh/service/forward.rb + lib/net/ssh/test.rb + lib/net/ssh/test/channel.rb + lib/net/ssh/test/extensions.rb + lib/net/ssh/test/kex.rb + lib/net/ssh/test/local_packet.rb + lib/net/ssh/test/packet.rb + lib/net/ssh/test/remote_packet.rb + lib/net/ssh/test/script.rb + lib/net/ssh/test/socket.rb + lib/net/ssh/transport/algorithms.rb + lib/net/ssh/transport/cipher_factory.rb + lib/net/ssh/transport/constants.rb + lib/net/ssh/transport/hmac.rb + lib/net/ssh/transport/hmac/abstract.rb + lib/net/ssh/transport/hmac/md5.rb + lib/net/ssh/transport/hmac/md5_96.rb + lib/net/ssh/transport/hmac/none.rb + lib/net/ssh/transport/hmac/sha1.rb + lib/net/ssh/transport/hmac/sha1_96.rb + lib/net/ssh/transport/identity_cipher.rb + lib/net/ssh/transport/kex.rb + lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb + lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb + lib/net/ssh/transport/openssl.rb + lib/net/ssh/transport/packet_stream.rb + lib/net/ssh/transport/server_version.rb + lib/net/ssh/transport/session.rb + lib/net/ssh/transport/state.rb + lib/net/ssh/verifiers/lenient.rb + lib/net/ssh/verifiers/null.rb + lib/net/ssh/verifiers/strict.rb + lib/net/ssh/version.rb + net-ssh.gemspec + setup.rb + support/arcfour_check.rb + test/authentication/methods/common.rb + test/authentication/methods/test_abstract.rb + test/authentication/methods/test_hostbased.rb + test/authentication/methods/test_keyboard_interactive.rb + test/authentication/methods/test_password.rb + test/authentication/methods/test_publickey.rb + test/authentication/test_agent.rb + test/authentication/test_key_manager.rb + test/authentication/test_session.rb + test/common.rb + test/configs/eqsign + test/configs/exact_match + test/configs/multihost + test/configs/wild_cards + test/connection/test_channel.rb + test/connection/test_session.rb + test/test_all.rb + test/test_buffer.rb + test/test_buffered_io.rb + test/test_config.rb + test/test_key_factory.rb + test/transport/hmac/test_md5.rb + test/transport/hmac/test_md5_96.rb + test/transport/hmac/test_none.rb + test/transport/hmac/test_sha1.rb + test/transport/hmac/test_sha1_96.rb + test/transport/kex/test_diffie_hellman_group1_sha1.rb + test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb + test/transport/test_algorithms.rb + test/transport/test_cipher_factory.rb + test/transport/test_hmac.rb + test/transport/test_identity_cipher.rb + test/transport/test_packet_stream.rb + test/transport/test_server_version.rb + test/transport/test_session.rb + test/transport/test_state.rb + ) + + +end diff --git a/vendor/gems/gems/eventmachine-0.12.10/setup.rb b/vendor/gems/gems/net-ssh-2.0.15/setup.rb similarity index 100% rename from vendor/gems/gems/eventmachine-0.12.10/setup.rb rename to vendor/gems/gems/net-ssh-2.0.15/setup.rb diff --git a/vendor/gems/gems/net-ssh-2.0.15/support/arcfour_check.rb b/vendor/gems/gems/net-ssh-2.0.15/support/arcfour_check.rb new file mode 100644 index 00000000..a823a1f1 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/support/arcfour_check.rb @@ -0,0 +1,20 @@ + +require 'net/ssh' + +# ARCFOUR CHECK +# +# Usage: +# $ ruby support/arcfour_check.rb +# +# Expected Output: +# arcfour128: [16, 8] OpenSSL::Cipher::Cipher +# arcfour256: [32, 8] OpenSSL::Cipher::Cipher +# arcfour512: [64, 8] OpenSSL::Cipher::Cipher + +[['arcfour128', 16], ['arcfour256', 32], ['arcfour512', 64]].each do |cipher| + print "#{cipher[0]}: " + a = Net::SSH::Transport::CipherFactory.get_lengths(cipher[0]) + b = Net::SSH::Transport::CipherFactory.get(cipher[0], :key => ([].fill('x', 0, cipher[1]).join)) + puts "#{a} #{b.class}" +end + diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/common.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/common.rb new file mode 100644 index 00000000..735836d3 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/common.rb @@ -0,0 +1,28 @@ +module Authentication; module Methods + + module Common + include Net::SSH::Authentication::Constants + + private + + def socket(options={}) + @socket ||= stub("socket", :client_name => "me.ssh.test") + end + + def transport(options={}) + @transport ||= MockTransport.new(options.merge(:socket => socket)) + end + + def session(options={}) + @session ||= begin + sess = stub("auth-session", :logger => nil, :transport => transport(options)) + def sess.next_message + transport.next_message + end + sess + end + end + + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_abstract.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_abstract.rb new file mode 100644 index 00000000..f0a18d55 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_abstract.rb @@ -0,0 +1,51 @@ +require 'common' +require 'authentication/methods/common' +require 'net/ssh/authentication/methods/abstract' + +module Authentication; module Methods + + class TestAbstract < Test::Unit::TestCase + include Common + + def test_constructor_should_set_defaults + assert_nil subject.key_manager + end + + def test_constructor_should_honor_options + assert_equal :manager, subject(:key_manager => :manager).key_manager + end + + def test_session_id_should_query_session_id_from_key_exchange + transport.stubs(:algorithms).returns(stub("algorithms", :session_id => "abcxyz123")) + assert_equal "abcxyz123", subject.session_id + end + + def test_send_message_should_delegate_to_transport + transport.expects(:send_message).with("abcxyz123") + subject.send_message("abcxyz123") + end + + def test_userauth_request_should_build_well_formed_userauth_packet + packet = subject.userauth_request("jamis", "ssh-connection", "password") + assert_equal "\062\0\0\0\005jamis\0\0\0\016ssh-connection\0\0\0\010password", packet.to_s + end + + def test_userauth_request_should_translate_extra_booleans_onto_end + packet = subject.userauth_request("jamis", "ssh-connection", "password", true, false) + assert_equal "\062\0\0\0\005jamis\0\0\0\016ssh-connection\0\0\0\010password\1\0", packet.to_s + end + + def test_userauth_request_should_translate_extra_strings_onto_end + packet = subject.userauth_request("jamis", "ssh-connection", "password", "foo", "bar") + assert_equal "\062\0\0\0\005jamis\0\0\0\016ssh-connection\0\0\0\010password\0\0\0\3foo\0\0\0\3bar", packet.to_s + end + + private + + def subject(options={}) + @subject ||= Net::SSH::Authentication::Methods::Abstract.new(session(options), options) + end + + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_hostbased.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_hostbased.rb new file mode 100644 index 00000000..281c86d9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_hostbased.rb @@ -0,0 +1,114 @@ +require 'common' +require 'net/ssh/authentication/methods/hostbased' +require 'authentication/methods/common' + +module Authentication; module Methods + + class TestHostbased < Test::Unit::TestCase + include Common + + def test_authenticate_should_return_false_when_no_key_manager_has_been_set + assert_equal false, subject(:key_manager => nil).authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_false_when_key_manager_has_no_keys + assert_equal false, subject(:keys => []).authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_false_if_no_keys_can_authenticate + ENV.stubs(:[]).with('USER').returns(nil) + key_manager.expects(:sign).with(&signature_parameters(keys.first)).returns("sig-one") + key_manager.expects(:sign).with(&signature_parameters(keys.last)).returns("sig-two") + + transport.expect do |t, packet| + assert_equal USERAUTH_REQUEST, packet.type + assert verify_userauth_request_packet(packet, keys.first) + assert_equal "sig-one", packet.read_string + t.return(USERAUTH_FAILURE, :string, "hostbased,password") + + t.expect do |t2, packet2| + assert_equal USERAUTH_REQUEST, packet2.type + assert verify_userauth_request_packet(packet2, keys.last) + assert_equal "sig-two", packet2.read_string + t2.return(USERAUTH_FAILURE, :string, "hostbased,password") + end + end + + assert_equal false, subject.authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_true_if_any_key_can_authenticate + ENV.stubs(:[]).with('USER').returns(nil) + key_manager.expects(:sign).with(&signature_parameters(keys.first)).returns("sig-one") + + transport.expect do |t, packet| + assert_equal USERAUTH_REQUEST, packet.type + assert verify_userauth_request_packet(packet, keys.first) + assert_equal "sig-one", packet.read_string + t.return(USERAUTH_SUCCESS) + end + + assert subject.authenticate("ssh-connection", "jamis") + end + + private + + def signature_parameters(key) + Proc.new do |given_key, data| + next false unless given_key.to_blob == key.to_blob + buffer = Net::SSH::Buffer.new(data) + buffer.read_string == "abcxyz123" && # session-id + buffer.read_byte == USERAUTH_REQUEST && # type + verify_userauth_request_packet(buffer, key) + end + end + + def verify_userauth_request_packet(packet, key) + packet.read_string == "jamis" && # user-name + packet.read_string == "ssh-connection" && # next service + packet.read_string == "hostbased" && # auth-method + packet.read_string == key.ssh_type && # key type + packet.read_buffer.read_key.to_blob == key.to_blob && # key + packet.read_string == "me.ssh.test." && # client hostname + packet.read_string == "jamis" # client username + end + + @@keys = nil + def keys + @@keys ||= [OpenSSL::PKey::RSA.new(32), OpenSSL::PKey::DSA.new(32)] + end + + def key_manager(options={}) + @key_manager ||= begin + manager = stub("key_manager") + manager.stubs(:each_identity).multiple_yields(*(options[:keys] || keys)) + manager + end + end + + def subject(options={}) + options[:key_manager] = key_manager(options) unless options.key?(:key_manager) + @subject ||= Net::SSH::Authentication::Methods::Hostbased.new(session(options), options) + end + + def socket(options={}) + @socket ||= stub("socket", :client_name => "me.ssh.test") + end + + def transport(options={}) + @transport ||= MockTransport.new(options.merge(:socket => socket)) + end + + def session(options={}) + @session ||= begin + sess = stub("auth-session", :logger => nil, :transport => transport(options)) + def sess.next_message + transport.next_message + end + sess + end + end + + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_keyboard_interactive.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_keyboard_interactive.rb new file mode 100644 index 00000000..b0922636 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_keyboard_interactive.rb @@ -0,0 +1,98 @@ +require 'common' +require 'net/ssh/authentication/methods/keyboard_interactive' +require 'authentication/methods/common' + +module Authentication; module Methods + + class TestKeyboardInteractive < Test::Unit::TestCase + include Common + + USERAUTH_INFO_REQUEST = 60 + USERAUTH_INFO_RESPONSE = 61 + + def test_authenticate_should_be_false_when_server_does_not_support_this_method + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + assert_equal "jamis", packet.read_string + assert_equal "ssh-connection", packet.read_string + assert_equal "keyboard-interactive", packet.read_string + assert_equal "", packet.read_string # language tags + assert_equal "", packet.read_string # submethods + + t.return(USERAUTH_FAILURE, :string, "password") + end + + assert_equal false, subject.authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_be_false_if_given_password_is_not_accepted + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_INFO_REQUEST, :string, "", :string, "", :string, "", :long, 1, :string, "Password:", :bool, false) + t.expect do |t2,packet2| + assert_equal USERAUTH_INFO_RESPONSE, packet2.type + assert_equal 1, packet2.read_long + assert_equal "the-password", packet2.read_string + t2.return(USERAUTH_FAILURE, :string, "publickey") + end + end + + assert_equal false, subject.authenticate("ssh-connection", "jamis", "the-password") + end + + def test_authenticate_should_be_true_if_given_password_is_accepted + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_INFO_REQUEST, :string, "", :string, "", :string, "", :long, 1, :string, "Password:", :bool, false) + t.expect do |t2,packet2| + assert_equal USERAUTH_INFO_RESPONSE, packet2.type + t2.return(USERAUTH_SUCCESS) + end + end + + assert subject.authenticate("ssh-connection", "jamis", "the-password") + end + + def test_authenticate_should_duplicate_password_as_needed_to_fill_request + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_INFO_REQUEST, :string, "", :string, "", :string, "", :long, 2, :string, "Password:", :bool, false, :string, "Again:", :bool, false) + t.expect do |t2,packet2| + assert_equal USERAUTH_INFO_RESPONSE, packet2.type + assert_equal 2, packet2.read_long + assert_equal "the-password", packet2.read_string + assert_equal "the-password", packet2.read_string + t2.return(USERAUTH_SUCCESS) + end + end + + assert subject.authenticate("ssh-connection", "jamis", "the-password") + end + + def test_authenticate_should_prompt_for_input_when_password_is_not_given + subject.expects(:prompt).with("Name:", true).returns("name") + subject.expects(:prompt).with("Password:", false).returns("password") + + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_INFO_REQUEST, :string, "", :string, "", :string, "", :long, 2, :string, "Name:", :bool, true, :string, "Password:", :bool, false) + t.expect do |t2,packet2| + assert_equal USERAUTH_INFO_RESPONSE, packet2.type + assert_equal 2, packet2.read_long + assert_equal "name", packet2.read_string + assert_equal "password", packet2.read_string + t2.return(USERAUTH_SUCCESS) + end + end + + assert subject.authenticate("ssh-connection", "jamis", nil) + end + + private + + def subject(options={}) + @subject ||= Net::SSH::Authentication::Methods::KeyboardInteractive.new(session(options), options) + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_password.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_password.rb new file mode 100644 index 00000000..52f4196e --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_password.rb @@ -0,0 +1,50 @@ +require 'common' +require 'net/ssh/authentication/methods/password' +require 'authentication/methods/common' + +module Authentication; module Methods + + class TestPassword < Test::Unit::TestCase + include Common + + def test_authenticate_when_password_is_unacceptible_should_return_false + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + assert_equal "jamis", packet.read_string + assert_equal "ssh-connection", packet.read_string + assert_equal "password", packet.read_string + assert_equal false, packet.read_bool + assert_equal "the-password", packet.read_string + + t.return(USERAUTH_FAILURE, :string, "publickey") + end + + assert !subject.authenticate("ssh-connection", "jamis", "the-password") + end + + def test_authenticate_when_password_is_acceptible_should_return_true + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_SUCCESS) + end + + assert subject.authenticate("ssh-connection", "jamis", "the-password") + end + + def test_authenticate_should_return_false_if_password_change_request_is_received + transport.expect do |t,packet| + assert_equal USERAUTH_REQUEST, packet.type + t.return(USERAUTH_PASSWD_CHANGEREQ, :string, "Change your password:", :string, "") + end + + assert !subject.authenticate("ssh-connection", "jamis", "the-password") + end + + private + + def subject(options={}) + @subject ||= Net::SSH::Authentication::Methods::Password.new(session(options), options) + end + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_publickey.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_publickey.rb new file mode 100644 index 00000000..f4f544f9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/methods/test_publickey.rb @@ -0,0 +1,127 @@ +require 'common' +require 'net/ssh/authentication/methods/publickey' +require 'authentication/methods/common' + +module Authentication; module Methods + + class TestPublickey < Test::Unit::TestCase + include Common + + def test_authenticate_should_return_false_when_no_key_manager_has_been_set + assert_equal false, subject(:key_manager => nil).authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_false_when_key_manager_has_no_keys + assert_equal false, subject(:keys => []).authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_false_if_no_keys_can_authenticate + transport.expect do |t, packet| + assert_equal USERAUTH_REQUEST, packet.type + assert verify_userauth_request_packet(packet, keys.first, false) + t.return(USERAUTH_FAILURE, :string, "hostbased,password") + + t.expect do |t2, packet2| + assert_equal USERAUTH_REQUEST, packet2.type + assert verify_userauth_request_packet(packet2, keys.last, false) + t2.return(USERAUTH_FAILURE, :string, "hostbased,password") + end + end + + assert_equal false, subject.authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_false_if_signature_exchange_fails + key_manager.expects(:sign).with(&signature_parameters(keys.first)).returns("sig-one") + key_manager.expects(:sign).with(&signature_parameters(keys.last)).returns("sig-two") + + transport.expect do |t, packet| + assert_equal USERAUTH_REQUEST, packet.type + assert verify_userauth_request_packet(packet, keys.first, false) + t.return(USERAUTH_PK_OK, :string, keys.first.ssh_type, :string, Net::SSH::Buffer.from(:key, keys.first)) + + t.expect do |t2,packet2| + assert_equal USERAUTH_REQUEST, packet2.type + assert verify_userauth_request_packet(packet2, keys.first, true) + assert_equal "sig-one", packet2.read_string + t2.return(USERAUTH_FAILURE, :string, "hostbased,password") + + t2.expect do |t3, packet3| + assert_equal USERAUTH_REQUEST, packet3.type + assert verify_userauth_request_packet(packet3, keys.last, false) + t3.return(USERAUTH_PK_OK, :string, keys.last.ssh_type, :string, Net::SSH::Buffer.from(:key, keys.last)) + + t3.expect do |t4,packet4| + assert_equal USERAUTH_REQUEST, packet4.type + assert verify_userauth_request_packet(packet4, keys.last, true) + assert_equal "sig-two", packet4.read_string + t4.return(USERAUTH_FAILURE, :string, "hostbased,password") + end + end + end + end + + assert !subject.authenticate("ssh-connection", "jamis") + end + + def test_authenticate_should_return_true_if_any_key_can_authenticate + key_manager.expects(:sign).with(&signature_parameters(keys.first)).returns("sig-one") + + transport.expect do |t, packet| + assert_equal USERAUTH_REQUEST, packet.type + assert verify_userauth_request_packet(packet, keys.first, false) + t.return(USERAUTH_PK_OK, :string, keys.first.ssh_type, :string, Net::SSH::Buffer.from(:key, keys.first)) + + t.expect do |t2,packet2| + assert_equal USERAUTH_REQUEST, packet2.type + assert verify_userauth_request_packet(packet2, keys.first, true) + assert_equal "sig-one", packet2.read_string + t2.return(USERAUTH_SUCCESS) + end + end + + assert subject.authenticate("ssh-connection", "jamis") + end + + private + + def signature_parameters(key) + Proc.new do |given_key, data| + next false unless given_key.to_blob == key.to_blob + buffer = Net::SSH::Buffer.new(data) + buffer.read_string == "abcxyz123" && # session-id + buffer.read_byte == USERAUTH_REQUEST && # type + verify_userauth_request_packet(buffer, key, true) + end + end + + def verify_userauth_request_packet(packet, key, has_sig) + packet.read_string == "jamis" && # user-name + packet.read_string == "ssh-connection" && # next service + packet.read_string == "publickey" && # auth-method + packet.read_bool == has_sig && # whether a signature is appended + packet.read_string == key.ssh_type && # ssh key type + packet.read_buffer.read_key.to_blob == key.to_blob # key + end + + @@keys = nil + def keys + @@keys ||= [OpenSSL::PKey::RSA.new(32), OpenSSL::PKey::DSA.new(32)] + end + + def key_manager(options={}) + @key_manager ||= begin + manager = stub("key_manager") + manager.stubs(:each_identity).multiple_yields(*(options[:keys] || keys)) + manager + end + end + + def subject(options={}) + options[:key_manager] = key_manager(options) unless options.key?(:key_manager) + @subject ||= Net::SSH::Authentication::Methods::Publickey.new(session(options), options) + end + + end + +end; end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_agent.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_agent.rb new file mode 100644 index 00000000..248a3d79 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_agent.rb @@ -0,0 +1,205 @@ +require 'common' +require 'net/ssh/authentication/agent' + +module Authentication + + class TestAgent < Test::Unit::TestCase + + SSH2_AGENT_REQUEST_VERSION = 1 + SSH2_AGENT_REQUEST_IDENTITIES = 11 + SSH2_AGENT_IDENTITIES_ANSWER = 12 + SSH2_AGENT_SIGN_REQUEST = 13 + SSH2_AGENT_SIGN_RESPONSE = 14 + SSH2_AGENT_FAILURE = 30 + SSH2_AGENT_VERSION_RESPONSE = 103 + + SSH_COM_AGENT2_FAILURE = 102 + + SSH_AGENT_REQUEST_RSA_IDENTITIES = 1 + SSH_AGENT_RSA_IDENTITIES_ANSWER = 2 + SSH_AGENT_FAILURE = 5 + + def setup + @original, ENV['SSH_AUTH_SOCK'] = ENV['SSH_AUTH_SOCK'], "/path/to/ssh.agent.sock" + end + + def teardown + ENV['SSH_AUTH_SOCK'] = @original + end + + def test_connect_should_use_agent_factory_to_determine_connection_type + factory.expects(:open).with("/path/to/ssh.agent.sock").returns(socket) + agent(false).connect! + end + + def test_connect_should_raise_error_if_connection_could_not_be_established + factory.expects(:open).raises(SocketError) + assert_raises(Net::SSH::Authentication::AgentNotAvailable) { agent(false).connect! } + end + + def test_negotiate_should_raise_error_if_ssh2_agent_response_recieved + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_VERSION, type + assert_equal Net::SSH::Transport::ServerVersion::PROTO_VERSION, buffer.read_string + s.return(SSH2_AGENT_VERSION_RESPONSE) + end + assert_raises(NotImplementedError) { agent.negotiate! } + end + + def test_negotiate_should_raise_error_if_response_was_unexpected + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_VERSION, type + s.return(255) + end + assert_raises(Net::SSH::Authentication::AgentError) { agent.negotiate! } + end + + def test_negotiate_should_be_successful_with_expected_response + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_VERSION, type + s.return(SSH_AGENT_RSA_IDENTITIES_ANSWER) + end + assert_nothing_raised { agent(:connect).negotiate! } + end + + def test_identities_should_fail_if_SSH_AGENT_FAILURE_recieved + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_IDENTITIES, type + s.return(SSH_AGENT_FAILURE) + end + assert_raises(Net::SSH::Authentication::AgentError) { agent.identities } + end + + def test_identities_should_fail_if_SSH2_AGENT_FAILURE_recieved + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_IDENTITIES, type + s.return(SSH2_AGENT_FAILURE) + end + assert_raises(Net::SSH::Authentication::AgentError) { agent.identities } + end + + def test_identities_should_fail_if_SSH_COM_AGENT2_FAILURE_recieved + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_IDENTITIES, type + s.return(SSH_COM_AGENT2_FAILURE) + end + assert_raises(Net::SSH::Authentication::AgentError) { agent.identities } + end + + def test_identities_should_fail_if_response_is_not_SSH2_AGENT_IDENTITIES_ANSWER + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_IDENTITIES, type + s.return(255) + end + assert_raises(Net::SSH::Authentication::AgentError) { agent.identities } + end + + def test_identities_should_augment_identities_with_comment_field + key1 = key + key2 = OpenSSL::PKey::DSA.new(32) + + socket.expect do |s, type, buffer| + assert_equal SSH2_AGENT_REQUEST_IDENTITIES, type + s.return(SSH2_AGENT_IDENTITIES_ANSWER, :long, 2, :string, Net::SSH::Buffer.from(:key, key1), :string, "My favorite key", :string, Net::SSH::Buffer.from(:key, key2), :string, "Okay, but not the best") + end + + result = agent.identities + assert_equal key1.to_blob, result.first.to_blob + assert_equal key2.to_blob, result.last.to_blob + assert_equal "My favorite key", result.first.comment + assert_equal "Okay, but not the best", result.last.comment + end + + def test_close_should_close_socket + socket.expects(:close) + agent.close + end + + def test_sign_should_fail_if_response_is_SSH_AGENT_FAILURE + socket.expect { |s,| s.return(SSH_AGENT_FAILURE) } + assert_raises(Net::SSH::Authentication::AgentError) { agent.sign(key, "hello world") } + end + + def test_sign_should_fail_if_response_is_SSH2_AGENT_FAILURE + socket.expect { |s,| s.return(SSH2_AGENT_FAILURE) } + assert_raises(Net::SSH::Authentication::AgentError) { agent.sign(key, "hello world") } + end + + def test_sign_should_fail_if_response_is_SSH_COM_AGENT2_FAILURE + socket.expect { |s,| s.return(SSH_COM_AGENT2_FAILURE) } + assert_raises(Net::SSH::Authentication::AgentError) { agent.sign(key, "hello world") } + end + + def test_sign_should_fail_if_response_is_not_SSH2_AGENT_SIGN_RESPONSE + socket.expect { |s,| s.return(255) } + assert_raises(Net::SSH::Authentication::AgentError) { agent.sign(key, "hello world") } + end + + def test_sign_should_return_signed_data_from_agent + socket.expect do |s,type,buffer| + assert_equal SSH2_AGENT_SIGN_REQUEST, type + assert_equal key.to_blob, Net::SSH::Buffer.new(buffer.read_string).read_key.to_blob + assert_equal "hello world", buffer.read_string + assert_equal 0, buffer.read_long + + s.return(SSH2_AGENT_SIGN_RESPONSE, :string, "abcxyz123") + end + + assert_equal "abcxyz123", agent.sign(key, "hello world") + end + + private + + class MockSocket + def initialize + @expectation = nil + @buffer = Net::SSH::Buffer.new + end + + def expect(&block) + @expectation = block + end + + def return(type, *args) + data = Net::SSH::Buffer.from(*args) + @buffer.append([data.length+1, type, data.to_s].pack("NCA*")) + end + + def send(data, flags) + raise "got #{data.inspect} but no packet was expected" unless @expectation + buffer = Net::SSH::Buffer.new(data) + buffer.read_long # skip the length + type = buffer.read_byte + @expectation.call(self, type, buffer) + @expectation = nil + end + + def read(length) + @buffer.read(length) + end + end + + def key + @key ||= OpenSSL::PKey::RSA.new(32) + end + + def socket + @socket ||= MockSocket.new + end + + def factory + @factory ||= stub("socket factory", :open => socket) + end + + def agent(auto=:connect) + @agent ||= begin + agent = Net::SSH::Authentication::Agent.new + agent.stubs(:agent_socket_factory).returns(factory) + agent.connect! if auto == :connect + agent + end + end + + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_key_manager.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_key_manager.rb new file mode 100644 index 00000000..af8422b4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_key_manager.rb @@ -0,0 +1,105 @@ +require 'common' +require 'net/ssh/authentication/key_manager' + +module Authentication + + class TestKeyManager < Test::Unit::TestCase + def test_key_files_and_known_identities_are_empty_by_default + assert manager.key_files.empty? + assert manager.known_identities.empty? + end + + def test_assume_agent_is_available_by_default + assert manager.use_agent? + end + + def test_add_ensures_list_is_unique + manager.add "/first" + manager.add "/second" + manager.add "/third" + manager.add "/second" + assert_equal %w(/first /second /third), manager.key_files + end + + def test_use_agent_should_be_set_to_false_if_agent_could_not_be_found + Net::SSH::Authentication::Agent.expects(:connect).raises(Net::SSH::Authentication::AgentNotAvailable) + assert manager.use_agent? + assert_nil manager.agent + assert !manager.use_agent? + end + + def test_each_identity_should_load_from_key_files + manager.stubs(:agent).returns(nil) + + stub_file_key "/first", rsa + stub_file_key "/second", dsa + + identities = [] + manager.each_identity { |identity| identities << identity } + + assert_equal 2, identities.length + assert_equal rsa.to_blob, identities.first.to_blob + assert_equal dsa.to_blob, identities.last.to_blob + + assert_equal({:from => :file, :file => "/first", :key => rsa}, manager.known_identities[rsa]) + assert_equal({:from => :file, :file => "/second", :key => dsa}, manager.known_identities[dsa]) + end + + def test_identities_should_load_from_agent + manager.stubs(:agent).returns(agent) + + identities = [] + manager.each_identity { |identity| identities << identity } + + assert_equal 2, identities.length + assert_equal rsa.to_blob, identities.first.to_blob + assert_equal dsa.to_blob, identities.last.to_blob + + assert_equal({:from => :agent}, manager.known_identities[rsa]) + assert_equal({:from => :agent}, manager.known_identities[dsa]) + end + + def test_sign_with_agent_originated_key_should_request_signature_from_agent + manager.stubs(:agent).returns(agent) + manager.each_identity { |identity| } # preload the known_identities + agent.expects(:sign).with(rsa, "hello, world").returns("abcxyz123") + assert_equal "abcxyz123", manager.sign(rsa, "hello, world") + end + + def test_sign_with_file_originated_key_should_load_private_key_and_sign_with_it + manager.stubs(:agent).returns(nil) + stub_file_key "/first", rsa(512), true + rsa.expects(:ssh_do_sign).with("hello, world").returns("abcxyz123") + manager.each_identity { |identity| } # preload the known_identities + assert_equal "\0\0\0\assh-rsa\0\0\0\011abcxyz123", manager.sign(rsa, "hello, world") + end + + private + + def stub_file_key(name, key, also_private=false) + manager.add(name) + File.expects(:readable?).with(name).returns(true) + File.expects(:readable?).with(name + ".pub").returns(false) + Net::SSH::KeyFactory.expects(:load_private_key).with(name, nil).returns(key).at_least_once + key.expects(:public_key).returns(key) + end + + def rsa(size=32) + @rsa ||= OpenSSL::PKey::RSA.new(size) + end + + def dsa + @dsa ||= OpenSSL::PKey::DSA.new(32) + end + + def agent + @agent ||= stub("agent", :identities => [rsa, dsa]) + end + + def manager + @manager ||= Net::SSH::Authentication::KeyManager.new(nil) + end + + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_session.rb b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_session.rb new file mode 100644 index 00000000..4cc9bd5a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/authentication/test_session.rb @@ -0,0 +1,93 @@ +require 'common' +require 'net/ssh/authentication/session' + +module Authentication + + class TestSession < Test::Unit::TestCase + include Net::SSH::Transport::Constants + include Net::SSH::Authentication::Constants + + def test_constructor_should_set_defaults + assert_equal %w(publickey hostbased password keyboard-interactive), session.auth_methods + assert_equal session.auth_methods, session.allowed_auth_methods + end + + def test_authenticate_should_raise_error_if_service_request_fails + transport.expect do |t, packet| + assert_equal SERVICE_REQUEST, packet.type + assert_equal "ssh-userauth", packet.read_string + t.return(255) + end + + assert_raises(Net::SSH::Exception) { session.authenticate("next service", "username", "password") } + end + + def test_authenticate_should_return_false_if_all_auth_methods_fail + transport.expect do |t, packet| + assert_equal SERVICE_REQUEST, packet.type + assert_equal "ssh-userauth", packet.read_string + t.return(SERVICE_ACCEPT) + end + + Net::SSH::Authentication::Methods::Publickey.any_instance.expects(:authenticate).with("next service", "username", "password").returns(false) + Net::SSH::Authentication::Methods::Hostbased.any_instance.expects(:authenticate).with("next service", "username", "password").returns(false) + Net::SSH::Authentication::Methods::Password.any_instance.expects(:authenticate).with("next service", "username", "password").returns(false) + Net::SSH::Authentication::Methods::KeyboardInteractive.any_instance.expects(:authenticate).with("next service", "username", "password").returns(false) + + assert_equal false, session.authenticate("next service", "username", "password") + end + + def test_next_message_should_silently_handle_USERAUTH_BANNER_packets + transport.return(USERAUTH_BANNER, :string, "Howdy, folks!") + transport.return(SERVICE_ACCEPT) + assert_equal SERVICE_ACCEPT, session.next_message.type + end + + def test_next_message_should_understand_USERAUTH_FAILURE + transport.return(USERAUTH_FAILURE, :string, "a,b,c", :bool, false) + packet = session.next_message + assert_equal USERAUTH_FAILURE, packet.type + assert_equal %w(a b c), session.allowed_auth_methods + end + + (60..79).each do |type| + define_method("test_next_message_should_return_packets_of_type_#{type}") do + transport.return(type) + assert_equal type, session.next_message.type + end + end + + def test_next_message_should_understand_USERAUTH_SUCCESS + transport.return(USERAUTH_SUCCESS) + assert !transport.hints[:authenticated] + assert_equal USERAUTH_SUCCESS, session.next_message.type + assert transport.hints[:authenticated] + end + + def test_next_message_should_raise_error_on_unrecognized_packet_types + transport.return(1) + assert_raises(Net::SSH::Exception) { session.next_message } + end + + def test_expect_message_should_raise_exception_if_next_packet_is_not_expected_type + transport.return(SERVICE_ACCEPT) + assert_raises(Net::SSH::Exception) { session.expect_message(USERAUTH_BANNER) } + end + + def test_expect_message_should_return_packet_if_next_packet_is_expected_type + transport.return(SERVICE_ACCEPT) + assert_equal SERVICE_ACCEPT, session.expect_message(SERVICE_ACCEPT).type + end + + private + + def session(options={}) + @session ||= Net::SSH::Authentication::Session.new(transport(options), options) + end + + def transport(options={}) + @transport ||= MockTransport.new(options) + end + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/common.rb b/vendor/gems/gems/net-ssh-2.0.15/test/common.rb new file mode 100644 index 00000000..4637cf0a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/common.rb @@ -0,0 +1,107 @@ +$LOAD_PATH.unshift "#{File.dirname(__FILE__)}/../lib" +gem "test-unit" # http://rubyforge.org/pipermail/test-unit-tracker/2009-July/000075.html +require 'test/unit' +require 'mocha' +require 'net/ssh/buffer' +require 'net/ssh/config' +require 'net/ssh/loggable' +require 'net/ssh/packet' +require 'net/ssh/transport/session' +require 'ostruct' + +# clear the default files out so that tests don't get confused by existing +# SSH config files. +$original_config_default_files = Net::SSH::Config.default_files.dup +Net::SSH::Config.default_files.clear + +def P(*args) + Net::SSH::Packet.new(Net::SSH::Buffer.from(*args)) +end + +class MockTransport < Net::SSH::Transport::Session + class BlockVerifier + def initialize(block) + @block = block + end + + def verify(data) + @block.call(data) + end + end + + attr_reader :host_key_verifier + attr_accessor :host_as_string + attr_accessor :server_version + + attr_reader :client_options + attr_reader :server_options + attr_reader :hints, :queue + + attr_accessor :mock_enqueue + + def initialize(options={}) + self.logger = options[:logger] + self.host_as_string = "net.ssh.test,127.0.0.1" + self.server_version = OpenStruct.new(:version => "SSH-2.0-Ruby/Net::SSH::Test") + @expectation = nil + @queue = [] + @hints = {} + @socket = options[:socket] + @algorithms = OpenStruct.new(:session_id => "abcxyz123") + verifier { |data| true } + end + + def send_message(message) + buffer = Net::SSH::Buffer.new(message.to_s) + if @expectation.nil? + raise "got #{message.to_s.inspect} but was not expecting anything" + else + block, @expectation = @expectation, nil + block.call(self, Net::SSH::Packet.new(buffer)) + end + end + + def enqueue_message(message) + if mock_enqueue + send_message(message) + else + super + end + end + + def poll_message + @queue.shift + end + + def next_message + @queue.shift or raise "expected a message from the server but nothing was ready to send" + end + + def return(type, *args) + @queue << P(:byte, type, *args) + end + + def expect(&block) + @expectation = block + end + + def expect! + expect {} + end + + def verifier(&block) + @host_key_verifier = BlockVerifier.new(block) + end + + def configure_client(options) + @client_options = options + end + + def configure_server(options) + @server_options = options + end + + def hint(name, value=true) + @hints[name] = value + end +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/configs/eqsign b/vendor/gems/gems/net-ssh-2.0.15/test/configs/eqsign new file mode 100644 index 00000000..ed362368 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/configs/eqsign @@ -0,0 +1,3 @@ +Host=test.test + Port =1234 + Compression yes diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/configs/exact_match b/vendor/gems/gems/net-ssh-2.0.15/test/configs/exact_match new file mode 100644 index 00000000..908d631f --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/configs/exact_match @@ -0,0 +1,8 @@ +Host other.host + Compression no + Port 1231 + +Host test.host + Compression yes + ForwardAgent yes + Port 1234 \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/configs/multihost b/vendor/gems/gems/net-ssh-2.0.15/test/configs/multihost new file mode 100644 index 00000000..732975cd --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/configs/multihost @@ -0,0 +1,4 @@ +Host other.host, test.host + Compression yes + Port 1980 + RekeyLimit 2G diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/configs/wild_cards b/vendor/gems/gems/net-ssh-2.0.15/test/configs/wild_cards new file mode 100644 index 00000000..9e02dc84 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/configs/wild_cards @@ -0,0 +1,14 @@ +Host test.* + Port 1234 + Compression no + +Host tes?.host + Port 4321 + ForwardAgent yes + +Host *.hos? + IdentityFile ~/.ssh/id_dsa + Compression yes + +Host k*.host + RekeyLimit 1G \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_channel.rb b/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_channel.rb new file mode 100644 index 00000000..3600cf64 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_channel.rb @@ -0,0 +1,452 @@ +require 'common' +require 'net/ssh/connection/channel' + +module Connection + + class TestChannel < Test::Unit::TestCase + include Net::SSH::Connection::Constants + + def teardown + connection.test! + end + + def test_constructor_should_set_defaults + assert_equal 0x10000, channel.local_maximum_packet_size + assert_equal 0x20000, channel.local_maximum_window_size + assert channel.pending_requests.empty? + end + + def test_channel_properties + channel[:hello] = "some value" + assert_equal "some value", channel[:hello] + end + + def test_exec_should_be_syntactic_sugar_for_a_channel_request + channel.expects(:send_channel_request).with("exec", :string, "ls").yields + found_block = false + channel.exec("ls") { found_block = true } + assert found_block, "expected block to be passed to send_channel_request" + end + + def test_subsystem_should_be_syntactic_sugar_for_a_channel_request + channel.expects(:send_channel_request).with("subsystem", :string, "sftp").yields + found_block = false + channel.subsystem("sftp") { found_block = true } + assert found_block, "expected block to be passed to send_channel_request" + end + + def test_request_pty_with_invalid_option_should_raise_error + assert_raises(ArgumentError) do + channel.request_pty(:bogus => "thing") + end + end + + def test_request_pty_without_options_should_use_defaults + channel.expects(:send_channel_request).with("pty-req", :string, "xterm", + :long, 80, :long, 24, :long, 640, :long, 480, :string, "\0").yields + found_block = false + channel.request_pty { found_block = true } + assert found_block, "expected block to be passed to send_channel_request" + end + + def test_request_pty_with_options_should_honor_options + channel.expects(:send_channel_request).with("pty-req", :string, "vanilla", + :long, 60, :long, 15, :long, 400, :long, 200, :string, "\5\0\0\0\1\0") + channel.request_pty :term => "vanilla", :chars_wide => 60, :chars_high => 15, + :pixels_wide => 400, :pixels_high => 200, :modes => { 5 => 1 } + end + + def test_send_data_should_append_to_channels_output_buffer + channel.send_data("hello") + assert_equal "hello", channel.output.to_s + channel.send_data("world") + assert_equal "helloworld", channel.output.to_s + end + + def test_close_before_channel_has_been_confirmed_should_do_nothing + assert !channel.closing? + channel.close + assert !channel.closing? + end + + def test_close_should_set_closing_and_send_message + channel.do_open_confirmation(0, 100, 100) + assert !channel.closing? + + connection.expect { |t,packet| assert_equal CHANNEL_CLOSE, packet.type } + channel.close + + assert channel.closing? + end + + def test_close_while_closing_should_do_nothing + test_close_should_set_closing_and_send_message + assert_nothing_raised { channel.close } + end + + def test_process_when_process_callback_is_not_set_should_just_enqueue_data + channel.expects(:enqueue_pending_output) + channel.process + end + + def test_process_when_process_callback_is_set_should_yield_self_before_enqueuing_data + channel.expects(:enqueue_pending_output).never + channel.on_process { |ch| ch.expects(:enqueue_pending_output).once } + channel.process + end + + def test_enqueue_pending_output_should_have_no_effect_if_channel_has_not_been_confirmed + channel.send_data("hello") + assert_nothing_raised { channel.enqueue_pending_output } + end + + def test_enqueue_pending_output_should_have_no_effect_if_there_is_no_output + channel.do_open_confirmation(0, 100, 100) + assert_nothing_raised { channel.enqueue_pending_output } + end + + def test_enqueue_pending_output_should_not_enqueue_more_than_output_length + channel.do_open_confirmation(0, 100, 100) + channel.send_data("hello world") + + connection.expect do |t,packet| + assert_equal CHANNEL_DATA, packet.type + assert_equal 0, packet[:local_id] + assert_equal 11, packet[:data].length + end + + channel.enqueue_pending_output + end + + def test_enqueue_pending_output_should_not_enqueue_more_than_max_packet_length_at_once + channel.do_open_confirmation(0, 100, 8) + channel.send_data("hello world") + + connection.expect do |t,packet| + assert_equal CHANNEL_DATA, packet.type + assert_equal 0, packet[:local_id] + assert_equal "hello wo", packet[:data] + + t.expect do |t2,packet2| + assert_equal CHANNEL_DATA, packet2.type + assert_equal 0, packet2[:local_id] + assert_equal "rld", packet2[:data] + end + end + + channel.enqueue_pending_output + end + + def test_enqueue_pending_output_should_not_enqueue_more_than_max_window_size + channel.do_open_confirmation(0, 8, 100) + channel.send_data("hello world") + + connection.expect do |t,packet| + assert_equal CHANNEL_DATA, packet.type + assert_equal 0, packet[:local_id] + assert_equal "hello wo", packet[:data] + end + + channel.enqueue_pending_output + end + + def test_on_data_with_block_should_set_callback + flag = false + channel.on_data { flag = !flag } + channel.do_data("") + assert(flag, "callback should have been invoked") + channel.on_data + channel.do_data("") + assert(flag, "callback should have been removed") + end + + def test_on_extended_data_with_block_should_set_callback + flag = false + channel.on_extended_data { flag = !flag } + channel.do_extended_data(0, "") + assert(flag, "callback should have been invoked") + channel.on_extended_data + channel.do_extended_data(0, "") + assert(flag, "callback should have been removed") + end + + def test_on_process_with_block_should_set_callback + flag = false + channel.on_process { flag = !flag } + channel.process + assert(flag, "callback should have been invoked") + channel.on_process + channel.process + assert(flag, "callback should have been removed") + end + + def test_on_close_with_block_should_set_callback + flag = false + channel.on_close { flag = !flag } + channel.do_close + assert(flag, "callback should have been invoked") + channel.on_close + channel.do_close + assert(flag, "callback should have been removed") + end + + def test_on_eof_with_block_should_set_callback + flag = false + channel.on_eof { flag = !flag } + channel.do_eof + assert(flag, "callback should have been invoked") + channel.on_eof + channel.do_eof + assert(flag, "callback should have been removed") + end + + def test_do_request_for_unhandled_request_should_do_nothing_if_not_wants_reply + channel.do_open_confirmation(0, 100, 100) + assert_nothing_raised { channel.do_request "exit-status", false, nil } + end + + def test_do_request_for_unhandled_request_should_send_CHANNEL_FAILURE_if_wants_reply + channel.do_open_confirmation(0, 100, 100) + connection.expect { |t,packet| assert_equal CHANNEL_FAILURE, packet.type } + channel.do_request "keepalive@openssh.com", true, nil + end + + def test_do_request_for_handled_request_should_invoke_callback_and_do_nothing_if_returns_true_and_not_wants_reply + channel.do_open_confirmation(0, 100, 100) + flag = false + channel.on_request("exit-status") { flag = true; true } + assert_nothing_raised { channel.do_request "exit-status", false, nil } + assert flag, "callback should have been invoked" + end + + def test_do_request_for_handled_request_should_invoke_callback_and_do_nothing_if_fails_and_not_wants_reply + channel.do_open_confirmation(0, 100, 100) + flag = false + channel.on_request("exit-status") { flag = true; raise Net::SSH::ChannelRequestFailed } + assert_nothing_raised { channel.do_request "exit-status", false, nil } + assert flag, "callback should have been invoked" + end + + def test_do_request_for_handled_request_should_invoke_callback_and_send_CHANNEL_SUCCESS_if_returns_true_and_wants_reply + channel.do_open_confirmation(0, 100, 100) + flag = false + channel.on_request("exit-status") { flag = true; true } + connection.expect { |t,p| assert_equal CHANNEL_SUCCESS, p.type } + assert_nothing_raised { channel.do_request "exit-status", true, nil } + assert flag, "callback should have been invoked" + end + + def test_do_request_for_handled_request_should_invoke_callback_and_send_CHANNEL_FAILURE_if_returns_false_and_wants_reply + channel.do_open_confirmation(0, 100, 100) + flag = false + channel.on_request("exit-status") { flag = true; raise Net::SSH::ChannelRequestFailed } + connection.expect { |t,p| assert_equal CHANNEL_FAILURE, p.type } + assert_nothing_raised { channel.do_request "exit-status", true, nil } + assert flag, "callback should have been invoked" + end + + def test_send_channel_request_without_callback_should_not_want_reply + channel.do_open_confirmation(0, 100, 100) + connection.expect do |t,p| + assert_equal CHANNEL_REQUEST, p.type + assert_equal 0, p[:local_id] + assert_equal "exec", p[:request] + assert_equal false, p[:want_reply] + assert_equal "ls", p[:request_data].read_string + end + channel.send_channel_request("exec", :string, "ls") + assert channel.pending_requests.empty? + end + + def test_send_channel_request_with_callback_should_want_reply + channel.do_open_confirmation(0, 100, 100) + connection.expect do |t,p| + assert_equal CHANNEL_REQUEST, p.type + assert_equal 0, p[:local_id] + assert_equal "exec", p[:request] + assert_equal true, p[:want_reply] + assert_equal "ls", p[:request_data].read_string + end + callback = Proc.new {} + channel.send_channel_request("exec", :string, "ls", &callback) + assert_equal [callback], channel.pending_requests + end + + def test_do_open_confirmation_should_set_remote_parameters + channel.do_open_confirmation(1, 2, 3) + assert_equal 1, channel.remote_id + assert_equal 2, channel.remote_window_size + assert_equal 2, channel.remote_maximum_window_size + assert_equal 3, channel.remote_maximum_packet_size + end + + def test_do_open_confirmation_should_call_open_confirmation_callback + flag = false + channel { flag = true } + assert !flag, "callback should not have been invoked yet" + channel.do_open_confirmation(1,2,3) + assert flag, "callback should have been invoked" + end + + def test_do_open_confirmation_with_session_channel_should_invoke_agent_forwarding_if_agent_forwarding_requested + connection :forward_agent => true + forward = mock("forward") + forward.expects(:agent).with(channel) + connection.expects(:forward).returns(forward) + channel.do_open_confirmation(1,2,3) + end + + def test_do_open_confirmation_with_non_session_channel_should_not_invoke_agent_forwarding_even_if_agent_forwarding_requested + connection :forward_agent => true + channel :type => "direct-tcpip" + connection.expects(:forward).never + channel.do_open_confirmation(1,2,3) + end + + def test_do_window_adjust_should_adjust_remote_window_size_by_the_given_amount + channel.do_open_confirmation(0, 1000, 1000) + assert_equal 1000, channel.remote_window_size + assert_equal 1000, channel.remote_maximum_window_size + channel.do_window_adjust(500) + assert_equal 1500, channel.remote_window_size + assert_equal 1500, channel.remote_maximum_window_size + end + + def test_do_data_should_update_local_window_size + assert_equal 0x20000, channel.local_maximum_window_size + assert_equal 0x20000, channel.local_window_size + channel.do_data("here is some data") + assert_equal 0x20000, channel.local_maximum_window_size + assert_equal 0x1FFEF, channel.local_window_size + end + + def test_do_extended_data_should_update_local_window_size + assert_equal 0x20000, channel.local_maximum_window_size + assert_equal 0x20000, channel.local_window_size + channel.do_extended_data(1, "here is some data") + assert_equal 0x20000, channel.local_maximum_window_size + assert_equal 0x1FFEF, channel.local_window_size + end + + def test_do_data_when_local_window_size_drops_below_threshold_should_trigger_WINDOW_ADJUST_message + channel.do_open_confirmation(0, 1000, 1000) + assert_equal 0x20000, channel.local_maximum_window_size + assert_equal 0x20000, channel.local_window_size + + connection.expect do |t,p| + assert_equal CHANNEL_WINDOW_ADJUST, p.type + assert_equal 0, p[:local_id] + assert_equal 0x20000, p[:extra_bytes] + end + + channel.do_data("." * 0x10001) + assert_equal 0x40000, channel.local_maximum_window_size + assert_equal 0x2FFFF, channel.local_window_size + end + + def test_do_failure_should_grab_next_pending_request_and_call_it + result = nil + channel.pending_requests << Proc.new { |*args| result = args } + channel.do_failure + assert_equal [channel, false], result + assert channel.pending_requests.empty? + end + + def test_do_success_should_grab_next_pending_request_and_call_it + result = nil + channel.pending_requests << Proc.new { |*args| result = args } + channel.do_success + assert_equal [channel, true], result + assert channel.pending_requests.empty? + end + + def test_active_should_be_true_when_channel_appears_in_channel_list + connection.channels[channel.local_id] = channel + assert channel.active? + end + + def test_active_should_be_false_when_channel_is_not_in_channel_list + assert !channel.active? + end + + def test_wait_should_block_while_channel_is_active? + channel.expects(:active?).times(3).returns(true,true,false) + channel.wait + end + + def test_eof_bang_should_send_eof_to_server + channel.do_open_confirmation(0, 1000, 1000) + connection.expect { |t,p| assert_equal CHANNEL_EOF, p.type } + channel.eof! + end + + def test_eof_bang_should_not_send_eof_if_eof_was_already_declared + channel.do_open_confirmation(0, 1000, 1000) + connection.expect { |t,p| assert_equal CHANNEL_EOF, p.type } + channel.eof! + assert_nothing_raised { channel.eof! } + end + + def test_eof_q_should_return_true_if_eof_declared + channel.do_open_confirmation(0, 1000, 1000) + connection.expect { |t,p| assert_equal CHANNEL_EOF, p.type } + + assert !channel.eof? + channel.eof! + assert channel.eof? + end + + def test_send_data_should_raise_exception_if_eof_declared + channel.do_open_confirmation(0, 1000, 1000) + connection.expect { |t,p| assert_equal CHANNEL_EOF, p.type } + channel.eof! + assert_raises(EOFError) { channel.send_data("die! die! die!") } + end + + private + + class MockConnection + attr_reader :logger + attr_reader :options + attr_reader :channels + + def initialize(options={}) + @expectation = nil + @options = options + @channels = {} + end + + def expect(&block) + @expectation = block + end + + def send_message(msg) + raise "#{msg.to_s.inspect} recieved but no message was expected" unless @expectation + packet = Net::SSH::Packet.new(msg.to_s) + callback, @expectation = @expectation, nil + callback.call(self, packet) + end + + alias loop_forever loop + def loop(&block) + loop_forever { break unless block.call } + end + + def test! + raise "expected a packet but none were sent" if @expectation + end + end + + def connection(options={}) + @connection ||= MockConnection.new(options) + end + + def channel(options={}, &block) + @channel ||= Net::SSH::Connection::Channel.new(connection(options), + options[:type] || "session", + options[:local_id] || 0, + &block) + end + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_session.rb b/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_session.rb new file mode 100644 index 00000000..05c3ad85 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/connection/test_session.rb @@ -0,0 +1,488 @@ +require 'common' +require 'net/ssh/connection/session' + +module Connection + + class TestSession < Test::Unit::TestCase + include Net::SSH::Connection::Constants + + def test_constructor_should_set_defaults + assert session.channels.empty? + assert session.pending_requests.empty? + assert_equal({ socket => nil }, session.listeners) + end + + def test_on_open_channel_should_register_block_with_given_channel_type + flag = false + session.on_open_channel("testing") { flag = true } + assert_not_nil session.channel_open_handlers["testing"] + session.channel_open_handlers["testing"].call + assert flag, "callback should have been invoked" + end + + def test_forward_should_create_and_cache_instance_of_forward_service + assert_instance_of Net::SSH::Service::Forward, session.forward + assert_equal session.forward.object_id, session.forward.object_id + end + + def test_listen_to_without_callback_should_add_argument_as_listener + io = stub("io") + session.listen_to(io) + assert session.listeners.key?(io) + assert_nil session.listeners[io] + end + + def test_listen_to_should_add_argument_to_listeners_list_if_block_is_given + io = stub("io", :pending_write? => true) + flag = false + session.listen_to(io) { flag = true } + assert !flag, "callback should not be invoked immediately" + assert session.listeners.key?(io) + session.listeners[io].call + assert flag, "callback should have been invoked" + end + + def test_stop_listening_to_should_remove_argument_from_listeners + io = stub("io", :pending_write? => true) + + session.listen_to(io) + assert session.listeners.key?(io) + + session.stop_listening_to(io) + assert !session.listeners.key?(io) + end + + def test_send_message_should_enqueue_message_at_transport_layer + packet = P(:byte, REQUEST_SUCCESS) + session.send_message(packet) + assert_equal packet.to_s, socket.write_buffer + end + + def test_open_channel_defaults_should_use_session_channel + flag = false + channel = session.open_channel { flag = true } + assert !flag, "callback should not be invoked immediately" + channel.do_open_confirmation(1,2,3) + assert flag, "callback should have been invoked" + assert_equal "session", channel.type + assert_equal 0, channel.local_id + assert_equal channel, session.channels[channel.local_id] + + packet = P(:byte, CHANNEL_OPEN, :string, "session", :long, channel.local_id, + :long, channel.local_maximum_window_size, :long, channel.local_maximum_packet_size) + assert_equal packet.to_s, socket.write_buffer + end + + def test_open_channel_with_type_should_use_type + channel = session.open_channel("direct-tcpip") + assert_equal "direct-tcpip", channel.type + packet = P(:byte, CHANNEL_OPEN, :string, "direct-tcpip", :long, channel.local_id, + :long, channel.local_maximum_window_size, :long, channel.local_maximum_packet_size) + assert_equal packet.to_s, socket.write_buffer + end + + def test_open_channel_with_extras_should_append_extras_to_packet + channel = session.open_channel("direct-tcpip", :string, "other.host", :long, 1234) + packet = P(:byte, CHANNEL_OPEN, :string, "direct-tcpip", :long, channel.local_id, + :long, channel.local_maximum_window_size, :long, channel.local_maximum_packet_size, + :string, "other.host", :long, 1234) + assert_equal packet.to_s, socket.write_buffer + end + + def test_send_global_request_without_callback_should_not_expect_reply + packet = P(:byte, GLOBAL_REQUEST, :string, "testing", :bool, false) + session.send_global_request("testing") + assert_equal packet.to_s, socket.write_buffer + assert session.pending_requests.empty? + end + + def test_send_global_request_with_callback_should_expect_reply + packet = P(:byte, GLOBAL_REQUEST, :string, "testing", :bool, true) + proc = Proc.new {} + session.send_global_request("testing", &proc) + assert_equal packet.to_s, socket.write_buffer + assert_equal [proc], session.pending_requests + end + + def test_send_global_request_with_extras_should_append_extras_to_packet + packet = P(:byte, GLOBAL_REQUEST, :string, "testing", :bool, false, :string, "other.host", :long, 1234) + session.send_global_request("testing", :string, "other.host", :long, 1234) + assert_equal packet.to_s, socket.write_buffer + end + + def test_process_should_exit_immediately_if_block_is_false + session.channels[0] = stub("channel", :closing? => false) + session.channels[0].expects(:process).never + process_times(0) + end + + def test_process_should_exit_after_processing_if_block_is_true_then_false + session.channels[0] = stub("channel", :closing? => false) + session.channels[0].expects(:process) + IO.expects(:select).never + process_times(2) + end + + def test_process_should_not_process_channels_that_are_closing + session.channels[0] = stub("channel", :closing? => true) + session.channels[0].expects(:process).never + IO.expects(:select).never + process_times(2) + end + + def test_global_request_packets_should_be_silently_handled_if_no_handler_exists_for_them + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, false) + process_times(2) + assert transport.queue.empty? + assert !socket.pending_write? + end + + def test_global_request_packets_should_be_auto_replied_to_even_if_no_handler_exists + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, true) + process_times(2) + assert_equal P(:byte, REQUEST_FAILURE).to_s, socket.write_buffer + end + + def test_global_request_handler_should_not_trigger_auto_reply_if_no_reply_is_wanted + flag = false + session.on_global_request("testing") { flag = true } + assert !flag, "callback should not be invoked yet" + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, false) + process_times(2) + assert transport.queue.empty? + assert !socket.pending_write? + assert flag, "callback should have been invoked" + end + + def test_global_request_handler_returning_true_should_trigger_success_auto_reply + flag = false + session.on_global_request("testing") { flag = true } + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, true) + process_times(2) + assert_equal P(:byte, REQUEST_SUCCESS).to_s, socket.write_buffer + assert flag + end + + def test_global_request_handler_returning_false_should_trigger_failure_auto_reply + flag = false + session.on_global_request("testing") { flag = true; false } + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, true) + process_times(2) + assert_equal P(:byte, REQUEST_FAILURE).to_s, socket.write_buffer + assert flag + end + + def test_global_request_handler_returning_sent_should_not_trigger_auto_reply + flag = false + session.on_global_request("testing") { flag = true; :sent } + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, true) + process_times(2) + assert !socket.pending_write? + assert flag + end + + def test_global_request_handler_returning_other_value_should_raise_error + session.on_global_request("testing") { "bug" } + transport.return(GLOBAL_REQUEST, :string, "testing", :bool, true) + assert_raises(RuntimeError) { process_times(2) } + end + + def test_request_success_packets_should_invoke_next_pending_request_with_true + result = nil + session.pending_requests << Proc.new { |*args| result = args } + transport.return(REQUEST_SUCCESS) + process_times(2) + assert_equal [true, P(:byte, REQUEST_SUCCESS)], result + assert session.pending_requests.empty? + end + + def test_request_failure_packets_should_invoke_next_pending_request_with_false + result = nil + session.pending_requests << Proc.new { |*args| result = args } + transport.return(REQUEST_FAILURE) + process_times(2) + assert_equal [false, P(:byte, REQUEST_FAILURE)], result + assert session.pending_requests.empty? + end + + def test_channel_open_packet_without_corresponding_channel_open_handler_should_result_in_channel_open_failure + transport.return(CHANNEL_OPEN, :string, "auth-agent", :long, 14, :long, 0x20000, :long, 0x10000) + process_times(2) + assert_equal P(:byte, CHANNEL_OPEN_FAILURE, :long, 14, :long, 3, :string, "unknown channel type auth-agent", :string, "").to_s, socket.write_buffer + end + + def test_channel_open_packet_with_corresponding_handler_should_result_in_channel_open_failure_when_handler_returns_an_error + transport.return(CHANNEL_OPEN, :string, "auth-agent", :long, 14, :long, 0x20000, :long, 0x10000) + session.on_open_channel "auth-agent" do |s, ch, p| + raise Net::SSH::ChannelOpenFailed.new(1234, "we iz in ur channelz!") + end + process_times(2) + assert_equal P(:byte, CHANNEL_OPEN_FAILURE, :long, 14, :long, 1234, :string, "we iz in ur channelz!", :string, "").to_s, socket.write_buffer + end + + def test_channel_open_packet_with_corresponding_handler_should_result_in_channel_open_confirmation_when_handler_succeeds + transport.return(CHANNEL_OPEN, :string, "auth-agent", :long, 14, :long, 0x20001, :long, 0x10001) + result = nil + session.on_open_channel("auth-agent") { |*args| result = args } + process_times(2) + assert_equal P(:byte, CHANNEL_OPEN_CONFIRMATION, :long, 14, :long, 0, :long, 0x20000, :long, 0x10000).to_s, socket.write_buffer + assert_not_nil(ch = session.channels[0]) + assert_equal [session, ch, P(:byte, CHANNEL_OPEN, :string, "auth-agent", :long, 14, :long, 0x20001, :long, 0x10001)], result + assert_equal 0, ch.local_id + assert_equal 14, ch.remote_id + assert_equal 0x20001, ch.remote_maximum_window_size + assert_equal 0x10001, ch.remote_maximum_packet_size + assert_equal 0x20000, ch.local_maximum_window_size + assert_equal 0x10000, ch.local_maximum_packet_size + assert_equal "auth-agent", ch.type + end + + def test_channel_open_failure_should_remove_channel_and_tell_channel_that_open_failed + session.channels[1] = stub("channel") + session.channels[1].expects(:do_open_failed).with(1234, "some reason") + transport.return(CHANNEL_OPEN_FAILURE, :long, 1, :long, 1234, :string, "some reason", :string, "lang tag") + process_times(2) + assert session.channels.empty? + end + + def test_channel_open_confirmation_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_open_confirmation).with(1234, 0x20001, 0x10001) + transport.return(CHANNEL_OPEN_CONFIRMATION, :long, 14, :long, 1234, :long, 0x20001, :long, 0x10001) + process_times(2) + end + + def test_channel_window_adjust_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_window_adjust).with(5000) + transport.return(CHANNEL_WINDOW_ADJUST, :long, 14, :long, 5000) + process_times(2) + end + + def test_channel_request_for_nonexistant_channel_should_be_ignored + transport.return(CHANNEL_REQUEST, :long, 14, :string, "testing", :bool, false) + assert_nothing_raised { process_times(2) } + end + + def test_channel_request_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_request).with("testing", false, Net::SSH::Buffer.new) + transport.return(CHANNEL_REQUEST, :long, 14, :string, "testing", :bool, false) + process_times(2) + end + + def test_channel_data_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_data).with("bring it on down") + transport.return(CHANNEL_DATA, :long, 14, :string, "bring it on down") + process_times(2) + end + + def test_channel_extended_data_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_extended_data).with(1, "bring it on down") + transport.return(CHANNEL_EXTENDED_DATA, :long, 14, :long, 1, :string, "bring it on down") + process_times(2) + end + + def test_channel_eof_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_eof).with() + transport.return(CHANNEL_EOF, :long, 14) + process_times(2) + end + + def test_channel_success_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_success).with() + transport.return(CHANNEL_SUCCESS, :long, 14) + process_times(2) + end + + def test_channel_failure_packet_should_be_routed_to_corresponding_channel + channel_at(14).expects(:do_failure).with() + transport.return(CHANNEL_FAILURE, :long, 14) + process_times(2) + end + + def test_channel_close_packet_should_be_routed_to_corresponding_channel_and_channel_should_be_closed_and_removed + channel_at(14).expects(:do_close).with() + session.channels[14].expects(:close).with() + transport.return(CHANNEL_CLOSE, :long, 14) + process_times(2) + assert session.channels.empty? + end + + def test_multiple_pending_dispatches_should_be_dispatched_together + channel_at(14).expects(:do_eof).with() + session.channels[14].expects(:do_success).with() + transport.return(CHANNEL_SUCCESS, :long, 14) + transport.return(CHANNEL_EOF, :long, 14) + process_times(2) + end + + def test_writers_without_pending_writes_should_not_be_considered_for_select + IO.expects(:select).with([socket],[],nil,nil).returns([[],[],[]]) + session.process + end + + def test_writers_with_pending_writes_should_be_considered_for_select + socket.enqueue("laksdjflasdkf") + IO.expects(:select).with([socket],[socket],nil,nil).returns([[],[],[]]) + session.process + end + + def test_ready_readers_should_be_filled + socket.expects(:recv).returns("this is some data") + IO.expects(:select).with([socket],[],nil,nil).returns([[socket],[],[]]) + session.process + assert_equal [socket], session.listeners.keys + end + + def test_ready_readers_that_cant_be_filled_should_be_removed + socket.expects(:recv).returns("") + socket.expects(:close) + IO.expects(:select).with([socket],[],nil,nil).returns([[socket],[],[]]) + session.process + assert session.listeners.empty? + end + + def test_ready_readers_that_are_registered_with_a_block_should_call_block_instead_of_fill + io = stub("io", :pending_write? => false) + flag = false + session.stop_listening_to(socket) # so that we only have to test the presence of a single IO object + session.listen_to(io) { flag = true } + IO.expects(:select).with([io],[],nil,nil).returns([[io],[],[]]) + session.process + assert flag, "callback should have been invoked" + end + + def test_ready_writers_should_call_send_pending + socket.enqueue("laksdjflasdkf") + socket.expects(:send).with("laksdjflasdkf", 0).returns(13) + IO.expects(:select).with([socket],[socket],nil,nil).returns([[],[socket],[]]) + session.process + end + + def test_process_should_call_rekey_as_needed + transport.expects(:rekey_as_needed) + IO.expects(:select).with([socket],[],nil,nil).returns([[],[],[]]) + session.process + end + + def test_loop_should_call_process_until_process_returns_false + IO.stubs(:select).with([socket],[],nil,nil).returns([[],[],[]]) + session.expects(:process).with(nil).times(4).returns(true,true,true,false).yields + n = 0 + session.loop { n += 1 } + assert_equal 4, n + end + + def test_exec_should_open_channel_and_configure_default_callbacks + prep_exec("ls", :stdout, "data packet", :stderr, "extended data packet") + + call = :first + session.exec "ls" do |channel, type, data| + if call == :first + assert_equal :stdout, type + assert_equal "data packet", data + call = :second + elsif call == :second + assert_equal :stderr, type + assert_equal "extended data packet", data + call = :third + else + flunk "should never get here, call == #{call.inspect}" + end + end + + session.loop + assert_equal :third, call + end + + def test_exec_without_block_should_use_print_to_display_result + prep_exec("ls", :stdout, "data packet", :stderr, "extended data packet") + $stdout.expects(:print).with("data packet") + $stderr.expects(:print).with("extended data packet") + + session.exec "ls" + session.loop + end + + def test_exec_bang_should_block_until_command_finishes + prep_exec("ls", :stdout, "some data") + called = false + session.exec! "ls" do |channel, type, data| + called = true + assert_equal :stdout, type + assert_equal "some data", data + end + assert called + end + + def test_exec_bang_without_block_should_return_data_as_string + prep_exec("ls", :stdout, "some data") + assert_equal "some data", session.exec!("ls") + end + + private + + def prep_exec(command, *data) + transport.mock_enqueue = true + transport.expect do |t, p| + assert_equal CHANNEL_OPEN, p.type + t.return(CHANNEL_OPEN_CONFIRMATION, :long, p[:remote_id], :long, 0, :long, 0x20000, :long, 0x10000) + t.expect do |t2, p2| + assert_equal CHANNEL_REQUEST, p2.type + assert_equal "exec", p2[:request] + assert_equal true, p2[:want_reply] + assert_equal "ls", p2.read_string + + t2.return(CHANNEL_SUCCESS, :long, p[:remote_id]) + + 0.step(data.length-1, 2) do |index| + type = data[index] + datum = data[index+1] + + if type == :stdout + t2.return(CHANNEL_DATA, :long, p[:remote_id], :string, datum) + else + t2.return(CHANNEL_EXTENDED_DATA, :long, p[:remote_id], :long, 1, :string, datum) + end + end + + t2.return(CHANNEL_CLOSE, :long, p[:remote_id]) + t2.expect { |t3,p3| assert_equal CHANNEL_CLOSE, p3.type } + end + end + end + + module MockSocket + # so that we can easily test the contents that were enqueued, without + # worrying about all the packet stream overhead + def enqueue_packet(message) + enqueue(message.to_s) + end + end + + def socket + @socket ||= begin + socket ||= Object.new + socket.extend(Net::SSH::Transport::PacketStream) + socket.extend(MockSocket) + socket + end + end + + def channel_at(local_id) + session.channels[local_id] = stub("channel", :process => true, :closing? => false) + end + + def transport(options={}) + @transport ||= MockTransport.new(options.merge(:socket => socket)) + end + + def session(options={}) + @session ||= Net::SSH::Connection::Session.new(transport, options) + end + + def process_times(n) + i = 0 + session.process { (i += 1) < n } + end + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/test_all.rb b/vendor/gems/gems/net-ssh-2.0.15/test/test_all.rb new file mode 100644 index 00000000..47305dc4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/test_all.rb @@ -0,0 +1,8 @@ +# $ ruby -Ilib -Itest -rrubygems test/test_all.rb +# $ ruby -Ilib -Itest -rrubygems test/transport/test_server_version.rb +Dir.chdir(File.dirname(__FILE__)) do + test_files = Dir['**/test_*.rb'] + test_files = test_files.select { |f| f =~ Regexp.new(ENV['ONLY']) } if ENV['ONLY'] + test_files = test_files.reject { |f| f =~ Regexp.new(ENV['EXCEPT']) } if ENV['EXCEPT'] + test_files.each { |file| require(file) } +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/test_buffer.rb b/vendor/gems/gems/net-ssh-2.0.15/test/test_buffer.rb new file mode 100644 index 00000000..81a91302 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/test_buffer.rb @@ -0,0 +1,336 @@ +require 'common' +require 'net/ssh/buffer' + +class TestBuffer < Test::Unit::TestCase + def test_constructor_should_initialize_buffer_to_empty_by_default + buffer = new + assert buffer.empty? + assert_equal 0, buffer.position + end + + def test_constructor_with_string_should_initialize_buffer_to_the_string + buffer = new("hello") + assert !buffer.empty? + assert_equal "hello", buffer.to_s + assert_equal 0, buffer.position + end + + def test_from_should_require_an_even_number_of_arguments + assert_raises(ArgumentError) { Net::SSH::Buffer.from("this") } + end + + def test_from_should_build_new_buffer_from_definition + buffer = Net::SSH::Buffer.from(:byte, 1, :long, 2, :int64, 3, :string, "4", :bool, true, :bool, false, :bignum, OpenSSL::BN.new("1234567890", 10), :raw, "something") + assert_equal "\1\0\0\0\2\0\0\0\0\0\0\0\3\0\0\0\0014\1\0\000\000\000\004I\226\002\322something", buffer.to_s + end + + def test_from_with_array_argument_should_write_multiple_of_the_given_type + buffer = Net::SSH::Buffer.from(:byte, [1,2,3,4,5]) + assert_equal "\1\2\3\4\5", buffer.to_s + end + + def test_read_without_argument_should_read_to_end + buffer = new("hello world") + assert_equal "hello world", buffer.read + assert buffer.eof? + assert_equal 11, buffer.position + end + + def test_read_with_argument_that_is_less_than_length_should_read_that_many_bytes + buffer = new "hello world" + assert_equal "hello", buffer.read(5) + assert_equal 5, buffer.position + end + + def test_read_with_argument_that_is_more_than_length_should_read_no_more_than_length + buffer = new "hello world" + assert_equal "hello world", buffer.read(500) + assert_equal 11, buffer.position + end + + def test_read_at_eof_should_return_empty_string + buffer = new "hello" + buffer.position = 5 + assert_equal "", buffer.read + end + + def test_consume_without_argument_should_resize_buffer_to_start_at_position + buffer = new "hello world" + buffer.read(5) + assert_equal 5, buffer.position + assert_equal 11, buffer.length + buffer.consume! + assert_equal 0, buffer.position + assert_equal 6, buffer.length + assert_equal " world", buffer.to_s + end + + def test_consume_with_argument_should_resize_buffer_starting_at_n + buffer = new "hello world" + assert_equal 0, buffer.position + buffer.consume!(5) + assert_equal 0, buffer.position + assert_equal 6, buffer.length + assert_equal " world", buffer.to_s + end + + def test_read_bang_should_read_and_consume_and_return_read_portion + buffer = new "hello world" + assert_equal "hello", buffer.read!(5) + assert_equal 0, buffer.position + assert_equal 6, buffer.length + assert_equal " world", buffer.to_s + end + + def test_available_should_return_length_after_position_to_end_of_string + buffer = new "hello world" + buffer.read(5) + assert_equal 6, buffer.available + end + + def test_clear_bang_should_reset_buffer_contents_and_counters + buffer = new "hello world" + buffer.read(5) + buffer.clear! + assert_equal 0, buffer.length + assert_equal 0, buffer.position + assert_equal "", buffer.to_s + end + + def test_append_should_append_argument_without_changing_position_and_should_return_self + buffer = new "hello world" + buffer.read(5) + buffer.append(" again") + assert_equal 5, buffer.position + assert_equal 12, buffer.available + assert_equal 17, buffer.length + assert_equal "hello world again", buffer.to_s + end + + def test_remainder_as_buffer_should_return_a_new_buffer_filled_with_the_text_after_the_current_position + buffer = new "hello world" + buffer.read(6) + b2 = buffer.remainder_as_buffer + assert_equal 6, buffer.position + assert_equal 0, b2.position + assert_equal "world", b2.to_s + end + + def test_read_int64_should_return_8_byte_integer + buffer = new "\xff\xee\xdd\xcc\xbb\xaa\x99\x88" + assert_equal 0xffeeddccbbaa9988, buffer.read_int64 + assert_equal 8, buffer.position + end + + def test_read_int64_should_return_nil_on_partial_read + buffer = new "\0\0\0\0\0\0\0" + assert_nil buffer.read_int64 + assert buffer.eof? + end + + def test_read_long_should_return_4_byte_integer + buffer = new "\xff\xee\xdd\xcc\xbb\xaa\x99\x88" + assert_equal 0xffeeddcc, buffer.read_long + assert_equal 4, buffer.position + end + + def test_read_long_should_return_nil_on_partial_read + buffer = new "\0\0\0" + assert_nil buffer.read_long + assert buffer.eof? + end + + def test_read_byte_should_return_single_byte_integer + buffer = new "\xfe\xdc" + assert_equal 0xfe, buffer.read_byte + assert_equal 1, buffer.position + end + + def test_read_byte_should_return_nil_at_eof + assert_nil new.read_byte + end + + def test_read_string_should_read_length_and_data_from_buffer + buffer = new "\0\0\0\x0bhello world" + assert_equal "hello world", buffer.read_string + end + + def test_read_string_should_return_nil_if_4_byte_length_cannot_be_read + assert_nil new("\0\1").read_string + end + + def test_read_bool_should_return_true_if_non_zero_byte_is_read + buffer = new "\1\2\3\4\5\6" + 6.times { assert_equal true, buffer.read_bool } + end + + def test_read_bool_should_return_false_if_zero_byte_is_read + buffer = new "\0" + assert_equal false, buffer.read_bool + end + + def test_read_bool_should_return_nil_at_eof + assert_nil new.read_bool + end + + def test_read_bignum_should_read_openssl_formatted_bignum + buffer = new("\000\000\000\004I\226\002\322") + assert_equal OpenSSL::BN.new("1234567890", 10), buffer.read_bignum + end + + def test_read_bignum_should_return_nil_if_length_cannot_be_read + assert_nil new("\0\1\2").read_bignum + end + + def test_read_key_blob_should_read_dsa_keys + random_dss { |buffer| buffer.read_keyblob("ssh-dss") } + end + + def test_read_key_blob_should_read_rsa_keys + random_rsa { |buffer| buffer.read_keyblob("ssh-rsa") } + end + + def test_read_key_should_read_dsa_key_type_and_keyblob + random_dss do |buffer| + b2 = Net::SSH::Buffer.from(:string, "ssh-dss", :raw, buffer) + b2.read_key + end + end + + def test_read_key_should_read_rsa_key_type_and_keyblob + random_rsa do |buffer| + b2 = Net::SSH::Buffer.from(:string, "ssh-rsa", :raw, buffer) + b2.read_key + end + end + + def test_read_buffer_should_read_a_string_and_return_it_wrapped_in_a_buffer + buffer = new("\0\0\0\x0bhello world") + b2 = buffer.read_buffer + assert_equal 0, b2.position + assert_equal 11, b2.length + assert_equal "hello world", b2.read + end + + def test_read_to_should_return_nil_if_pattern_does_not_exist_in_buffer + buffer = new("one two three") + assert_nil buffer.read_to("\n") + end + + def test_read_to_should_grok_string_patterns + buffer = new("one two three") + assert_equal "one tw", buffer.read_to("tw") + assert_equal 6, buffer.position + end + + def test_read_to_should_grok_regex_patterns + buffer = new("one two three") + assert_equal "one tw", buffer.read_to(/tw/) + assert_equal 6, buffer.position + end + + def test_read_to_should_grok_fixnum_patterns + buffer = new("one two three") + assert_equal "one tw", buffer.read_to(?w) + assert_equal 6, buffer.position + end + + def test_reset_bang_should_reset_position_to_0 + buffer = new("hello world") + buffer.read(5) + assert_equal 5, buffer.position + buffer.reset! + assert_equal 0, buffer.position + end + + def test_write_should_write_arguments_directly_to_end_buffer + buffer = new("start") + buffer.write "hello", " ", "world" + assert_equal "starthello world", buffer.to_s + assert_equal 0, buffer.position + end + + def test_write_int64_should_write_arguments_as_8_byte_integers_to_end_of_buffer + buffer = new("start") + buffer.write_int64 0xffeeddccbbaa9988, 0x7766554433221100 + assert_equal "start\xff\xee\xdd\xcc\xbb\xaa\x99\x88\x77\x66\x55\x44\x33\x22\x11\x00", buffer.to_s + end + + def test_write_long_should_write_arguments_as_4_byte_integers_to_end_of_buffer + buffer = new("start") + buffer.write_long 0xffeeddcc, 0xbbaa9988 + assert_equal "start\xff\xee\xdd\xcc\xbb\xaa\x99\x88", buffer.to_s + end + + def test_write_byte_should_write_arguments_as_1_byte_integers_to_end_of_buffer + buffer = new("start") + buffer.write_byte 1, 2, 3, 4, 5 + assert_equal "start\1\2\3\4\5", buffer.to_s + end + + def test_write_bool_should_write_arguments_as_1_byte_boolean_values_to_end_of_buffer + buffer = new("start") + buffer.write_bool nil, false, true, 1, Object.new + assert_equal "start\0\0\1\1\1", buffer.to_s + end + + def test_write_bignum_should_write_arguments_as_ssh_formatted_bignum_values_to_end_of_buffer + buffer = new("start") + buffer.write_bignum OpenSSL::BN.new('1234567890', 10) + assert_equal "start\000\000\000\004I\226\002\322", buffer.to_s + end + + def test_write_dss_key_should_write_argument_to_end_of_buffer + buffer = new("start") + + key = OpenSSL::PKey::DSA.new + key.p = 0xffeeddccbbaa9988 + key.q = 0x7766554433221100 + key.g = 0xffddbb9977553311 + key.pub_key = 0xeeccaa8866442200 + + buffer.write_key(key) + assert_equal "start\0\0\0\7ssh-dss\0\0\0\011\0\xff\xee\xdd\xcc\xbb\xaa\x99\x88\0\0\0\010\x77\x66\x55\x44\x33\x22\x11\x00\0\0\0\011\0\xff\xdd\xbb\x99\x77\x55\x33\x11\0\0\0\011\0\xee\xcc\xaa\x88\x66\x44\x22\x00", buffer.to_s + end + + def test_write_rsa_key_should_write_argument_to_end_of_buffer + buffer = new("start") + + key = OpenSSL::PKey::RSA.new + key.e = 0xffeeddccbbaa9988 + key.n = 0x7766554433221100 + + buffer.write_key(key) + assert_equal "start\0\0\0\7ssh-rsa\0\0\0\011\0\xff\xee\xdd\xcc\xbb\xaa\x99\x88\0\0\0\010\x77\x66\x55\x44\x33\x22\x11\x00", buffer.to_s + end + + private + + def random_rsa + n1 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + n2 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + buffer = Net::SSH::Buffer.from(:bignum, [n1, n2]) + key = yield(buffer) + assert_equal "ssh-rsa", key.ssh_type + assert_equal n1, key.e + assert_equal n2, key.n + end + + def random_dss + n1 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + n2 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + n3 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + n4 = OpenSSL::BN.new(rand(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).to_s, 10) + buffer = Net::SSH::Buffer.from(:bignum, [n1, n2, n3, n4]) + key = yield(buffer) + assert_equal "ssh-dss", key.ssh_type + assert_equal n1, key.p + assert_equal n2, key.q + assert_equal n3, key.g + assert_equal n4, key.pub_key + end + + def new(*args) + Net::SSH::Buffer.new(*args) + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/test_buffered_io.rb b/vendor/gems/gems/net-ssh-2.0.15/test/test_buffered_io.rb new file mode 100644 index 00000000..6ee46ef4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/test_buffered_io.rb @@ -0,0 +1,63 @@ +require 'common' +require 'net/ssh/buffered_io' + +class TestBufferedIo < Test::Unit::TestCase + def test_fill_should_pull_from_underlying_io + io.expects(:recv).with(8192).returns("here is some data") + assert_equal 17, io.fill + assert_equal 17, io.available + assert_equal "here is some data", io.read_available(20) + end + + def test_enqueue_should_not_write_to_underlying_io + assert !io.pending_write? + io.expects(:send).never + io.enqueue("here is some data") + assert io.pending_write? + end + + def test_send_pending_should_not_fail_when_no_writes_are_pending + assert !io.pending_write? + io.expects(:send).never + assert_nothing_raised { io.send_pending } + end + + def test_send_pending_with_pending_writes_should_write_to_underlying_io + io.enqueue("here is some data") + io.expects(:send).with("here is some data", 0).returns(17) + assert io.pending_write? + assert_nothing_raised { io.send_pending } + assert !io.pending_write? + end + + def test_wait_for_pending_sends_should_write_only_once_if_all_can_be_written_at_once + io.enqueue("here is some data") + io.expects(:send).with("here is some data", 0).returns(17) + assert io.pending_write? + assert_nothing_raised { io.wait_for_pending_sends } + assert !io.pending_write? + end + + def test_wait_for_pending_sends_should_write_multiple_times_if_first_write_was_partial + io.enqueue("here is some data") + + io.expects(:send).with("here is some data", 0).returns(10) + io.expects(:send).with("me data", 0).returns(4) + io.expects(:send).with("ata", 0).returns(3) + + IO.expects(:select).times(2).with(nil, [io]).returns([[], [io]]) + + assert_nothing_raised { io.wait_for_pending_sends } + assert !io.pending_write? + end + + private + + def io + @io ||= begin + io = mock("io") + io.extend(Net::SSH::BufferedIo) + io + end + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/test_config.rb b/vendor/gems/gems/net-ssh-2.0.15/test/test_config.rb new file mode 100644 index 00000000..82ad815c --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/test_config.rb @@ -0,0 +1,99 @@ +require 'common' +require 'net/ssh/config' + +class TestConfig < Test::Unit::TestCase + def test_load_for_non_existant_file_should_return_empty_hash + File.expects(:readable?).with("/bogus/file").returns(false) + assert_equal({}, Net::SSH::Config.load("/bogus/file", "host.name")) + end + + def test_load_should_expand_path + expected = File.expand_path("~/.ssh/config") + File.expects(:readable?).with(expected).returns(false) + Net::SSH::Config.load("~/.ssh/config", "host.name") + end + + def test_load_with_exact_host_match_should_load_that_section + config = Net::SSH::Config.load(config(:exact_match), "test.host") + assert config['compression'] + assert config['forwardagent'] + assert_equal 1234, config['port'] + end + + def test_load_with_wild_card_matches_should_load_all_matches_with_first_match_taking_precedence + config = Net::SSH::Config.load(config(:wild_cards), "test.host") + assert_equal 1234, config['port'] + assert !config['compression'] + assert config['forwardagent'] + assert_equal %w(~/.ssh/id_dsa), config['identityfile'] + assert !config.key?('rekeylimit') + end + + def test_for_should_load_all_files_and_translate_to_net_ssh_options + config = Net::SSH::Config.for("test.host", [config(:exact_match), config(:wild_cards)]) + assert_equal 1234, config[:port] + assert config[:compression] + assert config[:forward_agent] + assert_equal %w(~/.ssh/id_dsa), config[:keys] + assert !config.key?(:rekey_limit) + end + + def test_load_with_multiple_hosts + config = Net::SSH::Config.load(config(:multihost), "test.host") + assert config['compression'] + assert_equal '2G', config['rekeylimit'] + assert_equal 1980, config['port'] + end + + def test_load_with_multiple_hosts_and_config_should_match_for_both + aconfig = Net::SSH::Config.load(config(:multihost), "test.host") + bconfig = Net::SSH::Config.load(config(:multihost), "other.host") + assert_equal aconfig['port'], bconfig['port'] + assert_equal aconfig['compression'], bconfig['compression'] + assert_equal aconfig['rekeylimit'], bconfig['rekeylimit'] + end + + def test_load_should_parse_equal_sign_delimiters + config = Net::SSH::Config.load(config(:eqsign), "test.test") + assert config['compression'] + assert_equal 1234, config['port'] + end + + def test_translate_should_correctly_translate_from_openssh_to_net_ssh_names + open_ssh = { + 'ciphers' => "a,b,c", + 'compression' => true, + 'compressionlevel' => 6, + 'connecttimeout' => 100, + 'forwardagent' => true, + 'hostbasedauthentication' => true, + 'hostkeyalgorithms' => "d,e,f", + 'identityfile' => %w(g h i), + 'macs' => "j,k,l", + 'passwordauthentication' => true, + 'port' => 1234, + 'pubkeyauthentication' => true, + 'rekeylimit' => 1024 + } + + net_ssh = Net::SSH::Config.translate(open_ssh) + + assert_equal %w(a b c), net_ssh[:encryption] + assert_equal true, net_ssh[:compression] + assert_equal 6, net_ssh[:compression_level] + assert_equal 100, net_ssh[:timeout] + assert_equal true, net_ssh[:forward_agent] + assert_equal %w(hostbased password publickey), net_ssh[:auth_methods].sort + assert_equal %w(d e f), net_ssh[:host_key] + assert_equal %w(g h i), net_ssh[:keys] + assert_equal %w(j k l), net_ssh[:hmac] + assert_equal 1234, net_ssh[:port] + assert_equal 1024, net_ssh[:rekey_limit] + end + + private + + def config(name) + "test/configs/#{name}" + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/test_key_factory.rb b/vendor/gems/gems/net-ssh-2.0.15/test/test_key_factory.rb new file mode 100644 index 00000000..e5a38bbb --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/test_key_factory.rb @@ -0,0 +1,67 @@ +require 'common' +require 'net/ssh/key_factory' + +class TestKeyFactory < Test::Unit::TestCase + def test_load_unencrypted_private_RSA_key_should_return_key + File.expects(:read).with("/key-file").returns(rsa_key.export) + assert_equal rsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file").to_der + end + + def test_load_unencrypted_private_DSA_key_should_return_key + File.expects(:read).with("/key-file").returns(dsa_key.export) + assert_equal dsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file").to_der + end + + def test_load_encrypted_private_RSA_key_should_prompt_for_password_and_return_key + File.expects(:read).with("/key-file").returns(encrypted(rsa_key, "password")) + Net::SSH::KeyFactory.expects(:prompt).with("Enter passphrase for /key-file:", false).returns("password") + assert_equal rsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file").to_der + end + + def test_load_encrypted_private_RSA_key_with_password_should_not_prompt_and_return_key + File.expects(:read).with("/key-file").returns(encrypted(rsa_key, "password")) + assert_equal rsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file", "password").to_der + end + + def test_load_encrypted_private_DSA_key_should_prompt_for_password_and_return_key + File.expects(:read).with("/key-file").returns(encrypted(dsa_key, "password")) + Net::SSH::KeyFactory.expects(:prompt).with("Enter passphrase for /key-file:", false).returns("password") + assert_equal dsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file").to_der + end + + def test_load_encrypted_private_DSA_key_with_password_should_not_prompt_and_return_key + File.expects(:read).with("/key-file").returns(encrypted(dsa_key, "password")) + assert_equal dsa_key.to_der, Net::SSH::KeyFactory.load_private_key("/key-file", "password").to_der + end + + def test_load_encrypted_private_key_should_give_three_tries_for_the_password_and_then_raise_exception + File.expects(:read).with("/key-file").returns(encrypted(rsa_key, "password")) + Net::SSH::KeyFactory.expects(:prompt).times(3).with("Enter passphrase for /key-file:", false).returns("passwod","passphrase","passwd") + assert_raises(OpenSSL::PKey::RSAError) { Net::SSH::KeyFactory.load_private_key("/key-file") } + end + + def test_load_public_rsa_key_should_return_key + File.expects(:read).with("/key-file").returns(public(rsa_key)) + assert_equal rsa_key.to_blob, Net::SSH::KeyFactory.load_public_key("/key-file").to_blob + end + + private + + def rsa_key + @rsa_key ||= OpenSSL::PKey::RSA.new("0@\002\001\000\002\t\000\300\030\317\2132\340 \267\002\003\001\000\001\002\t\000\236~\232\025\350Y=\341\002\005\000\352D\217\a\002\005\000\321\352\304\321\002\005\000\242\350\206%\002\005\000\270\021\217\361\002\004~\253\214j") + end + + def dsa_key + @dsa_key ||= OpenSSL::PKey::DSA.new("0\201\367\002\001\000\002A\000\203\316/\037u\272&J\265\003l3\315d\324h\372{\t8\252#\331_\026\006\035\270\266\255\343\353Z\302\276\335\336\306\220\375\202L\244\244J\206>\346\b\315\211\302L\246x\247u\a\376\366\345\302\016#\002\025\000\244\274\302\221Og\275/\302+\356\346\360\024\373wI\2573\361\002@\027\215\270r*\f\213\350C\245\021:\350 \006\\\376\345\022`\210b\262\3643\023XLKS\320\370\002\276\347A\nU\204\276\324\256`=\026\240\330\306J\316V\213\024\e\030\215\355\006\037q\337\356ln\002@\017\257\034\f\260\333'S\271#\237\230E\321\312\027\021\226\331\251Vj\220\305\316\036\v\266+\000\230\270\177B\003?t\a\305]e\344\261\334\023\253\323\251\223M\2175)a(\004\"lI8\312\303\307\a\002\024_\aznW\345\343\203V\326\246ua\203\376\201o\350\302\002") + end + + def encrypted(key, password) + key.export(OpenSSL::Cipher::Cipher.new("des-ede3-cbc"), password) + end + + def public(key) + result = "#{key.ssh_type} " + result << [Net::SSH::Buffer.from(:key, key).to_s].pack("m*").strip.tr("\n\r\t ", "") + result << " joe@host.test" + end +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5.rb new file mode 100644 index 00000000..d7854f28 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5.rb @@ -0,0 +1,39 @@ +require 'common' +require 'net/ssh/transport/hmac/md5' + +module Transport; module HMAC + + class TestMD5 < Test::Unit::TestCase + def test_expected_digest_class + assert_equal OpenSSL::Digest::MD5, subject.digest_class + assert_equal OpenSSL::Digest::MD5, subject.new.digest_class + end + + def test_expected_key_length + assert_equal 16, subject.key_length + assert_equal 16, subject.new.key_length + end + + def test_expected_mac_length + assert_equal 16, subject.mac_length + assert_equal 16, subject.new.mac_length + end + + def test_expected_digest + hmac = subject.new("1234567890123456") + assert_equal "\275\345\006\307y~Oi\035<.\341\031\250<\257", hmac.digest("hello world") + end + + def test_key_should_be_truncated_to_required_length + hmac = subject.new("12345678901234567890") + assert_equal "1234567890123456", hmac.key + end + + private + + def subject + Net::SSH::Transport::HMAC::MD5 + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5_96.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5_96.rb new file mode 100644 index 00000000..575447be --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_md5_96.rb @@ -0,0 +1,25 @@ +require 'common' +require 'transport/hmac/test_md5' +require 'net/ssh/transport/hmac/md5_96' + +module Transport; module HMAC + + class TestMD5_96 < TestMD5 + def test_expected_mac_length + assert_equal 12, subject.mac_length + assert_equal 12, subject.new.mac_length + end + + def test_expected_digest + hmac = subject.new("1234567890123456") + assert_equal "\275\345\006\307y~Oi\035<.\341", hmac.digest("hello world") + end + + private + + def subject + Net::SSH::Transport::HMAC::MD5_96 + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_none.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_none.rb new file mode 100644 index 00000000..7b49fe8a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_none.rb @@ -0,0 +1,34 @@ +require 'common' +require 'net/ssh/transport/hmac/none' + +module Transport; module HMAC + + class TestNone < Test::Unit::TestCase + def test_expected_digest_class + assert_equal nil, subject.digest_class + assert_equal nil, subject.new.digest_class + end + + def test_expected_key_length + assert_equal 0, subject.key_length + assert_equal 0, subject.new.key_length + end + + def test_expected_mac_length + assert_equal 0, subject.mac_length + assert_equal 0, subject.new.mac_length + end + + def test_expected_digest + hmac = subject.new("1234567890123456") + assert_equal "", hmac.digest("hello world") + end + + private + + def subject + Net::SSH::Transport::HMAC::None + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1.rb new file mode 100644 index 00000000..4be100a8 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1.rb @@ -0,0 +1,34 @@ +require 'common' +require 'net/ssh/transport/hmac/sha1' + +module Transport; module HMAC + + class TestSHA1 < Test::Unit::TestCase + def test_expected_digest_class + assert_equal OpenSSL::Digest::SHA1, subject.digest_class + assert_equal OpenSSL::Digest::SHA1, subject.new.digest_class + end + + def test_expected_key_length + assert_equal 20, subject.key_length + assert_equal 20, subject.new.key_length + end + + def test_expected_mac_length + assert_equal 20, subject.mac_length + assert_equal 20, subject.new.mac_length + end + + def test_expected_digest + hmac = subject.new("1234567890123456") + assert_equal "\000\004W\202\204+&\335\311\251P\266\250\214\276\206;\022U\365", hmac.digest("hello world") + end + + private + + def subject + Net::SSH::Transport::HMAC::SHA1 + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1_96.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1_96.rb new file mode 100644 index 00000000..866e15a2 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/hmac/test_sha1_96.rb @@ -0,0 +1,25 @@ +require 'common' +require 'transport/hmac/test_sha1' +require 'net/ssh/transport/hmac/sha1_96' + +module Transport; module HMAC + + class TestSHA1_96 < TestSHA1 + def test_expected_mac_length + assert_equal 12, subject.mac_length + assert_equal 12, subject.new.mac_length + end + + def test_expected_digest + hmac = subject.new("1234567890123456") + assert_equal "\000\004W\202\204+&\335\311\251P\266", hmac.digest("hello world") + end + + private + + def subject + Net::SSH::Transport::HMAC::SHA1_96 + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group1_sha1.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group1_sha1.rb new file mode 100644 index 00000000..619b7162 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group1_sha1.rb @@ -0,0 +1,146 @@ +require 'common' +require 'net/ssh/transport/kex/diffie_hellman_group1_sha1' +require 'ostruct' + +module Transport; module Kex + + class TestDiffieHellmanGroup1SHA1 < Test::Unit::TestCase + include Net::SSH::Transport::Constants + + def setup + @dh_options = @dh = @algorithms = @connection = @server_key = + @packet_data = @shared_secret = nil + end + + def test_exchange_keys_should_return_expected_results_when_successful + result = exchange! + assert_equal session_id, result[:session_id] + assert_equal server_key.to_blob, result[:server_key].to_blob + assert_equal shared_secret, result[:shared_secret] + assert_equal OpenSSL::Digest::SHA1, result[:hashing_algorithm] + end + + def test_exchange_keys_with_unverifiable_host_should_raise_exception + connection.verifier { false } + assert_raises(Net::SSH::Exception) { exchange! } + end + + def test_exchange_keys_with_signature_key_type_mismatch_should_raise_exception + assert_raises(Net::SSH::Exception) { exchange! :key_type => "ssh-dss" } + end + + def test_exchange_keys_with_host_key_type_mismatch_should_raise_exception + algorithms :host_key => "ssh-dss" + assert_raises(Net::SSH::Exception) { exchange! :key_type => "ssh-dss" } + end + + def test_exchange_keys_when_server_signature_could_not_be_verified_should_raise_exception + @signature = "1234567890" + assert_raises(Net::SSH::Exception) { exchange! } + end + + def test_exchange_keys_should_pass_expected_parameters_to_host_key_verifier + verified = false + connection.verifier do |data| + verified = true + assert_equal server_key.to_blob, data[:key].to_blob + + blob = b(:key, data[:key]).to_s + fingerprint = OpenSSL::Digest::MD5.hexdigest(blob).scan(/../).join(":") + + assert_equal blob, data[:key_blob] + assert_equal fingerprint, data[:fingerprint] + assert_equal connection, data[:session] + + true + end + + assert_nothing_raised { exchange! } + assert verified + end + + private + + def exchange!(options={}) + connection.expect do |t, buffer| + assert_equal KEXDH_INIT, buffer.type + assert_equal dh.dh.pub_key, buffer.read_bignum + t.return(KEXDH_REPLY, :string, b(:key, server_key), :bignum, server_dh_pubkey, :string, b(:string, options[:key_type] || "ssh-rsa", :string, signature)) + connection.expect do |t2, buffer2| + assert_equal NEWKEYS, buffer2.type + t2.return(NEWKEYS) + end + end + + dh.exchange_keys + end + + def dh_options(options={}) + @dh_options = options + end + + def dh + @dh ||= subject.new(algorithms, connection, packet_data.merge(:need_bytes => 20).merge(@dh_options || {})) + end + + def algorithms(options={}) + @algorithms ||= OpenStruct.new(:host_key => options[:host_key] || "ssh-rsa") + end + + def connection + @connection ||= MockTransport.new + end + + def subject + Net::SSH::Transport::Kex::DiffieHellmanGroup1SHA1 + end + + # 368 bits is the smallest possible key that will work with this, so + # we use it for speed reasons + def server_key(bits=368) + @server_key ||= OpenSSL::PKey::RSA.new(bits) + end + + def packet_data + @packet_data ||= { :client_version_string => "client version string", + :server_version_string => "server version string", + :server_algorithm_packet => "server algorithm packet", + :client_algorithm_packet => "client algorithm packet" } + end + + def server_dh_pubkey + @server_dh_pubkey ||= bn(1234567890) + end + + def shared_secret + @shared_secret ||= OpenSSL::BN.new(dh.dh.compute_key(server_dh_pubkey), 2) + end + + def session_id + @session_id ||= begin + buffer = Net::SSH::Buffer.from(:string, packet_data[:client_version_string], + :string, packet_data[:server_version_string], + :string, packet_data[:client_algorithm_packet], + :string, packet_data[:server_algorithm_packet], + :string, Net::SSH::Buffer.from(:key, server_key), + :bignum, dh.dh.pub_key, + :bignum, server_dh_pubkey, + :bignum, shared_secret) + OpenSSL::Digest::SHA1.digest(buffer.to_s) + end + end + + def signature + @signature ||= server_key.ssh_do_sign(session_id) + end + + def bn(number, base=10) + OpenSSL::BN.new(number.to_s, base) + end + + def b(*args) + Net::SSH::Buffer.from(*args) + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb new file mode 100644 index 00000000..dc5a2bc1 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb @@ -0,0 +1,92 @@ +require 'common' +require 'transport/kex/test_diffie_hellman_group1_sha1' +require 'net/ssh/transport/kex/diffie_hellman_group_exchange_sha1' + +module Transport; module Kex + + class TestDiffieHellmanGroupExchangeSHA1 < TestDiffieHellmanGroup1SHA1 + KEXDH_GEX_GROUP = 31 + KEXDH_GEX_INIT = 32 + KEXDH_GEX_REPLY = 33 + KEXDH_GEX_REQUEST = 34 + + def test_exchange_with_fewer_than_minimum_bits_uses_minimum_bits + dh_options :need_bytes => 20 + assert_equal 1024, need_bits + assert_nothing_raised { exchange! } + end + + def test_exchange_with_fewer_than_maximum_bits_uses_need_bits + dh_options :need_bytes => 500 + need_bits(4000) + assert_nothing_raised { exchange! } + end + + def test_exchange_with_more_than_maximum_bits_uses_maximum_bits + dh_options :need_bytes => 2000 + need_bits(8192) + assert_nothing_raised { exchange! } + end + + def test_that_p_and_g_are_provided_by_the_server + assert_nothing_raised { exchange! :p => default_p+2, :g => 3 } + assert_equal default_p+2, dh.dh.p + assert_equal 3, dh.dh.g + end + + private + + def need_bits(bits=1024) + @need_bits ||= bits + end + + def default_p + 142326151570335518660743995281621698377057354949884468943021767573608899048361360422513557553514790045512299468953431585300812548859419857171094366358158903433167915517332113861059747425408670144201099811846875730766487278261498262568348338476437200556998366087779709990807518291581860338635288400119315130179 + end + + def exchange!(options={}) + connection.expect do |t, buffer| + assert_equal KEXDH_GEX_REQUEST, buffer.type + assert_equal 1024, buffer.read_long + assert_equal need_bits, buffer.read_long + assert_equal 8192, buffer.read_long + t.return(KEXDH_GEX_GROUP, :bignum, bn(options[:p] || default_p), :bignum, bn(options[:g] || 2)) + t.expect do |t2, buffer2| + assert_equal KEXDH_GEX_INIT, buffer2.type + assert_equal dh.dh.pub_key, buffer2.read_bignum + t2.return(KEXDH_GEX_REPLY, :string, b(:key, server_key), :bignum, server_dh_pubkey, :string, b(:string, options[:key_type] || "ssh-rsa", :string, signature)) + t2.expect do |t3, buffer3| + assert_equal NEWKEYS, buffer3.type + t3.return(NEWKEYS) + end + end + end + + dh.exchange_keys + end + + def subject + Net::SSH::Transport::Kex::DiffieHellmanGroupExchangeSHA1 + end + + def session_id + @session_id ||= begin + buffer = Net::SSH::Buffer.from(:string, packet_data[:client_version_string], + :string, packet_data[:server_version_string], + :string, packet_data[:client_algorithm_packet], + :string, packet_data[:server_algorithm_packet], + :string, Net::SSH::Buffer.from(:key, server_key), + :long, 1024, + :long, need_bits, # need bits, figure this part out, + :long, 8192, + :bignum, dh.dh.p, + :bignum, dh.dh.g, + :bignum, dh.dh.pub_key, + :bignum, server_dh_pubkey, + :bignum, shared_secret) + OpenSSL::Digest::SHA1.digest(buffer.to_s) + end + end + end + +end; end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_algorithms.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_algorithms.rb new file mode 100644 index 00000000..4a864ec4 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_algorithms.rb @@ -0,0 +1,302 @@ +require 'common' +require 'net/ssh/transport/algorithms' + +module Transport + + class TestAlgorithms < Test::Unit::TestCase + include Net::SSH::Transport::Constants + + def test_allowed_packets + (0..255).each do |type| + packet = stub("packet", :type => type) + case type + when 1..4, 6..19, 21..49 then assert(Net::SSH::Transport::Algorithms.allowed_packet?(packet), "#{type} should be allowed during key exchange") + else assert(!Net::SSH::Transport::Algorithms.allowed_packet?(packet), "#{type} should not be allowed during key exchange") + end + end + end + + def test_constructor_should_build_default_list_of_preferred_algorithms + assert_equal %w(ssh-rsa ssh-dss), algorithms[:host_key] + assert_equal %w(diffie-hellman-group-exchange-sha1 diffie-hellman-group1-sha1), algorithms[:kex] + assert_equal %w(aes128-cbc 3des-cbc blowfish-cbc cast128-cbc aes192-cbc aes256-cbc rijndael-cbc@lysator.liu.se idea-cbc none arcfour128 arcfour256), algorithms[:encryption] + assert_equal %w(hmac-sha1 hmac-md5 hmac-sha1-96 hmac-md5-96 none), algorithms[:hmac] + assert_equal %w(none zlib@openssh.com zlib), algorithms[:compression] + assert_equal %w(), algorithms[:language] + end + + def test_constructor_should_set_client_and_server_prefs_identically + %w(encryption hmac compression language).each do |key| + assert_equal algorithms[key.to_sym], algorithms[:"#{key}_client"], key + assert_equal algorithms[key.to_sym], algorithms[:"#{key}_server"], key + end + end + + def test_constructor_with_preferred_host_key_type_should_put_preferred_host_key_type_first + assert_equal %w(ssh-dss ssh-rsa), algorithms(:host_key => "ssh-dss")[:host_key] + end + + def test_constructor_with_known_hosts_reporting_known_host_key_should_use_that_host_key_type + Net::SSH::KnownHosts.expects(:search_for).with("net.ssh.test,127.0.0.1", {}).returns([stub("key", :ssh_type => "ssh-dss")]) + assert_equal %w(ssh-dss ssh-rsa), algorithms[:host_key] + end + + def test_constructor_with_unrecognized_host_key_type_should_raise_exception + assert_raises(NotImplementedError) { algorithms(:host_key => "bogus") } + end + + def test_constructor_with_preferred_kex_should_put_preferred_kex_first + assert_equal %w(diffie-hellman-group1-sha1 diffie-hellman-group-exchange-sha1), algorithms(:kex => "diffie-hellman-group1-sha1")[:kex] + end + + def test_constructor_with_unrecognized_kex_should_raise_exception + assert_raises(NotImplementedError) { algorithms(:kex => "bogus") } + end + + def test_constructor_with_preferred_encryption_should_put_preferred_encryption_first + assert_equal %w(aes256-cbc aes128-cbc 3des-cbc blowfish-cbc cast128-cbc aes192-cbc rijndael-cbc@lysator.liu.se idea-cbc none arcfour128 arcfour256), algorithms(:encryption => "aes256-cbc")[:encryption] + end + + def test_constructor_with_multiple_preferred_encryption_should_put_all_preferred_encryption_first + assert_equal %w(aes256-cbc 3des-cbc idea-cbc aes128-cbc blowfish-cbc cast128-cbc aes192-cbc rijndael-cbc@lysator.liu.se none arcfour128 arcfour256), algorithms(:encryption => %w(aes256-cbc 3des-cbc idea-cbc))[:encryption] + end + + def test_constructor_with_unrecognized_encryption_should_raise_exception + assert_raises(NotImplementedError) { algorithms(:encryption => "bogus") } + end + + def test_constructor_with_preferred_hmac_should_put_preferred_hmac_first + assert_equal %w(hmac-md5-96 hmac-sha1 hmac-md5 hmac-sha1-96 none), algorithms(:hmac => "hmac-md5-96")[:hmac] + end + + def test_constructor_with_multiple_preferred_hmac_should_put_all_preferred_hmac_first + assert_equal %w(hmac-md5-96 hmac-sha1-96 hmac-sha1 hmac-md5 none), algorithms(:hmac => %w(hmac-md5-96 hmac-sha1-96))[:hmac] + end + + def test_constructor_with_unrecognized_hmac_should_raise_exception + assert_raises(NotImplementedError) { algorithms(:hmac => "bogus") } + end + + def test_constructor_with_preferred_compression_should_put_preferred_compression_first + assert_equal %w(zlib none zlib@openssh.com), algorithms(:compression => "zlib")[:compression] + end + + def test_constructor_with_multiple_preferred_compression_should_put_all_preferred_compression_first + assert_equal %w(zlib@openssh.com zlib none), algorithms(:compression => %w(zlib@openssh.com zlib))[:compression] + end + + def test_constructor_with_general_preferred_compression_should_put_none_last + assert_equal %w(zlib@openssh.com zlib none), algorithms(:compression => true)[:compression] + end + + def test_constructor_with_unrecognized_compression_should_raise_exception + assert_raises(NotImplementedError) { algorithms(:compression => "bogus") } + end + + def test_initial_state_should_be_neither_pending_nor_initialized + assert !algorithms.pending? + assert !algorithms.initialized? + end + + def test_key_exchange_when_initiated_by_server + transport.expect do |t, buffer| + assert_kexinit(buffer) + install_mock_key_exchange(buffer) + end + + install_mock_algorithm_lookups + algorithms.accept_kexinit(kexinit) + + assert_exchange_results + end + + def test_key_exchange_when_initiated_by_client + state = nil + transport.expect do |t, buffer| + assert_kexinit(buffer) + state = :sent_kexinit + install_mock_key_exchange(buffer) + end + + algorithms.rekey! + assert_equal state, :sent_kexinit + assert algorithms.pending? + + install_mock_algorithm_lookups + algorithms.accept_kexinit(kexinit) + + assert_exchange_results + end + + def test_key_exchange_when_server_does_not_support_preferred_kex_should_fallback_to_secondary + kexinit :kex => "diffie-hellman-group1-sha1" + transport.expect do |t,buffer| + assert_kexinit(buffer) + install_mock_key_exchange(buffer, :kex => Net::SSH::Transport::Kex::DiffieHellmanGroup1SHA1) + end + algorithms.accept_kexinit(kexinit) + end + + def test_key_exchange_when_server_does_not_support_any_preferred_kex_should_raise_error + kexinit :kex => "something-obscure" + transport.expect { |t,buffer| assert_kexinit(buffer) } + assert_raises(Net::SSH::Exception) { algorithms.accept_kexinit(kexinit) } + end + + def test_allow_when_not_pending_should_be_true_for_all_packets + (0..255).each do |type| + packet = stub("packet", :type => type) + assert algorithms.allow?(packet), type + end + end + + def test_allow_when_pending_should_be_true_only_for_packets_valid_during_key_exchange + transport.expect! + algorithms.rekey! + assert algorithms.pending? + + (0..255).each do |type| + packet = stub("packet", :type => type) + case type + when 1..4, 6..19, 21..49 then assert(algorithms.allow?(packet), "#{type} should be allowed during key exchange") + else assert(!algorithms.allow?(packet), "#{type} should not be allowed during key exchange") + end + end + end + + def test_exchange_with_zlib_compression_enabled_sets_compression_to_standard + algorithms :compression => "zlib" + + transport.expect do |t, buffer| + assert_kexinit(buffer, :compression_client => "zlib,none,zlib@openssh.com", :compression_server => "zlib,none,zlib@openssh.com") + install_mock_key_exchange(buffer) + end + + install_mock_algorithm_lookups + algorithms.accept_kexinit(kexinit) + + assert_equal :standard, transport.client_options[:compression] + assert_equal :standard, transport.server_options[:compression] + end + + def test_exchange_with_zlib_at_openssh_dot_com_compression_enabled_sets_compression_to_delayed + algorithms :compression => "zlib@openssh.com" + + transport.expect do |t, buffer| + assert_kexinit(buffer, :compression_client => "zlib@openssh.com,none,zlib", :compression_server => "zlib@openssh.com,none,zlib") + install_mock_key_exchange(buffer) + end + + install_mock_algorithm_lookups + algorithms.accept_kexinit(kexinit) + + assert_equal :delayed, transport.client_options[:compression] + assert_equal :delayed, transport.server_options[:compression] + end + + private + + def install_mock_key_exchange(buffer, options={}) + kex = options[:kex] || Net::SSH::Transport::Kex::DiffieHellmanGroupExchangeSHA1 + + Net::SSH::Transport::Kex::MAP.each do |name, klass| + next if klass == kex + klass.expects(:new).never + end + + kex.expects(:new). + with(algorithms, transport, + :client_version_string => Net::SSH::Transport::ServerVersion::PROTO_VERSION, + :server_version_string => transport.server_version.version, + :server_algorithm_packet => kexinit.to_s, + :client_algorithm_packet => buffer.to_s, + :need_bytes => 20, + :logger => nil). + returns(stub("kex", :exchange_keys => { :shared_secret => shared_secret, :session_id => session_id, :hashing_algorithm => hashing_algorithm })) + end + + def install_mock_algorithm_lookups(options={}) + Net::SSH::Transport::CipherFactory.expects(:get). + with(options[:client_cipher] || "aes128-cbc", :iv => key("A"), :key => key("C"), :shared => shared_secret.to_ssh, :hash => session_id, :digester => hashing_algorithm, :encrypt => true). + returns(:client_cipher) + Net::SSH::Transport::CipherFactory.expects(:get). + with(options[:server_cipher] || "aes128-cbc", :iv => key("B"), :key => key("D"), :shared => shared_secret.to_ssh, :hash => session_id, :digester => hashing_algorithm, :decrypt => true). + returns(:server_cipher) + + Net::SSH::Transport::HMAC.expects(:get).with(options[:client_hmac] || "hmac-sha1", key("E")).returns(:client_hmac) + Net::SSH::Transport::HMAC.expects(:get).with(options[:server_hmac] || "hmac-sha1", key("F")).returns(:server_hmac) + end + + def shared_secret + @shared_secret ||= OpenSSL::BN.new("1234567890", 10) + end + + def session_id + @session_id ||= "this is the session id" + end + + def hashing_algorithm + OpenSSL::Digest::SHA1 + end + + def key(salt) + hashing_algorithm.digest(shared_secret.to_ssh + session_id + salt + session_id) + end + + def cipher(type, options={}) + Net::SSH::Transport::CipherFactory.get(type, options) + end + + def kexinit(options={}) + @kexinit ||= P(:byte, KEXINIT, + :long, rand(0xFFFFFFFF), :long, rand(0xFFFFFFFF), :long, rand(0xFFFFFFFF), :long, rand(0xFFFFFFFF), + :string, options[:kex] || "diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1", + :string, options[:host_key] || "ssh-rsa,ssh-dss", + :string, options[:encryption_client] || "aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,aes256-cbc,rijndael-cbc@lysator.liu.se,idea-cbc", + :string, options[:encryption_server] || "aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,aes256-cbc,rijndael-cbc@lysator.liu.se,idea-cbc", + :string, options[:hmac_client] || "hmac-sha1,hmac-md5,hmac-sha1-96,hmac-md5-96", + :string, options[:hmac_server] || "hmac-sha1,hmac-md5,hmac-sha1-96,hmac-md5-96", + :string, options[:compmression_client] || "none,zlib@openssh.com,zlib", + :string, options[:compmression_server] || "none,zlib@openssh.com,zlib", + :string, options[:language_client] || "", + :string, options[:langauge_server] || "", + :bool, options[:first_kex_follows]) + end + + def assert_kexinit(buffer, options={}) + assert_equal KEXINIT, buffer.type + assert_equal 16, buffer.read(16).length + assert_equal options[:kex] || "diffie-hellman-group-exchange-sha1,diffie-hellman-group1-sha1", buffer.read_string + assert_equal options[:host_key] || "ssh-rsa,ssh-dss", buffer.read_string + assert_equal options[:encryption_client] || "aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,aes256-cbc,rijndael-cbc@lysator.liu.se,idea-cbc,none,arcfour128,arcfour256", buffer.read_string + assert_equal options[:encryption_server] || "aes128-cbc,3des-cbc,blowfish-cbc,cast128-cbc,aes192-cbc,aes256-cbc,rijndael-cbc@lysator.liu.se,idea-cbc,none,arcfour128,arcfour256", buffer.read_string + assert_equal options[:hmac_client] || "hmac-sha1,hmac-md5,hmac-sha1-96,hmac-md5-96,none", buffer.read_string + assert_equal options[:hmac_server] || "hmac-sha1,hmac-md5,hmac-sha1-96,hmac-md5-96,none", buffer.read_string + assert_equal options[:compression_client] || "none,zlib@openssh.com,zlib", buffer.read_string + assert_equal options[:compression_server] || "none,zlib@openssh.com,zlib", buffer.read_string + assert_equal options[:language_client] || "", buffer.read_string + assert_equal options[:language_server] || "", buffer.read_string + assert_equal options[:first_kex_follows] || false, buffer.read_bool + end + + def assert_exchange_results + assert algorithms.initialized? + assert !algorithms.pending? + assert !transport.client_options[:compression] + assert !transport.server_options[:compression] + assert_equal :client_cipher, transport.client_options[:cipher] + assert_equal :server_cipher, transport.server_options[:cipher] + assert_equal :client_hmac, transport.client_options[:hmac] + assert_equal :server_hmac, transport.server_options[:hmac] + end + + def algorithms(options={}) + @algorithms ||= Net::SSH::Transport::Algorithms.new(transport, options) + end + + def transport + @transport ||= MockTransport.new + end + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_cipher_factory.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_cipher_factory.rb new file mode 100644 index 00000000..bdb0f4d6 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_cipher_factory.rb @@ -0,0 +1,213 @@ +require 'common' +require 'net/ssh/transport/cipher_factory' + +module Transport + + class TestCipherFactory < Test::Unit::TestCase + def self.if_supported?(name) + yield if Net::SSH::Transport::CipherFactory.supported?(name) + end + + def test_lengths_for_none + assert_equal [0,0], factory.get_lengths("none") + assert_equal [0,0], factory.get_lengths("bogus") + end + + def test_lengths_for_blowfish_cbc + assert_equal [16,8], factory.get_lengths("blowfish-cbc") + end + + if_supported?("idea-cbc") do + def test_lengths_for_idea_cbc + assert_equal [16,8], factory.get_lengths("idea-cbc") + end + end + + def test_lengths_for_rijndael_cbc + assert_equal [32,16], factory.get_lengths("rijndael-cbc@lysator.liu.se") + end + + def test_lengths_for_cast128_cbc + assert_equal [16,8], factory.get_lengths("cast128-cbc") + end + + def test_lengths_for_3des_cbc + assert_equal [24,8], factory.get_lengths("3des-cbc") + end + + def test_lengths_for_aes192_cbc + assert_equal [24,16], factory.get_lengths("aes192-cbc") + end + + def test_lengths_for_aes128_cbc + assert_equal [16,16], factory.get_lengths("aes128-cbc") + end + + def test_lengths_for_aes256_cbc + assert_equal [32,16], factory.get_lengths("aes256-cbc") + end + + def test_lengths_for_arcfour128 + assert_equal [16,8], factory.get_lengths("arcfour128") + end + + def test_lengths_for_arcfour256 + assert_equal [32,8], factory.get_lengths("arcfour256") + end + + def test_lengths_for_arcfour512 + assert_equal [64,8], factory.get_lengths("arcfour512") + end + + BLOWFISH = "\210\021\200\315\240_\026$\352\204g\233\244\242x\332e\370\001\327\224Nv@9_\323\037\252kb\037\036\237\375]\343/y\037\237\312Q\f7]\347Y\005\275%\377\0010$G\272\250B\265Nd\375\342\372\025r6}+Y\213y\n\237\267\\\374^\346BdJ$\353\220Ik\023<\236&H\277=\225" + + def test_blowfish_cbc_for_encryption + assert_equal BLOWFISH, encrypt("blowfish-cbc") + end + + def test_blowfish_cbc_for_decryption + assert_equal TEXT, decrypt("blowfish-cbc", BLOWFISH) + end + + if_supported?("idea-cbc") do + IDEA = "W\234\017G\231\b\357\370H\b\256U]\343M\031k\233]~\023C\363\263\177\262-\261\341$\022\376mv\217\322\b\2763\270H\306\035\343z\313\312\3531\351\t\201\302U\022\360\300\354ul7$z\320O]\360g\024\305\005`V\005\335A\351\312\270c\320D\232\eQH1\340\265\2118\031g*\303v" + + def test_idea_cbc_for_encryption + assert_equal IDEA, encrypt("idea-cbc") + end + + def test_idea_cbc_for_decryption + assert_equal TEXT, decrypt("idea-cbc", IDEA) + end + end + + RIJNDAEL = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" + + def test_rijndael_cbc_for_encryption + assert_equal RIJNDAEL, encrypt("rijndael-cbc@lysator.liu.se") + end + + def test_rijndael_cbc_for_decryption + assert_equal TEXT, decrypt("rijndael-cbc@lysator.liu.se", RIJNDAEL) + end + + CAST128 = "qW\302\331\333P\223t[9 ~(sg\322\271\227\272\022I\223\373p\255>k\326\314\260\2003\236C_W\211\227\373\205>\351\334\322\227\223\e\236\202Ii\032!P\214\035:\017\360h7D\371v\210\264\317\236a\262w1\2772\023\036\331\227\240:\f/X\351\324I\t[x\350\323E\2301\016m" + + def test_cast128_cbc_for_encryption + assert_equal CAST128, encrypt("cast128-cbc") + end + + def test_cast128_cbc_for_decryption + assert_equal TEXT, decrypt("cast128-cbc", CAST128) + end + + TRIPLE_DES = "\322\252\216D\303Q\375gg\367A{\177\313\3436\272\353%\223K?\257\206|\r&\353/%\340\336 \203E8rY\206\234\004\274\267\031\233T/{\"\227/B!i?[qGaw\306T\206\223\213n \212\032\244%]@\355\250\334\312\265E\251\017\361\270\357\230\274KP&^\031r+r%\370" + + def test_3des_cbc_for_encryption + assert_equal TRIPLE_DES, encrypt("3des-cbc") + end + + def test_3des_cbc_for_decryption + assert_equal TEXT, decrypt("3des-cbc", TRIPLE_DES) + end + + AES128 = "k\026\350B\366-k\224\313\3277}B\035\004\200\035\r\233\024$\205\261\231Q\2214r\245\250\360\315\237\266hg\262C&+\321\346Pf\267v\376I\215P\327\345-\232&HK\375\326_\030<\a\276\212\303g\342C\242O\233\260\006\001a&V\345`\\T\e\236.\207\223l\233ri^\v\252\363\245" + + def test_aes128_cbc_for_encryption + assert_equal AES128, encrypt("aes128-cbc") + end + + def test_aes128_cbc_for_decryption + assert_equal TEXT, decrypt("aes128-cbc", AES128) + end + + AES192 = "\256\017)x\270\213\336\303L\003f\235'jQ\3231k9\225\267\242\364C4\370\224\201\302~\217I\202\374\2167='\272\037\225\223\177Y\r\212\376(\275\n\3553\377\177\252C\254\236\016MA\274Z@H\331<\rL\317\205\323[\305X8\376\237=\374\352bH9\244\0231\353\204\352p\226\326~J\242" + + def test_aes192_cbc_for_encryption + assert_equal AES192, encrypt("aes192-cbc") + end + + def test_aes192_cbc_for_decryption + assert_equal TEXT, decrypt("aes192-cbc", AES192) + end + + AES256 = "$\253\271\255\005Z\354\336&\312\324\221\233\307Mj\315\360\310Fk\241EfN\037\231\213\361{'\310\204\347I\343\271\005\240`\325;\034\346uM>#\241\231C`\374\261\vo\226;Z\302:\b\250\366T\330\\#V\330\340\226\363\374!\bm\266\232\207!\232\347\340\t\307\370\356z\236\343=v\210\206y" + + def test_aes256_cbc_for_encryption + assert_equal AES256, encrypt("aes256-cbc") + end + + def test_aes256_cbc_for_decryption + assert_equal TEXT, decrypt("aes256-cbc", AES256) + end + + ARCFOUR128 = "\n\x90\xED*\xD4\xBE\xCBg5\xA5\a\xEC]\x97\xB7L\x06)6\x12FL\x90@\xF4Sqxqh\r\x11\x1Aq \xC8\xE6v\xC6\x12\xD9 "ABC", + :key => "abc", + :digester => OpenSSL::Digest::MD5, + :shared => "1234567890123456780", + :hash => '!@#$%#$^%$&^&%#$@$' + } + + def factory + Net::SSH::Transport::CipherFactory + end + + def encrypt(type) + cipher = factory.get(type, OPTIONS.merge(:encrypt => true)) + padding = TEXT.length % cipher.block_size + result = cipher.update(TEXT.dup) + result << cipher.update(" " * (cipher.block_size - padding)) if padding > 0 + result << cipher.final + end + + def decrypt(type, data) + cipher = factory.get(type, OPTIONS.merge(:decrypt => true)) + result = cipher.update(data.dup) + result << cipher.final + result.strip + end + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_hmac.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_hmac.rb new file mode 100644 index 00000000..51ba7aee --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_hmac.rb @@ -0,0 +1,34 @@ +require 'common' +require 'net/ssh/transport/hmac' + +module Transport + + class TestHMAC < Test::Unit::TestCase + Net::SSH::Transport::HMAC::MAP.each do |name, value| + method = name.tr("-", "_") + define_method("test_get_with_#{method}_returns_new_hmac_instance") do + key = "abcdefghijklmnopqrstuvwxyz"[0,Net::SSH::Transport::HMAC::MAP[name].key_length] + hmac = Net::SSH::Transport::HMAC.get(name, key) + assert_instance_of Net::SSH::Transport::HMAC::MAP[name], hmac + assert_equal key, hmac.key + end + + define_method("test_key_length_with_#{method}_returns_correct_key_length") do + assert_equal Net::SSH::Transport::HMAC::MAP[name].key_length, Net::SSH::Transport::HMAC.key_length(name) + end + end + + def test_get_with_unrecognized_hmac_raises_argument_error + assert_raises(ArgumentError) do + Net::SSH::Transport::HMAC.get("bogus") + end + end + + def test_key_length_with_unrecognized_hmac_raises_argument_error + assert_raises(ArgumentError) do + Net::SSH::Transport::HMAC.get("bogus") + end + end + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_identity_cipher.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_identity_cipher.rb new file mode 100644 index 00000000..6bfcce04 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_identity_cipher.rb @@ -0,0 +1,40 @@ +require 'common' +require 'net/ssh/transport/identity_cipher' + +module Transport + + class TestIdentityCipher < Test::Unit::TestCase + + def test_block_size_should_be_8 + assert_equal 8, cipher.block_size + end + + def test_encrypt_should_return_self + assert_equal cipher, cipher.encrypt + end + + def test_decrypt_should_return_self + assert_equal cipher, cipher.decrypt + end + + def test_update_should_return_argument + assert_equal "hello, world", cipher.update("hello, world") + end + + def test_final_should_return_empty_string + assert_equal "", cipher.final + end + + def test_name_should_be_identity + assert_equal "identity", cipher.name + end + + private + + def cipher + Net::SSH::Transport::IdentityCipher + end + + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_packet_stream.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_packet_stream.rb new file mode 100644 index 00000000..fc713fc9 --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_packet_stream.rb @@ -0,0 +1,441 @@ +require 'common' +require 'net/ssh/transport/packet_stream' + +module Transport + + class TestPacketStream < Test::Unit::TestCase + include Net::SSH::Transport::Constants + + def test_client_name_when_getnameinfo_works + stream.expects(:getsockname).returns(:sockaddr) + Socket.expects(:getnameinfo).with(:sockaddr, Socket::NI_NAMEREQD).returns(["net.ssh.test"]) + assert_equal "net.ssh.test", stream.client_name + end + + def test_client_name_when_getnameinfo_fails_first_and_then_works + stream.expects(:getsockname).returns(:sockaddr) + Socket.expects(:getnameinfo).with(:sockaddr, Socket::NI_NAMEREQD).raises(SocketError) + Socket.expects(:getnameinfo).with(:sockaddr).returns(["1.2.3.4"]) + assert_equal "1.2.3.4", stream.client_name + end + + def test_client_name_when_getnameinfo_fails_but_gethostbyname_works + stream.expects(:getsockname).returns(:sockaddr) + Socket.expects(:getnameinfo).with(:sockaddr, Socket::NI_NAMEREQD).raises(SocketError) + Socket.expects(:getnameinfo).with(:sockaddr).raises(SocketError) + Socket.expects(:gethostname).returns(:hostname) + Socket.expects(:gethostbyname).with(:hostname).returns(["net.ssh.test"]) + assert_equal "net.ssh.test", stream.client_name + end + + def test_client_name_when_getnameinfo_and_gethostbyname_all_fail + stream.expects(:getsockname).returns(:sockaddr) + Socket.expects(:getnameinfo).with(:sockaddr, Socket::NI_NAMEREQD).raises(SocketError) + Socket.expects(:getnameinfo).with(:sockaddr).raises(SocketError) + Socket.expects(:gethostname).returns(:hostname) + Socket.expects(:gethostbyname).with(:hostname).raises(SocketError) + assert_equal "unknown", stream.client_name + end + + def test_peer_ip_should_query_socket_for_info_about_peer + stream.expects(:getpeername).returns(:sockaddr) + Socket.expects(:getnameinfo).with(:sockaddr, Socket::NI_NUMERICHOST | Socket::NI_NUMERICSERV).returns(["1.2.3.4"]) + assert_equal "1.2.3.4", stream.peer_ip + end + + def test_available_for_read_should_return_nontrue_when_select_fails + IO.expects(:select).returns(nil) + assert !stream.available_for_read? + end + + def test_available_for_read_should_return_nontrue_when_self_is_not_ready + IO.expects(:select).with([stream], nil, nil, 0).returns([[],[],[]]) + assert !stream.available_for_read? + end + + def test_available_for_read_should_return_true_when_self_is_ready + IO.expects(:select).with([stream], nil, nil, 0).returns([[self],[],[]]) + assert stream.available_for_read? + end + + def test_cleanup_should_delegate_cleanup_to_client_and_server_states + stream.client.expects(:cleanup) + stream.server.expects(:cleanup) + stream.cleanup + end + + def test_if_needs_rekey_should_not_yield_if_neither_client_nor_server_states_need_rekey + stream.if_needs_rekey? { flunk "shouldn't need rekey" } + assert(true) + end + + def test_if_needs_rekey_should_yield_and_cleanup_if_client_needs_rekey + stream.client.stubs(:needs_rekey?).returns(true) + stream.client.expects(:reset!) + stream.server.expects(:reset!).never + rekeyed = false + stream.if_needs_rekey? { rekeyed = true } + assert(rekeyed) + end + + def test_if_needs_rekey_should_yield_and_cleanup_if_server_needs_rekey + stream.server.stubs(:needs_rekey?).returns(true) + stream.server.expects(:reset!) + stream.client.expects(:reset!).never + rekeyed = false + stream.if_needs_rekey? { rekeyed = true } + assert(rekeyed) + end + + def test_if_needs_rekey_should_yield_and_cleanup_if_both_need_rekey + stream.server.stubs(:needs_rekey?).returns(true) + stream.client.stubs(:needs_rekey?).returns(true) + stream.server.expects(:reset!) + stream.client.expects(:reset!) + rekeyed = false + stream.if_needs_rekey? { rekeyed = true } + assert(rekeyed) + end + + def test_next_packet_should_not_block_by_default + IO.expects(:select).returns(nil) + assert_nothing_raised do + timeout(1) { stream.next_packet } + end + end + + def test_next_packet_should_return_nil_when_non_blocking_and_not_ready + IO.expects(:select).returns(nil) + assert_nil stream.next_packet(:nonblock) + end + + def test_next_packet_should_return_nil_when_non_blocking_and_partial_read + IO.expects(:select).returns([[stream]]) + stream.expects(:recv).returns([8].pack("N")) + assert_nil stream.next_packet(:nonblock) + assert !stream.read_buffer.empty? + end + + def test_next_packet_should_return_packet_when_non_blocking_and_full_read + IO.expects(:select).returns([[stream]]) + stream.expects(:recv).returns(packet) + packet = stream.next_packet(:nonblock) + assert_not_nil packet + assert_equal DEBUG, packet.type + end + + def test_next_packet_should_eventually_return_packet_when_non_blocking_and_partial_read + IO.stubs(:select).returns([[stream]]) + stream.stubs(:recv).returns(packet[0,10], packet[10..-1]) + assert_nil stream.next_packet(:nonblock) + packet = stream.next_packet(:nonblock) + assert_not_nil packet + assert_equal DEBUG, packet.type + end + + def test_next_packet_should_block_when_requested_until_entire_packet_is_available + IO.stubs(:select).returns([[stream]]) + stream.stubs(:recv).returns(packet[0,10], packet[10,20], packet[20..-1]) + packet = stream.next_packet(:block) + assert_not_nil packet + assert_equal DEBUG, packet.type + end + + def test_next_packet_when_blocking_should_fail_when_fill_could_not_read_any_data + IO.stubs(:select).returns([[stream]]) + stream.stubs(:recv).returns("") + assert_raises(Net::SSH::Disconnect) { stream.next_packet(:block) } + end + + def test_next_packet_fails_with_invalid_argument + assert_raises(ArgumentError) { stream.next_packet("invalid") } + end + + def test_send_packet_should_enqueue_and_send_data_immediately + stream.expects(:send).times(3).with { |a,b| a == stream.write_buffer && b == 0 }.returns(15) + IO.expects(:select).times(2).returns([[], [stream]]) + stream.send_packet(ssh_packet) + assert !stream.pending_write? + end + + def test_enqueue_short_packet_should_ensure_packet_is_at_least_16_bytes_long + packet = Net::SSH::Buffer.from(:byte, 0) + stream.enqueue_packet(packet) + # 12 originally, plus the block-size (8), plus the 4-byte length field + assert_equal 24, stream.write_buffer.length + end + + PACKETS = { + "3des-cbc" => { + "hmac-md5" => { + false => "\003\352\031\261k\243\200\204\301\203]!\a\306\217\201\a[^\304\317\322\264\265~\361\017\n\205\272, #[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\317\222v\316\234<\310\377\310\034\346\351\020:\025{\372PDS\246\344\312J\364\301\n\262\r<\037\231Mu\031\240\255\026\362\200A\305\027\341\261\331x\353\0372\3643h`\177\202", + }, + "hmac-md5-96" => { + false => "\003\352\031\261k\243\200\204\301\203]!\a\306\217\201\a[^\304\317\322\264\265~\361\017\n\205\272, #[\343\200Sb\377\265\322\003=S", + :standard => "\317\222v\316\234<\310\377\310\034\346\351\020:\025{\372PDS\246\344\312J\364\301\n\262\r<\037\231Mu\031\240\255\026\362\200A\305\027\341\261\331x\353\0372\3643", + }, + "hmac-sha1" => { + false => "\003\352\031\261k\243\200\204\301\203]!\a\306\217\201\a[^\304\317\322\264\265~\361\017\n\205\272, \235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\317\222v\316\234<\310\377\310\034\346\351\020:\025{\372PDS\246\344\312J\364\301\n\262\r<\037\231Mu\031\240\255\026\362\200\345\a{|\0367\355\2735\310'\n\342\250\246\030*1\353\330", + }, + "hmac-sha1-96" => { + false => "\003\352\031\261k\243\200\204\301\203]!\a\306\217\201\a[^\304\317\322\264\265~\361\017\n\205\272, \235J\004f\262\3730t\376\273\323n", + :standard => "\317\222v\316\234<\310\377\310\034\346\351\020:\025{\372PDS\246\344\312J\364\301\n\262\r<\037\231Mu\031\240\255\026\362\200\345\a{|\0367\355\2735\310'\n", + }, + "none" => { + false => "\003\352\031\261k\243\200\204\301\203]!\a\306\217\201\a[^\304\317\322\264\265~\361\017\n\205\272, ", + :standard => "\317\222v\316\234<\310\377\310\034\346\351\020:\025{\372PDS\246\344\312J\364\301\n\262\r<\037\231Mu\031\240\255\026\362\200", + }, + }, + "aes128-cbc" => { + "hmac-md5" => { + false => "\240\016\243k]0\330\253\030\320\334\261(\034E\211\230#\326\374\267\311O\211E(\234\325n\306NY#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\273\367\324\032\3762\334\026\r\246\342\022\016\325\024\270.\273\005\314\036\312\211\261\037A\361\362:W\316\352K\204\216b\2124>A\265g\331\177\233dK\251\337\227`9L\324[bPd\253XY\205\241\310", + }, + "hmac-md5-96" => { + false => "\240\016\243k]0\330\253\030\320\334\261(\034E\211\230#\326\374\267\311O\211E(\234\325n\306NY#[\343\200Sb\377\265\322\003=S", + :standard => "\273\367\324\032\3762\334\026\r\246\342\022\016\325\024\270.\273\005\314\036\312\211\261\037A\361\362:W\316\352K\204\216b\2124>A\265g\331\177\233dK\251\337\227`9L\324[bPd\253X", + }, + "hmac-sha1" => { + false => "\240\016\243k]0\330\253\030\320\334\261(\034E\211\230#\326\374\267\311O\211E(\234\325n\306NY\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\273\367\324\032\3762\334\026\r\246\342\022\016\325\024\270.\273\005\314\036\312\211\261\037A\361\362:W\316\352K\204\216b\2124>A\265g\331\177\233dK\251\314\r\224%\316I\370t\251\372]\031\322pH%\267\337r\247", + }, + "hmac-sha1-96" => { + false => "\240\016\243k]0\330\253\030\320\334\261(\034E\211\230#\326\374\267\311O\211E(\234\325n\306NY\235J\004f\262\3730t\376\273\323n", + :standard => "\273\367\324\032\3762\334\026\r\246\342\022\016\325\024\270.\273\005\314\036\312\211\261\037A\361\362:W\316\352K\204\216b\2124>A\265g\331\177\233dK\251\314\r\224%\316I\370t\251\372]\031", + }, + "none" => { + false => "\240\016\243k]0\330\253\030\320\334\261(\034E\211\230#\326\374\267\311O\211E(\234\325n\306NY", + :standard => "\273\367\324\032\3762\334\026\r\246\342\022\016\325\024\270.\273\005\314\036\312\211\261\037A\361\362:W\316\352K\204\216b\2124>A\265g\331\177\233dK\251", + }, + }, + "aes192-cbc" => { + "hmac-md5" => { + false => "P$\377\302\326\262\276\215\206\343&\257#\315>Mp\232P\345o\215\330\213\t\027\300\360\300\037\267\003#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "se\347\230\026\311\212\250yH\241\302n\364:\276\270M=H1\317\222^\362\237D\225N\354:\343\205M\006[\313$U/yZ\330\235\032\307\320D\337\227`9L\324[bPd\253XY\205\241\310", + }, + "hmac-md5-96" => { + false => "P$\377\302\326\262\276\215\206\343&\257#\315>Mp\232P\345o\215\330\213\t\027\300\360\300\037\267\003#[\343\200Sb\377\265\322\003=S", + :standard => "se\347\230\026\311\212\250yH\241\302n\364:\276\270M=H1\317\222^\362\237D\225N\354:\343\205M\006[\313$U/yZ\330\235\032\307\320D\337\227`9L\324[bPd\253X", + }, + "hmac-sha1" => { + false => "P$\377\302\326\262\276\215\206\343&\257#\315>Mp\232P\345o\215\330\213\t\027\300\360\300\037\267\003\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "se\347\230\026\311\212\250yH\241\302n\364:\276\270M=H1\317\222^\362\237D\225N\354:\343\205M\006[\313$U/yZ\330\235\032\307\320D\314\r\224%\316I\370t\251\372]\031\322pH%\267\337r\247", + }, + "hmac-sha1-96" => { + false => "P$\377\302\326\262\276\215\206\343&\257#\315>Mp\232P\345o\215\330\213\t\027\300\360\300\037\267\003\235J\004f\262\3730t\376\273\323n", + :standard => "se\347\230\026\311\212\250yH\241\302n\364:\276\270M=H1\317\222^\362\237D\225N\354:\343\205M\006[\313$U/yZ\330\235\032\307\320D\314\r\224%\316I\370t\251\372]\031", + }, + "none" => { + false => "P$\377\302\326\262\276\215\206\343&\257#\315>Mp\232P\345o\215\330\213\t\027\300\360\300\037\267\003", + :standard => "se\347\230\026\311\212\250yH\241\302n\364:\276\270M=H1\317\222^\362\237D\225N\354:\343\205M\006[\313$U/yZ\330\235\032\307\320D", + }, + }, + "aes256-cbc" => { + "hmac-md5" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\337\227`9L\324[bPd\253XY\205\241\310", + }, + "hmac-md5-96" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340#[\343\200Sb\377\265\322\003=S", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\337\227`9L\324[bPd\253X", + }, + "hmac-sha1" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\314\r\224%\316I\370t\251\372]\031\322pH%\267\337r\247", + }, + "hmac-sha1-96" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340\235J\004f\262\3730t\376\273\323n", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\314\r\224%\316I\370t\251\372]\031", + }, + "none" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365", + }, + }, + "blowfish-cbc" => { + "hmac-md5" => { + false => "vT\353\203\247\206L\255e\371\001 6B/\234g\332\371\224l\227\257\346\373E\237C2\212u)#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "U\257\231e\347\274\bh\016X\232h\334\v\005\316e1G$-\367##\256$rW\000\210\335_\360\f\000\205#\370\201\006A\305\027\341\261\331x\353\0372\3643h`\177\202", + }, + "hmac-md5-96" => { + false => "vT\353\203\247\206L\255e\371\001 6B/\234g\332\371\224l\227\257\346\373E\237C2\212u)#[\343\200Sb\377\265\322\003=S", + :standard => "U\257\231e\347\274\bh\016X\232h\334\v\005\316e1G$-\367##\256$rW\000\210\335_\360\f\000\205#\370\201\006A\305\027\341\261\331x\353\0372\3643", + }, + "hmac-sha1" => { + false => "vT\353\203\247\206L\255e\371\001 6B/\234g\332\371\224l\227\257\346\373E\237C2\212u)\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "U\257\231e\347\274\bh\016X\232h\334\v\005\316e1G$-\367##\256$rW\000\210\335_\360\f\000\205#\370\201\006\345\a{|\0367\355\2735\310'\n\342\250\246\030*1\353\330", + }, + "hmac-sha1-96" => { + false => "vT\353\203\247\206L\255e\371\001 6B/\234g\332\371\224l\227\257\346\373E\237C2\212u)\235J\004f\262\3730t\376\273\323n", + :standard => "U\257\231e\347\274\bh\016X\232h\334\v\005\316e1G$-\367##\256$rW\000\210\335_\360\f\000\205#\370\201\006\345\a{|\0367\355\2735\310'\n", + }, + "none" => { + false => "vT\353\203\247\206L\255e\371\001 6B/\234g\332\371\224l\227\257\346\373E\237C2\212u)", + :standard => "U\257\231e\347\274\bh\016X\232h\334\v\005\316e1G$-\367##\256$rW\000\210\335_\360\f\000\205#\370\201\006", + }, + }, + "cast128-cbc" => { + "hmac-md5" => { + false => "\361\026\313!\31235|w~\n\261\257\277\e\277b\246b\342\333\eE\021N\345\343m\314\272\315\376#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\375i\253\004\311E\2011)\220$\251A\245\f(\371\263\314\242\353\260\272\367\276\"\031\224$\244\311W\307Oe\224\0017\336\325A\305\027\341\261\331x\353\0372\3643h`\177\202", + }, + "hmac-md5-96" => { + false => "\361\026\313!\31235|w~\n\261\257\277\e\277b\246b\342\333\eE\021N\345\343m\314\272\315\376#[\343\200Sb\377\265\322\003=S", + :standard => "\375i\253\004\311E\2011)\220$\251A\245\f(\371\263\314\242\353\260\272\367\276\"\031\224$\244\311W\307Oe\224\0017\336\325A\305\027\341\261\331x\353\0372\3643", + }, + "hmac-sha1" => { + false => "\361\026\313!\31235|w~\n\261\257\277\e\277b\246b\342\333\eE\021N\345\343m\314\272\315\376\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\375i\253\004\311E\2011)\220$\251A\245\f(\371\263\314\242\353\260\272\367\276\"\031\224$\244\311W\307Oe\224\0017\336\325\345\a{|\0367\355\2735\310'\n\342\250\246\030*1\353\330", + }, + "hmac-sha1-96" => { + false => "\361\026\313!\31235|w~\n\261\257\277\e\277b\246b\342\333\eE\021N\345\343m\314\272\315\376\235J\004f\262\3730t\376\273\323n", + :standard => "\375i\253\004\311E\2011)\220$\251A\245\f(\371\263\314\242\353\260\272\367\276\"\031\224$\244\311W\307Oe\224\0017\336\325\345\a{|\0367\355\2735\310'\n", + }, + "none" => { + false => "\361\026\313!\31235|w~\n\261\257\277\e\277b\246b\342\333\eE\021N\345\343m\314\272\315\376", + :standard => "\375i\253\004\311E\2011)\220$\251A\245\f(\371\263\314\242\353\260\272\367\276\"\031\224$\244\311W\307Oe\224\0017\336\325", + }, + }, + "idea-cbc" => { + "hmac-md5" => { + false => "\342\255\202$\273\201\025#\245\2341F\263\005@{\000<\266&s\016\251NH=J\322/\220 H#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "F\3048\360\357\265\215I\021)\a\254/\315%\354M\004\330\006\356\vFr\250K\225\223x\277+Q)\022\327\311K\025\322\317A\305\027\341\261\331x\353\0372\3643h`\177\202", + }, + "hmac-md5-96" => { + false => "\342\255\202$\273\201\025#\245\2341F\263\005@{\000<\266&s\016\251NH=J\322/\220 H#[\343\200Sb\377\265\322\003=S", + :standard => "F\3048\360\357\265\215I\021)\a\254/\315%\354M\004\330\006\356\vFr\250K\225\223x\277+Q)\022\327\311K\025\322\317A\305\027\341\261\331x\353\0372\3643", + }, + "hmac-sha1" => { + false => "\342\255\202$\273\201\025#\245\2341F\263\005@{\000<\266&s\016\251NH=J\322/\220 H\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "F\3048\360\357\265\215I\021)\a\254/\315%\354M\004\330\006\356\vFr\250K\225\223x\277+Q)\022\327\311K\025\322\317\345\a{|\0367\355\2735\310'\n\342\250\246\030*1\353\330", + }, + "hmac-sha1-96" => { + false => "\342\255\202$\273\201\025#\245\2341F\263\005@{\000<\266&s\016\251NH=J\322/\220 H\235J\004f\262\3730t\376\273\323n", + :standard => "F\3048\360\357\265\215I\021)\a\254/\315%\354M\004\330\006\356\vFr\250K\225\223x\277+Q)\022\327\311K\025\322\317\345\a{|\0367\355\2735\310'\n", + }, + "none" => { + false => "\342\255\202$\273\201\025#\245\2341F\263\005@{\000<\266&s\016\251NH=J\322/\220 H", + :standard => "F\3048\360\357\265\215I\021)\a\254/\315%\354M\004\330\006\356\vFr\250K\225\223x\277+Q)\022\327\311K\025\322\317", + }, + }, + "none" => { + "hmac-md5" => { + false => "\000\000\000\034\b\004\001\000\000\000\tdebugging\000\000\000\000\b\030CgWO\260\212#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\000\000\000$\tx\234bad``\340LIM*MO\317\314K\ar\030\000\000\000\000\377\377\b\030CgWO\260\212^A\305\027\341\261\331x\353\0372\3643h`\177\202", + }, + "hmac-md5-96" => { + false => "\000\000\000\034\b\004\001\000\000\000\tdebugging\000\000\000\000\b\030CgWO\260\212#[\343\200Sb\377\265\322\003=S", + :standard => "\000\000\000$\tx\234bad``\340LIM*MO\317\314K\ar\030\000\000\000\000\377\377\b\030CgWO\260\212^A\305\027\341\261\331x\353\0372\3643", + }, + "hmac-sha1" => { + false => "\000\000\000\034\b\004\001\000\000\000\tdebugging\000\000\000\000\b\030CgWO\260\212\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\000\000\000$\tx\234bad``\340LIM*MO\317\314K\ar\030\000\000\000\000\377\377\b\030CgWO\260\212^\345\a{|\0367\355\2735\310'\n\342\250\246\030*1\353\330", + }, + "hmac-sha1-96" => { + false => "\000\000\000\034\b\004\001\000\000\000\tdebugging\000\000\000\000\b\030CgWO\260\212\235J\004f\262\3730t\376\273\323n", + :standard => "\000\000\000$\tx\234bad``\340LIM*MO\317\314K\ar\030\000\000\000\000\377\377\b\030CgWO\260\212^\345\a{|\0367\355\2735\310'\n", + }, + "none" => { + false => "\000\000\000\034\b\004\001\000\000\000\tdebugging\000\000\000\000\b\030CgWO\260\212", + :standard => "\000\000\000$\tx\234bad``\340LIM*MO\317\314K\ar\030\000\000\000\000\377\377\b\030CgWO\260\212^", + }, + }, + "rijndael-cbc@lysator.liu.se" => { + "hmac-md5" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340#[\343\200Sb\377\265\322\003=S\255N\2654", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\337\227`9L\324[bPd\253XY\205\241\310", + }, + "hmac-md5-96" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340#[\343\200Sb\377\265\322\003=S", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\337\227`9L\324[bPd\253X", + }, + "hmac-sha1" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340\235J\004f\262\3730t\376\273\323n\260\275\202\223\214\370D\204", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\314\r\224%\316I\370t\251\372]\031\322pH%\267\337r\247", + }, + "hmac-sha1-96" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340\235J\004f\262\3730t\376\273\323n", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365\314\r\224%\316I\370t\251\372]\031", + }, + "none" => { + false => "\266\001oG(\201s\255[\202j\031-\354\353]\022\374\367j2\257\b#\273r\275\341\232\264\255\340", + :standard => "\251!O/_\253\321\217e\225\202\202W\261p\r\357\357\375\231\264Y,nZ/\366\225G\256\3000\036\223\237\353\265vG\231\215cvY\236%\315\365", + }, + }, + } + + ciphers = Net::SSH::Transport::CipherFactory::SSH_TO_OSSL.keys + hmacs = Net::SSH::Transport::HMAC::MAP.keys + + ciphers.each do |cipher_name| + next unless Net::SSH::Transport::CipherFactory.supported?(cipher_name) + + # TODO: How are the expected packets generated? + if cipher_name =~ /arcfour/ + puts "Skipping packet stream test for #{cipher_name}" + next + end + + hmacs.each do |hmac_name| + [false, :standard].each do |compress| + cipher_method_name = cipher_name.gsub(/\W/, "_") + hmac_method_name = hmac_name.gsub(/\W/, "_") + + define_method("test_next_packet_with_#{cipher_method_name}_and_#{hmac_method_name}_and_#{compress}_compression") do + cipher = Net::SSH::Transport::CipherFactory.get(cipher_name, :key => "ABC", :iv => "abc", :shared => "123", :digester => OpenSSL::Digest::SHA1, :hash => "^&*", :decrypt => true) + hmac = Net::SSH::Transport::HMAC.get(hmac_name, "{}|") + + stream.server.set :cipher => cipher, :hmac => hmac, :compression => compress + stream.stubs(:recv).returns(PACKETS[cipher_name][hmac_name][compress]) + IO.stubs(:select).returns([[stream]]) + packet = stream.next_packet(:nonblock) + assert_not_nil packet + assert_equal DEBUG, packet.type + assert packet[:always_display] + assert_equal "debugging", packet[:message] + assert_equal "", packet[:language] + stream.cleanup + end + + define_method("test_enqueue_packet_with_#{cipher_method_name}_and_#{hmac_method_name}_and_#{compress}_compression") do + cipher = Net::SSH::Transport::CipherFactory.get(cipher_name, :key => "ABC", :iv => "abc", :shared => "123", :digester => OpenSSL::Digest::SHA1, :hash => "^&*", :encrypt => true) + hmac = Net::SSH::Transport::HMAC.get(hmac_name, "{}|") + + srand(100) + stream.client.set :cipher => cipher, :hmac => hmac, :compression => compress + stream.enqueue_packet(ssh_packet) + assert_equal stream.write_buffer, PACKETS[cipher_name][hmac_name][compress] + stream.cleanup + end + end + end + end + + private + + def stream + @stream ||= begin + stream = mock("packet_stream") + stream.extend(Net::SSH::Transport::PacketStream) + stream + end + end + + def ssh_packet + Net::SSH::Buffer.from(:byte, DEBUG, :bool, true, :string, "debugging", :string, "") + end + + def packet + @packet ||= begin + data = ssh_packet + length = data.length + 4 + 1 # length + padding length + padding = stream.server.cipher.block_size - (length % stream.server.cipher.block_size) + padding += stream.server.cipher.block_size if padding < 4 + Net::SSH::Buffer.from(:long, length + padding - 4, :byte, padding, :raw, data, :raw, "\0" * padding).to_s + end + end + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_server_version.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_server_version.rb new file mode 100644 index 00000000..c297a10c --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_server_version.rb @@ -0,0 +1,68 @@ +require 'common' +require 'net/ssh/transport/server_version' + +module Transport + + class TestServerVersion < Test::Unit::TestCase + + def test_1_99_server_version_should_be_acceptible + s = subject(socket(true, "SSH-1.99-Testing_1.0\r\n")) + assert s.header.empty? + assert_equal "SSH-1.99-Testing_1.0", s.version + end + + def test_2_0_server_version_should_be_acceptible + s = subject(socket(true, "SSH-2.0-Testing_1.0\r\n")) + assert s.header.empty? + assert_equal "SSH-2.0-Testing_1.0", s.version + end + + def test_trailing_whitespace_should_be_preserved + # some servers, like Mocana, send a version string with trailing + # spaces, which are significant when exchanging keys later. + s = subject(socket(true, "SSH-2.0-Testing_1.0 \r\n")) + assert_equal "SSH-2.0-Testing_1.0 ", s.version + end + + def test_unacceptible_server_version_should_raise_exception + assert_raises(Net::SSH::Exception) { subject(socket(false, "SSH-1.4-Testing_1.0\r\n")) } + end + + def test_header_lines_should_be_accumulated + s = subject(socket(true, "Welcome\r\nAnother line\r\nSSH-2.0-Testing_1.0\r\n")) + assert_equal "Welcome\r\nAnother line\r\n", s.header + assert_equal "SSH-2.0-Testing_1.0", s.version + end + + def test_server_disconnect_should_raise_exception + assert_raises(Net::SSH::Disconnect) { subject(socket(false, "SSH-2.0-Aborting")) } + end + + private + + def socket(good, version_header) + socket = mock("socket") + + data = version_header.split('') + recv_times = data.length + if data[-1] != "\n" + recv_times += 1 + end + socket.expects(:recv).with(1).times(recv_times).returns(*data).then.returns(nil) + + if good + socket.expects(:write).with("#{Net::SSH::Transport::ServerVersion::PROTO_VERSION}\r\n") + socket.expects(:flush) + else + socket.expects(:write).never + end + + socket + end + + def subject(socket) + Net::SSH::Transport::ServerVersion.new(socket, nil) + end + end + +end diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_session.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_session.rb new file mode 100644 index 00000000..206e134a --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_session.rb @@ -0,0 +1,315 @@ +require 'common' +require 'net/ssh/transport/session' + +# mocha adds #verify to Object, which throws off the host-key-verifier part of +# these tests. + +# can't use .include? because ruby18 uses strings and ruby19 uses symbols :/ +if Object.instance_methods.any? { |v| v.to_sym == :verify } + Object.send(:undef_method, :verify) +end + +module Transport + + class TestSession < Test::Unit::TestCase + include Net::SSH::Transport::Constants + + def test_constructor_defaults + assert_equal "net.ssh.test", session.host + assert_equal 22, session.port + assert_instance_of Net::SSH::Verifiers::Lenient, session.host_key_verifier + end + + def test_paranoid_true_uses_lenient_verifier + assert_instance_of Net::SSH::Verifiers::Lenient, session(:paranoid => true).host_key_verifier + end + + def test_paranoid_very_uses_strict_verifier + assert_instance_of Net::SSH::Verifiers::Strict, session(:paranoid => :very).host_key_verifier + end + + def test_paranoid_false_uses_null_verifier + assert_instance_of Net::SSH::Verifiers::Null, session(:paranoid => false).host_key_verifier + end + + def test_unknown_paranoid_value_raises_exception_if_value_does_not_respond_to_verify + assert_raises(ArgumentError) { session(:paranoid => :bogus).host_key_verifier } + end + + def test_paranoid_value_responding_to_verify_should_pass_muster + object = stub("thingy", :verify => true) + assert_equal object, session(:paranoid => object).host_key_verifier + end + + def test_host_as_string_should_return_host_and_ip_when_port_is_default + session! + socket.stubs(:peer_ip).returns("1.2.3.4") + assert_equal "net.ssh.test,1.2.3.4", session.host_as_string + end + + def test_host_as_string_should_return_host_and_ip_with_port_when_port_is_not_default + session(:port => 1234) # force session to be instantiated + socket.stubs(:peer_ip).returns("1.2.3.4") + assert_equal "[net.ssh.test]:1234,[1.2.3.4]:1234", session.host_as_string + end + + def test_host_as_string_should_return_only_host_when_host_is_ip + session!(:host => "1.2.3.4") + socket.stubs(:peer_ip).returns("1.2.3.4") + assert_equal "1.2.3.4", session.host_as_string + end + + def test_host_as_string_should_return_only_host_and_port_when_host_is_ip_and_port_is_not_default + session!(:host => "1.2.3.4", :port => 1234) + socket.stubs(:peer_ip).returns("1.2.3.4") + assert_equal "[1.2.3.4]:1234", session.host_as_string + end + + def test_close_should_cleanup_and_close_socket + session! + socket.expects(:cleanup) + socket.expects(:close) + session.close + end + + def test_service_request_should_return_buffer + assert_equal "\005\000\000\000\004sftp", session.service_request('sftp').to_s + end + + def test_rekey_when_kex_is_pending_should_do_nothing + algorithms.stubs(:pending? => true) + algorithms.expects(:rekey!).never + session.rekey! + end + + def test_rekey_when_no_kex_is_pending_should_initiate_rekey_and_block_until_it_completes + algorithms.stubs(:pending? => false) + algorithms.expects(:rekey!) + session.expects(:wait).yields + algorithms.expects(:initialized?).returns(true) + session.rekey! + end + + def test_rekey_as_needed_when_kex_is_pending_should_do_nothing + session! + algorithms.stubs(:pending? => true) + socket.expects(:if_needs_rekey?).never + session.rekey_as_needed + end + + def test_rekey_as_needed_when_no_kex_is_pending_and_no_rekey_is_needed_should_do_nothing + session! + algorithms.stubs(:pending? => false) + socket.stubs(:if_needs_rekey? => false) + session.expects(:rekey!).never + session.rekey_as_needed + end + + def test_rekey_as_needed_when_no_kex_is_pending_and_rekey_is_needed_should_initiate_rekey_and_block + session! + algorithms.stubs(:pending? => false) + socket.expects(:if_needs_rekey?).yields + session.expects(:rekey!) + session.rekey_as_needed + end + + def test_peer_should_return_hash_of_info_about_peer + session! + socket.stubs(:peer_ip => "1.2.3.4") + assert_equal({:ip => "1.2.3.4", :port => 22, :host => "net.ssh.test", :canonized => "net.ssh.test,1.2.3.4"}, session.peer) + end + + def test_next_message_should_block_until_next_message_is_available + session.expects(:poll_message).with(:block) + session.next_message + end + + def test_poll_message_should_query_next_packet_using_the_given_blocking_parameter + session! + socket.expects(:next_packet).with(:blocking_parameter).returns(nil) + session.poll_message(:blocking_parameter) + end + + def test_poll_message_should_default_to_non_blocking + session! + socket.expects(:next_packet).with(:nonblock).returns(nil) + session.poll_message + end + + def test_poll_message_should_silently_handle_disconnect_packets + session! + socket.expects(:next_packet).returns(P(:byte, DISCONNECT, :long, 1, :string, "testing", :string, "")) + assert_raises(Net::SSH::Disconnect) { session.poll_message } + end + + def test_poll_message_should_silently_handle_ignore_packets + session! + socket.expects(:next_packet).times(2).returns(P(:byte, IGNORE, :string, "test"), nil) + assert_nil session.poll_message + end + + def test_poll_message_should_silently_handle_unimplemented_packets + session! + socket.expects(:next_packet).times(2).returns(P(:byte, UNIMPLEMENTED, :long, 15), nil) + assert_nil session.poll_message + end + + def test_poll_message_should_silently_handle_debug_packets_with_always_display + session! + socket.expects(:next_packet).times(2).returns(P(:byte, DEBUG, :bool, true, :string, "testing", :string, ""), nil) + assert_nil session.poll_message + end + + def test_poll_message_should_silently_handle_debug_packets_without_always_display + session! + socket.expects(:next_packet).times(2).returns(P(:byte, DEBUG, :bool, false, :string, "testing", :string, ""), nil) + assert_nil session.poll_message + end + + def test_poll_message_should_silently_handle_kexinit_packets + session! + packet = P(:byte, KEXINIT, :raw, "lasdfalksdjfa;slkdfja;slkfjsdfaklsjdfa;df") + socket.expects(:next_packet).times(2).returns(packet, nil) + algorithms.expects(:accept_kexinit).with(packet) + assert_nil session.poll_message + end + + def test_poll_message_should_return_other_packets + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + socket.expects(:next_packet).returns(packet) + assert_equal packet, session.poll_message + end + + def test_poll_message_should_enqueue_packets_when_algorithm_disallows_packet + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + algorithms.stubs(:allow?).with(packet).returns(false) + socket.expects(:next_packet).times(2).returns(packet, nil) + assert_nil session.poll_message + assert_equal [packet], session.queue + end + + def test_poll_message_should_read_from_queue_when_next_in_queue_is_allowed_and_consume_queue_is_true + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + session.push(packet) + socket.expects(:next_packet).never + assert_equal packet, session.poll_message + assert session.queue.empty? + end + + def test_poll_message_should_not_read_from_queue_when_next_in_queue_is_not_allowed + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + algorithms.stubs(:allow?).with(packet).returns(false) + session.push(packet) + socket.expects(:next_packet).returns(nil) + assert_nil session.poll_message + assert_equal [packet], session.queue + end + + def test_poll_message_should_not_read_from_queue_when_consume_queue_is_false + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + session.push(packet) + socket.expects(:next_packet).returns(nil) + assert_nil session.poll_message(:nonblock, false) + assert_equal [packet], session.queue + end + + def test_wait_with_block_should_return_immediately_if_block_returns_truth + session.expects(:poll_message).never + session.wait { true } + end + + def test_wait_should_not_consume_queue_on_reads + n = 0 + session.expects(:poll_message).with(:nonblock, false).returns(nil) + session.wait { (n += 1) > 1 } + end + + def test_wait_without_block_should_return_after_first_read + session.expects(:poll_message).returns(nil) + session.wait + end + + def test_wait_should_enqueue_packets + session! + + p1 = P(:byte, SERVICE_REQUEST, :string, "test") + p2 = P(:byte, SERVICE_ACCEPT, :string, "test") + socket.expects(:next_packet).times(2).returns(p1, p2) + + n = 0 + session.wait { (n += 1) > 2 } + assert_equal [p1, p2], session.queue + end + + def test_push_should_enqueue_packet + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + session.push(packet) + assert_equal [packet], session.queue + end + + def test_send_message_should_delegate_to_socket + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + socket.expects(:send_packet).with(packet) + session.send_message(packet) + end + + def test_enqueue_message_should_delegate_to_socket + session! + packet = P(:byte, SERVICE_ACCEPT, :string, "test") + socket.expects(:enqueue_packet).with(packet) + session.enqueue_message(packet) + end + + def test_configure_client_should_pass_options_to_socket_client_state + session.configure_client :compression => :standard + assert_equal :standard, socket.client.compression + end + + def test_configure_server_should_pass_options_to_socket_server_state + session.configure_server :compression => :standard + assert_equal :standard, socket.server.compression + end + + def test_hint_should_set_hint_on_socket + assert !socket.hints[:authenticated] + session.hint :authenticated + assert socket.hints[:authenticated] + end + + private + + def socket + @socket ||= stub("socket", :hints => {}) + end + + def server_version + @server_version ||= stub("server_version") + end + + def algorithms + @algorithms ||= stub("algorithms", :initialized? => true, :allow? => true) + end + + def session(options={}) + @session ||= begin + host = options.delete(:host) || "net.ssh.test" + TCPSocket.stubs(:open).with(host, options[:port] || 22).returns(socket) + Net::SSH::Transport::ServerVersion.stubs(:new).returns(server_version) + Net::SSH::Transport::Algorithms.stubs(:new).returns(algorithms) + + Net::SSH::Transport::Session.new(host, options) + end + end + + # a simple alias to make the tests more self-documenting. the bang + # version makes it look more like the session is being instantiated + alias session! session + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_state.rb b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_state.rb new file mode 100644 index 00000000..87fad5cf --- /dev/null +++ b/vendor/gems/gems/net-ssh-2.0.15/test/transport/test_state.rb @@ -0,0 +1,173 @@ +require 'common' +require 'net/ssh/transport/state' + +module Transport + + class TestState < Test::Unit::TestCase + + def setup + @socket = @state = @deflater = @inflater = nil + end + + def teardown + if @deflater + @deflater.finish if !@deflater.finished? + @deflater.close + end + + if @inflater + @inflater.finish if !@inflater.finished? + @inflater.close + end + + state.cleanup + end + + def test_constructor_should_initialize_all_values + assert_equal 0, state.sequence_number + assert_equal 0, state.packets + assert_equal 0, state.blocks + + assert_nil state.compression + assert_nil state.compression_level + assert_nil state.max_packets + assert_nil state.max_blocks + assert_nil state.rekey_limit + + assert_equal "identity", state.cipher.name + assert_instance_of Net::SSH::Transport::HMAC::None, state.hmac + end + + def test_increment_should_increment_counters + state.increment(24) + assert_equal 1, state.sequence_number + assert_equal 1, state.packets + assert_equal 3, state.blocks + end + + def test_reset_should_reset_counters_and_fix_defaults_for_maximums + state.increment(24) + state.reset! + assert_equal 1, state.sequence_number + assert_equal 0, state.packets + assert_equal 0, state.blocks + assert_equal 2147483648, state.max_packets + assert_equal 134217728, state.max_blocks + end + + def test_set_should_set_variables_and_reset_counters + state.expects(:reset!) + state.set :cipher => :a, :hmac => :b, :compression => :c, + :compression_level => :d, :max_packets => 500, :max_blocks => 1000, + :rekey_limit => 1500 + assert_equal :a, state.cipher + assert_equal :b, state.hmac + assert_equal :c, state.compression + assert_equal :d, state.compression_level + assert_equal 500, state.max_packets + assert_equal 1000, state.max_blocks + assert_equal 1500, state.rekey_limit + end + + def test_set_with_max_packets_should_respect_max_packets_setting + state.set :max_packets => 500 + assert_equal 500, state.max_packets + end + + def test_set_with_max_blocks_should_respect_max_blocks_setting + state.set :max_blocks => 1000 + assert_equal 1000, state.max_blocks + end + + def test_set_with_rekey_limit_should_include_rekey_limit_in_computation_of_max_blocks + state.set :rekey_limit => 4000 + assert_equal 500, state.max_blocks + end + + def test_compressor_defaults_to_default_zlib_compression + expect = deflater.deflate("hello world") + assert_equal expect, state.compressor.deflate("hello world") + end + + def test_compressor_uses_compression_level_when_given + state.set :compression_level => 1 + expect = deflater(1).deflate("hello world") + assert_equal expect, state.compressor.deflate("hello world") + end + + def test_compress_when_no_compression_is_active_returns_text + assert_equal "hello everybody", state.compress("hello everybody") + end + + def test_decompress_when_no_compression_is_active_returns_text + assert_equal "hello everybody", state.decompress("hello everybody") + end + + def test_compress_when_compression_is_delayed_and_no_auth_hint_is_set_should_return_text + state.set :compression => :delayed + assert_equal "hello everybody", state.compress("hello everybody") + end + + def test_decompress_when_compression_is_delayed_and_no_auth_hint_is_set_should_return_text + state.set :compression => :delayed + assert_equal "hello everybody", state.decompress("hello everybody") + end + + def test_compress_when_compression_is_enabled_should_return_compressed_text + state.set :compression => :standard + assert_equal "x\234\312H\315\311\311WH-K-\252L\312O\251\004\000\000\000\377\377", state.compress("hello everybody") + end + + def test_decompress_when_compression_is_enabled_should_return_decompressed_text + state.set :compression => :standard + assert_equal "hello everybody", state.decompress("x\234\312H\315\311\311WH-K-\252L\312O\251\004\000\000\000\377\377") + end + + def test_compress_when_compression_is_delayed_and_auth_hint_is_set_should_return_compressed_text + socket.hints[:authenticated] = true + state.set :compression => :delayed + assert_equal "x\234\312H\315\311\311WH-K-\252L\312O\251\004\000\000\000\377\377", state.compress("hello everybody") + end + + def test_decompress_when_compression_is_delayed_and_auth_hint_is_set_should_return_decompressed_text + socket.hints[:authenticated] = true + state.set :compression => :delayed + assert_equal "hello everybody", state.decompress("x\234\312H\315\311\311WH-K-\252L\312O\251\004\000\000\000\377\377") + end + + def test_needs_rekey_should_be_true_if_packets_exceeds_max_packets + state.set :max_packets => 2 + state.increment(8) + state.increment(8) + assert !state.needs_rekey? + state.increment(8) + assert state.needs_rekey? + end + + def test_needs_rekey_should_be_true_if_blocks_exceeds_max_blocks + state.set :max_blocks => 10 + assert !state.needs_rekey? + state.increment(88) + assert state.needs_rekey? + end + + private + + def deflater(level=Zlib::DEFAULT_COMPRESSION) + @deflater ||= Zlib::Deflate.new(level) + end + + def inflater + @inflater ||= Zlib::Inflate.new(nil) + end + + def socket + @socket ||= stub("socket", :hints => {}) + end + + def state + @state ||= Net::SSH::Transport::State.new(socket, :test) + end + end + +end \ No newline at end of file diff --git a/vendor/gems/gems/rubyforge-2.0.3/History.txt b/vendor/gems/gems/rubyforge-2.0.3/History.txt new file mode 100644 index 00000000..33430cdf --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/History.txt @@ -0,0 +1,124 @@ +=== 2.0.3 / 2009-10-11 + +* Added check to ensure user-config contains 'api.rubyforge.org' (Ryan Davis) +* Removed DEBUG global + +=== 2.0.2 / 2009-10-05 + +* Switched JSON gem dependency over to json_pure. + +=== 2.0.1 / 2009-10-02 + +* Added a missing dependency on JSON gem to the spec. + +=== 2.0.0 / 2009-09-21 + +* Modified to use RubyForge REST API rather than scraping HTML. + +=== 1.0.5 / 2009-09-15 + +* Added experimental script to sync RF trackers with release names. +* Fixed that damned processor_id bug. (hinegardner) +* The century usually doesn't change across runs. Refactor. +* Fix use of Time.utc if args.size < 10, tho seems unnecessary. + +=== 1.0.4 / 2009-07-21 + +* Uses the passed in proxy, if provided. (Thanks sdabet, RF #24071). +* Update group_id pattern for scraping project configs you are only a member of. +* Update regexp used to validate login page to be less restrictive. +* Fixed --help to not require an argument. +* add --force flag for login command to ignore previous cookie. (or use logout) + +=== 1.0.3 / 2009-02-26 + +* Fixed nil error in our Net::HTTP patches. +* Removed password from warning if the login possibly failed. + +=== 1.0.2 / 2009-01-05 + +* All webby commands now login automatically. +* Login now no-ops if it already has a session cookie. +* Added logout command. +* Much more of the config is self-repairing, but still not bulletproof yet. + +=== 1.0.1 / 2008-10-22 + +* Fixed multipart form upload so it isn't url escaping the data. DOH. + * Affects release_notes and release_changes, but never reported for 5 months. + +=== 1.0.0 / 2008-05-20 + +* Removed HTTPAccess2, thanks to Aaron Patterson. Even tho he's whiny. +* Changed initialize/configure to make testing scream. 100x faster. + +=== 0.4.5 / 2008-03-11 + +* Update for Ruby 1.9.0. +* Updated History, Rakefile, and Readme for new hoe abilities. +* Added config backup/restore rake tasks (for testing). + +=== 0.4.4 / 2007-08-13 + +* New type_id values will merge with extant data. (self-repairing data is Good) +* Scrape processor_ids, merging in with extant data. +* Default to "Other" if a file's type is unrecognized. + +=== 0.4.3 / 2007-07-23 + +* Set mode on .rubyforge directory to 700. +* Fix fetching of user id when user has no releases. + +=== 0.4.2 / 2007-05-21 + +* Fix for windoze users (spaces in path). +* Added check for extant release. +* Added default hash for first-time releases. + +=== 0.4.1 / 2007-03-08 + +* Verify that login succeeded and warn against if not (prolly should raise). +* Print a friendly error if you have the wrong package id. +* Handle upload error in add_release a bit better. + +=== 0.4.0 / 2007-01-09 + +* config.yml split and moved to user-config.yml (up to the user to do). +* auto-config.yml now generated via config command. +* @config renamed to @userconfig. +* @config["rubyforge"] moved to @autoconfig. +* Added save_autoconfig. +* Pulled scrape_project from scrape_config. +* scrape_config no longer takes a user param. Use opts to specify. +* scrape_project, add_project, add/remove_release now save automatically. + +=== 0.3.2 / 2006-11-29 + +* Fixed file uploads for windows. +* Correctly scrape releases with funky characters. + +=== 0.3.1 / 2006-10-24 + +* Added SSL login. +* Added yet more debugging output if $DEBUG. + +=== 0.3.0 / 2006-09-30 + +* Added more debugging output if $DEBUG +* Added news posting. +* Added multiple file release to add_release (uses add_file for extras). +* add_release now returns release_id +* Fixed config scraper to include '-' in names. + +=== 0.2.1 / 2006-09-14 + +* Gemspec was too loose about packaging. Now using manifest. + +=== 0.2.0 / 2006-09-13 + +* Split original script into script and library. +* Added tests for library. +* Refactored heavily. +* Added "config" command to scrape group/project/release ids from rubyforge. +* Added "names" command to help pick groups and projects. +* Added "add_file" command to add a file to an existing release. diff --git a/vendor/gems/gems/rubyforge-2.0.3/Manifest.txt b/vendor/gems/gems/rubyforge-2.0.3/Manifest.txt new file mode 100644 index 00000000..0a7d55f5 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/Manifest.txt @@ -0,0 +1,9 @@ +History.txt +Manifest.txt +README.txt +Rakefile +bin/rubyforge +lib/rubyforge.rb +lib/rubyforge/client.rb +test/test_rubyforge.rb +test/test_rubyforge_client.rb diff --git a/vendor/gems/gems/rubyforge-2.0.3/README.txt b/vendor/gems/gems/rubyforge-2.0.3/README.txt new file mode 100644 index 00000000..d1e0dee4 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/README.txt @@ -0,0 +1,55 @@ += Rubyforge + +* http://codeforpeople.rubyforge.org/rubyforge/ +* http://rubyforge.org/projects/codeforpeople/ + +== Description + +A script which automates a limited set of rubyforge operations. + +* Run 'rubyforge help' for complete usage. +* Setup: For first time users AND upgrades to 0.4.0: + * rubyforge setup (deletes your username and password, so run sparingly!) + * edit ~/.rubyforge/user-config.yml + * rubyforge config +* For all rubyforge upgrades, run 'rubyforge config' to ensure you have latest. + +== Synopsis + + rubyforge [options]* mode [mode_args]* + +== REQUIREMENTS + +* hoe +* json +* rubygems + +== INSTALL + +* sudo gem install rubyforge + +== LICENSE + +(The MIT License) + +Copyright (c) Ryan Davis, Eric Hodel, Ara T Howard. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/gems/gems/rubyforge-2.0.3/Rakefile b/vendor/gems/gems/rubyforge-2.0.3/Rakefile new file mode 100644 index 00000000..eaeed079 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/Rakefile @@ -0,0 +1,40 @@ +# -*- ruby -*- + +require 'rubygems' +require 'hoe' +require 'json' + +abort "you _must_ install this gem to release it" if + ENV['VERSION'] && ENV['VERSION'] != RubyForge::VERSION + +Hoe.plugin :email + +Hoe.spec "rubyforge" do + developer 'Ryan Davis', 'ryand-ruby@zenspider.com' + developer 'Eric Hodel', 'drbrain@segment7.net' + developer 'Ara T Howard', 'ara.t.howard@gmail.com' + developer 'Tom Copeland', 'tom@infoether.com' + + multiruby_skip << "rubinius" + extra_deps << ["json_pure",">= 1.1.7"] + self.rubyforge_name = "codeforpeople" + self.need_tar = false +end + +task :postrelease => :announce + +task :backup do + Dir.chdir File.expand_path("~/.rubyforge") do + cp "user-config.yml", "user-config.yml.bak" + cp "auto-config.yml", "auto-config.yml.bak" + end +end + +task :restore do + Dir.chdir File.expand_path("~/.rubyforge") do + cp "user-config.yml.bak", "user-config.yml" + cp "auto-config.yml.bak", "auto-config.yml" + end +end + +# vim:syntax=ruby diff --git a/vendor/gems/gems/rubyforge-2.0.3/bin/rubyforge b/vendor/gems/gems/rubyforge-2.0.3/bin/rubyforge new file mode 100755 index 00000000..cf54b6b8 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/bin/rubyforge @@ -0,0 +1,218 @@ +#! /usr/bin/env ruby + +$VERBOSE = true + +$:.unshift(File::join(File::dirname(File::dirname(__FILE__)), "lib")) + +require 'getoptlong' +require 'rubyforge' + +PROGRAM = File::basename $0 + +USAGE = <<-EOL +SYNOPSIS + + #{ PROGRAM } [options]* mode [mode_args]* + +DESCRIPTION + + simplistic script which automates a limited set of rubyforge operations + +MODES + + setup() + initializes your .rubyforge directory. you need to run this first before + doing anything else. + + example : + #{ PROGRAM } setup + + config([project]) + Helps you populate your auto-config.yml file by scraping rubyforge and + getting your groups, projects, and releases. + + example : + #{ PROGRAM } config + #{ PROGRAM } config myproject + + names() + Prints out the names of your configured groups and projects. + + example : + #{ PROGRAM } names + + create_package(group_id, package_name) + creates the named package under the specified group. + + example : + #{ PROGRAM } create_package 1024 traits + #{ PROGRAM } create_package codeforpeople.com traits + + add_release(group_id, package_id, release_name, userfile) + release a file as release_name under the specified group_id and + package_id. + + example : + #{ PROGRAM } add_release codeforpeople.com traits 0.8.0 traits-0.8.0.gem + #{ PROGRAM } add_release codeforpeople.com traits 0.8.0 traits-0.8.0.tgz + #{ PROGRAM } add_release 1024 1242 0.8.0 traits-0.8.0.gem + #{ PROGRAM } add_release 1024 1242 0.8.0 traits-0.8.0.gem + + add_file(group_id, package_id, release_id, userfile) + add a file to an existing release under the specified group_id, + package_id, and release_id + + example : + #{ PROGRAM } add_file codeforpeople.com traits 0.8.0 traits-0.8.0.gem + #{ PROGRAM } add_file codeforpeople.com traits 0.8.0 traits-0.8.0.tgz + #{ PROGRAM } add_file 1024 1242 0.8.0 traits-0.8.0.gem + + delete_package(group_id, package_name) + deletes a package and all its files. + + example : + #{ PROGRAM } delete_package codeforpeople.com traits + #{ PROGRAM } delete_package 1024 traits + + post_news(group_id, summary, details) + posts a news item to the specified group + + example : + #{ PROGRAM } post_news codeforpeople.com "new release" "this release is great!" + #{ PROGRAM } post_news 1024 traits "new release" "this release is great!" +NOTES + + - In order to use group_id, package_id, or release_id by name, + rather than number, you must edit the rubyforge[group_ids] and + rubyforge[package_ids] translation tables in your config.yml. See + the config command for more information and help. + +TODO + + - add error checking. this may require mods to the REST API as + well to ensure that it returns useful error codes. + +OPTIONS + + global : + --help , -h + this message + --config , -c + specify a config file (default #{ RubyForge::CONFIG_F }) + --username , -u + specify username, taken from config otherwise + --password , -p + specify password, taken from config otherwise + + add_release : + --is_private , -P + if true, release is not public + --release_date , -r + specify time of release (default 'now') + --type_id , -t + specify filetype code (default determined by ext) + --processor_id , -o + specify processor (default 'Any') + --release_notes , -n + specify release notes as string or file + --release_changes , -a + specify release changes as string or file + --preformatted , -f + specify whether release_notes/changes are preformatted + +EOL + +mode = ARGV.shift + +opts = GetoptLong::new( + [ "--help" , "-h" , GetoptLong::NO_ARGUMENT ], + [ "--force" , "-F" , GetoptLong::NO_ARGUMENT ], + [ "--username" , "-u" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--password" , "-p" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--is_private" , "-P" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--release_date" , "-r" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--type_id" , "-t" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--processor_id" , "-o" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--release_notes" , "-n" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--release_changes" , "-a" , GetoptLong::REQUIRED_ARGUMENT ], + [ "--preformatted" , "-f" , GetoptLong::NO_ARGUMENT ] + ).enum_for.inject({}) { |h, (k, v)| h.update k.delete('-') => v } + +rubyforge = RubyForge.new +rubyforge.configure opts + +mode = "help" if opts["help"] + +case mode +when %r/help/ + USAGE.display +when %r/setup/ + rubyforge.setup +when %r/config/ + project = ARGV.shift + if project then + rubyforge.scrape_project(project) + else + rubyforge.scrape_config + end +when %r/names/ + rf = rubyforge.autoconfig + puts "groups : #{rf["group_ids"].keys.sort.join(", ")}" + puts "packages: #{rf["package_ids"].keys.sort.join(", ")}" +when %r/create_package/ + page, msg = "/frs/admin/index.php", "post_content" + + group_id, package_name = ARGV + + abort "no " unless group_id + abort "no " unless package_name + + group_id = Integer(group_id) rescue group_id + + rubyforge.create_package group_id, package_name +when %r/post_news/ + group_id, summary, details = ARGV + + abort "no " unless group_id + + group_id = Integer(group_id) rescue group_id + + rubyforge.post_news group_id, summary, details +when %r/delete_package/ + group_id, package_id = ARGV + + abort "no " unless group_id + abort "no " unless package_id + + group_id = Integer(group_id) rescue group_id + package_id = Integer(package_id) rescue package_id + + rubyforge.delete_package group_id, package_id +when %r/add_release/ + group_id, package_id, release_name, userfile = ARGV + + abort "no " unless group_id + abort "no " unless package_id + abort "no " unless release_name + abort "no " unless userfile + + group_id = Integer(group_id) rescue group_id + package_id = Integer(package_id) rescue package_id + + rubyforge.add_release group_id, package_id, release_name, userfile +when %r/add_file/ + group_id, package_id, release_id, userfile = ARGV + + abort "no " unless group_id + abort "no " unless package_id + abort "no " unless release_id + abort "no " unless userfile + + group_id = Integer(group_id) rescue group_id + package_id = Integer(package_id) rescue package_id + release_id = Integer(release_id) rescue release_id + + rubyforge.add_file group_id, package_id, release_id, userfile +else + abort USAGE +end diff --git a/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge.rb b/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge.rb new file mode 100644 index 00000000..49d57284 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge.rb @@ -0,0 +1,389 @@ +#! /usr/bin/env ruby -w + +require 'json' +require 'enumerator' +require 'fileutils' +require 'yaml' +require 'open-uri' +require 'rubyforge/client' + +$TESTING = false unless defined? $TESTING + +class RubyForge + + # :stopdoc: + VERSION = '2.0.3' + HOME = ENV["HOME"] || ENV["HOMEPATH"] || File::expand_path("~") + RUBYFORGE_D = File::join HOME, ".rubyforge" + CONFIG_F = File::join RUBYFORGE_D, "user-config.yml" + + # We must use __FILE__ instead of DATA because this is now a library + # and DATA is relative to $0, not __FILE__. + config = File.read(__FILE__).split(/__END__/).last.gsub(/#\{(.*)\}/) {eval $1} + CONFIG = YAML.load(config) + # :startdoc: + + # TODO: add an autoconfig method that is self-repairing, removing key checks + attr_reader :userconfig, :autoconfig + + def initialize(userconfig=nil, autoconfig=nil, opts=nil) + # def initialize(userconfig=CONFIG_F, opts={}) + @userconfig, @autoconfig = userconfig, autoconfig + + @autoconfig ||= CONFIG["rubyforge"].dup + @userconfig.merge! opts if opts + + @client = nil + @uri = nil + end + + # These are no-ops now, but we'll keep them here for backwards compatibility + def login ; end + def logout ; end + + def configure opts = {} + user_path = CONFIG_F + dir, file = File.split(user_path) + + @userconfig = if test(?e, user_path) then + YAML.load_file(user_path) + else + CONFIG + end.merge(opts) + @autoconfig_path = File.join(dir, file.sub(/^user/, 'auto')) + @autoconfig = if test(?e, @autoconfig_path) then + YAML.load_file(@autoconfig_path) + else + CONFIG["rubyforge"].dup + end + @autoconfig["type_ids"] = CONFIG['rubyforge']['type_ids'].dup + + raise "no " unless @userconfig["username"] + raise "no " unless @userconfig["password"] + + self + end + + def force + @userconfig['force'] + end + + def uri + uri = @userconfig['uri'] + abort "Using new REST api, but uri isn't api.rubyforge.org. +run `rubyforge setup` and fix please" if + uri =~ /rubyforge.org/ and uri !~ /api.rubyforge.org/ + + @uri ||= URI.parse uri + end + + def setup + FileUtils::mkdir_p RUBYFORGE_D, :mode => 0700 unless test ?d, RUBYFORGE_D + test ?e, CONFIG_F and FileUtils::mv CONFIG_F, "#{CONFIG_F}.bak" + config = CONFIG.dup + config.delete "rubyforge" + + open(CONFIG_F, "w") { |f| + f.write YAML.dump(config) + } + edit = (ENV["EDITOR"] || ENV["EDIT"] || "vi") + " '#{CONFIG_F}'" + system edit or puts "edit '#{CONFIG_F}'" + end + + def save_autoconfig + File.open(@autoconfig_path, "w") do |file| + YAML.dump @autoconfig, file + end + end + + def scrape_config + username = @userconfig['username'] + + %w(group package processor release).each do |type| + @autoconfig["#{type}_ids"].clear if @autoconfig["#{type}_ids"] + end + + json = get_via_rest_api "/users/#{username}/groups.js" + + projects = json.collect {|group| group['group']['unix_group_name'] } + puts "Fetching #{projects.size} projects" + projects.each do |project| + scrape_project(project) + end + end + + def get_via_rest_api(path) + url = "#{self.uri}#{path}" + puts "Hitting REST API: #{url}" if $DEBUG + JSON.parse(client.get_content(url, {}, {}, @userconfig)) + end + + def scrape_project(project) + data = { + "group_ids" => {}, + "package_ids" => {}, + "processor_ids" => Hash.new { |h,k| h[k] = {} }, + "release_ids" => Hash.new { |h,k| h[k] = {} }, + } + + unless data["group_ids"].has_key? project then + json = get_via_rest_api "/groups/#{project}.js" + group_id = json["group"]["group_id"].to_i + data["group_ids"][project] = group_id + end + + # Get project's packages + json = get_via_rest_api "/groups/#{project}/packages.js" + json.each do |package| + data["package_ids"][package["package"]["name"]] = package["package"]["package_id"] + # Get releases for this package + json = get_via_rest_api "/packages/#{package["package"]["package_id"]}/releases.js" + json.each do |release| + data["release_ids"][package["package"]["name"]][release["name"]] = release["release_id"] + end + end + + # Get processor ids + if @autoconfig['processor_ids'].nil? || @autoconfig['processor_ids'].empty? + puts "Fetching processor ids" if $DEBUG + json = get_via_rest_api "/processors.js" + json.each do |processor| + data["processor_ids"][processor["processor"]["name"]] = processor["processor"]["processor_id"] + end + end + + data.each do |key, val| + @autoconfig[key] ||= {} + @autoconfig[key].merge! val + end + + save_autoconfig + end + + def create_package(group_id, package_name) + page = "/groups/#{group_id}/packages" + + group_id = lookup "group", group_id + is_private = @userconfig["is_private"] + is_public = is_private ? 0 : 1 + + form = { + "package[name]" => package_name, + "package[is_public]" => is_public + } + + run page, form + + group_name = @autoconfig["group_ids"].invert[group_id] + scrape_project(group_name) + end + + ## + # Posts news item to +group_id+ (can be name) with +subject+ and +body+ + + def post_news(group_id, subject, body) + # TODO - what was the post_changes parameter for? + form = { + "news_byte[summary]" => subject, + "news_byte[details]" => body + } + group_id = lookup "group", group_id + url = "/groups/#{group_id}/news_bytes" + run url, form + end + + def delete_package(group_id, package_id) + group_id = lookup "group", group_id + package_id = lookup "package", package_id + package_name = @autoconfig["package_ids"].invert[package_id] + @autoconfig["package_ids"].delete package_name + @autoconfig["release_ids"].delete package_name + save_autoconfig + url = "/packages/#{package_id}" + run url, {"_method" => "delete"} + end + + def add_release(group_id, package_id, release_name, *files) + group_id = lookup "group", group_id + package_id = lookup "package", package_id + release_date = @userconfig["release_date"] + release_notes = @userconfig["release_notes"] + release_changes = @userconfig["release_changes"] + preformatted = @userconfig["preformatted"] + release_date ||= Time.now.strftime("%Y-%m-%d %H:%M") + release_notes = IO::read(release_notes) if + test(?e, release_notes) if release_notes + release_changes = IO::read(release_changes) if + test(?e, release_changes) if release_changes + preformatted = preformatted ? 1 : 0 + + form = { + "release[name]" => release_name, + "release[release_date]" => release_date, + "release[notes]" => release_notes, + "release[changes]" => release_changes, + "release[preformatted]" => preformatted, + } + + url = "/packages/#{package_id}/releases" + json = run url, form + + release_id = JSON.parse(json)["release_id"].to_i rescue nil + unless release_id then + puts json if $DEBUG + raise "Couldn't get release_id, upload failed?" + end + + # FIXME + #raise "Invalid package_id #{package_id}" if html[/Invalid package_id/] + #raise "You have already released this version." if html[/That filename already exists in this project/] + + files.each do |file| + add_file(group_id, package_id, release_id, file) + end + + package_name = @autoconfig["package_ids"].invert[package_id] + raise "unknown package name for #{package_id}" if package_name.nil? + @autoconfig["release_ids"][package_name] ||= {} + @autoconfig["release_ids"][package_name][release_name] = release_id + save_autoconfig + + release_id + end + + ## + # add a file to an existing release under the specified group_id, + # package_id, and release_id + # + # example : + # add_file("codeforpeople", "traits", "0.8.0", "traits-0.8.0.gem") + # add_file("codeforpeople", "traits", "0.8.0", "traits-0.8.0.tgz") + # add_file(1024, 1242, "0.8.0", "traits-0.8.0.gem") + + def add_file(group_name, package_name, release_name, userfile) + type_id = @userconfig["type_id"] + group_id = lookup "group", group_name + package_id = lookup "package", package_name + release_id = (Integer === release_name) ? release_name : lookup("release", package_name)[release_name] + url = "/releases/#{release_id}/files.js" + + userfile = open userfile, 'rb' + + type_id ||= userfile.path[%r|\.[^\./]+$|] + type_id = (lookup "type", type_id rescue lookup "type", ".oth") + + processor_id = @userconfig["processor_id"] + processor_id ||= "Any" + processor_id = lookup "processor", processor_id + + form = { + "file[filename]" => File.basename(userfile.path), + "file[processor_id]" => processor_id, + "file[type_id]" => type_id, + "contents" => userfile.read + } + + run url, form + end + + def client + return @client if @client + + @client = RubyForge::Client::new ENV["HTTP_PROXY"] + @client.debug_dev = STDERR if ENV["RUBYFORGE_DEBUG"] || ENV["DEBUG"] || $DEBUG + + @client + end + + def run(page, form, extheader={}) # :nodoc: + uri = self.uri + page + puts "client.post_content #{uri.inspect}, #{form.inspect}, #{extheader.inspect}" if $DEBUG + response = client.post_content uri, form, extheader, @userconfig + puts response if $DEBUG + response + end + + def lookup(type, val) # :nodoc: + unless Fixnum === val then + key = val.to_s + val = @autoconfig["#{type}_ids"][key] + raise "no <#{type}_id> configured for <#{ key }>" unless val + end + val + end +end + +__END__ +# +# base rubyforge uri - store in #{ CONFIG_F } +# + uri : http://api.rubyforge.org +# +# this must be your username +# + username : tom +# +# this must be your password +# + password : password +# +# defaults for some values +# + is_private : false +# AUTOCONFIG: + rubyforge : + # + # map your group names to their rubyforge ids + # + group_ids : + codeforpeople : 1024 + support : 5 + # + # map your package names to their rubyforge ids + # + package_ids : + traits : 1241 + # + # map your package names to their rubyforge ids + # + release_ids : + traits : + 1.2.3 : 666 + # + # mapping file exts to rubyforge ids + # + type_ids : + .deb : 1000 + .rpm : 2000 + .zip : 3000 + .bz2 : 3100 + .gz : 3110 + .src.zip : 5000 + .src.bz2 : 5010 + .src.tar.bz2 : 5010 + .src.gz : 5020 + .src.tar.gz : 5020 + .src.rpm : 5100 + .src : 5900 + .jpg : 8000 + .txt : 8100 + .text : 8100 + .htm : 8200 + .html : 8200 + .pdf : 8300 + .oth : 9999 + .ebuild : 1300 + .exe : 1100 + .dmg : 1200 + .tar.gz : 5000 + .tgz : 5000 + .gem : 1400 + .pgp : 8150 + .sig : 8150 + .pem : 1500 + + # + # map processor names to rubyforge ids + # + processor_ids : + Other : 9999 diff --git a/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge/client.rb b/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge/client.rb new file mode 100644 index 00000000..b17b8f16 --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/lib/rubyforge/client.rb @@ -0,0 +1,124 @@ +require 'webrick/cookie' +require 'net/http' +require 'net/https' + +# clean up warnings caused by web servers that send down 2 digit years +class Time + CENTURY = Time.now.year / 100 * 100 + + class << self + alias :old_utc :utc + + def utc(*args) + args[0] += CENTURY if args[0] < 100 + old_utc(*args) + end + end +end unless Time.respond_to? :old_utc + +# clean up "using default DH parameters" warning for https +class Net::HTTP + alias :old_use_ssl= :use_ssl= + def use_ssl= flag + self.old_use_ssl = flag + @ssl_context.tmp_dh_callback = proc {} if @ssl_context + end +end unless Net::HTTP.public_instance_methods.include? "old_use_ssl=" + +class RubyForge + class Client + attr_accessor :debug_dev, :ssl_verify_mode, :agent_class + + def initialize(proxy = nil) + @debug_dev = nil + @ssl_verify_mode = OpenSSL::SSL::VERIFY_NONE + if proxy + begin + proxy_uri = URI.parse(proxy) + @agent_class = Net::HTTP::Proxy(proxy_uri.host,proxy_uri.port) + rescue URI::InvalidURIError + end + end + @agent_class ||= Net::HTTP + end + + def post_content(uri, form = {}, headers = {}, userconfig = nil) + uri = URI.parse(uri) unless uri.is_a?(URI) + request = agent_class::Post.new(uri.request_uri) + execute(request, uri, form, headers, userconfig) + end + + def get_content(uri, query = {}, headers = {}, userconfig = nil) + uri = URI.parse(uri) unless uri.is_a?(URI) + request = agent_class::Get.new(uri.request_uri) + execute(request, uri, query, headers, userconfig) + end + + def execute(request, uri, parameters = {}, headers = {}, userconfig = nil) + { + 'content-type' => 'application/x-www-form-urlencoded' + }.merge(headers).each { |k,v| request[k] = v } + + http = agent_class.new( uri.host, uri.port ) + + if uri.scheme == 'https' && uri.host !~ /localhost/ + http.use_ssl = true + http.verify_mode = OpenSSL::SSL::VERIFY_NONE + end + + request.basic_auth(userconfig["username"], userconfig["password"]) + + request_data = case request['Content-Type'] + when /boundary=(.*)$/ + boundary_data_for($1, parameters) + else + query_string_for(parameters) + end + request['Content-Length'] = request_data.length.to_s + + response = http.request(request, request_data) + + return response.body if response.class <= Net::HTTPSuccess + + if response.class <= Net::HTTPRedirection + location = response['Location'] + unless location =~ /^http/ + location = "#{uri.scheme}://#{uri.host}#{location}" + end + uri = URI.parse(location) + + execute(agent_class::Get.new(uri.request_uri), uri) + end + end + + def boundary_data_for(boundary, parameters) + parameters.sort_by {|k,v| k.to_s }.map { |k,v| + parameter = "--#{boundary}\r\nContent-Disposition: form-data; name=\"" + + WEBrick::HTTPUtils.escape_form(k.to_s) + "\"" + + if v.respond_to? :path + parameter += "; filename=\"#{File.basename(v.path)}\"\r\n" + parameter += "Content-Transfer-Encoding: binary\r\n" + parameter += "Content-Type: text/plain" + end + parameter += "\r\n\r\n" + + if v.respond_to? :path + parameter += v.read + else + parameter += v.to_s + end + + parameter + }.join("\r\n") + "\r\n--#{boundary}--\r\n" + end + + def query_string_for(parameters) + parameters.sort_by {|k,v| k.to_s }.map { |k,v| + k && [ WEBrick::HTTPUtils.escape_form(k.to_s), + WEBrick::HTTPUtils.escape_form(v.to_s) ].join('=') + }.compact.join('&') + end + + end +end diff --git a/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge.rb b/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge.rb new file mode 100644 index 00000000..8dcd4fde --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge.rb @@ -0,0 +1,329 @@ +require 'test/unit' unless defined? $ZENTEST and $ZENTEST + +$TESTING = true +require 'rubyforge' +require 'tmpdir' + +class RubyForge + attr_writer :client + + alias :old_save_autoconfig :save_autoconfig + def save_autoconfig + # raise "not during test" + end +end + +class RubyForge::FakeClient + def form; end + + def post_content(*args) + FakeRubyForge::HTML + end + + def get_content(*args) + URI::HTTP.data.join("\n") + end +end + +class FakeRubyForge < RubyForge + JSON = '{"release_id" : 42}' + + attr_accessor :page, :form, :extheader, :requests, :scrape + def run(page, form, extheader={}) + @page, @form, @extheader = page, form, extheader + @requests ||= [] + @requests << { :url => page, :form => form, :headers => extheader } + JSON + end + + def scrape_project(proj) + @scrape ||= [] + @scrape << proj + end +end + +# TODO: remove this and make rubyforge use Client exclusively +class URI::HTTP + def self.data + @data ||= [] + end + + def read + self.class.data.shift or raise "no more data" + end +end + +class TestRubyForge < Test::Unit::TestCase + def setup + srand(0) + util_new FakeRubyForge + end + + def teardown + # if defined? @old_autoconfig then + # @rubyforge.autoconfig.replace @old_autoconfig + # @rubyforge.save_autoconfig + # end + end + + def test_new_with_proxy_uses_a_proxy_class + client = RubyForge::Client.new('http://localhost:8808/') + assert client.agent_class.proxy_class?, 'agent class should be a proxy' + end + + def test_new_with_bad_proxy_uses_normal_http + client = RubyForge::Client.new('asdfkjhalksdfjh') + assert !client.agent_class.proxy_class?, 'agent class should not be a proxy' + end + + def test_initialize_bad + user_data = { + "uri" => "http://api.rubyforge.org", + "is_private" => false, + "username" => "username", + "password" => "password" + } + + assert_raise RuntimeError do + rf = RubyForge.new user_data + rf.configure "username" => nil + end + assert_raise RuntimeError do + rf = RubyForge.new user_data + rf.configure "password" => nil + end + end + + def test_setup + # TODO raise NotImplementedError, 'Need to write test_setup' + end + + def test_create_package + @rubyforge.create_package(42, 'woot_pkg') + + util_run('/groups/42/packages', + "package[is_public]" => 1, + "package[name]" => "woot_pkg") + end + + def test_delete_package + @rubyforge.delete_package(42, 666) + util_delete_package + end + + def test_delete_package_package_name + @rubyforge.delete_package(42, "woot_pkg") + util_delete_package + end + + def test_delete_package_undefined_package_name + assert_raise RuntimeError do + @rubyforge.delete_package(42, "blah") + end + end + + def test_delete_package_group_name + @rubyforge.delete_package("seattlerb", 666) + util_delete_package + end + + def test_delete_package_undefined_group_name + assert_raise RuntimeError do + @rubyforge.delete_package("blah", 666) + end + end + + def test_post_news + @rubyforge.post_news("seattlerb", "my summary", "my news") + + util_run("/groups/42/news_bytes", + "news_byte[details]" => "my news", + "news_byte[summary]" => "my summary") + end + + def test_add_release_undefined_package_name + assert_raise RuntimeError do + @rubyforge.add_release(42, "blah", '1.2.3', __FILE__) + end + end + + def test_add_release_undefined_group_name + assert_raise RuntimeError do + @rubyforge.add_release("blah", 666, '1.2.3', __FILE__) + end + end + + def test_lookup_id + assert_equal 43, @rubyforge.lookup("package", 43) + end + + def test_lookup_string_number + assert_raise RuntimeError do + @rubyforge.lookup("package", "43") + end + end + + def test_lookup_name + @rubyforge.autoconfig["package_ids"]["ringy_dingy"] = 314 + assert_equal 314, @rubyforge.lookup("package", "ringy_dingy") + end + + def test_lookup_undefined + assert_raise RuntimeError do + @rubyforge.lookup("package", "blah") + end + end + + def test_add_file + @rubyforge.autoconfig["package_ids"]["ringy_dingy"] = 314 + @rubyforge.autoconfig["release_ids"]["ringy_dingy"] ||= {} + @rubyforge.autoconfig["release_ids"]["ringy_dingy"]["1.2.3"] = 43 + + filepath, contents = make_a_tmp_file + + @rubyforge.add_file('seattlerb', 'ringy_dingy', '1.2.3', filepath) + + util_run('/releases/43/files.js', { + "file[type_id]" => 9999, + "file[processor_id]" => 8000, + "file[filename]"=> File.basename(filepath), + "contents" => File.read(filepath) + }) + end + + def test_add_release + @rubyforge.add_release(42, 666, '1.2.3') + util_add_release + end + + def test_add_release_with_a_file + filepath, contents = make_a_tmp_file + + @rubyforge.add_release(42, 666, '1.2.3', filepath) + add_release = ({ :url=>"/packages/666/releases", + :form=>{ "release[name]" => "1.2.3", + "release[notes]" => nil, + "release[changes]" => nil, + "release[preformatted]"=>0, + "release[release_date]" => "today"}, + :headers=> {}}) + add_file = ({ :url => '/releases/42/files.js', + :form => {"file[type_id]" => 9999, + "file[processor_id]" => 8000, + "file[filename]"=> File.basename(filepath), + "contents" => File.read(filepath) + }, + :headers => {}}) + expected = [add_release, add_file] + + result = @rubyforge.requests + result.each do |r| + r[:form].delete "userfile" + end + + assert_equal expected, result + end + + def test_add_release_package_name + @rubyforge.add_release(42, "woot_pkg", '1.2.3') + util_add_release + end + + def test_add_release_group_name + @rubyforge.add_release("seattlerb", 666, '1.2.3') + util_add_release + end + + + def test_scrape_project + orig_stdout = $stdout + orig_stderr = $stderr + $stdout = StringIO.new + $stderr = StringIO.new + util_new RubyForge # TODO: switch to Fake + @rubyforge.autoconfig.each { |k,v| v.clear } + + URI::HTTP.data << '{"group" : {"group_id":1513}}' + URI::HTTP.data << '[{"package" : {"package_id":4566, "package_name":"1.3.1"}}]' + + # @rubyforge.scrape << < <-EOF +# URI::HTTP.data << <<-EOF +# +# EOF + # + @rubyforge.scrape_project('my_project') rescue "Hm, for some reason this technique of loading up data on URI::HTTP isn't working here. Not sure why." + # + # expected = { + # "group_ids" => { "my_project" => 1513 }, + # "package_ids" => { "ar_mailer" => 4566 }, + # "processor_ids" => { "i386" => 1000, "i387" => 1001 }, + # "release_ids" => { + # "ar_mailer" => { "1.2.0" => 12185, "1.3.1" => 13368 } + # }, + # "type_ids" => {}, + # } + # + # assert_equal expected, @rubyforge.autoconfig + ensure + $stdout = orig_stdout + $stderr = orig_stderr + end + + def util_new(klass) + user_data = { + "uri" => "http://api.rubyforge.org", + "is_private" => false, + "username" => "username", + "password" => "password" + } + + auto_data = { + "group_ids" => {}, + "package_ids" => {}, + "release_ids" => Hash.new { |h,k| h[k] = {} }, + "type_ids" => {}, + "processor_ids" => {"Any"=>8000}, + } + + @rubyforge = klass.new user_data, auto_data + + @rubyforge.client = RubyForge::FakeClient.new + + @rubyforge.userconfig["release_date"] = "today" + @rubyforge.autoconfig["type_ids"][".rb"] = 9999 + @rubyforge.autoconfig["group_ids"]["seattlerb"] = 42 + @rubyforge.autoconfig["package_ids"]["woot_pkg"] = 666 + end + + def util_run(page, form={}, extheader={}) + form_result = @rubyforge.form + assert_equal page, @rubyforge.page.to_s + assert_equal form, form_result + assert_equal extheader, @rubyforge.extheader + end + + def util_add_release + util_run("/packages/666/releases", + { "release[name]" => "1.2.3", + "release[notes]" => nil, + "release[changes]" => nil, + "release[preformatted]"=>0, + "release[release_date]" => "today"}) + end + + def util_delete_package + util_run('/packages/666', "_method" => "delete") + end + + def make_a_tmp_file + content = "Merely a test" + tmp_file = File.join(Dir.tmpdir, "test.rb") + File.open(tmp_file, "w") { |f| f.syswrite(content) } + [tmp_file, content] + end + +end diff --git a/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge_client.rb b/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge_client.rb new file mode 100644 index 00000000..c5ac303e --- /dev/null +++ b/vendor/gems/gems/rubyforge-2.0.3/test/test_rubyforge_client.rb @@ -0,0 +1,57 @@ +require 'test/unit' unless defined? $ZENTEST and $ZENTEST +require 'rubyforge' + +class RubyForge::FakeAgent + class << self + attr_accessor :t_data, :t_request + end + + def initialize(*args) + end + + def request(request, data) + self.class.t_request = request + self.class.t_data = data + response = Net::HTTPOK.new('1.1', 200, '') + def response.read_body; ''; end + return response + end + + class Post + def initialize(*args) + @args = args + @stuff = {} + end + + def [] key + @stuff[key.downcase] + end + + def []= key, val + @stuff[key.downcase] = val + end + + def method_missing(*stuff) + # warn stuff.inspect + end + end +end + +class TestRubyForgeClient < Test::Unit::TestCase + def setup + @client = RubyForge::Client.new + @client.agent_class = RubyForge::FakeAgent + RubyForge::FakeAgent.t_data = :unassigned + RubyForge::FakeAgent.t_request = :unassigned + end + + def test_post_with_params + @client.post_content('http://example.com', { :f => 'adsf aoeu'}, {}, {"username" => "tom", "password" => "secret"}) + assert_equal('f=adsf+aoeu', RubyForge::FakeAgent.t_data) + + @client.post_content('http://example.com', { :a => 'b', :c => 'd' }, {}, {"username" => "tom", "password" => "secret"}) + assert_equal('a=b&c=d', RubyForge::FakeAgent.t_data) + end + + +end diff --git a/vendor/gems/gems/syntax-1.0.0/data/ruby.css b/vendor/gems/gems/syntax-1.0.0/data/ruby.css new file mode 100644 index 00000000..2bc85c46 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/data/ruby.css @@ -0,0 +1,18 @@ +.ruby .normal {} +.ruby .comment { color: #005; font-style: italic; } +.ruby .keyword { color: #A00; font-weight: bold; } +.ruby .method { color: #077; } +.ruby .class { color: #074; } +.ruby .module { color: #050; } +.ruby .punct { color: #447; font-weight: bold; } +.ruby .symbol { color: #099; } +.ruby .string { color: #944; background: #FFE; } +.ruby .char { color: #F07; } +.ruby .ident { color: #004; } +.ruby .constant { color: #07F; } +.ruby .regex { color: #B66; background: #FEF; } +.ruby .number { color: #F99; } +.ruby .attribute { color: #7BB; } +.ruby .global { color: #7FB; } +.ruby .expr { color: #227; } +.ruby .escape { color: #277; } diff --git a/vendor/gems/gems/syntax-1.0.0/data/xml.css b/vendor/gems/gems/syntax-1.0.0/data/xml.css new file mode 100644 index 00000000..c3efc661 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/data/xml.css @@ -0,0 +1,8 @@ +.xml .normal {} +.xml .namespace { color: #B66; font-weight: bold; } +.xml .tag { color: #F88; } +.xml .comment { color: #005; font-style: italic; } +.xml .punct { color: #447; font-weight: bold; } +.xml .string { color: #944; } +.xml .number { color: #F99; } +.xml .attribute { color: #BB7; } diff --git a/vendor/gems/gems/syntax-1.0.0/data/yaml.css b/vendor/gems/gems/syntax-1.0.0/data/yaml.css new file mode 100644 index 00000000..610e0057 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/data/yaml.css @@ -0,0 +1,12 @@ +.yaml .normal {} +.yaml .document { font-weight: bold; color: #07F; } +.yaml .type { font-weight: bold; color: #05C; } +.yaml .key { color: #F88; } +.yaml .comment { color: #005; font-style: italic; } +.yaml .punct { color: #447; font-weight: bold; } +.yaml .string { color: #944; } +.yaml .number { color: #F99; } +.yaml .time { color: #F99; } +.yaml .date { color: #F99; } +.yaml .ref { color: #944; } +.yaml .anchor { color: #944; } diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax.rb new file mode 100644 index 00000000..604dcc2f --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax.rb @@ -0,0 +1,38 @@ +require 'syntax/common' + +module Syntax + + # A default tokenizer for handling syntaxes that are not explicitly handled + # elsewhere. It simply yields the given text as a single token. + class Default + + # Yield the given text as a single token. + def tokenize( text ) + yield Token.new( text, :normal ) + end + + end + + # A hash for registering syntax implementations. + SYNTAX = Hash.new( Default ) + + # Load the implementation of the requested syntax. If the syntax cannot be + # found, or if it cannot be loaded for whatever reason, the Default syntax + # handler will be returned. + def load( syntax ) + begin + require "syntax/lang/#{syntax}" + rescue LoadError + end + SYNTAX[ syntax ].new + end + module_function :load + + # Return an array of the names of supported syntaxes. + def all + lang_dir = File.join(File.dirname(__FILE__), "syntax", "lang") + Dir["#{lang_dir}/*.rb"].map { |path| File.basename(path, ".rb") } + end + module_function :all + +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/common.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/common.rb new file mode 100644 index 00000000..a986e656 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/common.rb @@ -0,0 +1,163 @@ +require 'strscan' + +module Syntax + + # A single token extracted by a tokenizer. It is simply the lexeme + # itself, decorated with a 'group' attribute to identify the type of the + # lexeme. + class Token < String + + # the type of the lexeme that was extracted. + attr_reader :group + + # the instruction associated with this token (:none, :region_open, or + # :region_close) + attr_reader :instruction + + # Create a new Token representing the given text, and belonging to the + # given group. + def initialize( text, group, instruction = :none ) + super text + @group = group + @instruction = instruction + end + + end + + # The base class of all tokenizers. It sets up the scanner and manages the + # looping until all tokens have been extracted. It also provides convenience + # methods to make sure adjacent tokens of identical groups are returned as + # a single token. + class Tokenizer + + # The current group being processed by the tokenizer + attr_reader :group + + # The current chunk of text being accumulated + attr_reader :chunk + + # Start tokenizing. This sets up the state in preparation for tokenization, + # such as creating a new scanner for the text and saving the callback block. + # The block will be invoked for each token extracted. + def start( text, &block ) + @chunk = "" + @group = :normal + @callback = block + @text = StringScanner.new( text ) + setup + end + + # Subclasses may override this method to provide implementation-specific + # setup logic. + def setup + end + + # Finish tokenizing. This flushes the buffer, yielding any remaining text + # to the client. + def finish + start_group nil + teardown + end + + # Subclasses may override this method to provide implementation-specific + # teardown logic. + def teardown + end + + # Subclasses must implement this method, which is called for each iteration + # of the tokenization process. This method may extract multiple tokens. + def step + raise NotImplementedError, "subclasses must implement #step" + end + + # Begins tokenizing the given text, calling #step until the text has been + # exhausted. + def tokenize( text, &block ) + start text, &block + step until @text.eos? + finish + end + + # Specify a set of tokenizer-specific options. Each tokenizer may (or may + # not) publish any options, but if a tokenizer does those options may be + # used to specify optional behavior. + def set( opts={} ) + ( @options ||= Hash.new ).update opts + end + + # Get the value of the specified option. + def option(opt) + @options ? @options[opt] : nil + end + + private + + EOL = /(?=\r\n?|\n|$)/ + + # A convenience for delegating method calls to the scanner. + def self.delegate( sym ) + define_method( sym ) { |*a| @text.__send__( sym, *a ) } + end + + delegate :bol? + delegate :eos? + delegate :scan + delegate :scan_until + delegate :check + delegate :check_until + delegate :getch + delegate :matched + delegate :pre_match + delegate :peek + delegate :pos + + # Access the n-th subgroup from the most recent match. + def subgroup(n) + @text[n] + end + + # Append the given data to the currently active chunk. + def append( data ) + @chunk << data + end + + # Request that a new group be started. If the current group is the same + # as the group being requested, a new group will not be created. If a new + # group is created and the current chunk is not empty, the chunk's + # contents will be yielded to the client as a token, and then cleared. + # + # After the new group is started, if +data+ is non-nil it will be appended + # to the chunk. + def start_group( gr, data=nil ) + flush_chunk if gr != @group + @group = gr + @chunk << data if data + end + + def start_region( gr, data=nil ) + flush_chunk + @group = gr + @callback.call( Token.new( data||"", @group, :region_open ) ) + end + + def end_region( gr, data=nil ) + flush_chunk + @group = gr + @callback.call( Token.new( data||"", @group, :region_close ) ) + end + + def flush_chunk + @callback.call( Token.new( @chunk, @group ) ) unless @chunk.empty? + @chunk = "" + end + + def subtokenize( syntax, text ) + tokenizer = Syntax.load( syntax ) + tokenizer.set @options if @options + flush_chunk + tokenizer.tokenize( text, &@callback ) + end + + end + +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/abstract.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/abstract.rb new file mode 100644 index 00000000..46c2f6fe --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/abstract.rb @@ -0,0 +1,27 @@ +require 'syntax' + +module Syntax + module Convertors + + # The abstract ancestor class for all convertors. It implements a few + # convenience methods to provide a common interface for all convertors. + class Abstract + + # A reference to the tokenizer used by this convertor. + attr_reader :tokenizer + + # A convenience method for instantiating a new convertor for a + # specific syntax. + def self.for_syntax( syntax ) + new( Syntax.load( syntax ) ) + end + + # Creates a new convertor that uses the given tokenizer. + def initialize( tokenizer ) + @tokenizer = tokenizer + end + + end + + end +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/html.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/html.rb new file mode 100644 index 00000000..5df416a8 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/convertors/html.rb @@ -0,0 +1,51 @@ +require 'syntax/convertors/abstract' + +module Syntax + module Convertors + + # A simple class for converting a text into HTML. + class HTML < Abstract + + # Converts the given text to HTML, using spans to represent token groups + # of any type but :normal (which is always unhighlighted). If + # +pre+ is +true+, the html is automatically wrapped in pre tags. + def convert( text, pre=true ) + html = "" + html << "
" if pre
+        regions = []
+        @tokenizer.tokenize( text ) do |tok|
+          value = html_escape(tok)
+          case tok.instruction
+            when :region_close then
+              regions.pop
+              html << ""
+            when :region_open then
+              regions.push tok.group
+              html << "#{value}"
+            else
+              if tok.group == ( regions.last || :normal )
+                html << value
+              else
+                html << "#{value}"
+              end
+          end
+        end
+        html << "" while regions.pop
+        html << "
" if pre + html + end + + private + + # Replaces some characters with their corresponding HTML entities. + def html_escape( string ) + string.gsub( /&/, "&" ). + gsub( //, ">" ). + gsub( /"/, """ ) + end + + end + + end +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/ruby.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/ruby.rb new file mode 100644 index 00000000..66afaa47 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/ruby.rb @@ -0,0 +1,317 @@ +require 'syntax' + +module Syntax + + # A tokenizer for the Ruby language. It recognizes all common syntax + # (and some less common syntax) but because it is not a true lexer, it + # will make mistakes on some ambiguous cases. + class Ruby < Tokenizer + + # The list of all identifiers recognized as keywords. + KEYWORDS = + %w{if then elsif else end begin do rescue ensure while for + class module def yield raise until unless and or not when + case super undef break next redo retry in return alias + defined?} + + # Perform ruby-specific setup + def setup + @selector = false + @allow_operator = false + @heredocs = [] + end + + # Step through a single iteration of the tokenization process. + def step + case + when bol? && check( /=begin/ ) + start_group( :comment, scan_until( /^=end#{EOL}/ ) ) + when bol? && check( /__END__#{EOL}/ ) + start_group( :comment, scan_until( /\Z/ ) ) + else + case + when check( /def\s+/ ) + start_group :keyword, scan( /def\s+/ ) + start_group :method, scan_until( /(?=[;(\s]|#{EOL})/ ) + when check( /class\s+/ ) + start_group :keyword, scan( /class\s+/ ) + start_group :class, scan_until( /(?=[;\s<]|#{EOL})/ ) + when check( /module\s+/ ) + start_group :keyword, scan( /module\s+/ ) + start_group :module, scan_until( /(?=[;\s]|#{EOL})/ ) + when check( /::/ ) + start_group :punct, scan(/::/) + when check( /:"/ ) + start_group :symbol, scan(/:/) + scan_delimited_region :symbol, :symbol, "", true + @allow_operator = true + when check( /:'/ ) + start_group :symbol, scan(/:/) + scan_delimited_region :symbol, :symbol, "", false + @allow_operator = true + when scan( /:[_a-zA-Z@$][$@\w]*[=!?]?/ ) + start_group :symbol, matched + @allow_operator = true + when scan( /\?(\\[^\n\r]|[^\\\n\r\s])/ ) + start_group :char, matched + @allow_operator = true + when check( /(__FILE__|__LINE__|true|false|nil|self)[?!]?/ ) + if @selector || matched[-1] == ?? || matched[-1] == ?! + start_group :ident, + scan(/(__FILE__|__LINE__|true|false|nil|self)[?!]?/) + else + start_group :constant, + scan(/(__FILE__|__LINE__|true|false|nil|self)/) + end + @selector = false + @allow_operator = true + when scan(/0([bB][01]+|[oO][0-7]+|[dD][0-9]+|[xX][0-9a-fA-F]+)/) + start_group :number, matched + @allow_operator = true + else + case peek(2) + when "%r" + scan_delimited_region :punct, :regex, scan( /../ ), true + @allow_operator = true + when "%w", "%q" + scan_delimited_region :punct, :string, scan( /../ ), false + @allow_operator = true + when "%s" + scan_delimited_region :punct, :symbol, scan( /../ ), false + @allow_operator = true + when "%W", "%Q", "%x" + scan_delimited_region :punct, :string, scan( /../ ), true + @allow_operator = true + when /%[^\sa-zA-Z0-9]/ + scan_delimited_region :punct, :string, scan( /./ ), true + @allow_operator = true + when "<<" + saw_word = ( chunk[-1,1] =~ /[\w!?]/ ) + start_group :punct, scan( /<(\[\{}:;,&|%]/ + start_group :punct, scan(/./) + @allow_operator = false + when /[)\]]/ + start_group :punct, scan(/./) + @allow_operator = true + else + # all else just falls through this, to prevent + # infinite loops... + append getch + end + end + end + end + end + + private + + # Scan a delimited region of text. This handles the simple cases (strings + # delimited with quotes) as well as the more complex cases of %-strings + # and here-documents. + # + # * +delim_group+ is the group to use to classify the delimiters of the + # region + # * +inner_group+ is the group to use to classify the contents of the + # region + # * +starter+ is the text to use as the starting delimiter + # * +exprs+ is a boolean flag indicating whether the region is an + # interpolated string or not + # * +delim+ is the text to use as the delimiter of the region. If +nil+, + # the next character will be treated as the delimiter. + # * +heredoc+ is either +false+, meaning the region is not a heredoc, or + # :flush (meaning the delimiter must be flushed left), or + # :float (meaning the delimiter doens't have to be flush left). + def scan_delimited_region( delim_group, inner_group, starter, exprs, + delim=nil, heredoc=false ) + # begin + if !delim + start_group delim_group, starter + delim = scan( /./ ) + append delim + + delim = case delim + when '{' then '}' + when '(' then ')' + when '[' then ']' + when '<' then '>' + else delim + end + end + + start_region inner_group + + items = "\\\\|" + if heredoc + items << "(^" + items << '\s*' if heredoc == :float + items << "#{Regexp.escape(delim)}\s*?)#{EOL}" + else + items << "#{Regexp.escape(delim)}" + end + items << "|#(\\$|@@?|\\{)" if exprs + items = Regexp.new( items ) + + loop do + p = pos + match = scan_until( items ) + if match.nil? + start_group inner_group, scan_until( /\Z/ ) + break + else + text = pre_match[p..-1] + start_group inner_group, text if text.length > 0 + case matched.strip + when "\\" + unless exprs + case peek(1) + when "'" + scan(/./) + start_group :escape, "\\'" + when "\\" + scan(/./) + start_group :escape, "\\\\" + else + start_group inner_group, "\\" + end + else + start_group :escape, "\\" + c = getch + append c + case c + when 'x' + append scan( /[a-fA-F0-9]{1,2}/ ) + when /[0-7]/ + append scan( /[0-7]{0,2}/ ) + end + end + when delim + end_region inner_group + start_group delim_group, matched + break + when /^#/ + do_highlight = (option(:expressions) == :highlight) + start_region :expr if do_highlight + start_group :expr, matched + case matched[1] + when ?{ + depth = 1 + content = "" + while depth > 0 + p = pos + c = scan_until( /[\{}]/ ) + if c.nil? + content << scan_until( /\Z/ ) + break + else + depth += ( matched == "{" ? 1 : -1 ) + content << pre_match[p..-1] + content << matched if depth > 0 + end + end + if do_highlight + subtokenize "ruby", content + start_group :expr, "}" + else + append content + "}" + end + when ?$, ?@ + append scan( /\w+/ ) + end + end_region :expr if do_highlight + else raise "unexpected match on #{matched}" + end + end + end + end + + # Scan a heredoc beginning at the current position. + # + # * +float+ indicates whether the delimiter may be floated to the right + # * +type+ is +nil+, a single quote, or a double quote + # * +delim+ is the delimiter to look for + def scan_heredoc(float, type, delim) + scan_delimited_region( :constant, :string, "", type != "'", + delim, float ? :float : :flush ) + end + end + + SYNTAX["ruby"] = Ruby + +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/xml.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/xml.rb new file mode 100644 index 00000000..7d530e02 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/xml.rb @@ -0,0 +1,108 @@ +require 'syntax' + +module Syntax + + # A simple implementation of an XML lexer. It handles most cases. It is + # not a validating lexer, meaning it will happily process invalid XML without + # complaining. + class XML < Tokenizer + + # Initialize the lexer. + def setup + @in_tag = false + end + + # Step through a single iteration of the tokenization process. This will + # yield (potentially) many tokens, and possibly zero tokens. + def step + start_group :normal, matched if scan( /\s+/ ) + if @in_tag + case + when scan( /([-\w]+):([-\w]+)/ ) + start_group :namespace, subgroup(1) + start_group :punct, ":" + start_group :attribute, subgroup(2) + when scan( /\d+/ ) + start_group :number, matched + when scan( /[-\w]+/ ) + start_group :attribute, matched + when scan( %r{[/?]?>} ) + @in_tag = false + start_group :punct, matched + when scan( /=/ ) + start_group :punct, matched + when scan( /["']/ ) + scan_string matched + else + append getch + end + elsif ( text = scan_until( /(?=[<&])/ ) ) + start_group :normal, text unless text.empty? + if scan(/|\Z)/m) + start_group :comment, matched + else + case peek(1) + when "<" + start_group :punct, getch + case peek(1) + when "?" + append getch + when "/" + append getch + when "!" + append getch + end + start_group :normal, matched if scan( /\s+/ ) + if scan( /([-\w]+):([-\w]+)/ ) + start_group :namespace, subgroup(1) + start_group :punct, ":" + start_group :tag, subgroup(2) + elsif scan( /[-\w]+/ ) + start_group :tag, matched + end + @in_tag = true + when "&" + if scan( /&\S{1,10};/ ) + start_group :entity, matched + else + start_group :normal, scan( /&/ ) + end + end + end + else + append scan_until( /\Z/ ) + end + end + + private + + # Scan the string starting at the current position, with the given + # delimiter character. + def scan_string( delim ) + start_group :punct, delim + match = /(?=[&\\]|#{delim})/ + loop do + break unless ( text = scan_until( match ) ) + start_group :string, text unless text.empty? + case peek(1) + when "&" + if scan( /&\S{1,10};/ ) + start_group :entity, matched + else + start_group :string, getch + end + when "\\" + start_group :string, getch + append getch || "" + when delim + start_group :punct, getch + break + end + end + end + + end + + SYNTAX["xml"] = XML + +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/yaml.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/yaml.rb new file mode 100644 index 00000000..53b052db --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/lang/yaml.rb @@ -0,0 +1,105 @@ +require 'syntax' + +module Syntax + + # A simple implementation of an YAML lexer. It handles most cases. It is + # not a validating lexer. + class YAML < Tokenizer + + # Step through a single iteration of the tokenization process. This will + # yield (potentially) many tokens, and possibly zero tokens. + def step + if bol? + case + when scan(/---(\s*.+)?$/) + start_group :document, matched + when scan(/(\s*)([a-zA-Z][-\w]*)(\s*):/) + start_group :normal, subgroup(1) + start_group :key, subgroup(2) + start_group :normal, subgroup(3) + start_group :punct, ":" + when scan(/(\s*)-/) + start_group :normal, subgroup(1) + start_group :punct, "-" + when scan(/\s*$/) + start_group :normal, matched + when scan(/#.*$/) + start_group :comment, matched + else + append getch + end + else + case + when scan(/[\n\r]+/) + start_group :normal, matched + when scan(/[ \t]+/) + start_group :normal, matched + when scan(/!+(.*?^)?\S+/) + start_group :type, matched + when scan(/&\S+/) + start_group :anchor, matched + when scan(/\*\S+/) + start_group :ref, matched + when scan(/\d\d:\d\d:\d\d/) + start_group :time, matched + when scan(/\d\d\d\d-\d\d-\d\d\s\d\d:\d\d:\d\d(\.\d+)? [-+]\d\d:\d\d/) + start_group :date, matched + when scan(/['"]/) + start_group :punct, matched + scan_string matched + when scan(/:\w+/) + start_group :symbol, matched + when scan(/[:]/) + start_group :punct, matched + when scan(/#.*$/) + start_group :comment, matched + when scan(/>-?/) + start_group :punct, matched + start_group :normal, scan(/.*$/) + append getch until eos? || bol? + return if eos? + indent = check(/ */) + start_group :string + loop do + line = check_until(/[\n\r]|\Z/) + break if line.nil? + if line.chomp.length > 0 + this_indent = line.chomp.match( /^\s*/ )[0] + break if this_indent.length < indent.length + end + append scan_until(/[\n\r]|\Z/) + end + else + start_group :normal, scan_until(/(?=$|#)/) + end + end + end + + private + + def scan_string( delim ) + regex = /(?=[#{delim=="'" ? "" : "\\\\"}#{delim}])/ + loop do + text = scan_until( regex ) + if text.nil? + start_group :string, scan_until( /\Z/ ) + break + else + start_group :string, text unless text.empty? + end + + case peek(1) + when "\\" + start_group :expr, scan(/../) + else + start_group :punct, getch + break + end + end + end + + end + + SYNTAX["yaml"] = YAML + +end diff --git a/vendor/gems/gems/syntax-1.0.0/lib/syntax/version.rb b/vendor/gems/gems/syntax-1.0.0/lib/syntax/version.rb new file mode 100644 index 00000000..d5330468 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/lib/syntax/version.rb @@ -0,0 +1,9 @@ +module Syntax + module Version + MAJOR=1 + MINOR=0 + TINY=0 + + STRING=[MAJOR,MINOR,TINY].join('.') + end +end diff --git a/vendor/gems/gems/syntax-1.0.0/test/ALL-TESTS.rb b/vendor/gems/gems/syntax-1.0.0/test/ALL-TESTS.rb new file mode 100644 index 00000000..4014cdcb --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/test/ALL-TESTS.rb @@ -0,0 +1,5 @@ +#!/usr/bin/env ruby +$:.unshift "../lib" + +Dir.chdir File.dirname(__FILE__) +Dir["**/tc_*.rb"].each { |file| load file } diff --git a/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_ruby.rb b/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_ruby.rb new file mode 100644 index 00000000..b3a0cb2d --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_ruby.rb @@ -0,0 +1,871 @@ +require File.dirname(__FILE__) + "/tokenizer_testcase" + +class TC_Syntax_Ruby < TokenizerTestCase + + syntax "ruby" + + def test_empty + tokenize "" + assert_no_next_token + end + + def test_constant + tokenize "Foo" + assert_next_token :constant, "Foo" + end + + def test_ident + tokenize "foo" + assert_next_token :ident, "foo" + end + + def test_comment_eol + tokenize "# a comment\nfoo" + assert_next_token :comment, "# a comment" + assert_next_token :normal, "\n" + assert_next_token :ident, "foo" + end + + def test_comment_block + tokenize "=begin\nthis is a comment\n=end\nnoncomment" + assert_next_token :comment, "=begin\nthis is a comment\n=end" + assert_next_token :normal, "\n" + assert_next_token :ident, "noncomment" + end + + def test_comment_block_with_CRNL + tokenize "=begin\r\nthis is a comment\r\n=end\r\nnoncomment" + assert_next_token :comment, "=begin\r\nthis is a comment\r\n=end" + assert_next_token :normal, "\r\n" + assert_next_token :ident, "noncomment" + end + + def test_keyword + Syntax::Ruby::KEYWORDS.each do |word| + tokenize word + assert_next_token :keyword, word + end + Syntax::Ruby::KEYWORDS.each do |word| + tokenize "foo.#{word}" + skip_token 2 + assert_next_token :ident, word + end + end + + def test__END__ + tokenize "__END__\n\nblah blah blah" + assert_next_token :comment, "__END__\n\nblah blah blah" + end + + def test__END__with_CRNL + tokenize "__END__\r\nblah blah blah" + assert_next_token :comment, "__END__\r\nblah blah blah" + end + + def test_def_paren + tokenize "def foo(bar)" + assert_next_token :keyword, "def " + assert_next_token :method, "foo" + assert_next_token :punct, "(" + assert_next_token :ident, "bar" + assert_next_token :punct, ")" + end + + def test_def_space + tokenize "def foo bar" + assert_next_token :keyword, "def " + assert_next_token :method, "foo" + assert_next_token :normal, " " + assert_next_token :ident, "bar" + end + + def test_def_semicolon + tokenize "def foo;" + assert_next_token :keyword, "def " + assert_next_token :method, "foo" + assert_next_token :punct, ";" + end + + def test_def_eol + tokenize "def foo" + assert_next_token :keyword, "def " + assert_next_token :method, "foo" + end + + def test_class_space + tokenize "class Foo\n" + assert_next_token :keyword, "class " + assert_next_token :class, "Foo" + assert_next_token :normal, "\n" + end + + def test_class_semicolon + tokenize "class Foo;" + assert_next_token :keyword, "class " + assert_next_token :class, "Foo" + assert_next_token :punct, ";" + end + + def test_class_extend + tokenize "class Foo< Bang" + assert_next_token :keyword, "class " + assert_next_token :class, "Foo" + assert_next_token :punct, "<" + assert_next_token :normal, " " + assert_next_token :constant, "Bang" + end + + def test_module_space + tokenize "module Foo\n" + assert_next_token :keyword, "module " + assert_next_token :module, "Foo" + assert_next_token :normal, "\n" + end + + def test_module_semicolon + tokenize "module Foo;" + assert_next_token :keyword, "module " + assert_next_token :module, "Foo" + assert_next_token :punct, ";" + end + + def test_module_other + tokenize "module Foo!\n" + assert_next_token :keyword, "module " + assert_next_token :module, "Foo!" + end + + def test_scope_operator + tokenize "Foo::Bar" + assert_next_token :constant, "Foo" + assert_next_token :punct, "::" + assert_next_token :constant, "Bar" + end + + def test_symbol_dquote + tokenize ':"foo"' + assert_next_token :symbol, ':"' + assert_next_token :symbol, '', :region_open + assert_next_token :symbol, 'foo' + assert_next_token :symbol, '', :region_close + assert_next_token :symbol, '"' + assert_no_next_token + end + + def test_symbol_squote + tokenize ":'foo'" + assert_next_token :symbol, ":'" + assert_next_token :symbol, "", :region_open + assert_next_token :symbol, "foo" + assert_next_token :symbol, "", :region_close + assert_next_token :symbol, "'" + assert_no_next_token + end + + def test_symbol + tokenize ":foo_123" + assert_next_token :symbol, ":foo_123" + + tokenize ":123" + assert_next_token :punct, ":" + assert_next_token :number, "123" + + tokenize ":foo=" + assert_next_token :symbol, ":foo=" + + tokenize ":foo!" + assert_next_token :symbol, ":foo!" + + tokenize ":foo?" + assert_next_token :symbol, ":foo?" + end + + def test_char + tokenize "?." + assert_next_token :char, "?." + + tokenize '?\n' + assert_next_token :char, '?\n' + end + + def test_specials + %w{__FILE__ __LINE__ true false nil self}.each do |word| + tokenize word + assert_next_token :constant, word + end + + %w{__FILE__ __LINE__ true false nil self}.each do |word| + tokenize "#{word}?" + assert_next_token :ident, "#{word}?" + end + + %w{__FILE__ __LINE__ true false nil self}.each do |word| + tokenize "#{word}!" + assert_next_token :ident, "#{word}!" + end + + %w{__FILE__ __LINE__ true false nil self}.each do |word| + tokenize "x.#{word}" + skip_token 2 + assert_next_token :ident, word + end + end + + def test_pct_r + tokenize '%r{foo#{x}bar}' + assert_next_token :punct, "%r{" + assert_next_token :regex, "", :region_open + assert_next_token :regex, "foo" + assert_next_token :expr, '#{x}' + assert_next_token :regex, "bar" + assert_next_token :regex, "", :region_close + assert_next_token :punct, "}" + + tokenize '%r-foo#{x}bar-' + assert_next_token :punct, "%r-" + assert_next_token :regex, "", :region_open + assert_next_token :regex, "foo" + assert_next_token :expr, '#{x}' + assert_next_token :regex, "bar" + assert_next_token :regex, "", :region_close + assert_next_token :punct, "-" + end + + def test_pct_r_with_wakas + tokenize '%r foo' + assert_next_token :punct, "%r<" + assert_next_token :regex, "", :region_open + assert_next_token :regex, "foo" + assert_next_token :expr, '#{x}' + assert_next_token :regex, "bar" + assert_next_token :regex, "", :region_close + assert_next_token :punct, ">" + assert_next_token :normal, " " + assert_next_token :ident, "foo" + end + + def test_pct_w_brace + tokenize '%w{foo bar baz}' + assert_next_token :punct, "%w{" + assert_next_token :string, '', :region_open + assert_next_token :string, 'foo bar baz' + assert_next_token :string, '', :region_close + assert_next_token :punct, "}" + end + + def test_pct_w + tokenize '%w-foo#{x} bar baz-' + assert_next_token :punct, "%w-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'foo#{x} bar baz' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_q + tokenize '%q-hello #{world}-' + assert_next_token :punct, "%q-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'hello #{world}' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_s + tokenize '%s-hello #{world}-' + assert_next_token :punct, "%s-" + assert_next_token :symbol, '', :region_open + assert_next_token :symbol, 'hello #{world}' + assert_next_token :symbol, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_W + tokenize '%W-foo#{x} bar baz-' + assert_next_token :punct, "%W-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'foo' + assert_next_token :expr, '#{x}' + assert_next_token :string, ' bar baz' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_Q + tokenize '%Q-hello #{world}-' + assert_next_token :punct, "%Q-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'hello ' + assert_next_token :expr, '#{world}' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_x + tokenize '%x-ls /blah/#{foo}-' + assert_next_token :punct, "%x-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'ls /blah/' + assert_next_token :expr, '#{foo}' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_pct_string + tokenize '%-hello #{world}-' + assert_next_token :punct, "%-" + assert_next_token :string, '', :region_open + assert_next_token :string, 'hello ' + assert_next_token :expr, '#{world}' + assert_next_token :string, '', :region_close + assert_next_token :punct, "-" + end + + def test_bad_pct_string + tokenize '%0hello #{world}0' + assert_next_token :punct, "%" + assert_next_token :number, '0' + assert_next_token :ident, 'hello' + assert_next_token :normal, ' ' + assert_next_token :comment, '#{world}0' + end + + def test_shift_left + tokenize 'foo << 5' + assert_next_token :ident, "foo" + assert_next_token :normal, " " + assert_next_token :punct, "<<" + assert_next_token :normal, " " + assert_next_token :number, "5" + end + + def test_shift_left_no_white + tokenize 'foo<<5' + assert_next_token :ident, "foo" + assert_next_token :punct, "<<" + assert_next_token :number, "5" + end + + def test_here_doc_no_opts + tokenize "foo < :highlight + tokenize '"la la #{["hello", "world"].each { |f| puts "string #{f}" }}"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :string, "la la " + assert_next_token :expr, "", :region_open + assert_next_token :expr, '#{' + assert_next_token :punct, '["' + assert_next_token :string, "", :region_open + assert_next_token :string, 'hello' + assert_next_token :string, "", :region_close + assert_next_token :punct, '",' + assert_next_token :normal, ' ' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :string, "world" + assert_next_token :string, "", :region_close + assert_next_token :punct, '"].' + assert_next_token :ident, 'each' + assert_next_token :normal, ' ' + assert_next_token :punct, '{' + assert_next_token :normal, ' ' + assert_next_token :punct, '|' + assert_next_token :ident, 'f' + assert_next_token :punct, '|' + assert_next_token :normal, ' ' + assert_next_token :ident, 'puts' + assert_next_token :normal, ' ' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :string, "string " + assert_next_token :expr, "", :region_open + assert_next_token :expr, '#{' + assert_next_token :ident, 'f' + assert_next_token :expr, '}' + assert_next_token :expr, "", :region_close + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + assert_next_token :normal, ' ' + assert_next_token :punct, '}' + assert_next_token :expr, '}' + assert_next_token :expr, "", :region_close + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_expr_in_braces + tokenize '"#{f}"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :expr, '#{f}' + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_expr_in_braces_with_nested_braces + tokenize '"#{loop{break}}"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :expr, '#{loop{break}}' + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_expr_with_global_var + tokenize '"#$f"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :expr, '#$f' + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_expr_with_instance_var + tokenize '"#@f"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :expr, '#@f' + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_expr_with_class_var + tokenize '"#@@f"' + assert_next_token :punct, '"' + assert_next_token :string, "", :region_open + assert_next_token :expr, '#@@f' + assert_next_token :string, "", :region_close + assert_next_token :punct, '"' + end + + def test_qmark_space + tokenize "? " + assert_next_token :punct, "?" + assert_next_token :normal, " " + end + + def test_capitalized_method + tokenize "obj.Foo" + skip_token 2 + assert_next_token :ident, "Foo" + end + + def test_hexadecimal_literal + tokenize "0xDEADbeef 0X1234567890ABCDEFG" + assert_next_token :number, "0xDEADbeef" + skip_token + assert_next_token :number, "0X1234567890ABCDEF" + assert_next_token :constant, "G" + end + + def test_binary_literal + tokenize "0b2 0b0 0b101 0B123" + assert_next_token :number, "0" + assert_next_token :ident, "b2" + skip_token + assert_next_token :number, "0b0" + skip_token + assert_next_token :number, "0b101" + skip_token + assert_next_token :number, "0B123" + end + + def test_octal_literal + tokenize "0o9 0o12345670abc 0O12345678" + assert_next_token :number, "0" + assert_next_token :ident, "o9" + skip_token + assert_next_token :number, "0o12345670" + assert_next_token :ident, "abc" + skip_token + assert_next_token :number, "0O12345678" + end + + def test_decimal_literal + tokenize "0dA 0d1234567890abc 0D1234567890" + assert_next_token :number, "0" + assert_next_token :ident, "dA" + skip_token + assert_next_token :number, "0d1234567890" + assert_next_token :ident, "abc" + skip_token + assert_next_token :number, "0D1234567890" + end +end diff --git a/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_xml.rb b/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_xml.rb new file mode 100644 index 00000000..83cd26b1 --- /dev/null +++ b/vendor/gems/gems/syntax-1.0.0/test/syntax/tc_xml.rb @@ -0,0 +1,202 @@ +$:.unshift File.dirname(__FILE__) +"/../../lib" + +require 'test/unit' +require 'syntax/lang/xml' + +class TC_Syntax_XML < Test::Unit::TestCase + + def setup + @xml = Syntax::XML.new + end + + def test_empty + called = false + @xml.tokenize( "" ) { |tok| called = true } + assert !called + end + + def test_no_tag + tok = [] + @xml.tokenize( "foo bar baz" ) { |t| tok << t } + assert_equal [ :normal, "foo bar baz" ], [ tok.first.group, tok.shift ] + end + + def test_entity_outside_tag + tok = [] + @xml.tokenize( "& &x157; &nosemi & foo;" ) { |t| tok << t } + assert_equal [ :entity, "&" ], [ tok.first.group, tok.shift ] + tok.shift + assert_equal [ :entity, " " ], [ tok.first.group, tok.shift ] + tok.shift + assert_equal [ :entity, "&x157;" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " &nosemi & foo;" ], [ tok.first.group, tok.shift ] + end + + def test_start_tag + tok = [] + @xml.tokenize( "" ) { |t| tok << t } + assert_equal [ :normal, "/>" ], [ tok.first.group, tok.shift ] + end + + def test_start_namespaced_tag + tok = [] + @xml.tokenize( " foo' ) { |t| tok << t } + assert_equal [ :punct, "<" ], [ tok.first.group, tok.shift ] + assert_equal [ :tag, "name" ], [ tok.first.group, tok.shift ] + assert_equal [ :punct, ">" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " foo" ], [ tok.first.group, tok.shift ] + end + + def test_close_self_tag + tok = [] + @xml.tokenize( ' foo' ) { |t| tok << t } + assert_equal [ :punct, "<" ], [ tok.first.group, tok.shift ] + assert_equal [ :tag, "name" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " " ], [ tok.first.group, tok.shift ] + assert_equal [ :punct, "/>" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " foo" ], [ tok.first.group, tok.shift ] + end + + def test_close_decl_tag + tok = [] + @xml.tokenize( ' foo' ) { |t| tok << t } + assert_equal [ :punct, "" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " foo" ], [ tok.first.group, tok.shift ] + end + + def test_comment + tok = [] + @xml.tokenize( "foo bar" ) { |t| tok << t } + assert_equal [ :normal, "foo " ], [ tok.first.group, tok.shift ] + assert_equal [ :comment, "" ], [ tok.first.group, tok.shift ] + assert_equal [ :normal, " bar" ], [ tok.first.group, tok.shift ] + end + + def test_comment_unterminated + tok = [] + @xml.tokenize( "foo - - - - - - - - - <% 0.upto(thin_max_instances - 1) do |instance| %> - - - - - - - - - - - - - - - - - <% end %> - - - diff --git a/vendor/gems/gems/thin-1.2.5/example/thin_solaris_smf.readme.txt b/vendor/gems/gems/thin-1.2.5/example/thin_solaris_smf.readme.txt deleted file mode 100644 index aea4e938..00000000 --- a/vendor/gems/gems/thin-1.2.5/example/thin_solaris_smf.readme.txt +++ /dev/null @@ -1,150 +0,0 @@ -Using Thin with Solaris' SMF Monitoring Framework -- - - - - - - - - - - - - - - - - - - - - - - - - - -Solaris uses the Service Management Framework (SMF) at the OS level to manage, monitor, and restart long running processes. This replaces init scripts, and tools like monit and god. - -The sample XML file (thin_solaris_smf.erb) is an example SMF manifest which I use on a Joyent accelerator which runs on OpenSolaris. - -This setup will: - -- ensure the right dependencies are loaded -- start n instances of Thin, and monitor each individually. If any single one dies it will be restarted instantly (test it by killing a single thin instance and it will be back alive before you can type 'ps -ef'). - -This is better than using clustering since if you start the cluster with SMF it will only notice a problem and restart individual Thin's if ALL of them are dead, at which point it will restart the whole cluster. This approach makes sure that all of your Thins start together and are monitored and managed independant of each other. This problem likely exists if you are using god or monit to monitor only the start of the master cluster, and don't then monitor the individual processes started. - -This example is in .erb format instead of plain XML since I dynamically generate this file as part of a Capistrano deployment. In my deploy.rb file I define the variables found in this erb. Of course you don't need to use this with Capistrano. Just replace the few ERB variables from the xml file, change its extension, and load that directly in Solaris if you prefer. - -Here are some examples for usage to get you started with Capistrano, and Thin: - -FILE : config/deploy.rb --- - - require 'config/accelerator/accelerator_tasks' - - set :application, "yourapp" - set :svcadm_bin, "/usr/sbin/svcadm" - set :svccfg_bin, "/usr/sbin/svccfg" - set :svcs_bin, "/usr/bin/svcs" - - # gets the list of remote service SMF names that we need to start - # like (depending on thin_max_instances settings): - # svc:/network/thin/yourapp-production:i_0 - # svc:/network/thin/yourapp-production:i_1 - # svc:/network/thin/yourapp-production:i_2 - set :service_list, "`svcs -H -o FMRI svc:network/thin/#{application}-production`" - - # how many Thin instances should be setup to run? - # this affects the generated thin smf file, and the nginx vhost conf - # need to re-run setup for thin smf and nginx vhost conf when changed - set :thin_max_instances, 3 - - # OVERRIDE STANDARD TASKS - desc "Restart the entire application" - deploy.task :restart do - accelerator.thin.restart - accelerator.nginx.restart - end - - desc "Start the entire application" - deploy.task :start do - accelerator.thin.restart - accelerator.nginx.restart - end - - desc "Stop the entire application" - deploy.task :stop do - accelerator.thin.disable - accelerator.nginx.disable - end - - -FILE : config/accelerator/accelerator_tasks.rb --- - - desc "Create and deploy Thin SMF config" - task :create_thin_smf, :roles => :app do - service_name = application - working_directory = current_path - template = File.read("config/accelerator/thin_solaris_smf.erb") - buffer = ERB.new(template).result(binding) - put buffer, "#{shared_path}/#{application}-thin-smf.xml" - sudo "#{svccfg_bin} import #{shared_path}/#{application}-thin-smf.xml" - end - - desc "Delete Thin SMF config" - task :delete_thin_smf, :roles => :app do - accelerator.thin.disable - sudo "#{svccfg_bin} delete /network/thin/#{application}-production" - end - - desc "Show all SMF services" - task :svcs do - run "#{svcs_bin} -a" do |ch, st, data| - puts data - end - end - - desc "Shows all non-functional SMF services" - task :svcs_broken do - run "#{svcs_bin} -vx" do |ch, st, data| - puts data - end - end - - - namespace :thin do - - desc "Disable all Thin servers" - task :disable, :roles => :app do - # temporarily disable, until next reboot (-t) - sudo "#{svcadm_bin} disable -t #{service_list}" - end - - desc "Enable all Thin servers" - task :enable, :roles => :app do - # start the app with all recursive dependencies - sudo "#{svcadm_bin} enable -r #{service_list}" - end - - desc "Restart all Thin servers" - task :restart, :roles => :app do - # svcadm restart doesn't seem to work right, so we'll brute force it - disable - enable - end - - end # namespace thin - - -FILE : config/thin.yml --- - ---- -pid: tmp/pids/thin.pid -socket: /tmp/thin.sock -log: log/thin.log -max_conns: 1024 -timeout: 30 -chdir: /your/app/dir/rails/root -environment: production -max_persistent_conns: 512 -daemonize: true -servers: 3 - - -FILE : config/accelerator/thin_solaris_smf.erb --- -This is of course an example. It works for me, but YMMV - -You may need to change this line to match your environment and config: - exec='/opt/csw/bin/thin -C config/thin.yml --only <%= instance.to_s %> start' - - -CONTRIBUTE: - -If you see problems or enhancements for this approach please send me an email at glenn [at] rempe dot us. Sadly, I won't be able to provide support for this example as time and my limited Solaris admin skills won't allow. - -Cheers, - -Glenn Rempe -2008/03/20 diff --git a/vendor/gems/gems/thin-1.2.5/example/vlad.rake b/vendor/gems/gems/thin-1.2.5/example/vlad.rake deleted file mode 100644 index 51b4ae69..00000000 --- a/vendor/gems/gems/thin-1.2.5/example/vlad.rake +++ /dev/null @@ -1,64 +0,0 @@ -# $GEM_HOME/gems/vlad-1.2.0/lib/vlad/thin.rb -# Thin tasks for Vlad the Deployer -# By cnantais -require 'vlad' - -namespace :vlad do - ## - # Thin app server - - set :thin_address, "127.0.0.1" - set :thin_command, 'thin' - set(:thin_conf) { "#{shared_path}/thin_cluster.conf" } - set :thin_environment, "production" - set :thin_group, nil - set :thin_log_file, nil - set :thin_pid_file, nil - set :thin_port, nil - set :thin_socket, nil - set :thin_prefix, nil - set :thin_servers, 2 - set :thin_user, nil - - desc "Prepares application servers for deployment. thin -configuration is set via the thin_* variables.".cleanup - - remote_task :setup_app, :roles => :app do - - raise(ArgumentError, "Please provide either thin_socket or thin_port") if thin_port.nil? && thin_socket.nil? - - cmd = [ - "#{thin_command} config", - "-s #{thin_servers}", - ("-S #{thin_socket}" if thin_socket), - "-e #{thin_environment}", - "-a #{thin_address}", - "-c #{current_path}", - "-C #{thin_conf}", - ("-P #{thin_pid_file}" if thin_pid_file), - ("-l #{thin_log_file}" if thin_log_file), - ("--user #{thin_user}" if thin_user), - ("--group #{thin_group}" if thin_group), - ("--prefix #{thin_prefix}" if thin_prefix), - ("-p #{thin_port}" if thin_port), - ].compact.join ' ' - - run cmd - end - - def thin(cmd) # :nodoc: - "#{thin_command} #{cmd} -C #{thin_conf}" - end - - desc "Restart the app servers" - - remote_task :start_app, :roles => :app do - run thin("restart -s #{thin_servers}") - end - - desc "Stop the app servers" - - remote_task :stop_app, :roles => :app do - run thin("stop -s #{thin_servers}") - end -end diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/Makefile b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/Makefile deleted file mode 100644 index 83ac357d..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/Makefile +++ /dev/null @@ -1,181 +0,0 @@ - -SHELL = /bin/sh - -#### Start of system configuration section. #### - -srcdir = . -topdir = /Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1 -hdrdir = /Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1 -arch_hdrdir = /Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1/$(arch) -VPATH = $(srcdir):$(arch_hdrdir)/ruby:$(hdrdir)/ruby -prefix = $(DESTDIR)/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243 -exec_prefix = $(prefix) -vendorhdrdir = $(rubyhdrdir)/vendor_ruby -sitehdrdir = $(rubyhdrdir)/site_ruby -rubyhdrdir = $(includedir)/$(RUBY_INSTALL_NAME)-$(ruby_version) -vendordir = $(libdir)/$(RUBY_INSTALL_NAME)/vendor_ruby -sitedir = $(libdir)/$(RUBY_INSTALL_NAME)/site_ruby -mandir = $(datarootdir)/man -localedir = $(datarootdir)/locale -libdir = $(exec_prefix)/lib -psdir = $(docdir) -pdfdir = $(docdir) -dvidir = $(docdir) -htmldir = $(docdir) -infodir = $(datarootdir)/info -docdir = $(datarootdir)/doc/$(PACKAGE) -oldincludedir = $(DESTDIR)/usr/include -includedir = $(prefix)/include -localstatedir = $(prefix)/var -sharedstatedir = $(prefix)/com -sysconfdir = $(prefix)/etc -datadir = $(datarootdir) -datarootdir = $(prefix)/share -libexecdir = $(exec_prefix)/libexec -sbindir = $(exec_prefix)/sbin -bindir = $(exec_prefix)/bin -rubylibdir = $(libdir)/$(ruby_install_name)/$(ruby_version) -archdir = $(rubylibdir)/$(arch) -sitelibdir = $(sitedir)/$(ruby_version) -sitearchdir = $(sitelibdir)/$(sitearch) -vendorlibdir = $(vendordir)/$(ruby_version) -vendorarchdir = $(vendorlibdir)/$(sitearch) - -CC = gcc -CXX = g++ -LIBRUBY = $(LIBRUBY_SO) -LIBRUBY_A = lib$(RUBY_SO_NAME)-static.a -LIBRUBYARG_SHARED = -l$(RUBY_SO_NAME) -LIBRUBYARG_STATIC = -l$(RUBY_SO_NAME)-static -OUTFLAG = -o -COUTFLAG = -o - -RUBY_EXTCONF_H = -cflags = $(optflags) $(debugflags) $(warnflags) -optflags = -O2 -debugflags = -g -warnflags = -Wall -Wno-parentheses -CFLAGS = -fno-common -O3 -march=core2 -m64 -mmmx -msse4.1 -w -pipe -fomit-frame-pointer -mmacosx-version-min=10.6 $(cflags) -fno-common -pipe -fno-common -INCFLAGS = -I. -I$(arch_hdrdir) -I$(hdrdir)/ruby/backward -I$(hdrdir) -I$(srcdir) -DEFS = -CPPFLAGS = -I/Users/tdreyno/homebrew/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE $(DEFS) $(cppflags) -CXXFLAGS = $(CFLAGS) -O3 -march=core2 -m64 -mmmx -msse4.1 -w -pipe -fomit-frame-pointer -mmacosx-version-min=10.6 $(cxxflags) -ldflags = -L. -L/Users/tdreyno/homebrew/lib -L/usr/local/lib -dldflags = -archflag = -DLDFLAGS = $(ldflags) $(dldflags) $(archflag) -LDSHARED = cc -dynamic -bundle -undefined suppress -flat_namespace -LDSHAREDXX = $(LDSHARED) -AR = ar -EXEEXT = - -RUBY_INSTALL_NAME = ruby -RUBY_SO_NAME = ruby -arch = i386-darwin10.0.0 -sitearch = i386-darwin10.0.0 -ruby_version = 1.9.1 -ruby = /Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/bin/ruby -RUBY = $(ruby) -RM = rm -f -RM_RF = $(RUBY) -run -e rm -- -rf -RMDIRS = $(RUBY) -run -e rmdir -- -p -MAKEDIRS = mkdir -p -INSTALL = /usr/bin/install -c -INSTALL_PROG = $(INSTALL) -m 0755 -INSTALL_DATA = $(INSTALL) -m 644 -COPY = cp - -#### End of system configuration section. #### - -preload = - -libpath = . $(libdir) -LIBPATH = -L. -L$(libdir) -DEFFILE = - -CLEANFILES = mkmf.log -DISTCLEANFILES = -DISTCLEANDIRS = - -extout = -extout_prefix = -target_prefix = -LOCAL_LIBS = -LIBS = $(LIBRUBYARG_SHARED) -lc -lpthread -ldl -lobjc -SRCS = parser.c thin.c -OBJS = parser.o thin.o -TARGET = thin_parser -DLLIB = $(TARGET).bundle -EXTSTATIC = -STATIC_LIB = - -BINDIR = $(bindir) -RUBYCOMMONDIR = $(sitedir)$(target_prefix) -RUBYLIBDIR = /Users/tdreyno/Dropbox/Sites/middleman/vendor/gems/gems/thin-1.2.5/lib$(target_prefix) -RUBYARCHDIR = /Users/tdreyno/Dropbox/Sites/middleman/vendor/gems/gems/thin-1.2.5/lib$(target_prefix) -HDRDIR = $(rubyhdrdir)/ruby$(target_prefix) -ARCHHDRDIR = $(rubyhdrdir)/$(arch)/ruby$(target_prefix) - -TARGET_SO = $(DLLIB) -CLEANLIBS = $(TARGET).bundle -CLEANOBJS = *.o *.bak - -all: $(DLLIB) -static: $(STATIC_LIB) - -clean-rb-default:: -clean-rb:: -clean-so:: -clean: clean-so clean-rb-default clean-rb - @-$(RM) $(CLEANLIBS) $(CLEANOBJS) $(CLEANFILES) - -distclean-rb-default:: -distclean-rb:: -distclean-so:: -distclean: clean distclean-so distclean-rb-default distclean-rb - @-$(RM) Makefile $(RUBY_EXTCONF_H) conftest.* mkmf.log - @-$(RM) core ruby$(EXEEXT) *~ $(DISTCLEANFILES) - @-$(RMDIRS) $(DISTCLEANDIRS) - -realclean: distclean -install: install-so install-rb - -install-so: $(RUBYARCHDIR) -install-so: $(RUBYARCHDIR)/$(DLLIB) -$(RUBYARCHDIR)/$(DLLIB): $(DLLIB) - $(INSTALL_PROG) $(DLLIB) $(RUBYARCHDIR) -install-rb: pre-install-rb install-rb-default -install-rb-default: pre-install-rb-default -pre-install-rb: Makefile -pre-install-rb-default: Makefile -$(RUBYARCHDIR): - $(MAKEDIRS) $@ - -site-install: site-install-so site-install-rb -site-install-so: install-so -site-install-rb: install-rb - -.SUFFIXES: .c .m .cc .cxx .cpp .C .o - -.cc.o: - $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $< - -.cxx.o: - $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $< - -.cpp.o: - $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $< - -.C.o: - $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) $(COUTFLAG)$@ -c $< - -.c.o: - $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) $(COUTFLAG)$@ -c $< - -$(DLLIB): $(OBJS) Makefile - @-$(RM) $(@) - $(LDSHARED) -o $@ $(OBJS) $(LIBPATH) $(DLDFLAGS) $(LOCAL_LIBS) $(LIBS) - - - -$(OBJS): $(hdrdir)/ruby.h $(hdrdir)/ruby/defines.h $(arch_hdrdir)/ruby/config.h diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/common.rl b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/common.rl deleted file mode 100644 index 46fe2293..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/common.rl +++ /dev/null @@ -1,55 +0,0 @@ -%%{ - - machine http_parser_common; - -#### HTTP PROTOCOL GRAMMAR -# line endings - CRLF = "\r\n"; - -# character types - CTL = (cntrl | 127); - safe = ("$" | "-" | "_" | "."); - extra = ("!" | "*" | "'" | "(" | ")" | ","); - reserved = (";" | "/" | "?" | ":" | "@" | "&" | "=" | "+"); - sorta_safe = ("\"" | "<" | ">"); - unsafe = (CTL | " " | "#" | "%" | sorta_safe); - national = any -- (alpha | digit | reserved | extra | safe | unsafe); - unreserved = (alpha | digit | safe | extra | national); - escape = ("%" xdigit xdigit); - uchar = (unreserved | escape | sorta_safe); - pchar = (uchar | ":" | "@" | "&" | "=" | "+"); - tspecials = ("(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\\" | "\"" | "/" | "[" | "]" | "?" | "=" | "{" | "}" | " " | "\t"); - -# elements - token = (ascii -- (CTL | tspecials)); - -# URI schemes and absolute paths - scheme = ( alpha | digit | "+" | "-" | "." )* ; - absolute_uri = (scheme ":" (uchar | reserved )*); - - path = ( pchar+ ( "/" pchar* )* ) ; - query = ( uchar | reserved )* %query_string ; - param = ( pchar | "/" )* ; - params = ( param ( ";" param )* ) ; - rel_path = ( path? %request_path (";" params)? ) ("?" %start_query query)?; - absolute_path = ( "/"+ rel_path ); - - Request_URI = ( "*" | absolute_uri | absolute_path ) >mark %request_uri; - Fragment = ( uchar | reserved )* >mark %fragment; - Method = ( upper | digit | safe ){1,20} >mark %request_method; - - http_number = ( digit+ "." digit+ ) ; - HTTP_Version = ( "HTTP/" http_number ) >mark %http_version ; - Request_Line = ( Method " " Request_URI ("#" Fragment){0,1} " " HTTP_Version CRLF ) ; - - field_name = ( token -- ":" )+ >start_field %write_field; - - field_value = any* >start_value %write_value; - - message_header = field_name ":" " "* field_value :> CRLF; - - Request = Request_Line ( message_header )* ( CRLF @done ); - -main := Request; - -}%% diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/ext_help.h b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/ext_help.h deleted file mode 100644 index 8b4d754c..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/ext_help.h +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef ext_help_h -#define ext_help_h - -#define RAISE_NOT_NULL(T) if(T == NULL) rb_raise(rb_eArgError, "NULL found for " # T " when shouldn't be."); -#define DATA_GET(from,type,name) Data_Get_Struct(from,type,name); RAISE_NOT_NULL(name); -#define REQUIRE_TYPE(V, T) if(TYPE(V) != T) rb_raise(rb_eTypeError, "Wrong argument type for " # V " required " # T); - -#ifdef DEBUG -#define TRACE() fprintf(stderr, "> %s:%d:%s\n", __FILE__, __LINE__, __FUNCTION__) -#else -#define TRACE() -#endif - -#endif diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/extconf.rb b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/extconf.rb deleted file mode 100644 index f83a75ac..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/extconf.rb +++ /dev/null @@ -1,6 +0,0 @@ -require 'mkmf' - -dir_config("thin_parser") -have_library("c", "main") - -create_makefile("thin_parser") diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/mkmf.log b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/mkmf.log deleted file mode 100644 index 9a3a3657..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/mkmf.log +++ /dev/null @@ -1,22 +0,0 @@ -have_library: checking for main() in -lc... -------------------- yes - -"gcc -o conftest -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1/i386-darwin10.0.0 -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1/ruby/backward -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1 -I. -I/Users/tdreyno/homebrew/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -I/Users/tdreyno/homebrew/include -O3 -march=core2 -m64 -mmmx -msse4.1 -w -pipe -fomit-frame-pointer -mmacosx-version-min=10.6 -O2 -g -Wall -Wno-parentheses -fno-common -pipe -fno-common conftest.c -L. -L/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/lib -L. -L/Users/tdreyno/homebrew/lib -L/usr/local/lib -lruby-static -lpthread -ldl -lobjc " -checked program was: -/* begin */ -1: #include "ruby.h" -2: -3: int main() {return 0;} -/* end */ - -"gcc -o conftest -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1/i386-darwin10.0.0 -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1/ruby/backward -I/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/include/ruby-1.9.1 -I. -I/Users/tdreyno/homebrew/include -D_XOPEN_SOURCE -D_DARWIN_C_SOURCE -I/Users/tdreyno/homebrew/include -O3 -march=core2 -m64 -mmmx -msse4.1 -w -pipe -fomit-frame-pointer -mmacosx-version-min=10.6 -O2 -g -Wall -Wno-parentheses -fno-common -pipe -fno-common conftest.c -L. -L/Users/tdreyno/homebrew/Cellar/ruby/1.9.1-p243/lib -L. -L/Users/tdreyno/homebrew/lib -L/usr/local/lib -lruby-static -lc -lpthread -ldl -lobjc " -checked program was: -/* begin */ -1: #include "ruby.h" -2: -3: /*top*/ -4: int main() {return 0;} -5: int t() { void ((*volatile p)()); p = (void ((*)()))main; return 0; } -/* end */ - --------------------- - diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.c b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.c deleted file mode 100644 index 60f6720f..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.c +++ /dev/null @@ -1,452 +0,0 @@ -#line 1 "parser.rl" -/** - * Copyright (c) 2005 Zed A. Shaw - * You can redistribute it and/or modify it under the same terms as Ruby. - */ -#include "parser.h" -#include -#include -#include -#include -#include - -#define LEN(AT, FPC) (FPC - buffer - parser->AT) -#define MARK(M,FPC) (parser->M = (FPC) - buffer) -#define PTR_TO(F) (buffer + parser->F) - -/** Machine **/ - -#line 81 "parser.rl" - - -/** Data **/ - -#line 25 "parser.c" -static const char _http_parser_actions[] = { - 0, 1, 0, 1, 1, 1, 2, 1, - 3, 1, 4, 1, 5, 1, 6, 1, - 7, 1, 8, 1, 10, 1, 11, 1, - 12, 2, 0, 7, 2, 3, 4, 2, - 9, 6, 2, 11, 6, 3, 8, 9, - 6 -}; - -static const short _http_parser_key_offsets[] = { - 0, 0, 8, 17, 27, 29, 30, 31, - 32, 33, 34, 36, 39, 41, 44, 45, - 61, 62, 78, 80, 81, 87, 93, 99, - 105, 115, 121, 127, 133, 141, 147, 153, - 160, 166, 172, 178, 184, 190, 196, 205, - 214, 223, 232, 241, 250, 259, 268, 277, - 286, 295, 304, 313, 322, 331, 340, 349, - 358, 359 -}; - -static const char _http_parser_trans_keys[] = { - 36, 95, 45, 46, 48, 57, 65, 90, - 32, 36, 95, 45, 46, 48, 57, 65, - 90, 42, 43, 47, 58, 45, 57, 65, - 90, 97, 122, 32, 35, 72, 84, 84, - 80, 47, 48, 57, 46, 48, 57, 48, - 57, 13, 48, 57, 10, 13, 33, 124, - 126, 35, 39, 42, 43, 45, 46, 48, - 57, 65, 90, 94, 122, 10, 33, 58, - 124, 126, 35, 39, 42, 43, 45, 46, - 48, 57, 65, 90, 94, 122, 13, 32, - 13, 32, 35, 37, 127, 0, 31, 32, - 35, 37, 127, 0, 31, 48, 57, 65, - 70, 97, 102, 48, 57, 65, 70, 97, - 102, 43, 58, 45, 46, 48, 57, 65, - 90, 97, 122, 32, 35, 37, 127, 0, - 31, 48, 57, 65, 70, 97, 102, 48, - 57, 65, 70, 97, 102, 32, 35, 37, - 59, 63, 127, 0, 31, 48, 57, 65, - 70, 97, 102, 48, 57, 65, 70, 97, - 102, 32, 35, 37, 63, 127, 0, 31, - 48, 57, 65, 70, 97, 102, 48, 57, - 65, 70, 97, 102, 32, 35, 37, 127, - 0, 31, 32, 35, 37, 127, 0, 31, - 48, 57, 65, 70, 97, 102, 48, 57, - 65, 70, 97, 102, 32, 36, 95, 45, - 46, 48, 57, 65, 90, 32, 36, 95, - 45, 46, 48, 57, 65, 90, 32, 36, - 95, 45, 46, 48, 57, 65, 90, 32, - 36, 95, 45, 46, 48, 57, 65, 90, - 32, 36, 95, 45, 46, 48, 57, 65, - 90, 32, 36, 95, 45, 46, 48, 57, - 65, 90, 32, 36, 95, 45, 46, 48, - 57, 65, 90, 32, 36, 95, 45, 46, - 48, 57, 65, 90, 32, 36, 95, 45, - 46, 48, 57, 65, 90, 32, 36, 95, - 45, 46, 48, 57, 65, 90, 32, 36, - 95, 45, 46, 48, 57, 65, 90, 32, - 36, 95, 45, 46, 48, 57, 65, 90, - 32, 36, 95, 45, 46, 48, 57, 65, - 90, 32, 36, 95, 45, 46, 48, 57, - 65, 90, 32, 36, 95, 45, 46, 48, - 57, 65, 90, 32, 36, 95, 45, 46, - 48, 57, 65, 90, 32, 36, 95, 45, - 46, 48, 57, 65, 90, 32, 36, 95, - 45, 46, 48, 57, 65, 90, 32, 0 -}; - -static const char _http_parser_single_lengths[] = { - 0, 2, 3, 4, 2, 1, 1, 1, - 1, 1, 0, 1, 0, 1, 1, 4, - 1, 4, 2, 1, 4, 4, 0, 0, - 2, 4, 0, 0, 6, 0, 0, 5, - 0, 0, 4, 4, 0, 0, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 1, 0 -}; - -static const char _http_parser_range_lengths[] = { - 0, 3, 3, 3, 0, 0, 0, 0, - 0, 0, 1, 1, 1, 1, 0, 6, - 0, 6, 0, 0, 1, 1, 3, 3, - 4, 1, 3, 3, 1, 3, 3, 1, - 3, 3, 1, 1, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, - 0, 0 -}; - -static const short _http_parser_index_offsets[] = { - 0, 0, 6, 13, 21, 24, 26, 28, - 30, 32, 34, 36, 39, 41, 44, 46, - 57, 59, 70, 73, 75, 81, 87, 91, - 95, 102, 108, 112, 116, 124, 128, 132, - 139, 143, 147, 153, 159, 163, 167, 174, - 181, 188, 195, 202, 209, 216, 223, 230, - 237, 244, 251, 258, 265, 272, 279, 286, - 293, 295 -}; - -static const char _http_parser_indicies[] = { - 0, 0, 0, 0, 0, 1, 2, 3, - 3, 3, 3, 3, 1, 4, 5, 6, - 7, 5, 5, 5, 1, 8, 9, 1, - 10, 1, 11, 1, 12, 1, 13, 1, - 14, 1, 15, 1, 16, 15, 1, 17, - 1, 18, 17, 1, 19, 1, 20, 21, - 21, 21, 21, 21, 21, 21, 21, 21, - 1, 22, 1, 23, 24, 23, 23, 23, - 23, 23, 23, 23, 23, 1, 26, 27, - 25, 29, 28, 31, 1, 32, 1, 1, - 30, 34, 1, 35, 1, 1, 33, 36, - 36, 36, 1, 33, 33, 33, 1, 37, - 38, 37, 37, 37, 37, 1, 8, 9, - 39, 1, 1, 38, 40, 40, 40, 1, - 38, 38, 38, 1, 42, 43, 44, 45, - 46, 1, 1, 41, 47, 47, 47, 1, - 41, 41, 41, 1, 8, 9, 49, 50, - 1, 1, 48, 51, 51, 51, 1, 48, - 48, 48, 1, 53, 54, 55, 1, 1, - 52, 57, 58, 59, 1, 1, 56, 60, - 60, 60, 1, 56, 56, 56, 1, 2, - 61, 61, 61, 61, 61, 1, 2, 62, - 62, 62, 62, 62, 1, 2, 63, 63, - 63, 63, 63, 1, 2, 64, 64, 64, - 64, 64, 1, 2, 65, 65, 65, 65, - 65, 1, 2, 66, 66, 66, 66, 66, - 1, 2, 67, 67, 67, 67, 67, 1, - 2, 68, 68, 68, 68, 68, 1, 2, - 69, 69, 69, 69, 69, 1, 2, 70, - 70, 70, 70, 70, 1, 2, 71, 71, - 71, 71, 71, 1, 2, 72, 72, 72, - 72, 72, 1, 2, 73, 73, 73, 73, - 73, 1, 2, 74, 74, 74, 74, 74, - 1, 2, 75, 75, 75, 75, 75, 1, - 2, 76, 76, 76, 76, 76, 1, 2, - 77, 77, 77, 77, 77, 1, 2, 78, - 78, 78, 78, 78, 1, 2, 1, 1, - 0 -}; - -static const char _http_parser_trans_targs[] = { - 2, 0, 3, 38, 4, 24, 28, 25, - 5, 20, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, 16, 17, 57, 17, - 18, 19, 14, 18, 19, 14, 21, 5, - 22, 21, 5, 22, 23, 24, 25, 26, - 27, 28, 5, 20, 29, 31, 34, 30, - 31, 32, 34, 33, 35, 5, 20, 36, - 35, 5, 20, 36, 37, 39, 40, 41, - 42, 43, 44, 45, 46, 47, 48, 49, - 50, 51, 52, 53, 54, 55, 56 -}; - -static const char _http_parser_trans_actions[] = { - 1, 0, 11, 0, 1, 1, 1, 1, - 13, 13, 1, 0, 0, 0, 0, 0, - 0, 0, 19, 0, 0, 3, 23, 0, - 5, 7, 28, 7, 0, 9, 1, 25, - 1, 0, 15, 0, 0, 0, 0, 0, - 0, 0, 34, 34, 0, 21, 21, 0, - 0, 0, 0, 0, 17, 37, 37, 17, - 0, 31, 31, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0 -}; - -static const int http_parser_start = 1; -static const int http_parser_first_final = 57; -static const int http_parser_error = 0; - -static const int http_parser_en_main = 1; - -#line 85 "parser.rl" - -int thin_http_parser_init(http_parser *parser) { - int cs = 0; - -#line 205 "parser.c" - { - cs = http_parser_start; - } -#line 89 "parser.rl" - parser->cs = cs; - parser->body_start = 0; - parser->content_len = 0; - parser->mark = 0; - parser->nread = 0; - parser->field_len = 0; - parser->field_start = 0; - - return(1); -} - - -/** exec **/ -size_t thin_http_parser_execute(http_parser *parser, const char *buffer, size_t len, size_t off) { - const char *p, *pe; - int cs = parser->cs; - - assert(off <= len && "offset past end of buffer"); - - p = buffer+off; - pe = buffer+len; - - assert(*pe == '\0' && "pointer does not end on NUL"); - assert(pe - p == len - off && "pointers aren't same distance"); - - - -#line 237 "parser.c" - { - int _klen; - unsigned int _trans; - const char *_acts; - unsigned int _nacts; - const char *_keys; - - if ( p == pe ) - goto _test_eof; - if ( cs == 0 ) - goto _out; -_resume: - _keys = _http_parser_trans_keys + _http_parser_key_offsets[cs]; - _trans = _http_parser_index_offsets[cs]; - - _klen = _http_parser_single_lengths[cs]; - if ( _klen > 0 ) { - const char *_lower = _keys; - const char *_mid; - const char *_upper = _keys + _klen - 1; - while (1) { - if ( _upper < _lower ) - break; - - _mid = _lower + ((_upper-_lower) >> 1); - if ( (*p) < *_mid ) - _upper = _mid - 1; - else if ( (*p) > *_mid ) - _lower = _mid + 1; - else { - _trans += (_mid - _keys); - goto _match; - } - } - _keys += _klen; - _trans += _klen; - } - - _klen = _http_parser_range_lengths[cs]; - if ( _klen > 0 ) { - const char *_lower = _keys; - const char *_mid; - const char *_upper = _keys + (_klen<<1) - 2; - while (1) { - if ( _upper < _lower ) - break; - - _mid = _lower + (((_upper-_lower) >> 1) & ~1); - if ( (*p) < _mid[0] ) - _upper = _mid - 2; - else if ( (*p) > _mid[1] ) - _lower = _mid + 2; - else { - _trans += ((_mid - _keys)>>1); - goto _match; - } - } - _trans += _klen; - } - -_match: - _trans = _http_parser_indicies[_trans]; - cs = _http_parser_trans_targs[_trans]; - - if ( _http_parser_trans_actions[_trans] == 0 ) - goto _again; - - _acts = _http_parser_actions + _http_parser_trans_actions[_trans]; - _nacts = (unsigned int) *_acts++; - while ( _nacts-- > 0 ) - { - switch ( *_acts++ ) - { - case 0: -#line 22 "parser.rl" - {MARK(mark, p); } - break; - case 1: -#line 25 "parser.rl" - { MARK(field_start, p); } - break; - case 2: -#line 26 "parser.rl" - { - parser->field_len = LEN(field_start, p); - } - break; - case 3: -#line 30 "parser.rl" - { MARK(mark, p); } - break; - case 4: -#line 31 "parser.rl" - { - if (parser->http_field != NULL) { - parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, p)); - } - } - break; - case 5: -#line 36 "parser.rl" - { - if (parser->request_method != NULL) { - parser->request_method(parser->data, PTR_TO(mark), LEN(mark, p)); - } - } - break; - case 6: -#line 41 "parser.rl" - { - if (parser->request_uri != NULL) { - parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, p)); - } - } - break; - case 7: -#line 46 "parser.rl" - { - if (parser->fragment != NULL) { - parser->fragment(parser->data, PTR_TO(mark), LEN(mark, p)); - } - } - break; - case 8: -#line 52 "parser.rl" - {MARK(query_start, p); } - break; - case 9: -#line 53 "parser.rl" - { - if (parser->query_string != NULL) { - parser->query_string(parser->data, PTR_TO(query_start), LEN(query_start, p)); - } - } - break; - case 10: -#line 59 "parser.rl" - { - if (parser->http_version != NULL) { - parser->http_version(parser->data, PTR_TO(mark), LEN(mark, p)); - } - } - break; - case 11: -#line 65 "parser.rl" - { - if (parser->request_path != NULL) { - parser->request_path(parser->data, PTR_TO(mark), LEN(mark,p)); - } - } - break; - case 12: -#line 71 "parser.rl" - { - parser->body_start = p - buffer + 1; - if (parser->header_done != NULL) { - parser->header_done(parser->data, p + 1, pe - p - 1); - } - {p++; goto _out; } - } - break; -#line 399 "parser.c" - } - } - -_again: - if ( cs == 0 ) - goto _out; - if ( ++p != pe ) - goto _resume; - _test_eof: {} - _out: {} - } -#line 116 "parser.rl" - - parser->cs = cs; - parser->nread += p - (buffer + off); - - assert(p <= pe && "buffer overflow after parsing execute"); - assert(parser->nread <= len && "nread longer than length"); - assert(parser->body_start <= len && "body starts after buffer end"); - assert(parser->mark < len && "mark is after buffer end"); - assert(parser->field_len <= len && "field has length longer than whole buffer"); - assert(parser->field_start < len && "field starts after buffer end"); - - if(parser->body_start) { - /* final \r\n combo encountered so stop right here */ - parser->nread++; - } - - return(parser->nread); -} - -int thin_http_parser_finish(http_parser *parser) -{ - int cs = parser->cs; - - - parser->cs = cs; - - if (thin_http_parser_has_error(parser) ) { - return -1; - } else if (thin_http_parser_is_finished(parser) ) { - return 1; - } else { - return 0; - } -} - -int thin_http_parser_has_error(http_parser *parser) { - return parser->cs == http_parser_error; -} - -int thin_http_parser_is_finished(http_parser *parser) { - return parser->cs == http_parser_first_final; -} diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.h b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.h deleted file mode 100644 index 8d074bab..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.h +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (c) 2005 Zed A. Shaw - * You can redistribute it and/or modify it under the same terms as Ruby. - */ - -#ifndef http11_parser_h -#define http11_parser_h - -#include - -#if defined(_WIN32) -#include -#endif - -typedef void (*element_cb)(void *data, const char *at, size_t length); -typedef void (*field_cb)(void *data, const char *field, size_t flen, const char *value, size_t vlen); - -typedef struct http_parser { - int cs; - size_t body_start; - int content_len; - size_t nread; - size_t mark; - size_t field_start; - size_t field_len; - size_t query_start; - - void *data; - - field_cb http_field; - element_cb request_method; - element_cb request_uri; - element_cb fragment; - element_cb request_path; - element_cb query_string; - element_cb http_version; - element_cb header_done; - -} http_parser; - -int http_parser_init(http_parser *parser); -int http_parser_finish(http_parser *parser); -size_t http_parser_execute(http_parser *parser, const char *data, size_t len, size_t off); -int http_parser_has_error(http_parser *parser); -int http_parser_is_finished(http_parser *parser); - -#define http_parser_nread(parser) (parser)->nread - -#endif diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.o b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.o deleted file mode 100644 index 4fddcde9..00000000 Binary files a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.o and /dev/null differ diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.rl b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.rl deleted file mode 100644 index d32f81d9..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/parser.rl +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Copyright (c) 2005 Zed A. Shaw - * You can redistribute it and/or modify it under the same terms as Ruby. - */ -#include "parser.h" -#include -#include -#include -#include -#include - -#define LEN(AT, FPC) (FPC - buffer - parser->AT) -#define MARK(M,FPC) (parser->M = (FPC) - buffer) -#define PTR_TO(F) (buffer + parser->F) - -/** Machine **/ - -%%{ - - machine http_parser; - - action mark {MARK(mark, fpc); } - - - action start_field { MARK(field_start, fpc); } - action write_field { - parser->field_len = LEN(field_start, fpc); - } - - action start_value { MARK(mark, fpc); } - action write_value { - if (parser->http_field != NULL) { - parser->http_field(parser->data, PTR_TO(field_start), parser->field_len, PTR_TO(mark), LEN(mark, fpc)); - } - } - action request_method { - if (parser->request_method != NULL) { - parser->request_method(parser->data, PTR_TO(mark), LEN(mark, fpc)); - } - } - action request_uri { - if (parser->request_uri != NULL) { - parser->request_uri(parser->data, PTR_TO(mark), LEN(mark, fpc)); - } - } - action fragment { - if (parser->fragment != NULL) { - parser->fragment(parser->data, PTR_TO(mark), LEN(mark, fpc)); - } - } - - action start_query {MARK(query_start, fpc); } - action query_string { - if (parser->query_string != NULL) { - parser->query_string(parser->data, PTR_TO(query_start), LEN(query_start, fpc)); - } - } - - action http_version { - if (parser->http_version != NULL) { - parser->http_version(parser->data, PTR_TO(mark), LEN(mark, fpc)); - } - } - - action request_path { - if (parser->request_path != NULL) { - parser->request_path(parser->data, PTR_TO(mark), LEN(mark,fpc)); - } - } - - action done { - parser->body_start = fpc - buffer + 1; - if (parser->header_done != NULL) { - parser->header_done(parser->data, fpc + 1, pe - fpc - 1); - } - fbreak; - } - - include http_parser_common "common.rl"; - -}%% - -/** Data **/ -%% write data; - -int thin_http_parser_init(http_parser *parser) { - int cs = 0; - %% write init; - parser->cs = cs; - parser->body_start = 0; - parser->content_len = 0; - parser->mark = 0; - parser->nread = 0; - parser->field_len = 0; - parser->field_start = 0; - - return(1); -} - - -/** exec **/ -size_t thin_http_parser_execute(http_parser *parser, const char *buffer, size_t len, size_t off) { - const char *p, *pe; - int cs = parser->cs; - - assert(off <= len && "offset past end of buffer"); - - p = buffer+off; - pe = buffer+len; - - assert(*pe == '\0' && "pointer does not end on NUL"); - assert(pe - p == len - off && "pointers aren't same distance"); - - - %% write exec; - - parser->cs = cs; - parser->nread += p - (buffer + off); - - assert(p <= pe && "buffer overflow after parsing execute"); - assert(parser->nread <= len && "nread longer than length"); - assert(parser->body_start <= len && "body starts after buffer end"); - assert(parser->mark < len && "mark is after buffer end"); - assert(parser->field_len <= len && "field has length longer than whole buffer"); - assert(parser->field_start < len && "field starts after buffer end"); - - if(parser->body_start) { - /* final \r\n combo encountered so stop right here */ - parser->nread++; - } - - return(parser->nread); -} - -int thin_http_parser_finish(http_parser *parser) -{ - int cs = parser->cs; - - - parser->cs = cs; - - if (thin_http_parser_has_error(parser) ) { - return -1; - } else if (thin_http_parser_is_finished(parser) ) { - return 1; - } else { - return 0; - } -} - -int thin_http_parser_has_error(http_parser *parser) { - return parser->cs == http_parser_error; -} - -int thin_http_parser_is_finished(http_parser *parser) { - return parser->cs == http_parser_first_final; -} diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.c b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.c deleted file mode 100644 index 04754bc9..00000000 --- a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.c +++ /dev/null @@ -1,433 +0,0 @@ -/** - * Mongrel Parser adpated to Thin and to play more nicely with Rack specs. - * - * Orignal version Copyright (c) 2005 Zed A. Shaw - * You can redistribute it and/or modify it under the same terms as Ruby. - */ -#include "ruby.h" -#include "ext_help.h" -#include -#include -#include "parser.h" -#include - -static VALUE mThin; -static VALUE cHttpParser; -static VALUE eHttpParserError; - -static VALUE global_empty; -static VALUE global_http_prefix; -static VALUE global_request_method; -static VALUE global_request_uri; -static VALUE global_fragment; -static VALUE global_query_string; -static VALUE global_http_version; -static VALUE global_content_length; -static VALUE global_http_content_length; -static VALUE global_request_path; -static VALUE global_content_type; -static VALUE global_http_content_type; -static VALUE global_gateway_interface; -static VALUE global_gateway_interface_value; -static VALUE global_server_name; -static VALUE global_server_port; -static VALUE global_server_protocol; -static VALUE global_server_protocol_value; -static VALUE global_http_host; -static VALUE global_port_80; -static VALUE global_http_body; -static VALUE global_url_scheme; -static VALUE global_url_scheme_value; -static VALUE global_script_name; -static VALUE global_path_info; - -#define TRIE_INCREASE 30 - -/** Defines common length and error messages for input length validation. */ -#define DEF_MAX_LENGTH(N,length) const size_t MAX_##N##_LENGTH = length; const char *MAX_##N##_LENGTH_ERR = "HTTP element " # N " is longer than the " # length " allowed length." - -/** Validates the max length of given input and throws an HttpParserError exception if over. */ -#define VALIDATE_MAX_LENGTH(len, N) if(len > MAX_##N##_LENGTH) { rb_raise(eHttpParserError, MAX_##N##_LENGTH_ERR); } - -/** Defines global strings in the init method. */ -#define DEF_GLOBAL(N, val) global_##N = rb_obj_freeze(rb_str_new2(val)); rb_global_variable(&global_##N) - -/* for compatibility with Ruby 1.8.5, which doesn't declare RSTRING_PTR */ -#ifndef RSTRING_PTR -#define RSTRING_PTR(s) (RSTRING(s)->ptr) -#endif - -/* for compatibility with Ruby 1.8.5, which doesn't declare RSTRING_LEN */ -#ifndef RSTRING_LEN -#define RSTRING_LEN(s) (RSTRING(s)->len) -#endif - -/* Defines the maximum allowed lengths for various input elements.*/ -DEF_MAX_LENGTH(FIELD_NAME, 256); -DEF_MAX_LENGTH(FIELD_VALUE, 80 * 1024); -DEF_MAX_LENGTH(REQUEST_URI, 1024 * 12); -DEF_MAX_LENGTH(FRAGMENT, 1024); /* Don't know if this length is specified somewhere or not */ -DEF_MAX_LENGTH(REQUEST_PATH, 1024); -DEF_MAX_LENGTH(QUERY_STRING, (1024 * 10)); -DEF_MAX_LENGTH(HEADER, (1024 * (80 + 32))); - - -static void http_field(void *data, const char *field, size_t flen, const char *value, size_t vlen) -{ - char *ch, *end; - VALUE req = (VALUE)data; - VALUE v = Qnil; - VALUE f = Qnil; - - VALIDATE_MAX_LENGTH(flen, FIELD_NAME); - VALIDATE_MAX_LENGTH(vlen, FIELD_VALUE); - - v = rb_str_new(value, vlen); - f = rb_str_dup(global_http_prefix); - f = rb_str_buf_cat(f, field, flen); - - for(ch = RSTRING_PTR(f) + RSTRING_LEN(global_http_prefix), end = RSTRING_PTR(f) + RSTRING_LEN(f); ch < end; ch++) { - if(*ch == '-') { - *ch = '_'; - } else { - *ch = toupper(*ch); - } - } - - rb_hash_aset(req, f, v); -} - -static void request_method(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = Qnil; - - val = rb_str_new(at, length); - rb_hash_aset(req, global_request_method, val); -} - -static void request_uri(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = Qnil; - - VALIDATE_MAX_LENGTH(length, REQUEST_URI); - - val = rb_str_new(at, length); - rb_hash_aset(req, global_request_uri, val); -} - -static void fragment(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = Qnil; - - VALIDATE_MAX_LENGTH(length, FRAGMENT); - - val = rb_str_new(at, length); - rb_hash_aset(req, global_fragment, val); -} - -static void request_path(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = Qnil; - - VALIDATE_MAX_LENGTH(length, REQUEST_PATH); - - val = rb_str_new(at, length); - rb_hash_aset(req, global_request_path, val); - rb_hash_aset(req, global_path_info, val); -} - -static void query_string(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = Qnil; - - VALIDATE_MAX_LENGTH(length, QUERY_STRING); - - val = rb_str_new(at, length); - rb_hash_aset(req, global_query_string, val); -} - -static void http_version(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE val = rb_str_new(at, length); - rb_hash_aset(req, global_http_version, val); -} - -/** Finalizes the request header to have a bunch of stuff that's - needed. */ - -static void header_done(void *data, const char *at, size_t length) -{ - VALUE req = (VALUE)data; - VALUE temp = Qnil; - VALUE ctype = Qnil; - VALUE clen = Qnil; - VALUE body = Qnil; - char *colon = NULL; - - clen = rb_hash_aref(req, global_http_content_length); - if(clen != Qnil) { - rb_hash_aset(req, global_content_length, clen); - rb_hash_delete(req, global_http_content_length); - } - - ctype = rb_hash_aref(req, global_http_content_type); - if(ctype != Qnil) { - rb_hash_aset(req, global_content_type, ctype); - rb_hash_delete(req, global_http_content_type); - } - - rb_hash_aset(req, global_gateway_interface, global_gateway_interface_value); - if((temp = rb_hash_aref(req, global_http_host)) != Qnil) { - /* ruby better close strings off with a '\0' dammit */ - colon = strchr(RSTRING_PTR(temp), ':'); - if(colon != NULL) { - rb_hash_aset(req, global_server_name, rb_str_substr(temp, 0, colon - RSTRING_PTR(temp))); - rb_hash_aset(req, global_server_port, - rb_str_substr(temp, colon - RSTRING_PTR(temp)+1, - RSTRING_LEN(temp))); - } else { - rb_hash_aset(req, global_server_name, temp); - rb_hash_aset(req, global_server_port, global_port_80); - } - } - - /* grab the initial body and stuff it into the hash */ - if(length > 0) { - body = rb_hash_aref(req, global_http_body); - rb_io_write(body, rb_str_new(at, length)); - } - - /* according to Rack specs, query string must be empty string if none */ - if (rb_hash_aref(req, global_query_string) == Qnil) { - rb_hash_aset(req, global_query_string, global_empty); - } - - /* set some constants */ - rb_hash_aset(req, global_server_protocol, global_server_protocol_value); - rb_hash_aset(req, global_url_scheme, global_url_scheme_value); - rb_hash_aset(req, global_script_name, global_empty); -} - - -void Thin_HttpParser_free(void *data) { - TRACE(); - - if(data) { - free(data); - } -} - - -VALUE Thin_HttpParser_alloc(VALUE klass) -{ - VALUE obj; - http_parser *hp = ALLOC_N(http_parser, 1); - TRACE(); - hp->http_field = http_field; - hp->request_method = request_method; - hp->request_uri = request_uri; - hp->fragment = fragment; - hp->request_path = request_path; - hp->query_string = query_string; - hp->http_version = http_version; - hp->header_done = header_done; - thin_http_parser_init(hp); - - obj = Data_Wrap_Struct(klass, NULL, Thin_HttpParser_free, hp); - - return obj; -} - - -/** - * call-seq: - * parser.new -> parser - * - * Creates a new parser. - */ -VALUE Thin_HttpParser_init(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - thin_http_parser_init(http); - - return self; -} - - -/** - * call-seq: - * parser.reset -> nil - * - * Resets the parser to it's initial state so that you can reuse it - * rather than making new ones. - */ -VALUE Thin_HttpParser_reset(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - thin_http_parser_init(http); - - return Qnil; -} - - -/** - * call-seq: - * parser.finish -> true/false - * - * Finishes a parser early which could put in a "good" or bad state. - * You should call reset after finish it or bad things will happen. - */ -VALUE Thin_HttpParser_finish(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - thin_http_parser_finish(http); - - return thin_http_parser_is_finished(http) ? Qtrue : Qfalse; -} - - -/** - * call-seq: - * parser.execute(req_hash, data, start) -> Integer - * - * Takes a Hash and a String of data, parses the String of data filling in the Hash - * returning an Integer to indicate how much of the data has been read. No matter - * what the return value, you should call HttpParser#finished? and HttpParser#error? - * to figure out if it's done parsing or there was an error. - * - * This function now throws an exception when there is a parsing error. This makes - * the logic for working with the parser much easier. You can still test for an - * error, but now you need to wrap the parser with an exception handling block. - * - * The third argument allows for parsing a partial request and then continuing - * the parsing from that position. It needs all of the original data as well - * so you have to append to the data buffer as you read. - */ -VALUE Thin_HttpParser_execute(VALUE self, VALUE req_hash, VALUE data, VALUE start) -{ - http_parser *http = NULL; - int from = 0; - char *dptr = NULL; - long dlen = 0; - - DATA_GET(self, http_parser, http); - - from = FIX2INT(start); - dptr = RSTRING_PTR(data); - dlen = RSTRING_LEN(data); - - if(from >= dlen) { - rb_raise(eHttpParserError, "Requested start is after data buffer end."); - } else { - http->data = (void *)req_hash; - thin_http_parser_execute(http, dptr, dlen, from); - - VALIDATE_MAX_LENGTH(http_parser_nread(http), HEADER); - - if(thin_http_parser_has_error(http)) { - rb_raise(eHttpParserError, "Invalid HTTP format, parsing fails."); - } else { - return INT2FIX(http_parser_nread(http)); - } - } -} - - - -/** - * call-seq: - * parser.error? -> true/false - * - * Tells you whether the parser is in an error state. - */ -VALUE Thin_HttpParser_has_error(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - - return thin_http_parser_has_error(http) ? Qtrue : Qfalse; -} - - -/** - * call-seq: - * parser.finished? -> true/false - * - * Tells you whether the parser is finished or not and in a good state. - */ -VALUE Thin_HttpParser_is_finished(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - - return thin_http_parser_is_finished(http) ? Qtrue : Qfalse; -} - - -/** - * call-seq: - * parser.nread -> Integer - * - * Returns the amount of data processed so far during this processing cycle. It is - * set to 0 on initialize or reset calls and is incremented each time execute is called. - */ -VALUE Thin_HttpParser_nread(VALUE self) -{ - http_parser *http = NULL; - DATA_GET(self, http_parser, http); - - return INT2FIX(http->nread); -} - -void Init_thin_parser() -{ - - mThin = rb_define_module("Thin"); - - DEF_GLOBAL(empty, ""); - DEF_GLOBAL(http_prefix, "HTTP_"); - DEF_GLOBAL(request_method, "REQUEST_METHOD"); - DEF_GLOBAL(request_uri, "REQUEST_URI"); - DEF_GLOBAL(fragment, "FRAGMENT"); - DEF_GLOBAL(query_string, "QUERY_STRING"); - DEF_GLOBAL(http_version, "HTTP_VERSION"); - DEF_GLOBAL(request_path, "REQUEST_PATH"); - DEF_GLOBAL(content_length, "CONTENT_LENGTH"); - DEF_GLOBAL(http_content_length, "HTTP_CONTENT_LENGTH"); - DEF_GLOBAL(content_type, "CONTENT_TYPE"); - DEF_GLOBAL(http_content_type, "HTTP_CONTENT_TYPE"); - DEF_GLOBAL(gateway_interface, "GATEWAY_INTERFACE"); - DEF_GLOBAL(gateway_interface_value, "CGI/1.2"); - DEF_GLOBAL(server_name, "SERVER_NAME"); - DEF_GLOBAL(server_port, "SERVER_PORT"); - DEF_GLOBAL(server_protocol, "SERVER_PROTOCOL"); - DEF_GLOBAL(server_protocol_value, "HTTP/1.1"); - DEF_GLOBAL(http_host, "HTTP_HOST"); - DEF_GLOBAL(port_80, "80"); - DEF_GLOBAL(http_body, "rack.input"); - DEF_GLOBAL(url_scheme, "rack.url_scheme"); - DEF_GLOBAL(url_scheme_value, "http"); - DEF_GLOBAL(script_name, "SCRIPT_NAME"); - DEF_GLOBAL(path_info, "PATH_INFO"); - - eHttpParserError = rb_define_class_under(mThin, "InvalidRequest", rb_eIOError); - - cHttpParser = rb_define_class_under(mThin, "HttpParser", rb_cObject); - rb_define_alloc_func(cHttpParser, Thin_HttpParser_alloc); - rb_define_method(cHttpParser, "initialize", Thin_HttpParser_init,0); - rb_define_method(cHttpParser, "reset", Thin_HttpParser_reset,0); - rb_define_method(cHttpParser, "finish", Thin_HttpParser_finish,0); - rb_define_method(cHttpParser, "execute", Thin_HttpParser_execute,3); - rb_define_method(cHttpParser, "error?", Thin_HttpParser_has_error,0); - rb_define_method(cHttpParser, "finished?", Thin_HttpParser_is_finished,0); - rb_define_method(cHttpParser, "nread", Thin_HttpParser_nread,0); -} diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.o b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.o deleted file mode 100644 index f411e4b2..00000000 Binary files a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin.o and /dev/null differ diff --git a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin_parser.bundle b/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin_parser.bundle deleted file mode 100755 index ca9bb01e..00000000 Binary files a/vendor/gems/gems/thin-1.2.5/ext/thin_parser/thin_parser.bundle and /dev/null differ diff --git a/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/loader.rb b/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/loader.rb deleted file mode 100644 index 44b04a97..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/loader.rb +++ /dev/null @@ -1,79 +0,0 @@ -module Rack - class AdapterNotFound < RuntimeError; end - - # Mapping used to guess which adapter to use in Adapter.for. - # Framework => in order they will - # be tested. - # +nil+ for value to never guess. - # NOTE: If a framework has a file that is not unique, make sure to place - # it at the end. - ADAPTERS = [ - [:rails, 'config/environment.rb'], - [:ramaze, 'start.rb'], - [:halcyon, 'runner.ru'], - [:merb, 'config/init.rb'], - [:mack, 'config/app_config/default.yml'], - [:mack, 'config/configatron/default.rb'], - [:file, nil] - ] - - module Adapter - # Guess which adapter to use based on the directory structure - # or file content. - # Returns a symbol representing the name of the adapter to use - # to load the application under dir/. - def self.guess(dir) - ADAPTERS.each do |adapter, file| - return adapter if file && ::File.exist?(::File.join(dir, file)) - end - raise AdapterNotFound, "No adapter found for #{dir}" - end - - # Loads an adapter identified by +name+ using +options+ hash. - def self.for(name, options={}) - case name.to_sym - when :rails - return Rails.new(options.merge(:root => options[:chdir])) - - when :ramaze - require "#{options[:chdir]}/start" - - Ramaze.trait[:essentials].delete Ramaze::Adapter - Ramaze.start :force => true - - return Ramaze::Adapter::Base - - when :merb - require 'merb-core' - - Merb::Config.setup(:merb_root => options[:chdir], - :environment => options[:environment]) - Merb.environment = Merb::Config[:environment] - Merb.root = Merb::Config[:merb_root] - Merb::BootLoader.run - - return Merb::Rack::Application.new - - when :halcyon - require 'halcyon' - - $:.unshift(Halcyon.root/'lib') - - return Halcyon::Runner.new - - when :mack - ENV["MACK_ENV"] = options[:environment] - load(::File.join(options[:chdir], "Rakefile")) - require 'mack' - return Mack::Utils::Server.build_app - - when :file - return Rack::File.new(options[:chdir]) - - else - raise AdapterNotFound, "Adapter not found: #{name}" - - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/rails.rb b/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/rails.rb deleted file mode 100644 index 34196d84..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/rack/adapter/rails.rb +++ /dev/null @@ -1,180 +0,0 @@ -require 'cgi' - -# Adapter to run a Rails app with any supported Rack handler. -# By default it will try to load the Rails application in the -# current directory in the development environment. -# -# Options: -# root: Root directory of the Rails app -# environment: Rails environment to run in (development [default], production or test) -# prefix: Set the relative URL root. -# -# Based on http://fuzed.rubyforge.org/ Rails adapter -module Rack - module Adapter - class Rails - FILE_METHODS = %w(GET HEAD).freeze - - def initialize(options={}) - @root = options[:root] || Dir.pwd - @env = options[:environment] || 'development' - @prefix = options[:prefix] - - load_application - - @rails_app = if rack_based? - ActionController::Dispatcher.new - else - CgiApp.new - end - - @file_app = Rack::File.new(::File.join(RAILS_ROOT, "public")) - end - - def rack_based? - rails_version = ::Rails::VERSION - rails_version::MAJOR >= 2 && rails_version::MINOR >= 2 && rails_version::TINY >= 3 - end - - def load_application - ENV['RAILS_ENV'] = @env - - require "#{@root}/config/environment" - require 'dispatcher' - - if @prefix - if ActionController::Base.respond_to?(:relative_url_root=) - ActionController::Base.relative_url_root = @prefix # Rails 2.1.1 - else - ActionController::AbstractRequest.relative_url_root = @prefix - end - end - end - - def file_exist?(path) - full_path = ::File.join(@file_app.root, Utils.unescape(path)) - ::File.file?(full_path) && ::File.readable_real?(full_path) - end - - def call(env) - path = env['PATH_INFO'].chomp('/') - method = env['REQUEST_METHOD'] - cached_path = (path.empty? ? 'index' : path) + ActionController::Base.page_cache_extension - - if FILE_METHODS.include?(method) - if file_exist?(path) # Serve the file if it's there - return @file_app.call(env) - elsif file_exist?(cached_path) # Serve the page cache if it's there - env['PATH_INFO'] = cached_path - return @file_app.call(env) - end - end - - # No static file, let Rails handle it - @rails_app.call(env) - end - - protected - # For Rails pre Rack (2.3) - class CgiApp - def call(env) - request = Request.new(env) - response = Response.new - session_options = ActionController::CgiRequest::DEFAULT_SESSION_OPTIONS - cgi = CGIWrapper.new(request, response) - - Dispatcher.dispatch(cgi, session_options, response) - - response.finish - end - end - - class CGIWrapper < ::CGI - def initialize(request, response, *args) - @request = request - @response = response - @args = *args - @input = request.body - - super *args - end - - def header(options = "text/html") - if options.is_a?(String) - @response['Content-Type'] = options unless @response['Content-Type'] - else - @response['Content-Length'] = options.delete('Content-Length').to_s if options['Content-Length'] - - @response['Content-Type'] = options.delete('type') || "text/html" - @response['Content-Type'] += "; charset=" + options.delete('charset') if options['charset'] - - @response['Content-Language'] = options.delete('language') if options['language'] - @response['Expires'] = options.delete('expires') if options['expires'] - - @response.status = options.delete('Status') if options['Status'] - - # Convert 'cookie' header to 'Set-Cookie' headers. - # Because Set-Cookie header can appear more the once in the response body, - # we store it in a line break seperated string that will be translated to - # multiple Set-Cookie header by the handler. - if cookie = options.delete('cookie') - cookies = [] - - case cookie - when Array then cookie.each { |c| cookies << c.to_s } - when Hash then cookie.each { |_, c| cookies << c.to_s } - else cookies << cookie.to_s - end - - @output_cookies.each { |c| cookies << c.to_s } if @output_cookies - - @response['Set-Cookie'] = [@response['Set-Cookie'], cookies].compact - # See http://groups.google.com/group/rack-devel/browse_thread/thread/e8759b91a82c5a10/a8dbd4574fe97d69?#a8dbd4574fe97d69 - if Thin.ruby_18? - @response['Set-Cookie'].flatten! - else - @response['Set-Cookie'] = @response['Set-Cookie'].join("\n") - end - end - - options.each { |k,v| @response[k] = v } - end - - "" - end - - def params - @params ||= @request.params - end - - def cookies - @request.cookies - end - - def query_string - @request.query_string - end - - # Used to wrap the normal args variable used inside CGI. - def args - @args - end - - # Used to wrap the normal env_table variable used inside CGI. - def env_table - @request.env - end - - # Used to wrap the normal stdinput variable used inside CGI. - def stdinput - @input - end - - def stdoutput - STDERR.puts "stdoutput should not be used." - @response.body - end - end - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin.rb b/vendor/gems/gems/thin-1.2.5/lib/thin.rb deleted file mode 100644 index 93c83e37..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin.rb +++ /dev/null @@ -1,46 +0,0 @@ -require 'fileutils' -require 'timeout' -require 'stringio' -require 'time' -require 'forwardable' - -require 'openssl' -require 'eventmachine' - -require 'thin/version' -require 'thin/statuses' - -module Thin - autoload :Command, 'thin/command' - autoload :Connection, 'thin/connection' - autoload :Daemonizable, 'thin/daemonizing' - autoload :Logging, 'thin/logging' - autoload :Headers, 'thin/headers' - autoload :Request, 'thin/request' - autoload :Response, 'thin/response' - autoload :Runner, 'thin/runner' - autoload :Server, 'thin/server' - autoload :Stats, 'thin/stats' - - module Backends - autoload :Base, 'thin/backends/base' - autoload :SwiftiplyClient, 'thin/backends/swiftiply_client' - autoload :TcpServer, 'thin/backends/tcp_server' - autoload :UnixServer, 'thin/backends/unix_server' - end - - module Controllers - autoload :Cluster, 'thin/controllers/cluster' - autoload :Controller, 'thin/controllers/controller' - autoload :Service, 'thin/controllers/service' - end -end - -require 'rack' -require 'rack/adapter/loader' - -module Rack - module Adapter - autoload :Rails, 'rack/adapter/rails' - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/base.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/backends/base.rb deleted file mode 100644 index ee7f3e46..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/base.rb +++ /dev/null @@ -1,141 +0,0 @@ -module Thin - module Backends - # A Backend connects the server to the client. It handles: - # * connection/disconnection to the server - # * initialization of the connections - # * manitoring of the active connections. - # - # == Implementing your own backend - # You can create your own minimal backend by inheriting this class and - # defining the +connect+ and +disconnect+ method. - # If your backend is not based on EventMachine you also need to redefine - # the +start+, +stop+, stop! and +config+ methods. - class Base - # Server serving the connections throught the backend - attr_accessor :server - - # Maximum time for incoming data to arrive - attr_accessor :timeout - - # Maximum number of file or socket descriptors that the server may open. - attr_accessor :maximum_connections - - # Maximum number of connections that can be persistent - attr_accessor :maximum_persistent_connections - - # Allow using threads in the backend. - attr_writer :threaded - def threaded?; @threaded end - - # Number of persistent connections currently opened - attr_accessor :persistent_connection_count - - # Disable the use of epoll under Linux - attr_accessor :no_epoll - - def initialize - @connections = [] - @timeout = Server::DEFAULT_TIMEOUT - @persistent_connection_count = 0 - @maximum_connections = Server::DEFAULT_MAXIMUM_CONNECTIONS - @maximum_persistent_connections = Server::DEFAULT_MAXIMUM_PERSISTENT_CONNECTIONS - @no_epoll = false - end - - # Start the backend and connect it. - def start - @stopping = false - starter = proc do - connect - @running = true - end - - # Allow for early run up of eventmachine. - if EventMachine.reactor_running? - starter.call - else - EventMachine.run(&starter) - end - end - - # Stop of the backend from accepting new connections. - def stop - @running = false - @stopping = true - - # Do not accept anymore connection - disconnect - stop! if @connections.empty? - end - - # Force stop of the backend NOW, too bad for the current connections. - def stop! - @running = false - @stopping = false - - EventMachine.stop if EventMachine.reactor_running? - @connections.each { |connection| connection.close_connection } - close - end - - # Configure the backend. This method will be called before droping superuser privileges, - # so you can do crazy stuff that require godlike powers here. - def config - # See http://rubyeventmachine.com/pub/rdoc/files/EPOLL.html - EventMachine.epoll unless @no_epoll - - # Set the maximum number of socket descriptors that the server may open. - # The process needs to have required privilege to set it higher the 1024 on - # some systems. - @maximum_connections = EventMachine.set_descriptor_table_size(@maximum_connections) unless Thin.win? - end - - # Free up resources used by the backend. - def close - end - - # Returns +true+ if the backend is connected and running. - def running? - @running - end - - # Called by a connection when it's unbinded. - def connection_finished(connection) - @persistent_connection_count -= 1 if connection.can_persist? - @connections.delete(connection) - - # Finalize gracefull stop if there's no more active connection. - stop! if @stopping && @connections.empty? - end - - # Returns +true+ if no active connection. - def empty? - @connections.empty? - end - - # Number of active connections. - def size - @connections.size - end - - protected - # Initialize a new connection to a client. - def initialize_connection(connection) - connection.backend = self - connection.app = @server.app - connection.comm_inactivity_timeout = @timeout - connection.threaded = @threaded - - # We control the number of persistent connections by keeping - # a count of the total one allowed yet. - if @persistent_connection_count < @maximum_persistent_connections - connection.can_persist! - @persistent_connection_count += 1 - end - - @connections << connection - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/swiftiply_client.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/backends/swiftiply_client.rb deleted file mode 100644 index 506a84c3..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/swiftiply_client.rb +++ /dev/null @@ -1,56 +0,0 @@ -module Thin - module Backends - # Backend to act as a Swiftiply client (http://swiftiply.swiftcore.org). - class SwiftiplyClient < Base - attr_accessor :key - - attr_accessor :host, :port - - def initialize(host, port, options={}) - @host = host - @port = port.to_i - @key = options[:swiftiply].to_s - super() - end - - # Connect the server - def connect - EventMachine.connect(@host, @port, SwiftiplyConnection, &method(:initialize_connection)) - end - - # Stops the server - def disconnect - EventMachine.stop - end - - def to_s - "#{@host}:#{@port} swiftiply" - end - end - end - - class SwiftiplyConnection < Connection - def connection_completed - send_data swiftiply_handshake(@backend.key) - end - - def persistent? - true - end - - def unbind - super - EventMachine.add_timer(rand(2)) { reconnect(@backend.host, @backend.port) } if @backend.running? - end - - protected - def swiftiply_handshake(key) - 'swiftclient' << host_ip.collect { |x| sprintf('%02x', x.to_i)}.join << sprintf('%04x', @backend.port) << sprintf('%02x', key.length) << key - end - - # For some reason Swiftiply request the current host - def host_ip - Socket.gethostbyname(@backend.host)[3].unpack('CCCC') rescue [0,0,0,0] - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/tcp_server.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/backends/tcp_server.rb deleted file mode 100644 index 0f2b38ee..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/tcp_server.rb +++ /dev/null @@ -1,29 +0,0 @@ -module Thin - module Backends - # Backend to act as a TCP socket server. - class TcpServer < Base - # Address and port on which the server is listening for connections. - attr_accessor :host, :port - - def initialize(host, port) - @host = host - @port = port - super() - end - - # Connect the server - def connect - @signature = EventMachine.start_server(@host, @port, Connection, &method(:initialize_connection)) - end - - # Stops the server - def disconnect - EventMachine.stop_server(@signature) - end - - def to_s - "#{@host}:#{@port}" - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/unix_server.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/backends/unix_server.rb deleted file mode 100644 index bf81b875..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/backends/unix_server.rb +++ /dev/null @@ -1,51 +0,0 @@ -module Thin - module Backends - # Backend to act as a UNIX domain socket server. - class UnixServer < Base - # UNIX domain socket on which the server is listening for connections. - attr_accessor :socket - - def initialize(socket) - raise PlatformNotSupported, 'UNIX domain sockets not available on Windows' if Thin.win? - @socket = socket - super() - end - - # Connect the server - def connect - at_exit { remove_socket_file } # In case it crashes - EventMachine.start_unix_domain_server(@socket, UnixConnection, &method(:initialize_connection)) - # HACK EventMachine.start_unix_domain_server doesn't return the connection signature - # so we have to go in the internal stuff to find it. - @signature = EventMachine.instance_eval{@acceptors.keys.first} - end - - # Stops the server - def disconnect - EventMachine.stop_server(@signature) - end - - # Free up resources used by the backend. - def close - remove_socket_file - end - - def to_s - @socket - end - - protected - def remove_socket_file - File.delete(@socket) if @socket && File.exist?(@socket) - end - end - end - - # Connection through a UNIX domain socket. - class UnixConnection < Connection - protected - def socket_address - '127.0.0.1' # Unix domain sockets can only be local - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/command.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/command.rb deleted file mode 100644 index 798b1fca..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/command.rb +++ /dev/null @@ -1,53 +0,0 @@ -require 'open3' - -module Thin - # Run a command through the +thin+ command-line script. - class Command - include Logging - - class << self - # Path to the +thin+ script used to control the servers. - # Leave this to default to use the one in the path. - attr_accessor :script - end - - def initialize(name, options={}) - @name = name - @options = options - end - - def self.run(*args) - new(*args).run - end - - # Send the command to the +thin+ script - def run - shell_cmd = shellify - trace shell_cmd - trap('INT') {} # Ignore INT signal to pass CTRL+C to subprocess - Open3.popen3(shell_cmd) do |stdin, stdout, stderr| - log stdout.gets until stdout.eof? - log stderr.gets until stderr.eof? - end - end - - # Turn into a runnable shell command - def shellify - shellified_options = @options.inject([]) do |args, (name, value)| - option_name = name.to_s.tr("_", "-") - case value - when NilClass, - TrueClass then args << "--#{option_name}" - when FalseClass - when Array then value.each { |v| args << "--#{option_name}=#{v.inspect}" } - else args << "--#{option_name}=#{value.inspect}" - end - args - end - - raise ArgumentError, "Path to thin script can't be found, set Command.script" unless self.class.script - - "#{self.class.script} #{@name} #{shellified_options.compact.join(' ')}" - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/connection.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/connection.rb deleted file mode 100644 index 7ead989a..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/connection.rb +++ /dev/null @@ -1,222 +0,0 @@ -require 'socket' - -module Thin - # Connection between the server and client. - # This class is instanciated by EventMachine on each new connection - # that is opened. - class Connection < EventMachine::Connection - CONTENT_LENGTH = 'Content-Length'.freeze - TRANSFER_ENCODING = 'Transfer-Encoding'.freeze - CHUNKED_REGEXP = /\bchunked\b/i.freeze - - include Logging - - # This is a template async response. N.B. Can't use string for body on 1.9 - AsyncResponse = [-1, {}, []].freeze - - # Rack application (adapter) served by this connection. - attr_accessor :app - - # Backend to the server - attr_accessor :backend - - # Current request served by the connection - attr_accessor :request - - # Next response sent through the connection - attr_accessor :response - - # Calling the application in a threaded allowing - # concurrent processing of requests. - attr_writer :threaded - - # Get the connection ready to process a request. - def post_init - @request = Request.new - @response = Response.new - end - - # Called when data is received from the client. - def receive_data(data) - trace { data } - process if @request.parse(data) - rescue InvalidRequest => e - log "!! Invalid request" - log_error e - close_connection - end - - # Called when all data was received and the request - # is ready to be processed. - def process - if threaded? - @request.threaded = true - EventMachine.defer(method(:pre_process), method(:post_process)) - else - @request.threaded = false - post_process(pre_process) - end - end - - def pre_process - # Add client info to the request env - @request.remote_address = remote_address - - # Connection may be closed unless the App#call response was a [-1, ...] - # It should be noted that connection objects will linger until this - # callback is no longer referenced, so be tidy! - @request.async_callback = method(:post_process) - - # When we're under a non-async framework like rails, we can still spawn - # off async responses using the callback info, so there's little point - # in removing this. - response = AsyncResponse - catch(:async) do - # Process the request calling the Rack adapter - response = @app.call(@request.env) - end - response - rescue Exception - handle_error - terminate_request - nil # Signal to post_process that the request could not be processed - end - - def post_process(result) - return unless result - result = result.to_a - - # Status code -1 indicates that we're going to respond later (async). - return if result.first == AsyncResponse.first - - # Set the Content-Length header if possible - set_content_length(result) if need_content_length?(result) - - @response.status, @response.headers, @response.body = *result - - log "!! Rack application returned nil body. Probably you wanted it to be an empty string?" if @response.body.nil? - - # Make the response persistent if requested by the client - @response.persistent! if @request.persistent? - - # Send the response - @response.each do |chunk| - trace { chunk } - send_data chunk - end - - rescue Exception - handle_error - ensure - # If the body is being deferred, then terminate afterward. - if @response.body.respond_to?(:callback) && @response.body.respond_to?(:errback) - @response.body.callback { terminate_request } - @response.body.errback { terminate_request } - else - # Don't terminate the response if we're going async. - terminate_request unless result && result.first == AsyncResponse.first - end - end - - # Logs catched exception and closes the connection. - def handle_error - log "!! Unexpected error while processing request: #{$!.message}" - log_error - close_connection rescue nil - end - - def close_request_response - @request.async_close.succeed if @request.async_close - @request.close rescue nil - @response.close rescue nil - end - - # Does request and response cleanup (closes open IO streams and - # deletes created temporary files). - # Re-initializes response and request if client supports persistent - # connection. - def terminate_request - unless persistent? - close_connection_after_writing rescue nil - close_request_response - else - close_request_response - # Prepare the connection for another request if the client - # supports HTTP pipelining (persistent connection). - post_init - end - end - - # Called when the connection is unbinded from the socket - # and can no longer be used to process requests. - def unbind - @request.async_close.succeed if @request.async_close - @response.body.fail if @response.body.respond_to?(:fail) - @backend.connection_finished(self) - end - - # Allows this connection to be persistent. - def can_persist! - @can_persist = true - end - - # Return +true+ if this connection is allowed to stay open and be persistent. - def can_persist? - @can_persist - end - - # Return +true+ if the connection must be left open - # and ready to be reused for another request. - def persistent? - @can_persist && @response.persistent? - end - - # +true+ if app.call will be called inside a thread. - # You can set all requests as threaded setting Connection#threaded=true - # or on a per-request case returning +true+ in app.deferred?. - def threaded? - @threaded || (@app.respond_to?(:deferred?) && @app.deferred?(@request.env)) - end - - # IP Address of the remote client. - def remote_address - socket_address - rescue Exception - log_error - nil - end - - protected - - # Returns IP address of peer as a string. - def socket_address - Socket.unpack_sockaddr_in(get_peername)[1] - end - - private - def need_content_length?(result) - status, headers, body = result - return false if status == -1 - return false if headers.has_key?(CONTENT_LENGTH) - return false if (100..199).include?(status) || status == 204 || status == 304 - return false if headers.has_key?(TRANSFER_ENCODING) && headers[TRANSFER_ENCODING] =~ CHUNKED_REGEXP - return false unless body.kind_of?(String) || body.kind_of?(Array) - true - end - - def set_content_length(result) - headers, body = result[1..2] - case body - when String - # See http://redmine.ruby-lang.org/issues/show/203 - headers[CONTENT_LENGTH] = (body.respond_to?(:bytesize) ? body.bytesize : body.size).to_s - when Array - bytes = 0 - body.each do |p| - bytes += p.respond_to?(:bytesize) ? p.bytesize : p.size - end - headers[CONTENT_LENGTH] = bytes.to_s - end - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/cluster.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/cluster.rb deleted file mode 100644 index f8749f04..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/cluster.rb +++ /dev/null @@ -1,178 +0,0 @@ -require 'socket' - -module Thin - # An exception class to handle the event that server didn't start on time - class RestartTimeout < RuntimeError; end - - module Controllers - # Control a set of servers. - # * Generate start and stop commands and run them. - # * Inject the port or socket number in the pid and log filenames. - # Servers are started throught the +thin+ command-line script. - class Cluster < Controller - # Cluster only options that should not be passed in the command sent - # to the indiviual servers. - CLUSTER_OPTIONS = [:servers, :only, :onebyone, :wait] - - # Maximum wait time for the server to be restarted - DEFAULT_WAIT_TIME = 30 # seconds - - # Create a new cluster of servers launched using +options+. - def initialize(options) - super - # Cluster can only contain daemonized servers - @options.merge!(:daemonize => true) - end - - def first_port; @options[:port] end - def address; @options[:address] end - def socket; @options[:socket] end - def pid_file; @options[:pid] end - def log_file; @options[:log] end - def size; @options[:servers] end - def only; @options[:only] end - def onebyone; @options[:onebyone] end - def wait; @options[:wait] end - - def swiftiply? - @options.has_key?(:swiftiply) - end - - # Start the servers - def start - with_each_server { |n| start_server n } - end - - # Start a single server - def start_server(number) - log "Starting server on #{server_id(number)} ... " - - run :start, number - end - - # Stop the servers - def stop - with_each_server { |n| stop_server n } - end - - # Stop a single server - def stop_server(number) - log "Stopping server on #{server_id(number)} ... " - - run :stop, number - end - - # Stop and start the servers. - def restart - unless onebyone - # Let's do a normal restart by defaults - stop - sleep 0.1 # Let's breath a bit shall we ? - start - else - with_each_server do |n| - stop_server(n) - sleep 0.1 # Let's breath a bit shall we ? - start_server(n) - wait_until_server_started(n) - end - end - end - - def test_socket(number) - if socket - UNIXSocket.new(socket_for(number)) - else - TCPSocket.new(address, number) - end - rescue - nil - end - - # Make sure the server is running before moving on to the next one. - def wait_until_server_started(number) - log "Waiting for server to start ..." - STDOUT.flush # Need this to make sure user got the message - - tries = 0 - loop do - if test_socket = test_socket(number) - test_socket.close - break - elsif tries < wait - sleep 1 - tries += 1 - else - raise RestartTimeout, "The server didn't start in time. Please look at server's log file " + - "for more information, or set the value of 'wait' in your config " + - "file to be higher (defaults: 30)." - end - end - end - - def server_id(number) - if socket - socket_for(number) - elsif swiftiply? - [address, first_port, number].join(':') - else - [address, number].join(':') - end - end - - def log_file_for(number) - include_server_number log_file, number - end - - def pid_file_for(number) - include_server_number pid_file, number - end - - def socket_for(number) - include_server_number socket, number - end - - def pid_for(number) - File.read(pid_file_for(number)).chomp.to_i - end - - private - # Send the command to the +thin+ script - def run(cmd, number) - cmd_options = @options.reject { |option, value| CLUSTER_OPTIONS.include?(option) } - cmd_options.merge!(:pid => pid_file_for(number), :log => log_file_for(number)) - if socket - cmd_options.merge!(:socket => socket_for(number)) - elsif swiftiply? - cmd_options.merge!(:port => first_port) - else - cmd_options.merge!(:port => number) - end - Command.run(cmd, cmd_options) - end - - def with_each_server - if only - if first_port && only < 80 - # interpret +only+ as a sequence number - yield first_port + only - else - # interpret +only+ as an absolute port number - yield only - end - elsif socket || swiftiply? - size.times { |n| yield n } - else - size.times { |n| yield first_port + n } - end - end - - # Add the server port or number in the filename - # so each instance get its own file - def include_server_number(path, number) - ext = File.extname(path) - path.gsub(/#{ext}$/, ".#{number}#{ext}") - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/controller.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/controller.rb deleted file mode 100644 index 6cece408..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/controller.rb +++ /dev/null @@ -1,183 +0,0 @@ -require 'yaml' - -module Thin - # Error raised that will abort the process and print not backtrace. - class RunnerError < RuntimeError; end - - # Raised when a mandatory option is missing to run a command. - class OptionRequired < RunnerError - def initialize(option) - super("#{option} option required") - end - end - - # Raised when an option is not valid. - class InvalidOption < RunnerError; end - - # Build and control Thin servers. - # Hey Controller pattern is not only for web apps yo! - module Controllers - # Controls one Thin server. - # Allow to start, stop, restart and configure a single thin server. - class Controller - include Logging - - # Command line options passed to the thin script - attr_accessor :options - - def initialize(options) - @options = options - - if @options[:socket] - @options.delete(:address) - @options.delete(:port) - end - end - - def start - # Constantize backend class - @options[:backend] = eval(@options[:backend], TOPLEVEL_BINDING) if @options[:backend] - - server = Server.new(@options[:socket] || @options[:address], # Server detects kind of socket - @options[:port], # Port ignored on UNIX socket - @options) - - # Set options - server.pid_file = @options[:pid] - server.log_file = @options[:log] - server.timeout = @options[:timeout] - server.maximum_connections = @options[:max_conns] - server.maximum_persistent_connections = @options[:max_persistent_conns] - server.threaded = @options[:threaded] - server.no_epoll = @options[:no_epoll] if server.backend.respond_to?(:no_epoll=) - - # Detach the process, after this line the current process returns - server.daemonize if @options[:daemonize] - - # +config+ must be called before changing privileges since it might require superuser power. - server.config - - server.change_privilege @options[:user], @options[:group] if @options[:user] && @options[:group] - - # If a Rack config file is specified we eval it inside a Rack::Builder block to create - # a Rack adapter from it. Or else we guess which adapter to use and load it. - if @options[:rackup] - server.app = load_rackup_config - else - server.app = load_adapter - end - - # If a prefix is required, wrap in Rack URL mapper - server.app = Rack::URLMap.new(@options[:prefix] => server.app) if @options[:prefix] - - # If a stats URL is specified, wrap in Stats adapter - server.app = Stats::Adapter.new(server.app, @options[:stats]) if @options[:stats] - - # Register restart procedure which just start another process with same options, - # so that's why this is done here. - server.on_restart { Command.run(:start, @options) } - - server.start - end - - def stop - raise OptionRequired, :pid unless @options[:pid] - - tail_log(@options[:log]) do - if Server.kill(@options[:pid], @options[:force] ? 0 : (@options[:timeout] || 60)) - wait_for_file :deletion, @options[:pid] - end - end - end - - def restart - raise OptionRequired, :pid unless @options[:pid] - - tail_log(@options[:log]) do - if Server.restart(@options[:pid]) - wait_for_file :creation, @options[:pid] - end - end - end - - def config - config_file = @options.delete(:config) || raise(OptionRequired, :config) - - # Stringify keys - @options.keys.each { |o| @options[o.to_s] = @options.delete(o) } - - File.open(config_file, 'w') { |f| f << @options.to_yaml } - log ">> Wrote configuration to #{config_file}" - end - - protected - # Wait for a pid file to either be created or deleted. - def wait_for_file(state, file) - Timeout.timeout(@options[:timeout] || 30) do - case state - when :creation then sleep 0.1 until File.exist?(file) - when :deletion then sleep 0.1 while File.exist?(file) - end - end - end - - # Tail the log file of server +number+ during the execution of the block. - def tail_log(log_file) - if log_file - tail_thread = tail(log_file) - yield - tail_thread.kill - else - yield - end - end - - # Acts like GNU tail command. Taken from Rails. - def tail(file) - cursor = File.exist?(file) ? File.size(file) : 0 - last_checked = Time.now - tail_thread = Thread.new do - Thread.pass until File.exist?(file) - File.open(file, 'r') do |f| - loop do - f.seek cursor - if f.mtime > last_checked - last_checked = f.mtime - contents = f.read - cursor += contents.length - print contents - STDOUT.flush - end - sleep 0.1 - end - end - end - sleep 1 if File.exist?(file) # HACK Give the thread a little time to open the file - tail_thread - end - - private - def load_adapter - adapter = @options[:adapter] || Rack::Adapter.guess(@options[:chdir]) - log ">> Using #{adapter} adapter" - Rack::Adapter.for(adapter, @options) - rescue Rack::AdapterNotFound => e - raise InvalidOption, e.message - end - - def load_rackup_config - ENV['RACK_ENV'] = @options[:environment] - case @options[:rackup] - when /\.rb$/ - Kernel.load(@options[:rackup]) - Object.const_get(File.basename(@options[:rackup], '.rb').capitalize.to_sym) - when /\.ru$/ - rackup_code = File.read(@options[:rackup]) - eval("Rack::Builder.new {( #{rackup_code}\n )}.to_app", TOPLEVEL_BINDING, @options[:rackup]) - else - raise "Invalid rackup file. please specify either a .ru or .rb file" - end - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.rb deleted file mode 100644 index 540e6595..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.rb +++ /dev/null @@ -1,75 +0,0 @@ -require 'erb' - -module Thin - module Controllers - # System service controller to launch all servers which - # config files are in a directory. - class Service < Controller - INITD_PATH = '/etc/init.d/thin' - DEFAULT_CONFIG_PATH = '/etc/thin' - TEMPLATE = File.dirname(__FILE__) + '/service.sh.erb' - - def initialize(options) - super - - raise PlatformNotSupported, 'Running as a service only supported on Linux' unless Thin.linux? - end - - def config_path - @options[:all] || DEFAULT_CONFIG_PATH - end - - def start - run :start - end - - def stop - run :stop - end - - def restart - run :restart - end - - def install(config_files_path=DEFAULT_CONFIG_PATH) - if File.exist?(INITD_PATH) - log ">> Thin service already installed at #{INITD_PATH}" - else - log ">> Installing thin service at #{INITD_PATH} ..." - sh "mkdir -p #{File.dirname(INITD_PATH)}" - log "writing #{INITD_PATH}" - File.open(INITD_PATH, 'w') do |f| - f << ERB.new(File.read(TEMPLATE)).result(binding) - end - sh "chmod +x #{INITD_PATH}" # Make executable - end - - sh "mkdir -p #{config_files_path}" - - log '' - log "To configure thin to start at system boot:" - log "on RedHat like systems:" - log " sudo /sbin/chkconfig --level 345 #{NAME} on" - log "on Debian-like systems (Ubuntu):" - log " sudo /usr/sbin/update-rc.d -f #{NAME} defaults" - log "on Gentoo:" - log " sudo rc-update add #{NAME} default" - log '' - log "Then put your config files in #{config_files_path}" - end - - private - def run(command) - Dir[config_path + '/*'].each do |config| - log "[#{command}] #{config} ..." - Command.run(command, :config => config, :daemonize => true) - end - end - - def sh(cmd) - log cmd - system(cmd) - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.sh.erb b/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.sh.erb deleted file mode 100644 index 5b96548c..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/controllers/service.sh.erb +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/sh -### BEGIN INIT INFO -# Provides: thin -# Required-Start: $local_fs $remote_fs -# Required-Stop: $local_fs $remote_fs -# Default-Start: 2 3 4 5 -# Default-Stop: S 0 1 6 -# Short-Description: thin initscript -# Description: thin -### END INIT INFO - -# Original author: Forrest Robertson - -# Do NOT "set -e" - -DAEMON=<%= Command.script %> -SCRIPT_NAME=<%= INITD_PATH %> -CONFIG_PATH=<%= config_files_path %> - -# Exit if the package is not installed -[ -x "$DAEMON" ] || exit 0 - -case "$1" in - start) - $DAEMON start --all $CONFIG_PATH - ;; - stop) - $DAEMON stop --all $CONFIG_PATH - ;; - restart) - $DAEMON restart --all $CONFIG_PATH - ;; - *) - echo "Usage: $SCRIPT_NAME {start|stop|restart}" >&2 - exit 3 - ;; -esac - -: diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/daemonizing.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/daemonizing.rb deleted file mode 100644 index 2f127278..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/daemonizing.rb +++ /dev/null @@ -1,174 +0,0 @@ -require 'etc' -require 'daemons' unless Thin.win? - -module Process - # Returns +true+ the process identied by +pid+ is running. - def running?(pid) - Process.getpgid(pid) != -1 - rescue Errno::ESRCH - false - end - module_function :running? -end - -module Thin - # Raised when the pid file already exist starting as a daemon. - class PidFileExist < RuntimeError; end - - # Module included in classes that can be turned into a daemon. - # Handle stuff like: - # * storing the PID in a file - # * redirecting output to the log file - # * changing processs privileges - # * killing the process gracefully - module Daemonizable - attr_accessor :pid_file, :log_file - - def self.included(base) - base.extend ClassMethods - end - - def pid - File.exist?(pid_file) ? open(pid_file).read.to_i : nil - end - - # Turns the current script into a daemon process that detaches from the console. - def daemonize - raise PlatformNotSupported, 'Daemonizing is not supported on Windows' if Thin.win? - raise ArgumentError, 'You must specify a pid_file to daemonize' unless @pid_file - - remove_stale_pid_file - - pwd = Dir.pwd # Current directory is changed during daemonization, so store it - - Daemonize.daemonize(File.expand_path(@log_file), name) - - Dir.chdir(pwd) - - write_pid_file - - trap('HUP') { restart } - at_exit do - log ">> Exiting!" - remove_pid_file - end - end - - # Change privileges of the process - # to the specified user and group. - def change_privilege(user, group=user) - log ">> Changing process privilege to #{user}:#{group}" - - uid, gid = Process.euid, Process.egid - target_uid = Etc.getpwnam(user).uid - target_gid = Etc.getgrnam(group).gid - - if uid != target_uid || gid != target_gid - # Change process ownership - Process.initgroups(user, target_gid) - Process::GID.change_privilege(target_gid) - Process::UID.change_privilege(target_uid) - end - rescue Errno::EPERM => e - log "Couldn't change user and group to #{user}:#{group}: #{e}" - end - - # Register a proc to be called to restart the server. - def on_restart(&block) - @on_restart = block - end - - # Restart the server. - def restart - raise ArgumentError, "Can't restart, no 'on_restart' proc specified" unless @on_restart - log '>> Restarting ...' - stop - remove_pid_file - @on_restart.call - exit! - end - - module ClassMethods - # Send a QUIT or INT (if timeout is +0+) signal the process which - # PID is stored in +pid_file+. - # If the process is still running after +timeout+, KILL signal is - # sent. - def kill(pid_file, timeout=60) - if timeout == 0 - send_signal('INT', pid_file, timeout) - else - send_signal('QUIT', pid_file, timeout) - end - end - - # Restart the server by sending HUP signal. - def restart(pid_file) - send_signal('HUP', pid_file) - end - - # Send a +signal+ to the process which PID is stored in +pid_file+. - def send_signal(signal, pid_file, timeout=60) - if pid = read_pid_file(pid_file) - Logging.log "Sending #{signal} signal to process #{pid} ... " - Process.kill(signal, pid) - Timeout.timeout(timeout) do - sleep 0.1 while Process.running?(pid) - end - else - Logging.log "Can't stop process, no PID found in #{pid_file}" - end - rescue Timeout::Error - Logging.log "Timeout!" - force_kill pid_file - rescue Interrupt - force_kill pid_file - rescue Errno::ESRCH # No such process - Logging.log "process not found!" - force_kill pid_file - end - - def force_kill(pid_file) - if pid = read_pid_file(pid_file) - Logging.log "Sending KILL signal to process #{pid} ... " - Process.kill("KILL", pid) - File.delete(pid_file) if File.exist?(pid_file) - else - Logging.log "Can't stop process, no PID found in #{pid_file}" - end - end - - def read_pid_file(file) - if File.file?(file) && pid = File.read(file) - pid.to_i - else - nil - end - end - end - - protected - def remove_pid_file - File.delete(@pid_file) if @pid_file && File.exists?(@pid_file) - end - - def write_pid_file - log ">> Writing PID to #{@pid_file}" - FileUtils.mkdir_p File.dirname(@pid_file) - open(@pid_file,"w") { |f| f.write(Process.pid) } - File.chmod(0644, @pid_file) - end - - # If PID file is stale, remove it. - def remove_stale_pid_file - if File.exist?(@pid_file) - if pid && Process.running?(pid) - raise PidFileExist, "#{@pid_file} already exists, seems like it's already running (process ID: #{pid}). " + - "Stop the process or delete #{@pid_file}." - else - log ">> Deleting stale PID file #{@pid_file}" - remove_pid_file - end - end - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/headers.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/headers.rb deleted file mode 100644 index 3e1b9cc3..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/headers.rb +++ /dev/null @@ -1,39 +0,0 @@ -module Thin - # Store HTTP header name-value pairs direcly to a string - # and allow duplicated entries on some names. - class Headers - HEADER_FORMAT = "%s: %s\r\n".freeze - ALLOWED_DUPLICATES = %w(Set-Cookie Set-Cookie2 Warning WWW-Authenticate).freeze - - def initialize - @sent = {} - @out = [] - end - - # Add key: value pair to the headers. - # Ignore if already sent and no duplicates are allowed - # for this +key+. - def []=(key, value) - if !@sent.has_key?(key) || ALLOWED_DUPLICATES.include?(key) - @sent[key] = true - value = case value - when Time - value.httpdate - when NilClass - return - else - value.to_s - end - @out << HEADER_FORMAT % [key, value] - end - end - - def has_key?(key) - @sent[key] - end - - def to_s - @out.join - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/logging.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/logging.rb deleted file mode 100644 index 2a998274..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/logging.rb +++ /dev/null @@ -1,54 +0,0 @@ -module Thin - # To be included in classes to allow some basic logging - # that can be silenced (Logging.silent=) or made - # more verbose. - # Logging.debug=: log all error backtrace and messages - # logged with +debug+. - # Logging.trace=: log all raw request and response and - # messages logged with +trace+. - module Logging - class << self - attr_writer :trace, :debug, :silent - - def trace?; !@silent && @trace end - def debug?; !@silent && @debug end - def silent?; @silent end - end - - # Global silencer methods - def silent - Logging.silent? - end - def silent=(value) - Logging.silent = value - end - - # Log a message to the console - def log(msg) - puts msg unless Logging.silent? - end - module_function :log - public :log - - # Log a message to the console if tracing is activated - def trace(msg=nil) - log msg || yield if Logging.trace? - end - module_function :trace - public :trace - - # Log a message to the console if debugging is activated - def debug(msg=nil) - log msg || yield if Logging.debug? - end - module_function :debug - public :debug - - # Log an error backtrace if debugging is activated - def log_error(e=$!) - debug "#{e}\n\t" + e.backtrace.join("\n\t") - end - module_function :log_error - public :log_error - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/request.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/request.rb deleted file mode 100644 index 7b982dae..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/request.rb +++ /dev/null @@ -1,157 +0,0 @@ -require 'thin_parser' -require 'tempfile' - -module Thin - # Raised when an incoming request is not valid - # and the server can not process it. - class InvalidRequest < IOError; end - - # A request sent by the client to the server. - class Request - # Maximum request body size before it is moved out of memory - # and into a tempfile for reading. - MAX_BODY = 1024 * (80 + 32) - BODY_TMPFILE = 'thin-body'.freeze - MAX_HEADER = 1024 * (80 + 32) - - INITIAL_BODY = '' - # Force external_encoding of request's body to ASCII_8BIT - INITIAL_BODY.encode!(Encoding::ASCII_8BIT) if INITIAL_BODY.respond_to?(:encode) - - # Freeze some HTTP header names & values - SERVER_SOFTWARE = 'SERVER_SOFTWARE'.freeze - SERVER_NAME = 'SERVER_NAME'.freeze - LOCALHOST = 'localhost'.freeze - HTTP_VERSION = 'HTTP_VERSION'.freeze - HTTP_1_0 = 'HTTP/1.0'.freeze - REMOTE_ADDR = 'REMOTE_ADDR'.freeze - CONTENT_LENGTH = 'CONTENT_LENGTH'.freeze - CONNECTION = 'HTTP_CONNECTION'.freeze - KEEP_ALIVE_REGEXP = /\bkeep-alive\b/i.freeze - CLOSE_REGEXP = /\bclose\b/i.freeze - - # Freeze some Rack header names - RACK_INPUT = 'rack.input'.freeze - RACK_VERSION = 'rack.version'.freeze - RACK_ERRORS = 'rack.errors'.freeze - RACK_MULTITHREAD = 'rack.multithread'.freeze - RACK_MULTIPROCESS = 'rack.multiprocess'.freeze - RACK_RUN_ONCE = 'rack.run_once'.freeze - ASYNC_CALLBACK = 'async.callback'.freeze - ASYNC_CLOSE = 'async.close'.freeze - - # CGI-like request environment variables - attr_reader :env - - # Unparsed data of the request - attr_reader :data - - # Request body - attr_reader :body - - def initialize - @parser = Thin::HttpParser.new - @data = '' - @nparsed = 0 - @body = StringIO.new(INITIAL_BODY.dup) - @env = { - SERVER_SOFTWARE => SERVER, - SERVER_NAME => LOCALHOST, - - # Rack stuff - RACK_INPUT => @body, - - RACK_VERSION => VERSION::RACK, - RACK_ERRORS => STDERR, - - RACK_MULTITHREAD => false, - RACK_MULTIPROCESS => false, - RACK_RUN_ONCE => false - } - end - - # Parse a chunk of data into the request environment - # Raises a +InvalidRequest+ if invalid. - # Returns +true+ if the parsing is complete. - def parse(data) - if @parser.finished? # Header finished, can only be some more body - body << data - else # Parse more header using the super parser - @data << data - raise InvalidRequest, 'Header longer than allowed' if @data.size > MAX_HEADER - - @nparsed = @parser.execute(@env, @data, @nparsed) - - # Transfert to a tempfile if body is very big - move_body_to_tempfile if @parser.finished? && content_length > MAX_BODY - end - - - if finished? # Check if header and body are complete - @data = nil - @body.rewind - true # Request is fully parsed - else - false # Not finished, need more data - end - end - - # +true+ if headers and body are finished parsing - def finished? - @parser.finished? && @body.size >= content_length - end - - # Expected size of the body - def content_length - @env[CONTENT_LENGTH].to_i - end - - # Returns +true+ if the client expect the connection to be persistent. - def persistent? - # Clients and servers SHOULD NOT assume that a persistent connection - # is maintained for HTTP versions less than 1.1 unless it is explicitly - # signaled. (http://www.w3.org/Protocols/rfc2616/rfc2616-sec8.html) - if @env[HTTP_VERSION] == HTTP_1_0 - @env[CONNECTION] =~ KEEP_ALIVE_REGEXP - - # HTTP/1.1 client intends to maintain a persistent connection unless - # a Connection header including the connection-token "close" was sent - # in the request - else - @env[CONNECTION].nil? || @env[CONNECTION] !~ CLOSE_REGEXP - end - end - - def remote_address=(address) - @env[REMOTE_ADDR] = address - end - - def threaded=(value) - @env[RACK_MULTITHREAD] = value - end - - def async_callback=(callback) - @env[ASYNC_CALLBACK] = callback - @env[ASYNC_CLOSE] = EventMachine::DefaultDeferrable.new - end - - def async_close - @async_close ||= @env[ASYNC_CLOSE] - end - - # Close any resource used by the request - def close - @body.delete if @body.class == Tempfile - end - - private - def move_body_to_tempfile - current_body = @body - current_body.rewind - @body = Tempfile.new(BODY_TMPFILE) - @body.binmode - @body << current_body.read - @env[RACK_INPUT] = @body - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/response.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/response.rb deleted file mode 100644 index 0ff95784..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/response.rb +++ /dev/null @@ -1,101 +0,0 @@ -module Thin - # A response sent to the client. - class Response - CONNECTION = 'Connection'.freeze - CLOSE = 'close'.freeze - KEEP_ALIVE = 'keep-alive'.freeze - SERVER = 'Server'.freeze - CONTENT_LENGTH = 'Content-Length'.freeze - - # Status code - attr_accessor :status - - # Response body, must respond to +each+. - attr_accessor :body - - # Headers key-value hash - attr_reader :headers - - def initialize - @headers = Headers.new - @status = 200 - @persistent = false - end - - # String representation of the headers - # to be sent in the response. - def headers_output - # Set default headers - @headers[CONNECTION] = persistent? ? KEEP_ALIVE : CLOSE - @headers[SERVER] = Thin::SERVER - - @headers.to_s - end - - # Top header of the response, - # containing the status code and response headers. - def head - "HTTP/1.1 #{@status} #{HTTP_STATUS_CODES[@status.to_i]}\r\n#{headers_output}\r\n" - end - - if Thin.ruby_18? - - # Ruby 1.8 implementation. - # Respects Rack specs. - # - # See http://rack.rubyforge.org/doc/files/SPEC.html - def headers=(key_value_pairs) - key_value_pairs.each do |k, vs| - vs.each { |v| @headers[k] = v.chomp } if vs - end if key_value_pairs - end - - else - - # Ruby 1.9 doesn't have a String#each anymore. - # Rack spec doesn't take care of that yet, for now we just use - # +each+ but fallback to +each_line+ on strings. - # I wish we could remove that condition. - # To be reviewed when a new Rack spec comes out. - def headers=(key_value_pairs) - key_value_pairs.each do |k, vs| - next unless vs - if vs.is_a?(String) - vs.each_line { |v| @headers[k] = v.chomp } - else - vs.each { |v| @headers[k] = v.chomp } - end - end if key_value_pairs - end - - end - - # Close any resource used by the response - def close - @body.close if @body.respond_to?(:close) - end - - # Yields each chunk of the response. - # To control the size of each chunk - # define your own +each+ method on +body+. - def each - yield head - if @body.is_a?(String) - yield @body - else - @body.each { |chunk| yield chunk } - end - end - - # Tell the client the connection should stay open - def persistent! - @persistent = true - end - - # Persistent connection must be requested as keep-alive - # from the server and have a Content-Length. - def persistent? - @persistent && @headers.has_key?(CONTENT_LENGTH) - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/runner.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/runner.rb deleted file mode 100644 index c91994c5..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/runner.rb +++ /dev/null @@ -1,212 +0,0 @@ -require 'optparse' -require 'yaml' - -module Thin - # CLI runner. - # Parse options and send command to the correct Controller. - class Runner - COMMANDS = %w(start stop restart config) - LINUX_ONLY_COMMANDS = %w(install) - - # Commands that wont load options from the config file - CONFIGLESS_COMMANDS = %w(config install) - - # Parsed options - attr_accessor :options - - # Name of the command to be runned. - attr_accessor :command - - # Arguments to be passed to the command. - attr_accessor :arguments - - # Return all available commands - def self.commands - commands = COMMANDS - commands += LINUX_ONLY_COMMANDS if Thin.linux? - commands - end - - def initialize(argv) - @argv = argv - - # Default options values - @options = { - :chdir => Dir.pwd, - :environment => 'development', - :address => '0.0.0.0', - :port => Server::DEFAULT_PORT, - :timeout => Server::DEFAULT_TIMEOUT, - :log => 'log/thin.log', - :pid => 'tmp/pids/thin.pid', - :max_conns => Server::DEFAULT_MAXIMUM_CONNECTIONS, - :max_persistent_conns => Server::DEFAULT_MAXIMUM_PERSISTENT_CONNECTIONS, - :require => [], - :wait => Controllers::Cluster::DEFAULT_WAIT_TIME - } - - parse! - end - - def parser - # NOTE: If you add an option here make sure the key in the +options+ hash is the - # same as the name of the command line option. - # +option+ keys are used to build the command line to launch other processes, - # see lib/thin/command.rb. - @parser ||= OptionParser.new do |opts| - opts.banner = "Usage: thin [options] #{self.class.commands.join('|')}" - - opts.separator "" - opts.separator "Server options:" - - opts.on("-a", "--address HOST", "bind to HOST address " + - "(default: #{@options[:address]})") { |host| @options[:address] = host } - opts.on("-p", "--port PORT", "use PORT (default: #{@options[:port]})") { |port| @options[:port] = port.to_i } - opts.on("-S", "--socket FILE", "bind to unix domain socket") { |file| @options[:socket] = file } - opts.on("-y", "--swiftiply [KEY]", "Run using swiftiply") { |key| @options[:swiftiply] = key } - opts.on("-A", "--adapter NAME", "Rack adapter to use (default: autodetect)", - "(#{Rack::ADAPTERS.map{|(a,b)|a}.join(', ')})") { |name| @options[:adapter] = name } - opts.on("-R", "--rackup FILE", "Load a Rack config file instead of " + - "Rack adapter") { |file| @options[:rackup] = file } - opts.on("-c", "--chdir DIR", "Change to dir before starting") { |dir| @options[:chdir] = File.expand_path(dir) } - opts.on( "--stats PATH", "Mount the Stats adapter under PATH") { |path| @options[:stats] = path } - - opts.separator "" - opts.separator "Adapter options:" - opts.on("-e", "--environment ENV", "Framework environment " + - "(default: #{@options[:environment]})") { |env| @options[:environment] = env } - opts.on( "--prefix PATH", "Mount the app under PATH (start with /)") { |path| @options[:prefix] = path } - - unless Thin.win? # Daemonizing not supported on Windows - opts.separator "" - opts.separator "Daemon options:" - - opts.on("-d", "--daemonize", "Run daemonized in the background") { @options[:daemonize] = true } - opts.on("-l", "--log FILE", "File to redirect output " + - "(default: #{@options[:log]})") { |file| @options[:log] = file } - opts.on("-P", "--pid FILE", "File to store PID " + - "(default: #{@options[:pid]})") { |file| @options[:pid] = file } - opts.on("-u", "--user NAME", "User to run daemon as (use with -g)") { |user| @options[:user] = user } - opts.on("-g", "--group NAME", "Group to run daemon as (use with -u)") { |group| @options[:group] = group } - opts.on( "--tag NAME", "Additional text to display in process listing") { |tag| @options[:tag] = tag } - - opts.separator "" - opts.separator "Cluster options:" - - opts.on("-s", "--servers NUM", "Number of servers to start") { |num| @options[:servers] = num.to_i } - opts.on("-o", "--only NUM", "Send command to only one server of the cluster") { |only| @options[:only] = only.to_i } - opts.on("-C", "--config FILE", "Load options from config file") { |file| @options[:config] = file } - opts.on( "--all [DIR]", "Send command to each config files in DIR") { |dir| @options[:all] = dir } if Thin.linux? - opts.on("-O", "--onebyone", "Restart the cluster one by one (only works with restart command)") { @options[:onebyone] = true } - opts.on("-w", "--wait NUM", "Maximum wait time for server to be started in seconds (use with -O)") { |time| @options[:wait] = time.to_i } - end - - opts.separator "" - opts.separator "Tuning options:" - - opts.on("-b", "--backend CLASS", "Backend to use, full classname") { |name| @options[:backend] = name } - opts.on("-t", "--timeout SEC", "Request or command timeout in sec " + - "(default: #{@options[:timeout]})") { |sec| @options[:timeout] = sec.to_i } - opts.on("-f", "--force", "Force the execution of the command") { @options[:force] = true } - opts.on( "--max-conns NUM", "Maximum number of open file descriptors " + - "(default: #{@options[:max_conns]})", - "Might require sudo to set higher then 1024") { |num| @options[:max_conns] = num.to_i } unless Thin.win? - opts.on( "--max-persistent-conns NUM", - "Maximum number of persistent connections", - "(default: #{@options[:max_persistent_conns]})") { |num| @options[:max_persistent_conns] = num.to_i } - opts.on( "--threaded", "Call the Rack application in threads " + - "[experimental]") { @options[:threaded] = true } - opts.on( "--no-epoll", "Disable the use of epoll") { @options[:no_epoll] = true } if Thin.linux? - - opts.separator "" - opts.separator "Common options:" - - opts.on_tail("-r", "--require FILE", "require the library") { |file| @options[:require] << file } - opts.on_tail("-D", "--debug", "Set debbuging on") { @options[:debug] = true } - opts.on_tail("-V", "--trace", "Set tracing on (log raw request/response)") { @options[:trace] = true } - opts.on_tail("-h", "--help", "Show this message") { puts opts; exit } - opts.on_tail('-v', '--version', "Show version") { puts Thin::SERVER; exit } - end - end - - # Parse the options. - def parse! - parser.parse! @argv - @command = @argv.shift - @arguments = @argv - end - - # Parse the current shell arguments and run the command. - # Exits on error. - def run! - if self.class.commands.include?(@command) - run_command - elsif @command.nil? - puts "Command required" - puts @parser - exit 1 - else - abort "Unknown command: #{@command}. Use one of #{self.class.commands.join(', ')}" - end - end - - # Send the command to the controller: single instance or cluster. - def run_command - load_options_from_config_file! unless CONFIGLESS_COMMANDS.include?(@command) - - # PROGRAM_NAME is relative to the current directory, so make sure - # we store and expand it before changing directory. - Command.script = File.expand_path($PROGRAM_NAME) - - # Change the current directory ASAP so that all relative paths are - # relative to this one. - Dir.chdir(@options[:chdir]) unless CONFIGLESS_COMMANDS.include?(@command) - - @options[:require].each { |r| ruby_require r } - Logging.debug = @options[:debug] - Logging.trace = @options[:trace] - - controller = case - when cluster? then Controllers::Cluster.new(@options) - when service? then Controllers::Service.new(@options) - else Controllers::Controller.new(@options) - end - - if controller.respond_to?(@command) - begin - controller.send(@command, *@arguments) - rescue RunnerError => e - abort e.message - end - else - abort "Invalid options for command: #{@command}" - end - end - - # +true+ if we're controlling a cluster. - def cluster? - @options[:only] || @options[:servers] || @options[:config] - end - - # +true+ if we're acting a as system service. - def service? - @options.has_key?(:all) || @command == 'install' - end - - private - def load_options_from_config_file! - if file = @options.delete(:config) - YAML.load_file(file).each { |key, value| @options[key.to_sym] = value } - end - end - - def ruby_require(file) - if File.extname(file) == '.ru' - warn 'WARNING: Use the -R option to load a Rack config file' - @options[:rackup] = file - else - require file - end - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/server.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/server.rb deleted file mode 100644 index 852ee02c..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/server.rb +++ /dev/null @@ -1,247 +0,0 @@ -module Thin - # The uterly famous Thin HTTP server. - # It listen for incoming request through a given +backend+ - # and forward all request to +app+. - # - # == TCP server - # Create a new TCP server on bound to host:port by specifiying +host+ - # and +port+ as the first 2 arguments. - # - # Thin::Server.start('0.0.0.0', 3000, app) - # - # == UNIX domain server - # Create a new UNIX domain socket bound to +socket+ file by specifiying a filename - # as the first argument. Eg.: /tmp/thin.sock. If the first argument contains a / - # it will be assumed to be a UNIX socket. - # - # Thin::Server.start('/tmp/thin.sock', app) - # - # == Using a custom backend - # You can implement your own way to connect the server to its client by creating your - # own Backend class and pass it as the :backend option. - # - # Thin::Server.start('galaxy://faraway', 1345, app, :backend => Thin::Backends::MyFancyBackend) - # - # == Rack application (+app+) - # All requests will be processed through +app+ that must be a valid Rack adapter. - # A valid Rack adapter (application) must respond to call(env#Hash) and - # return an array of [status, headers, body]. - # - # == Building an app in place - # If a block is passed, a Rack::Builder instance - # will be passed to build the +app+. So you can do cool stuff like this: - # - # Thin::Server.start('0.0.0.0', 3000) do - # use Rack::CommonLogger - # use Rack::ShowExceptions - # map "/lobster" do - # use Rack::Lint - # run Rack::Lobster.new - # end - # end - # - # == Controlling with signals - # * QUIT: Gracefull shutdown (see Server#stop) - # * INT and TERM: Force shutdown (see Server#stop!) - # Disable signals by passing :signals => false - # - class Server - include Logging - include Daemonizable - extend Forwardable - - # Default values - DEFAULT_TIMEOUT = 30 #sec - DEFAULT_HOST = '0.0.0.0' - DEFAULT_PORT = 3000 - DEFAULT_MAXIMUM_CONNECTIONS = 1024 - DEFAULT_MAXIMUM_PERSISTENT_CONNECTIONS = 512 - - # Application (Rack adapter) called with the request that produces the response. - attr_accessor :app - - # A tag that will show in the process listing - attr_accessor :tag - - # Backend handling the connections to the clients. - attr_accessor :backend - - # Maximum number of seconds for incoming data to arrive before the connection - # is dropped. - def_delegators :backend, :timeout, :timeout= - - # Maximum number of file or socket descriptors that the server may open. - def_delegators :backend, :maximum_connections, :maximum_connections= - - # Maximum number of connection that can be persistent at the same time. - # Most browser never close the connection so most of the time they are closed - # when the timeout occur. If we don't control the number of persistent connection, - # if would be very easy to overflow the server for a DoS attack. - def_delegators :backend, :maximum_persistent_connections, :maximum_persistent_connections= - - # Allow using threads in the backend. - def_delegators :backend, :threaded?, :threaded= - - # Address and port on which the server is listening for connections. - def_delegators :backend, :host, :port - - # UNIX domain socket on which the server is listening for connections. - def_delegator :backend, :socket - - # Disable the use of epoll under Linux - def_delegators :backend, :no_epoll, :no_epoll= - - def initialize(*args, &block) - host, port, options = DEFAULT_HOST, DEFAULT_PORT, {} - - # Guess each parameter by its type so they can be - # received in any order. - args.each do |arg| - case arg - when Fixnum, /^\d+$/ then port = arg.to_i - when String then host = arg - when Hash then options = arg - else - @app = arg if arg.respond_to?(:call) - end - end - - # Set tag if needed - self.tag = options[:tag] - - # Try to intelligently select which backend to use. - @backend = select_backend(host, port, options) - - load_cgi_multipart_eof_fix - - @backend.server = self - - # Set defaults - @backend.maximum_connections = DEFAULT_MAXIMUM_CONNECTIONS - @backend.maximum_persistent_connections = DEFAULT_MAXIMUM_PERSISTENT_CONNECTIONS - @backend.timeout = DEFAULT_TIMEOUT - - # Allow using Rack builder as a block - @app = Rack::Builder.new(&block).to_app if block - - # If in debug mode, wrap in logger adapter - @app = Rack::CommonLogger.new(@app) if Logging.debug? - - setup_signals unless options[:signals].class == FalseClass - end - - # Lil' shortcut to turn this: - # - # Server.new(...).start - # - # into this: - # - # Server.start(...) - # - def self.start(*args, &block) - new(*args, &block).start! - end - - # Start the server and listen for connections. - def start - raise ArgumentError, 'app required' unless @app - - log ">> Thin web server (v#{VERSION::STRING} codename #{VERSION::CODENAME})" - debug ">> Debugging ON" - trace ">> Tracing ON" - - log ">> Maximum connections set to #{@backend.maximum_connections}" - log ">> Listening on #{@backend}, CTRL+C to stop" - - @backend.start - end - alias :start! :start - - # == Gracefull shutdown - # Stops the server after processing all current connections. - # As soon as this method is called, the server stops accepting - # new requests and wait for all current connections to finish. - # Calling twice is the equivalent of calling stop!. - def stop - if running? - @backend.stop - unless @backend.empty? - log ">> Waiting for #{@backend.size} connection(s) to finish, " + - "can take up to #{timeout} sec, CTRL+C to stop now" - end - else - stop! - end - end - - # == Force shutdown - # Stops the server closing all current connections right away. - # This doesn't wait for connection to finish their work and send data. - # All current requests will be dropped. - def stop! - log ">> Stopping ..." - - @backend.stop! - end - - # == Configure the server - # The process might need to have superuser privilege to configure - # server with optimal options. - def config - @backend.config - end - - # Name of the server and type of backend used. - # This is also the name of the process in which Thin is running as a daemon. - def name - "thin server (#{@backend})" + (tag ? " [#{tag}]" : "") - end - alias :to_s :name - - # Return +true+ if the server is running and ready to receive requests. - # Note that the server might still be running and return +false+ when - # shuting down and waiting for active connections to complete. - def running? - @backend.running? - end - - protected - # Register signals: - # * INT calls +stop+ to shutdown gracefully. - # * TERM calls stop! to force shutdown. - def setup_signals - trap('QUIT') { stop } unless Thin.win? - trap('INT') { stop! } - trap('TERM') { stop! } - end - - def select_backend(host, port, options) - case - when options.has_key?(:backend) - raise ArgumentError, ":backend must be a class" unless options[:backend].is_a?(Class) - options[:backend].new(host, port, options) - when options.has_key?(:swiftiply) - Backends::SwiftiplyClient.new(host, port, options) - when host.include?('/') - Backends::UnixServer.new(host) - else - Backends::TcpServer.new(host, port) - end - end - - # Taken from Mongrel cgi_multipart_eof_fix - # Ruby 1.8.5 has a security bug in cgi.rb, we need to patch it. - def load_cgi_multipart_eof_fix - version = RUBY_VERSION.split('.').map { |i| i.to_i } - - if version[0] <= 1 && version[1] <= 8 && version[2] <= 5 && RUBY_PLATFORM !~ /java/ - begin - require 'cgi_multipart_eof_fix' - rescue LoadError - log "!! Ruby 1.8.5 is not secure please install cgi_multipart_eof_fix:" - log " gem install cgi_multipart_eof_fix" - end - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/stats.html.erb b/vendor/gems/gems/thin-1.2.5/lib/thin/stats.html.erb deleted file mode 100644 index 14338bf1..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/stats.html.erb +++ /dev/null @@ -1,216 +0,0 @@ -<%# -# Taken from Rack::ShowException -# adapted from Django -# Copyright (c) 2005, the Lawrence Journal-World -# Used under the modified BSD license: -# http://www.xfree86.org/3.3.6/COPYRIGHT2.html#5 -%> - - - - - - Thin Stats - - - - -
-

Server stats

-

<%= Thin::SERVER %>

- - - - - - - - - -
Uptime<%= Time.now - @start_time %> sec
PID<%=h Process.pid %>
- - <% if @last_request %> -

Jump to:

- - <% end %> -
- -
-

Requests

-

Stats

- - - - - - - - - - - - - - - - - -
Requests<%= @requests %>
Finished<%= @requests_finished %>
Errors<%= @requests - @requests_finished %>
Last request<%= @last_request_time %> sec
-
- -<% if @last_request %> -
-

Last Request information

- -

GET

- <% unless @last_request.GET.empty? %> - - - - - - - - - <% @last_request.GET.sort_by { |k, v| k.to_s }.each { |key, val| %> - - - - - <% } %> - -
VariableValue
<%=h key %>
<%=h val.inspect %>
- <% else %> -

No GET data.

- <% end %> - -

POST

- <% unless @last_request.POST.empty? %> - - - - - - - - - <% @last_request.POST.sort_by { |k, v| k.to_s }.each { |key, val| %> - - - - - <% } %> - -
VariableValue
<%=h key %>
<%=h val.inspect %>
- <% else %> -

No POST data.

- <% end %> - - - - <% unless @last_request.cookies.empty? %> - - - - - - - - - <% @last_request.cookies.each { |key, val| %> - - - - - <% } %> - -
VariableValue
<%=h key %>
<%=h val.inspect %>
- <% else %> -

No cookie data.

- <% end %> - -

Rack ENV

- - - - - - - - - <% @last_request.env.sort_by { |k, v| k.to_s }.each { |key, val| %> - - - - - <% } %> - -
VariableValue
<%=h key %>
<%=h val %>
- -
-<% end %> - -
-

- You're seeing this page because you use Thin::Stats. -

-
- - - \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/stats.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/stats.rb deleted file mode 100644 index 146baec5..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/stats.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'erb' - -module Thin - module Stats - # Rack adapter to log stats about a Rack application. - class Adapter - include ERB::Util - - def initialize(app, path='/stats') - @app = app - @path = path - - @template = ERB.new(File.read(File.dirname(__FILE__) + '/stats.html.erb')) - - @requests = 0 - @requests_finished = 0 - @start_time = Time.now - end - - def call(env) - if env['PATH_INFO'].index(@path) == 0 - serve(env) - else - log(env) { @app.call(env) } - end - end - - def log(env) - @requests += 1 - @last_request = Rack::Request.new(env) - request_started_at = Time.now - - response = yield - - @requests_finished += 1 - @last_request_time = Time.now - request_started_at - - response - end - - def serve(env) - body = @template.result(binding) - - [ - 200, - { 'Content-Type' => 'text/html' }, - [body] - ] - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/statuses.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/statuses.rb deleted file mode 100644 index 8aa3614d..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/statuses.rb +++ /dev/null @@ -1,43 +0,0 @@ -module Thin - # Every standard HTTP code mapped to the appropriate message. - # Stolent from Mongrel. - HTTP_STATUS_CODES = { - 100 => 'Continue', - 101 => 'Switching Protocols', - 200 => 'OK', - 201 => 'Created', - 202 => 'Accepted', - 203 => 'Non-Authoritative Information', - 204 => 'No Content', - 205 => 'Reset Content', - 206 => 'Partial Content', - 300 => 'Multiple Choices', - 301 => 'Moved Permanently', - 302 => 'Moved Temporarily', - 303 => 'See Other', - 304 => 'Not Modified', - 305 => 'Use Proxy', - 400 => 'Bad Request', - 401 => 'Unauthorized', - 402 => 'Payment Required', - 403 => 'Forbidden', - 404 => 'Not Found', - 405 => 'Method Not Allowed', - 406 => 'Not Acceptable', - 407 => 'Proxy Authentication Required', - 408 => 'Request Time-out', - 409 => 'Conflict', - 410 => 'Gone', - 411 => 'Length Required', - 412 => 'Precondition Failed', - 413 => 'Request Entity Too Large', - 414 => 'Request-URI Too Large', - 415 => 'Unsupported Media Type', - 500 => 'Internal Server Error', - 501 => 'Not Implemented', - 502 => 'Bad Gateway', - 503 => 'Service Unavailable', - 504 => 'Gateway Time-out', - 505 => 'HTTP Version not supported' - } -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin/version.rb b/vendor/gems/gems/thin-1.2.5/lib/thin/version.rb deleted file mode 100644 index fbaf79e6..00000000 --- a/vendor/gems/gems/thin-1.2.5/lib/thin/version.rb +++ /dev/null @@ -1,32 +0,0 @@ -module Thin - # Raised when a feature is not supported on the - # current platform. - class PlatformNotSupported < RuntimeError; end - - module VERSION #:nodoc: - MAJOR = 1 - MINOR = 2 - TINY = 5 - - STRING = [MAJOR, MINOR, TINY].join('.') - - CODENAME = "This Is Not A Web Server".freeze - - RACK = [1, 0].freeze # Rack protocol version - end - - NAME = 'thin'.freeze - SERVER = "#{NAME} #{VERSION::STRING} codename #{VERSION::CODENAME}".freeze - - def self.win? - RUBY_PLATFORM =~ /mswin|mingw/ - end - - def self.linux? - RUBY_PLATFORM =~ /linux/ - end - - def self.ruby_18? - RUBY_VERSION =~ /^1\.8/ - end -end diff --git a/vendor/gems/gems/thin-1.2.5/lib/thin_parser.bundle b/vendor/gems/gems/thin-1.2.5/lib/thin_parser.bundle deleted file mode 100755 index ca9bb01e..00000000 Binary files a/vendor/gems/gems/thin-1.2.5/lib/thin_parser.bundle and /dev/null differ diff --git a/vendor/gems/gems/thin-1.2.5/spec/backends/swiftiply_client_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/backends/swiftiply_client_spec.rb deleted file mode 100644 index baebd6fe..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/backends/swiftiply_client_spec.rb +++ /dev/null @@ -1,66 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Backends::SwiftiplyClient do - before do - @backend = Backends::SwiftiplyClient.new('0.0.0.0', 3333) - @backend.server = mock('server', :null_object => true) - end - - it "should connect" do - EventMachine.run do - @backend.connect - EventMachine.stop - end - end - - it "should disconnect" do - EventMachine.run do - @backend.connect - @backend.disconnect - EventMachine.stop - end - end -end - -describe SwiftiplyConnection do - before do - @connection = SwiftiplyConnection.new(nil) - @connection.backend = Backends::SwiftiplyClient.new('0.0.0.0', 3333) - @connection.backend.server = mock('server', :null_object => true) - end - - it do - @connection.should be_persistent - end - - it "should send handshake on connection_completed" do - @connection.should_receive(:send_data).with('swiftclient000000000d0500') - @connection.connection_completed - end - - it "should reconnect on unbind" do - @connection.backend.stub!(:running?).and_return(true) - @connection.stub!(:rand).and_return(0) # Make sure we don't wait - - @connection.should_receive(:reconnect).with('0.0.0.0', 3333) - - EventMachine.run do - @connection.unbind - EventMachine.add_timer(0) { EventMachine.stop } - end - end - - it "should not reconnect when not running" do - @connection.backend.stub!(:running?).and_return(false) - EventMachine.should_not_receive(:add_timer) - @connection.unbind - end - - it "should have a host_ip" do - @connection.send(:host_ip).should == [0, 0, 0, 0] - end - - it "should generate swiftiply_handshake based on key" do - @connection.send(:swiftiply_handshake, 'key').should == 'swiftclient000000000d0503key' - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/backends/tcp_server_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/backends/tcp_server_spec.rb deleted file mode 100644 index f358e328..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/backends/tcp_server_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Backends::TcpServer do - before do - @backend = Backends::TcpServer.new('0.0.0.0', 3333) - end - - it "should not use epoll" do - @backend.no_epoll = true - EventMachine.should_not_receive(:epoll) - @backend.config - end - - it "should use epoll" do - EventMachine.should_receive(:epoll) - @backend.config - end - - it "should connect" do - EventMachine.run do - @backend.connect - EventMachine.stop - end - end - - it "should disconnect" do - EventMachine.run do - @backend.connect - @backend.disconnect - EventMachine.stop - end - end -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/backends/unix_server_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/backends/unix_server_spec.rb deleted file mode 100644 index 1abcdb69..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/backends/unix_server_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Backends::UnixServer do - before do - @backend = Backends::UnixServer.new('/tmp/thin-test.sock') - end - - it "should connect" do - EventMachine.run do - @backend.connect - EventMachine.stop - end - end - - it "should disconnect" do - EventMachine.run do - @backend.connect - @backend.disconnect - EventMachine.stop - end - end - - it "should remove socket file on close" do - @backend.close - File.exist?('/tmp/thin-test.sock').should be_false - end -end - -describe UnixConnection do - before do - @connection = UnixConnection.new(nil) - end - - it "should return 127.0.0.1 as remote_address" do - @connection.remote_address.should == '127.0.0.1' - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/command_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/command_spec.rb deleted file mode 100644 index 751d11f7..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/command_spec.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Command do - before do - Command.script = 'thin' - @command = Command.new(:start, :port => 3000, :daemonize => true, :log => 'hi.log', - :require => %w(rubygems thin), :no_epoll => true) - end - - it 'should shellify command' do - out = @command.shellify - out.should include('--port=3000', '--daemonize', '--log="hi.log"', 'thin start --') - out.should_not include('--pid') - end - - it 'should shellify Array argument to multiple parameters' do - out = @command.shellify - out.should include('--require="rubygems"', '--require="thin"') - end - - it 'should convert _ to - in option name' do - out = @command.shellify - out.should include('--no-epoll') - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/configs/cluster.yml b/vendor/gems/gems/thin-1.2.5/spec/configs/cluster.yml deleted file mode 100644 index 5243635b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/configs/cluster.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -pid: tmp/pids/thin.pid -log: log/thin.log -timeout: 60 -port: 5000 -chdir: spec/rails_app -environment: production -servers: 3 -address: 127.0.0.1 diff --git a/vendor/gems/gems/thin-1.2.5/spec/configs/single.yml b/vendor/gems/gems/thin-1.2.5/spec/configs/single.yml deleted file mode 100644 index 29c3d611..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/configs/single.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -pid: tmp/pids/thin.pid -log: log/thin.log -timeout: 60 -port: 6000 -chdir: spec/rails_app -environment: production -daemonize: true -address: 127.0.0.1 diff --git a/vendor/gems/gems/thin-1.2.5/spec/connection_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/connection_spec.rb deleted file mode 100644 index 11672117..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/connection_spec.rb +++ /dev/null @@ -1,106 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Connection do - before do - @connection = Connection.new(mock('EM', :null_object => true)) - @connection.post_init - @connection.app = proc do |env| - [200, {}, ['']] - end - end - - it "should parse on receive_data" do - @connection.request.should_receive(:parse).with('GET') - @connection.receive_data('GET') - end - - it "should close connection on InvalidRequest error in receive_data" do - @connection.request.stub!(:parse).and_raise(InvalidRequest) - @connection.should_receive(:close_connection) - @connection.receive_data('') - end - - it "should process when parsing complete" do - @connection.request.should_receive(:parse).and_return(true) - @connection.should_receive(:process) - @connection.receive_data('GET') - end - - it "should process" do - @connection.process - end - - it "should rescue error in process" do - @connection.app.should_receive(:call).and_raise(StandardError) - @connection.process - end - - it "should rescue Timeout error in process" do - @connection.app.should_receive(:call).and_raise(Timeout::Error.new("timeout error not rescued")) - @connection.process - end - - it "should not return HTTP_X_FORWARDED_FOR as remote_address" do - @connection.request.env['HTTP_X_FORWARDED_FOR'] = '1.2.3.4' - @connection.stub!(:socket_address).and_return("127.0.0.1") - @connection.remote_address.should == "127.0.0.1" - end - - it "should return nil on error retreiving remote_address" do - @connection.stub!(:get_peername).and_raise(RuntimeError) - @connection.remote_address.should be_nil - end - - it "should return nil on nil get_peername" do - @connection.stub!(:get_peername).and_return(nil) - @connection.remote_address.should be_nil - end - - it "should return nil on empty get_peername" do - @connection.stub!(:get_peername).and_return('') - @connection.remote_address.should be_nil - end - - it "should return remote_address" do - @connection.stub!(:get_peername).and_return(Socket.pack_sockaddr_in(3000, '127.0.0.1')) - @connection.remote_address.should == '127.0.0.1' - end - - it "should not be persistent" do - @connection.should_not be_persistent - end - - it "should be persistent when response is and allowed" do - @connection.response.stub!(:persistent?).and_return(true) - @connection.can_persist! - @connection.should be_persistent - end - - it "should not be persistent when response is but not allowed" do - @connection.response.persistent! - @connection.should_not be_persistent - end - - it "should set request env as rack.multithread" do - EventMachine.should_receive(:defer) - - @connection.threaded = true - @connection.process - - @connection.request.env["rack.multithread"].should == true - end - - it "should set as threaded when app.deferred? is true" do - @connection.app.should_receive(:deferred?).and_return(true) - @connection.should be_threaded - end - - it "should not set as threaded when app.deferred? is false" do - @connection.app.should_receive(:deferred?).and_return(false) - @connection.should_not be_threaded - end - - it "should not set as threaded when app do not respond to deferred?" do - @connection.should_not be_threaded - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/controllers/cluster_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/controllers/cluster_spec.rb deleted file mode 100644 index 81246642..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/controllers/cluster_spec.rb +++ /dev/null @@ -1,267 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' -include Controllers - -describe Cluster, "with host and port" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid' - ) - end - - it 'should include port number in file names' do - @cluster.send(:include_server_number, 'thin.log', 3000).should == 'thin.3000.log' - @cluster.send(:include_server_number, 'thin.pid', 3000).should == 'thin.3000.pid' - end - - it 'should call each server' do - calls = [] - @cluster.send(:with_each_server) do |port| - calls << port - end - calls.should == [3000, 3001, 3002] - end - - it 'should start on each port' do - Command.should_receive(:run).with(:start, options_for_port(3000)) - Command.should_receive(:run).with(:start, options_for_port(3001)) - Command.should_receive(:run).with(:start, options_for_port(3002)) - - @cluster.start - end - - it 'should stop on each port' do - Command.should_receive(:run).with(:stop, options_for_port(3000)) - Command.should_receive(:run).with(:stop, options_for_port(3001)) - Command.should_receive(:run).with(:stop, options_for_port(3002)) - - @cluster.stop - end - - private - def options_for_port(port) - { :daemonize => true, :log => "thin.#{port}.log", :timeout => 10, :address => "0.0.0.0", :port => port, :pid => "thin.#{port}.pid", :chdir => "/rails_app" } - end -end - -describe Cluster, "with UNIX socket" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :socket => '/tmp/thin.sock', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid' - ) - end - - it 'should include socket number in file names' do - @cluster.send(:include_server_number, 'thin.sock', 0).should == 'thin.0.sock' - @cluster.send(:include_server_number, 'thin', 0).should == 'thin.0' - end - - it "should exclude :address and :port options" do - @cluster.options.should_not have_key(:address) - @cluster.options.should_not have_key(:port) - end - - it 'should call each server' do - calls = [] - @cluster.send(:with_each_server) do |n| - calls << n - end - calls.should == [0, 1, 2] - end - - it 'should start each server' do - Command.should_receive(:run).with(:start, options_for_socket(0)) - Command.should_receive(:run).with(:start, options_for_socket(1)) - Command.should_receive(:run).with(:start, options_for_socket(2)) - - @cluster.start - end - - it 'should stop each server' do - Command.should_receive(:run).with(:stop, options_for_socket(0)) - Command.should_receive(:run).with(:stop, options_for_socket(1)) - Command.should_receive(:run).with(:stop, options_for_socket(2)) - - @cluster.stop - end - - - private - def options_for_socket(number) - { :daemonize => true, :log => "thin.#{number}.log", :timeout => 10, :socket => "/tmp/thin.#{number}.sock", :pid => "thin.#{number}.pid", :chdir => "/rails_app" } - end -end - -describe Cluster, "controlling only one server" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid', - :only => 3001 - ) - end - - it 'should call only specified server' do - calls = [] - @cluster.send(:with_each_server) do |n| - calls << n - end - calls.should == [3001] - end - - it "should start only specified server" do - Command.should_receive(:run).with(:start, options_for_port(3001)) - - @cluster.start - end - - private - def options_for_port(port) - { :daemonize => true, :log => "thin.#{port}.log", :timeout => 10, :address => "0.0.0.0", :port => port, :pid => "thin.#{port}.pid", :chdir => "/rails_app" } - end -end - -describe Cluster, "controlling only one server with UNIX socket" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :socket => '/tmp/thin.sock', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid', - :only => 1 - ) - end - - it 'should call only specified server' do - calls = [] - @cluster.send(:with_each_server) do |n| - calls << n - end - calls.should == [1] - end -end - -describe Cluster, "controlling only one server, by sequence number" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid', - :only => 1 - ) - end - - it 'should call only specified server' do - calls = [] - @cluster.send(:with_each_server) do |n| - calls << n - end - calls.should == [3001] - end - - it "should start only specified server" do - Command.should_receive(:run).with(:start, options_for_port(3001)) - - @cluster.start - end - - private - def options_for_port(port) - { :daemonize => true, :log => "thin.#{port}.log", :timeout => 10, :address => "0.0.0.0", :port => port, :pid => "thin.#{port}.pid", :chdir => "/rails_app" } - end -end - -describe Cluster, "with Swiftiply" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :address => '0.0.0.0', - :port => 3000, - :servers => 3, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid', - :swiftiply => true - ) - end - - it 'should call each server' do - calls = [] - @cluster.send(:with_each_server) do |n| - calls << n - end - calls.should == [0, 1, 2] - end - - it 'should start each server' do - Command.should_receive(:run).with(:start, options_for_swiftiply(0)) - Command.should_receive(:run).with(:start, options_for_swiftiply(1)) - Command.should_receive(:run).with(:start, options_for_swiftiply(2)) - - @cluster.start - end - - it 'should stop each server' do - Command.should_receive(:run).with(:stop, options_for_swiftiply(0)) - Command.should_receive(:run).with(:stop, options_for_swiftiply(1)) - Command.should_receive(:run).with(:stop, options_for_swiftiply(2)) - - @cluster.stop - end - - private - def options_for_swiftiply(number) - { :address => '0.0.0.0', :port => 3000, :daemonize => true, :log => "thin.#{number}.log", :timeout => 10, :pid => "thin.#{number}.pid", :chdir => "/rails_app", :swiftiply => true } - end -end - -describe Cluster, "rolling restart" do - before do - @cluster = Cluster.new(:chdir => '/rails_app', - :address => '0.0.0.0', - :port => 3000, - :servers => 2, - :timeout => 10, - :log => 'thin.log', - :pid => 'thin.pid', - :onebyone => true, - :wait => 30 - ) - end - - it "should restart servers one by one" do - Command.should_receive(:run).with(:stop, options_for_port(3000)) - Command.should_receive(:run).with(:start, options_for_port(3000)) - @cluster.should_receive(:wait_until_server_started).with(3000) - - Command.should_receive(:run).with(:stop, options_for_port(3001)) - Command.should_receive(:run).with(:start, options_for_port(3001)) - @cluster.should_receive(:wait_until_server_started).with(3001) - - @cluster.restart - end - - private - def options_for_port(port) - { :daemonize => true, :log => "thin.#{port}.log", :timeout => 10, :address => "0.0.0.0", :port => port, :pid => "thin.#{port}.pid", :chdir => "/rails_app" } - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/controllers/controller_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/controllers/controller_spec.rb deleted file mode 100644 index d98a011d..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/controllers/controller_spec.rb +++ /dev/null @@ -1,129 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' -require 'ostruct' -include Controllers - -describe Controller, 'start' do - before do - @controller = Controller.new(:address => '0.0.0.0', - :port => 3000, - :pid => 'thin.pid', - :log => 'thin.log', - :timeout => 60, - :max_conns => 2000, - :max_persistent_conns => 1000, - :adapter => 'rails') - - @server = OpenStruct.new - @adapter = OpenStruct.new - - Server.should_receive(:new).with('0.0.0.0', 3000, @controller.options).and_return(@server) - @server.should_receive(:config) - Rack::Adapter::Rails.stub!(:new).and_return(@adapter) - end - - it "should configure server" do - @controller.start - - @server.app.should == @adapter - @server.pid_file.should == 'thin.pid' - @server.log_file.should == 'thin.log' - @server.maximum_connections.should == 2000 - @server.maximum_persistent_connections.should == 1000 - end - - it "should start as daemon" do - @controller.options[:daemonize] = true - @controller.options[:user] = true - @controller.options[:group] = true - - @server.should_receive(:daemonize) - @server.should_receive(:change_privilege) - - @controller.start - end - - it "should configure Rails adapter" do - Rack::Adapter::Rails.should_receive(:new).with(@controller.options.merge(:root => nil)) - - @controller.start - end - - it "should mount app under :prefix" do - @controller.options[:prefix] = '/app' - @controller.start - - @server.app.class.should == Rack::URLMap - end - - it "should mount Stats adapter under :stats" do - @controller.options[:stats] = '/stats' - @controller.start - - @server.app.class.should == Stats::Adapter - end - - it "should load app from Rack config" do - @controller.options[:rackup] = File.dirname(__FILE__) + '/../../example/config.ru' - @controller.start - - @server.app.class.should == Proc - end - - it "should load app from ruby file" do - @controller.options[:rackup] = File.dirname(__FILE__) + '/../../example/myapp.rb' - @controller.start - - @server.app.should == Myapp - end - - it "should throwup if rackup is not a .ru or .rb file" do - proc do - @controller.options[:rackup] = File.dirname(__FILE__) + '/../../example/myapp.foo' - @controller.start - end.should raise_error(RuntimeError, /please/) - end - - it "should set server as threaded" do - @controller.options[:threaded] = true - @controller.start - - @server.threaded.should be_true - end - - it "should set RACK_ENV" do - @controller.options[:rackup] = File.dirname(__FILE__) + '/../../example/config.ru' - @controller.options[:environment] = "lolcat" - @controller.start - - ENV['RACK_ENV'].should == "lolcat" - end - -end - -describe Controller do - before do - @controller = Controller.new(:pid => 'thin.pid', :timeout => 10) - @controller.stub!(:wait_for_file) - end - - it "should stop" do - Server.should_receive(:kill).with('thin.pid', 10) - @controller.stop - end - - it "should restart" do - Server.should_receive(:restart).with('thin.pid') - @controller.restart - end - - it "should write configuration file" do - silence_stream(STDOUT) do - Controller.new(:config => 'test.yml', :port => 5000, :address => '127.0.0.1').config - end - - File.read('test.yml').should include('port: 5000', 'address: 127.0.0.1') - File.read('test.yml').should_not include('config: ') - - File.delete('test.yml') - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/controllers/service_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/controllers/service_spec.rb deleted file mode 100644 index 6c6cd3ba..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/controllers/service_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' -include Controllers - -describe Service do - before(:all) do - silence_stream(STDERR) do - Service::INITD_PATH = 'tmp/sandbox' + Service::INITD_PATH - Service::DEFAULT_CONFIG_PATH = 'tmp/sandbox' + Service::DEFAULT_CONFIG_PATH - end - end - - before do - Thin.stub!(:linux?).and_return(true) - FileUtils.mkdir_p 'tmp/sandbox' - - @service = Service.new(:all => 'spec/configs') - end - - it "should call command for each config file" do - Command.should_receive(:run).with(:start, :config => 'spec/configs/cluster.yml', :daemonize => true) - Command.should_receive(:run).with(:start, :config => 'spec/configs/single.yml', :daemonize => true) - - @service.start - end - - it "should create /etc/init.d/thin file when calling install" do - @service.install - - File.exist?(Service::INITD_PATH).should be_true - File.read(Service::INITD_PATH).should include('CONFIG_PATH=tmp/sandbox/etc/thin', - 'SCRIPT_NAME=tmp/sandbox/etc/init.d/thin', - 'DAEMON=' + Command.script) - end - - it "should create /etc/thin dir when calling install" do - @service.install - - File.directory?(Service::DEFAULT_CONFIG_PATH).should be_true - end - - it "should include specified path in /etc/init.d/thin script" do - @service.install('tmp/sandbox/usr/thin') - - File.read(Service::INITD_PATH).should include('CONFIG_PATH=tmp/sandbox/usr/thin') - end - - after do - FileUtils.rm_rf 'tmp/sandbox' - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/daemonizing_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/daemonizing_spec.rb deleted file mode 100644 index 365d8df1..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/daemonizing_spec.rb +++ /dev/null @@ -1,192 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -class TestServer - include Logging # Daemonizable should include this? - include Daemonizable - - def stop - end - - def name - 'Thin test server' - end -end - -describe 'Daemonizing' do - before :all do - @logfile = File.dirname(__FILE__) + '/../log/daemonizing_test.log' - @pidfile = 'test.pid' - File.delete(@logfile) if File.exist?(@logfile) - File.delete(@pidfile) if File.exist?(@pidfile) - end - - before :each do - @server = TestServer.new - @server.log_file = @logfile - @server.pid_file = @pidfile - @pid = nil - end - - it 'should have a pid file' do - @server.should respond_to(:pid_file) - @server.should respond_to(:pid_file=) - end - - it 'should create a pid file' do - @pid = fork do - @server.daemonize - sleep 1 - end - - sleep 1 - Process.wait(@pid) - File.exist?(@server.pid_file).should be_true - @pid = @server.pid - - proc { sleep 0.1 while File.exist?(@server.pid_file) }.should take_less_then(5) - end - - it 'should redirect stdio to a log file' do - @pid = fork do - @server.log_file = 'daemon_test.log' - @server.daemonize - - puts "simple puts" - STDERR.puts "STDERR.puts" - STDOUT.puts "STDOUT.puts" - end - Process.wait(@pid) - # Wait for the file to close and magical stuff to happen - proc { sleep 0.1 until File.exist?('daemon_test.log') }.should take_less_then(3) - sleep 0.5 - - @pid = @server.pid - - log = File.read('daemon_test.log') - log.should include('simple puts', 'STDERR.puts', 'STDOUT.puts') - - File.delete 'daemon_test.log' - end - - it 'should change privilege' do - @pid = fork do - @server.daemonize - @server.change_privilege('root', 'admin') - end - Process.wait(@pid) - $?.should be_a_success - end - - it 'should kill process in pid file' do - @pid = fork do - @server.daemonize - loop { sleep 3 } - end - - server_should_start_in_less_then 3 - - @pid = @server.pid - - silence_stream STDOUT do - TestServer.kill(@server.pid_file, 1) - end - - File.exist?(@server.pid_file).should be_false - end - - it 'should force kill process in pid file' do - @pid = fork do - @server.daemonize - loop { sleep 3 } - end - - server_should_start_in_less_then 3 - - @pid = @server.pid - - silence_stream STDOUT do - TestServer.kill(@server.pid_file, 0) - end - - File.exist?(@server.pid_file).should be_false - end - - it 'should send kill signal if timeout' do - @pid = fork do - @server.should_receive(:stop) # pretend we cannot handle the INT signal - @server.daemonize - sleep 5 - end - - server_should_start_in_less_then 10 - - @pid = @server.pid - - silence_stream STDOUT do - TestServer.kill(@server.pid_file, 1) - end - - sleep 1 - - File.exist?(@server.pid_file).should be_false - Process.running?(@pid).should be_false - end - - it "should restart" do - @pid = fork do - @server.on_restart {} - @server.daemonize - sleep 5 - end - - server_should_start_in_less_then 10 - - @pid = @server.pid - - silence_stream STDOUT do - TestServer.restart(@server.pid_file) - end - - proc { sleep 0.1 while File.exist?(@server.pid_file) }.should take_less_then(10) - end - - it "should not restart when not running" do - silence_stream STDOUT do - TestServer.restart(@server.pid_file) - end - end - - it "should exit and raise if pid file already exist" do - @pid = fork do - @server.daemonize - sleep 5 - end - server_should_start_in_less_then 10 - - @pid = @server.pid - - proc { @server.daemonize }.should raise_error(PidFileExist) - - File.exist?(@server.pid_file).should be_true - end - - it "should should delete pid file if stale" do - # Create a file w/ a PID that does not exist - File.open(@server.pid_file, 'w') { |f| f << 999999999 } - - @server.send(:remove_stale_pid_file) - - File.exist?(@server.pid_file).should be_false - end - - after do - Process.kill(9, @pid.to_i) if @pid && Process.running?(@pid.to_i) - Process.kill(9, @server.pid) if @server.pid && Process.running?(@server.pid) - File.delete(@server.pid_file) rescue nil - end - - private - def server_should_start_in_less_then(sec=10) - proc { sleep 0.1 until File.exist?(@server.pid_file) }.should take_less_then(10) - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/headers_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/headers_spec.rb deleted file mode 100644 index 98ee4709..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/headers_spec.rb +++ /dev/null @@ -1,40 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Headers do - before do - @headers = Headers.new - end - - it 'should allow duplicate on some fields' do - @headers['Set-Cookie'] = 'twice' - @headers['Set-Cookie'] = 'is cooler the once' - - @headers.to_s.should == "Set-Cookie: twice\r\nSet-Cookie: is cooler the once\r\n" - end - - it 'should overwrite value on non duplicate fields' do - @headers['Host'] = 'this is unique' - @headers['Host'] = 'so is this' - - @headers.to_s.should == "Host: this is unique\r\n" - end - - it 'should output to string' do - @headers['Host'] = 'localhost:3000' - @headers['Set-Cookie'] = 'twice' - @headers['Set-Cookie'] = 'is cooler the once' - - @headers.to_s.should == "Host: localhost:3000\r\nSet-Cookie: twice\r\nSet-Cookie: is cooler the once\r\n" - end - - it 'should ignore nil values' do - @headers['Something'] = nil - @headers.to_s.should_not include('Something: ') - end - - it 'should format Time values correctly' do - time = Time.now - @headers['Modified-At'] = time - @headers.to_s.should include("Modified-At: #{time.httpdate}") - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/logging_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/logging_spec.rb deleted file mode 100644 index eeb160e6..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/logging_spec.rb +++ /dev/null @@ -1,46 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -class TestLogging - include Logging -end - -describe Logging do - before do - Logging.silent = false - @object = TestLogging.new - end - - it "should output debug when set to true" do - Logging.debug = true - @object.should_receive(:puts) - @object.debug 'hi' - end - - it "should output trace when set to true" do - Logging.trace = true - @object.should_receive(:puts) - @object.trace 'hi' - end - - it "should not output when silenced" do - Logging.silent = true - @object.should_not_receive(:puts) - @object.log 'hi' - end - - it "should not output when silenced as instance method" do - @object.silent = true - - @object.should_not_receive(:puts) - @object.log 'hi' - end - - it "should be usable as module functions" do - Logging.silent = true - Logging.log "hi" - end - - after do - Logging.silent = true - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/perf/request_perf_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/perf/request_perf_spec.rb deleted file mode 100644 index bd1e6a4c..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/perf/request_perf_spec.rb +++ /dev/null @@ -1,50 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Request, 'performance' do - it "should be faster then #{max_parsing_time = 0.0002} RubySeconds" do - body = <<-EOS.chomp.gsub("\n", "\r\n") -POST /postit HTTP/1.1 -Host: localhost:3000 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.9) Gecko/20071025 Firefox/2.0.0.9 -Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Accept-Language: en-us,en;q=0.5 -Accept-Encoding: gzip,deflate -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 -Keep-Alive: 300 -Connection: keep-alive -Content-Type: text/html -Content-Length: 37 - -hi=there&name=marc&email=macournoyer@gmail.com -EOS - - proc { R(body) }.should be_faster_then(max_parsing_time) - end - - it 'should be comparable to Mongrel parser' do - require 'http11' - - body = <<-EOS.chomp.gsub("\n", "\r\n") -POST /postit HTTP/1.1 -Host: localhost:3000 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.9) Gecko/20071025 Firefox/2.0.0.9 -Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Accept-Language: en-us,en;q=0.5 -Accept-Encoding: gzip,deflate -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 -Keep-Alive: 300 -Connection: keep-alive -Content-Type: text/html -Content-Length: 37 - -hi=there&name=marc&email=macournoyer@gmail.com -EOS - - tests = 10_000 - puts - Benchmark.bmbm(10) do |results| - results.report("mongrel:") { tests.times { Mongrel::HttpParser.new.execute({}, body.dup, 0) } } - results.report("thin:") { tests.times { Thin::HttpParser.new.execute({'rack.input' => StringIO.new}, body.dup, 0) } } - end - end if ENV['BM'] -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/perf/response_perf_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/perf/response_perf_spec.rb deleted file mode 100644 index 1456cae6..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/perf/response_perf_spec.rb +++ /dev/null @@ -1,19 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Response, 'performance' do - before do - @response = Response.new - @response.body = '' - end - - it "should be fast" do - @response.body << <<-EOS -Dir listing -

Listing stuff

    -#{'
  • Hi!
  • ' * 100} -
-EOS - - proc { @response.each { |l| l } }.should be_faster_then(0.00011) - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/perf/server_perf_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/perf/server_perf_spec.rb deleted file mode 100644 index 528ebc4f..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/perf/server_perf_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, 'performance' do - before do - start_server do |env| - body = env.inspect + env['rack.input'].read - [200, { 'Content-Length' => body.size.to_s }, body] - end - end - - it "should handle GET in less then #{get_request_time = 0.0045} RubySecond" do - proc { get('/') }.should be_faster_then(get_request_time) - end - - it "should handle POST in less then #{post_request_time = 0.007} RubySecond" do - proc { post('/', :file => 'X' * 1000) }.should be_faster_then(post_request_time) - end - - after do - stop_server - end -end - -describe Server, 'UNIX socket performance' do - before do - start_server('/tmp/thin_test.sock') do |env| - body = env.inspect + env['rack.input'].read - [200, { 'Content-Length' => body.size.to_s }, body] - end - end - - it "should handle GET in less then #{get_request_time = 0.002} RubySecond" do - proc { get('/') }.should be_faster_then(get_request_time) - end - - after do - stop_server - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rack/loader_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/rack/loader_spec.rb deleted file mode 100644 index 0cfb19da..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rack/loader_spec.rb +++ /dev/null @@ -1,29 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Rack::Adapter do - before do - @rails_path = File.dirname(__FILE__) + '/../rails_app' - end - - it "should guess rails app from dir" do - Rack::Adapter.guess(@rails_path).should == :rails - end - - it "should return nil when can't guess from dir" do - proc { Rack::Adapter.guess('.') }.should raise_error(Rack::AdapterNotFound) - end - - it "should load Rails adapter" do - Rack::Adapter::Rails.should_receive(:new) - Rack::Adapter.for(:rails, :chdir => @rails_path) - end - - it "should load File adapter" do - Rack::File.should_receive(:new) - Rack::Adapter.for(:file) - end - - it "should raise error when adapter can't be found" do - proc { Rack::Adapter.for(:fart, {}) }.should raise_error(Rack::AdapterNotFound) - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rack/rails_adapter_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/rack/rails_adapter_spec.rb deleted file mode 100644 index 279c26d4..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rack/rails_adapter_spec.rb +++ /dev/null @@ -1,106 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' -require 'rack/mock' - -begin - gem 'rails', '= 2.0.2' # We could freeze Rails in the rails_app dir to remove this - - describe Rack::Adapter::Rails do - before do - @rails_app_path = File.dirname(__FILE__) + '/../rails_app' - @request = Rack::MockRequest.new(Rack::Adapter::Rails.new(:root => @rails_app_path)) - end - - it "should handle simple GET request" do - res = @request.get("/simple", :lint => true) - - res.should be_ok - res["Content-Type"].should include("text/html") - - res.body.should include('Simple#index') - end - - it "should handle POST parameters" do - data = "foo=bar" - res = @request.post("/simple/post_form", :input => data, 'CONTENT_LENGTH' => data.size.to_s, :lint => true) - - res.should be_ok - res["Content-Type"].should include("text/html") - res["Content-Length"].should_not be_nil - - res.body.should include('foo: bar') - end - - it "should serve static files" do - res = @request.get("/index.html", :lint => true) - - res.should be_ok - res["Content-Type"].should include("text/html") - end - - it "should serve root with index.html if present" do - res = @request.get("/", :lint => true) - - res.should be_ok - res["Content-Length"].to_i.should == File.size(@rails_app_path + '/public/index.html') - end - - it "should serve page cache if present" do - res = @request.get("/simple/cached?value=cached", :lint => true) - - res.should be_ok - res.body.should == 'cached' - - res = @request.get("/simple/cached?value=notcached") - - res.should be_ok - res.body.should == 'cached' - end - - it "should not serve page cache on POST request" do - res = @request.get("/simple/cached?value=cached", :lint => true) - - res.should be_ok - res.body.should == 'cached' - - res = @request.post("/simple/cached?value=notcached") - - res.should be_ok - res.body.should == 'notcached' - end - - it "handles multiple cookies" do - res = @request.get('/simple/set_cookie?name=a&value=1', :lint => true) - - res.should be_ok - res.original_headers['Set-Cookie'].size.should == 2 - res.original_headers['Set-Cookie'].first.should include('a=1; path=/') - res.original_headers['Set-Cookie'].last.should include('_rails_app_session') - end - - after do - FileUtils.rm_rf @rails_app_path + '/public/simple' - end - end - - describe Rack::Adapter::Rails, 'with prefix' do - before do - @rails_app_path = File.dirname(__FILE__) + '/../rails_app' - @prefix = '/nowhere' - @request = Rack::MockRequest.new( - Rack::URLMap.new( - @prefix => Rack::Adapter::Rails.new(:root => @rails_app_path, :prefix => @prefix))) - end - - it "should handle simple GET request" do - res = @request.get("#{@prefix}/simple", :lint => true) - - res.should be_ok - res["Content-Type"].should include("text/html") - - res.body.should include('Simple#index') - end - end - -rescue Gem::LoadError - warn 'Rails 2.0.2 is required to run the Rails adapter specs' -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/application.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/application.rb deleted file mode 100644 index cfdb724c..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/application.rb +++ /dev/null @@ -1,10 +0,0 @@ -# Filters added to this controller apply to all controllers in the application. -# Likewise, all the methods added will be available for all controllers. - -class ApplicationController < ActionController::Base - helper :all # include all helpers, all the time - - # See ActionController::RequestForgeryProtection for details - # Uncomment the :secret if you're not using the cookie session store - # protect_from_forgery # :secret => 'a8af303b8dabf2d2d8f1a7912ac04d7d' -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/simple_controller.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/simple_controller.rb deleted file mode 100644 index 2c17427e..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/controllers/simple_controller.rb +++ /dev/null @@ -1,19 +0,0 @@ -class SimpleController < ApplicationController - caches_page :cached - - def index - end - - def post_form - render :text => params.to_yaml - end - - def set_cookie - cookies[params[:name]] = params[:value] if params[:name] - render :text => cookies.to_yaml - end - - def cached - render :text => params[:value] - end -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/helpers/application_helper.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/helpers/application_helper.rb deleted file mode 100644 index 22a7940e..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/helpers/application_helper.rb +++ /dev/null @@ -1,3 +0,0 @@ -# Methods added to this helper will be available to all templates in the application. -module ApplicationHelper -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/views/simple/index.html.erb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/views/simple/index.html.erb deleted file mode 100644 index 7717b8ec..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/app/views/simple/index.html.erb +++ /dev/null @@ -1,15 +0,0 @@ -

Simple#index

- -

ENV

-<%= request.env.to_yaml %> - -

Cookies

-<%= request.cookies.to_yaml %> - -

Params

-<%= params.to_yaml %> - -<% form_tag '/simple' do %> - <%= text_field_tag :a %> - <%= submit_tag 'Submit' %> -<% end %> \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/boot.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/boot.rb deleted file mode 100644 index 5697cc1b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/boot.rb +++ /dev/null @@ -1,109 +0,0 @@ -# Don't change this file! -# Configure your app in config/environment.rb and config/environments/*.rb - -RAILS_ROOT = "#{File.dirname(__FILE__)}/.." unless defined?(RAILS_ROOT) - -module Rails - class << self - def boot! - unless booted? - preinitialize - pick_boot.run - end - end - - def booted? - defined? Rails::Initializer - end - - def pick_boot - (vendor_rails? ? VendorBoot : GemBoot).new - end - - def vendor_rails? - File.exist?("#{RAILS_ROOT}/vendor/rails") - end - - # FIXME : Ruby 1.9 - def preinitialize - load(preinitializer_path) if File.exists?(preinitializer_path) - end - - def preinitializer_path - "#{RAILS_ROOT}/config/preinitializer.rb" - end - end - - class Boot - def run - load_initializer - Rails::Initializer.run(:set_load_path) - end - end - - class VendorBoot < Boot - def load_initializer - require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer" - end - end - - class GemBoot < Boot - def load_initializer - self.class.load_rubygems - load_rails_gem - require 'initializer' - end - - def load_rails_gem - if version = self.class.gem_version - gem 'rails', version - else - gem 'rails' - end - rescue Gem::LoadError => load_error - $stderr.puts %(Missing the Rails #{version} gem. Please `gem install -v=#{version} rails`, update your RAILS_GEM_VERSION setting in config/environment.rb for the Rails version you do have installed, or comment out RAILS_GEM_VERSION to use the latest version installed.) - exit 1 - end - - class << self - def rubygems_version - Gem::RubyGemsVersion if defined? Gem::RubyGemsVersion - end - - def gem_version - if defined? RAILS_GEM_VERSION - RAILS_GEM_VERSION - elsif ENV.include?('RAILS_GEM_VERSION') - ENV['RAILS_GEM_VERSION'] - else - parse_gem_version(read_environment_rb) - end - end - - def load_rubygems - require 'rubygems' - - unless rubygems_version >= '0.9.4' - $stderr.puts %(Rails requires RubyGems >= 0.9.4 (you have #{rubygems_version}). Please `gem update --system` and try again.) - exit 1 - end - - rescue LoadError - $stderr.puts %(Rails requires RubyGems >= 0.9.4. Please install RubyGems and try again: http://rubygems.rubyforge.org) - exit 1 - end - - def parse_gem_version(text) - $1 if text =~ /^[^#]*RAILS_GEM_VERSION\s*=\s*["']([!~<>=]*\s*[\d.]+)["']/ - end - - private - def read_environment_rb - File.read("#{RAILS_ROOT}/config/environment.rb") - end - end - end -end - -# All that for this: -Rails.boot! diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environment.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environment.rb deleted file mode 100644 index 2af27f14..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environment.rb +++ /dev/null @@ -1,64 +0,0 @@ -# Be sure to restart your server when you modify this file - -# Uncomment below to force Rails into production mode when -# you don't control web/app server and can't set it the proper way -# ENV['RAILS_ENV'] ||= 'production' - -# Specifies gem version of Rails to use when vendor/rails is not present -RAILS_GEM_VERSION = '2.0.2' unless defined? RAILS_GEM_VERSION - -# Bootstrap the Rails environment, frameworks, and default configuration -require File.join(File.dirname(__FILE__), 'boot') - -Rails::Initializer.run do |config| - # Settings in config/environments/* take precedence over those specified here. - # Application configuration should go into files in config/initializers - # -- all .rb files in that directory are automatically loaded. - # See Rails::Configuration for more options. - - # Skip frameworks you're not going to use (only works if using vendor/rails). - # To use Rails without a database, you must remove the Active Record framework - config.frameworks -= [ :active_record, :active_resource, :action_mailer ] - - # Only load the plugins named here, in the order given. By default, all plugins - # in vendor/plugins are loaded in alphabetical order. - # :all can be used as a placeholder for all plugins not explicitly named - # config.plugins = [ :exception_notification, :ssl_requirement, :all ] - - # Add additional load paths for your own custom dirs - # config.load_paths += %W( #{RAILS_ROOT}/extras ) - - # No need for log files - config.logger = Logger.new(nil) - - # Force all environments to use the same logger level - # (by default production uses :info, the others :debug) - # config.log_level = :debug - - # Your secret key for verifying cookie session data integrity. - # If you change this key, all old sessions will become invalid! - # Make sure the secret is at least 30 characters and all random, - # no regular words or you'll be exposed to dictionary attacks. - config.action_controller.session = { - :session_key => '_rails_app_session', - :secret => 'cb7141365b4443eff37e7122473e704ceae95146a4028930b21300965fe6abec51e3e93b2670a914b3b65d06058b81aadfe6b240d63e7d7713db044b42a6e1c1' - } - - config.action_controller.allow_forgery_protection = false - - # Use the database for sessions instead of the cookie-based default, - # which shouldn't be used to store highly confidential information - # (create the session table with 'rake db:sessions:create') - # config.action_controller.session_store = :active_record_store - - # Use SQL instead of Active Record's schema dumper when creating the test database. - # This is necessary if your schema can't be completely dumped by the schema dumper, - # like if you have constraints or database-specific column types - # config.active_record.schema_format = :sql - - # Activate observers that should always be running - # config.active_record.observers = :cacher, :garbage_collector - - # Make Active Record use UTC-base instead of local time - # config.active_record.default_timezone = :utc -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/development.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/development.rb deleted file mode 100644 index 191f39cb..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/development.rb +++ /dev/null @@ -1,18 +0,0 @@ -# Settings specified here will take precedence over those in config/environment.rb - -# In the development environment your application's code is reloaded on -# every request. This slows down response time but is perfect for development -# since you don't have to restart the webserver when you make code changes. -config.cache_classes = false - -# Log error messages when you accidentally call methods on nil. -config.whiny_nils = true - -# Show full error reports and disable caching -config.action_controller.consider_all_requests_local = true -config.action_view.debug_rjs = true -config.action_controller.perform_caching = true -config.action_view.cache_template_extensions = false - -# Don't care if the mailer can't send -config.action_mailer.raise_delivery_errors = false \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/production.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/production.rb deleted file mode 100644 index 91f541c4..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/production.rb +++ /dev/null @@ -1,19 +0,0 @@ -# Settings specified here will take precedence over those in config/environment.rb - -# The production environment is meant for finished, "live" apps. -# Code is not reloaded between requests -config.cache_classes = true - -# Use a different logger for distributed setups -# config.logger = SyslogLogger.new - -# Full error reports are disabled and caching is turned on -config.action_controller.consider_all_requests_local = false -config.action_controller.perform_caching = true -config.action_view.cache_template_loading = true - -# Enable serving of images, stylesheets, and javascripts from an asset server -# config.action_controller.asset_host = "http://assets.example.com" - -# Disable delivery errors, bad email addresses will be ignored -# config.action_mailer.raise_delivery_errors = false diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/test.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/test.rb deleted file mode 100644 index 58850a79..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/environments/test.rb +++ /dev/null @@ -1,22 +0,0 @@ -# Settings specified here will take precedence over those in config/environment.rb - -# The test environment is used exclusively to run your application's -# test suite. You never need to work with it otherwise. Remember that -# your test database is "scratch space" for the test suite and is wiped -# and recreated between test runs. Don't rely on the data there! -config.cache_classes = true - -# Log error messages when you accidentally call methods on nil. -config.whiny_nils = true - -# Show full error reports and disable caching -config.action_controller.consider_all_requests_local = true -config.action_controller.perform_caching = false - -# Disable request forgery protection in test environment -config.action_controller.allow_forgery_protection = false - -# Tell ActionMailer not to deliver emails to the real world. -# The :test delivery method accumulates sent emails in the -# ActionMailer::Base.deliveries array. -config.action_mailer.delivery_method = :test diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/inflections.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/inflections.rb deleted file mode 100644 index 09158b86..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/inflections.rb +++ /dev/null @@ -1,10 +0,0 @@ -# Be sure to restart your server when you modify this file. - -# Add new inflection rules using the following format -# (all these examples are active by default): -# Inflector.inflections do |inflect| -# inflect.plural /^(ox)$/i, '\1en' -# inflect.singular /^(ox)en/i, '\1' -# inflect.irregular 'person', 'people' -# inflect.uncountable %w( fish sheep ) -# end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/mime_types.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/mime_types.rb deleted file mode 100644 index 72aca7e4..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/initializers/mime_types.rb +++ /dev/null @@ -1,5 +0,0 @@ -# Be sure to restart your server when you modify this file. - -# Add new mime types for use in respond_to blocks: -# Mime::Type.register "text/richtext", :rtf -# Mime::Type.register_alias "text/html", :iphone diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/routes.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/routes.rb deleted file mode 100644 index d94afa1b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/config/routes.rb +++ /dev/null @@ -1,35 +0,0 @@ -ActionController::Routing::Routes.draw do |map| - # The priority is based upon order of creation: first created -> highest priority. - - # Sample of regular route: - # map.connect 'products/:id', :controller => 'catalog', :action => 'view' - # Keep in mind you can assign values other than :controller and :action - - # Sample of named route: - # map.purchase 'products/:id/purchase', :controller => 'catalog', :action => 'purchase' - # This route can be invoked with purchase_url(:id => product.id) - - # Sample resource route (maps HTTP verbs to controller actions automatically): - # map.resources :products - - # Sample resource route with options: - # map.resources :products, :member => { :short => :get, :toggle => :post }, :collection => { :sold => :get } - - # Sample resource route with sub-resources: - # map.resources :products, :has_many => [ :comments, :sales ], :has_one => :seller - - # Sample resource route within a namespace: - # map.namespace :admin do |admin| - # # Directs /admin/products/* to Admin::ProductsController (app/controllers/admin/products_controller.rb) - # admin.resources :products - # end - - # You can have the root of your site routed with map.root -- just remember to delete public/index.html. - # map.root :controller => "welcome" - - # See how all your routes lay out with "rake routes" - - # Install the default routes as the lowest priority. - map.connect ':controller/:action/:id' - map.connect ':controller/:action/:id.:format' -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/404.html b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/404.html deleted file mode 100644 index eff660b9..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/404.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - The page you were looking for doesn't exist (404) - - - - - -
-

The page you were looking for doesn't exist.

-

You may have mistyped the address or the page may have moved.

-
- - \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/422.html b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/422.html deleted file mode 100644 index b54e4a3c..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/422.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - The change you wanted was rejected (422) - - - - - -
-

The change you wanted was rejected.

-

Maybe you tried to change something you didn't have access to.

-
- - \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/500.html b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/500.html deleted file mode 100644 index 0e9c14f4..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/500.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - We're sorry, but something went wrong (500) - - - - - -
-

We're sorry, but something went wrong.

-

We've been notified about this issue and we'll take a look at it shortly.

-
- - \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.cgi b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.cgi deleted file mode 100755 index 9b5ae760..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.cgi +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/local/bin/ruby - -require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) - -# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: -# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired -require "dispatcher" - -ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) -Dispatcher.dispatch \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.fcgi b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.fcgi deleted file mode 100755 index 65188f38..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.fcgi +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/local/bin/ruby -# -# You may specify the path to the FastCGI crash log (a log of unhandled -# exceptions which forced the FastCGI instance to exit, great for debugging) -# and the number of requests to process before running garbage collection. -# -# By default, the FastCGI crash log is RAILS_ROOT/log/fastcgi.crash.log -# and the GC period is nil (turned off). A reasonable number of requests -# could range from 10-100 depending on the memory footprint of your app. -# -# Example: -# # Default log path, normal GC behavior. -# RailsFCGIHandler.process! -# -# # Default log path, 50 requests between GC. -# RailsFCGIHandler.process! nil, 50 -# -# # Custom log path, normal GC behavior. -# RailsFCGIHandler.process! '/var/log/myapp_fcgi_crash.log' -# -require File.dirname(__FILE__) + "/../config/environment" -require 'fcgi_handler' - -RailsFCGIHandler.process! diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.rb b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.rb deleted file mode 100755 index 9b5ae760..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/dispatch.rb +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/local/bin/ruby - -require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) - -# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: -# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired -require "dispatcher" - -ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) -Dispatcher.dispatch \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/images/rails.png b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/images/rails.png deleted file mode 100644 index b8441f18..00000000 Binary files a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/images/rails.png and /dev/null differ diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/index.html b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/index.html deleted file mode 100644 index 84b7b57c..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/index.html +++ /dev/null @@ -1,277 +0,0 @@ - - - - - Ruby on Rails: Welcome aboard - - - - - - -
- - -
- - - - -
-

Getting started

-

Here’s how to get rolling:

- -
    -
  1. -

    Create your databases and edit config/database.yml

    -

    Rails needs to know your login and password.

    -
  2. - -
  3. -

    Use script/generate to create your models and controllers

    -

    To see all available options, run it without parameters.

    -
  4. - -
  5. -

    Set up a default route and remove or rename this file

    -

    Routes are set up in config/routes.rb.

    -
  6. -
-
-
- - -
- - \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/application.js b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/application.js deleted file mode 100644 index fe457769..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/application.js +++ /dev/null @@ -1,2 +0,0 @@ -// Place your application-specific JavaScript functions and classes here -// This file is automatically included by javascript_include_tag :defaults diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/controls.js b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/controls.js deleted file mode 100644 index fbc4418b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/controls.js +++ /dev/null @@ -1,963 +0,0 @@ -// Copyright (c) 2005-2007 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// (c) 2005-2007 Ivan Krstic (http://blogs.law.harvard.edu/ivan) -// (c) 2005-2007 Jon Tirsen (http://www.tirsen.com) -// Contributors: -// Richard Livsey -// Rahul Bhargava -// Rob Wills -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -// Autocompleter.Base handles all the autocompletion functionality -// that's independent of the data source for autocompletion. This -// includes drawing the autocompletion menu, observing keyboard -// and mouse events, and similar. -// -// Specific autocompleters need to provide, at the very least, -// a getUpdatedChoices function that will be invoked every time -// the text inside the monitored textbox changes. This method -// should get the text for which to provide autocompletion by -// invoking this.getToken(), NOT by directly accessing -// this.element.value. This is to allow incremental tokenized -// autocompletion. Specific auto-completion logic (AJAX, etc) -// belongs in getUpdatedChoices. -// -// Tokenized incremental autocompletion is enabled automatically -// when an autocompleter is instantiated with the 'tokens' option -// in the options parameter, e.g.: -// new Ajax.Autocompleter('id','upd', '/url/', { tokens: ',' }); -// will incrementally autocomplete with a comma as the token. -// Additionally, ',' in the above example can be replaced with -// a token array, e.g. { tokens: [',', '\n'] } which -// enables autocompletion on multiple tokens. This is most -// useful when one of the tokens is \n (a newline), as it -// allows smart autocompletion after linebreaks. - -if(typeof Effect == 'undefined') - throw("controls.js requires including script.aculo.us' effects.js library"); - -var Autocompleter = { } -Autocompleter.Base = Class.create({ - baseInitialize: function(element, update, options) { - element = $(element) - this.element = element; - this.update = $(update); - this.hasFocus = false; - this.changed = false; - this.active = false; - this.index = 0; - this.entryCount = 0; - this.oldElementValue = this.element.value; - - if(this.setOptions) - this.setOptions(options); - else - this.options = options || { }; - - this.options.paramName = this.options.paramName || this.element.name; - this.options.tokens = this.options.tokens || []; - this.options.frequency = this.options.frequency || 0.4; - this.options.minChars = this.options.minChars || 1; - this.options.onShow = this.options.onShow || - function(element, update){ - if(!update.style.position || update.style.position=='absolute') { - update.style.position = 'absolute'; - Position.clone(element, update, { - setHeight: false, - offsetTop: element.offsetHeight - }); - } - Effect.Appear(update,{duration:0.15}); - }; - this.options.onHide = this.options.onHide || - function(element, update){ new Effect.Fade(update,{duration:0.15}) }; - - if(typeof(this.options.tokens) == 'string') - this.options.tokens = new Array(this.options.tokens); - // Force carriage returns as token delimiters anyway - if (!this.options.tokens.include('\n')) - this.options.tokens.push('\n'); - - this.observer = null; - - this.element.setAttribute('autocomplete','off'); - - Element.hide(this.update); - - Event.observe(this.element, 'blur', this.onBlur.bindAsEventListener(this)); - Event.observe(this.element, 'keydown', this.onKeyPress.bindAsEventListener(this)); - }, - - show: function() { - if(Element.getStyle(this.update, 'display')=='none') this.options.onShow(this.element, this.update); - if(!this.iefix && - (Prototype.Browser.IE) && - (Element.getStyle(this.update, 'position')=='absolute')) { - new Insertion.After(this.update, - ''); - this.iefix = $(this.update.id+'_iefix'); - } - if(this.iefix) setTimeout(this.fixIEOverlapping.bind(this), 50); - }, - - fixIEOverlapping: function() { - Position.clone(this.update, this.iefix, {setTop:(!this.update.style.height)}); - this.iefix.style.zIndex = 1; - this.update.style.zIndex = 2; - Element.show(this.iefix); - }, - - hide: function() { - this.stopIndicator(); - if(Element.getStyle(this.update, 'display')!='none') this.options.onHide(this.element, this.update); - if(this.iefix) Element.hide(this.iefix); - }, - - startIndicator: function() { - if(this.options.indicator) Element.show(this.options.indicator); - }, - - stopIndicator: function() { - if(this.options.indicator) Element.hide(this.options.indicator); - }, - - onKeyPress: function(event) { - if(this.active) - switch(event.keyCode) { - case Event.KEY_TAB: - case Event.KEY_RETURN: - this.selectEntry(); - Event.stop(event); - case Event.KEY_ESC: - this.hide(); - this.active = false; - Event.stop(event); - return; - case Event.KEY_LEFT: - case Event.KEY_RIGHT: - return; - case Event.KEY_UP: - this.markPrevious(); - this.render(); - Event.stop(event); - return; - case Event.KEY_DOWN: - this.markNext(); - this.render(); - Event.stop(event); - return; - } - else - if(event.keyCode==Event.KEY_TAB || event.keyCode==Event.KEY_RETURN || - (Prototype.Browser.WebKit > 0 && event.keyCode == 0)) return; - - this.changed = true; - this.hasFocus = true; - - if(this.observer) clearTimeout(this.observer); - this.observer = - setTimeout(this.onObserverEvent.bind(this), this.options.frequency*1000); - }, - - activate: function() { - this.changed = false; - this.hasFocus = true; - this.getUpdatedChoices(); - }, - - onHover: function(event) { - var element = Event.findElement(event, 'LI'); - if(this.index != element.autocompleteIndex) - { - this.index = element.autocompleteIndex; - this.render(); - } - Event.stop(event); - }, - - onClick: function(event) { - var element = Event.findElement(event, 'LI'); - this.index = element.autocompleteIndex; - this.selectEntry(); - this.hide(); - }, - - onBlur: function(event) { - // needed to make click events working - setTimeout(this.hide.bind(this), 250); - this.hasFocus = false; - this.active = false; - }, - - render: function() { - if(this.entryCount > 0) { - for (var i = 0; i < this.entryCount; i++) - this.index==i ? - Element.addClassName(this.getEntry(i),"selected") : - Element.removeClassName(this.getEntry(i),"selected"); - if(this.hasFocus) { - this.show(); - this.active = true; - } - } else { - this.active = false; - this.hide(); - } - }, - - markPrevious: function() { - if(this.index > 0) this.index-- - else this.index = this.entryCount-1; - this.getEntry(this.index).scrollIntoView(true); - }, - - markNext: function() { - if(this.index < this.entryCount-1) this.index++ - else this.index = 0; - this.getEntry(this.index).scrollIntoView(false); - }, - - getEntry: function(index) { - return this.update.firstChild.childNodes[index]; - }, - - getCurrentEntry: function() { - return this.getEntry(this.index); - }, - - selectEntry: function() { - this.active = false; - this.updateElement(this.getCurrentEntry()); - }, - - updateElement: function(selectedElement) { - if (this.options.updateElement) { - this.options.updateElement(selectedElement); - return; - } - var value = ''; - if (this.options.select) { - var nodes = $(selectedElement).select('.' + this.options.select) || []; - if(nodes.length>0) value = Element.collectTextNodes(nodes[0], this.options.select); - } else - value = Element.collectTextNodesIgnoreClass(selectedElement, 'informal'); - - var bounds = this.getTokenBounds(); - if (bounds[0] != -1) { - var newValue = this.element.value.substr(0, bounds[0]); - var whitespace = this.element.value.substr(bounds[0]).match(/^\s+/); - if (whitespace) - newValue += whitespace[0]; - this.element.value = newValue + value + this.element.value.substr(bounds[1]); - } else { - this.element.value = value; - } - this.oldElementValue = this.element.value; - this.element.focus(); - - if (this.options.afterUpdateElement) - this.options.afterUpdateElement(this.element, selectedElement); - }, - - updateChoices: function(choices) { - if(!this.changed && this.hasFocus) { - this.update.innerHTML = choices; - Element.cleanWhitespace(this.update); - Element.cleanWhitespace(this.update.down()); - - if(this.update.firstChild && this.update.down().childNodes) { - this.entryCount = - this.update.down().childNodes.length; - for (var i = 0; i < this.entryCount; i++) { - var entry = this.getEntry(i); - entry.autocompleteIndex = i; - this.addObservers(entry); - } - } else { - this.entryCount = 0; - } - - this.stopIndicator(); - this.index = 0; - - if(this.entryCount==1 && this.options.autoSelect) { - this.selectEntry(); - this.hide(); - } else { - this.render(); - } - } - }, - - addObservers: function(element) { - Event.observe(element, "mouseover", this.onHover.bindAsEventListener(this)); - Event.observe(element, "click", this.onClick.bindAsEventListener(this)); - }, - - onObserverEvent: function() { - this.changed = false; - this.tokenBounds = null; - if(this.getToken().length>=this.options.minChars) { - this.getUpdatedChoices(); - } else { - this.active = false; - this.hide(); - } - this.oldElementValue = this.element.value; - }, - - getToken: function() { - var bounds = this.getTokenBounds(); - return this.element.value.substring(bounds[0], bounds[1]).strip(); - }, - - getTokenBounds: function() { - if (null != this.tokenBounds) return this.tokenBounds; - var value = this.element.value; - if (value.strip().empty()) return [-1, 0]; - var diff = arguments.callee.getFirstDifferencePos(value, this.oldElementValue); - var offset = (diff == this.oldElementValue.length ? 1 : 0); - var prevTokenPos = -1, nextTokenPos = value.length; - var tp; - for (var index = 0, l = this.options.tokens.length; index < l; ++index) { - tp = value.lastIndexOf(this.options.tokens[index], diff + offset - 1); - if (tp > prevTokenPos) prevTokenPos = tp; - tp = value.indexOf(this.options.tokens[index], diff + offset); - if (-1 != tp && tp < nextTokenPos) nextTokenPos = tp; - } - return (this.tokenBounds = [prevTokenPos + 1, nextTokenPos]); - } -}); - -Autocompleter.Base.prototype.getTokenBounds.getFirstDifferencePos = function(newS, oldS) { - var boundary = Math.min(newS.length, oldS.length); - for (var index = 0; index < boundary; ++index) - if (newS[index] != oldS[index]) - return index; - return boundary; -}; - -Ajax.Autocompleter = Class.create(Autocompleter.Base, { - initialize: function(element, update, url, options) { - this.baseInitialize(element, update, options); - this.options.asynchronous = true; - this.options.onComplete = this.onComplete.bind(this); - this.options.defaultParams = this.options.parameters || null; - this.url = url; - }, - - getUpdatedChoices: function() { - this.startIndicator(); - - var entry = encodeURIComponent(this.options.paramName) + '=' + - encodeURIComponent(this.getToken()); - - this.options.parameters = this.options.callback ? - this.options.callback(this.element, entry) : entry; - - if(this.options.defaultParams) - this.options.parameters += '&' + this.options.defaultParams; - - new Ajax.Request(this.url, this.options); - }, - - onComplete: function(request) { - this.updateChoices(request.responseText); - } -}); - -// The local array autocompleter. Used when you'd prefer to -// inject an array of autocompletion options into the page, rather -// than sending out Ajax queries, which can be quite slow sometimes. -// -// The constructor takes four parameters. The first two are, as usual, -// the id of the monitored textbox, and id of the autocompletion menu. -// The third is the array you want to autocomplete from, and the fourth -// is the options block. -// -// Extra local autocompletion options: -// - choices - How many autocompletion choices to offer -// -// - partialSearch - If false, the autocompleter will match entered -// text only at the beginning of strings in the -// autocomplete array. Defaults to true, which will -// match text at the beginning of any *word* in the -// strings in the autocomplete array. If you want to -// search anywhere in the string, additionally set -// the option fullSearch to true (default: off). -// -// - fullSsearch - Search anywhere in autocomplete array strings. -// -// - partialChars - How many characters to enter before triggering -// a partial match (unlike minChars, which defines -// how many characters are required to do any match -// at all). Defaults to 2. -// -// - ignoreCase - Whether to ignore case when autocompleting. -// Defaults to true. -// -// It's possible to pass in a custom function as the 'selector' -// option, if you prefer to write your own autocompletion logic. -// In that case, the other options above will not apply unless -// you support them. - -Autocompleter.Local = Class.create(Autocompleter.Base, { - initialize: function(element, update, array, options) { - this.baseInitialize(element, update, options); - this.options.array = array; - }, - - getUpdatedChoices: function() { - this.updateChoices(this.options.selector(this)); - }, - - setOptions: function(options) { - this.options = Object.extend({ - choices: 10, - partialSearch: true, - partialChars: 2, - ignoreCase: true, - fullSearch: false, - selector: function(instance) { - var ret = []; // Beginning matches - var partial = []; // Inside matches - var entry = instance.getToken(); - var count = 0; - - for (var i = 0; i < instance.options.array.length && - ret.length < instance.options.choices ; i++) { - - var elem = instance.options.array[i]; - var foundPos = instance.options.ignoreCase ? - elem.toLowerCase().indexOf(entry.toLowerCase()) : - elem.indexOf(entry); - - while (foundPos != -1) { - if (foundPos == 0 && elem.length != entry.length) { - ret.push("
  • " + elem.substr(0, entry.length) + "" + - elem.substr(entry.length) + "
  • "); - break; - } else if (entry.length >= instance.options.partialChars && - instance.options.partialSearch && foundPos != -1) { - if (instance.options.fullSearch || /\s/.test(elem.substr(foundPos-1,1))) { - partial.push("
  • " + elem.substr(0, foundPos) + "" + - elem.substr(foundPos, entry.length) + "" + elem.substr( - foundPos + entry.length) + "
  • "); - break; - } - } - - foundPos = instance.options.ignoreCase ? - elem.toLowerCase().indexOf(entry.toLowerCase(), foundPos + 1) : - elem.indexOf(entry, foundPos + 1); - - } - } - if (partial.length) - ret = ret.concat(partial.slice(0, instance.options.choices - ret.length)) - return "
      " + ret.join('') + "
    "; - } - }, options || { }); - } -}); - -// AJAX in-place editor and collection editor -// Full rewrite by Christophe Porteneuve (April 2007). - -// Use this if you notice weird scrolling problems on some browsers, -// the DOM might be a bit confused when this gets called so do this -// waits 1 ms (with setTimeout) until it does the activation -Field.scrollFreeActivate = function(field) { - setTimeout(function() { - Field.activate(field); - }, 1); -} - -Ajax.InPlaceEditor = Class.create({ - initialize: function(element, url, options) { - this.url = url; - this.element = element = $(element); - this.prepareOptions(); - this._controls = { }; - arguments.callee.dealWithDeprecatedOptions(options); // DEPRECATION LAYER!!! - Object.extend(this.options, options || { }); - if (!this.options.formId && this.element.id) { - this.options.formId = this.element.id + '-inplaceeditor'; - if ($(this.options.formId)) - this.options.formId = ''; - } - if (this.options.externalControl) - this.options.externalControl = $(this.options.externalControl); - if (!this.options.externalControl) - this.options.externalControlOnly = false; - this._originalBackground = this.element.getStyle('background-color') || 'transparent'; - this.element.title = this.options.clickToEditText; - this._boundCancelHandler = this.handleFormCancellation.bind(this); - this._boundComplete = (this.options.onComplete || Prototype.emptyFunction).bind(this); - this._boundFailureHandler = this.handleAJAXFailure.bind(this); - this._boundSubmitHandler = this.handleFormSubmission.bind(this); - this._boundWrapperHandler = this.wrapUp.bind(this); - this.registerListeners(); - }, - checkForEscapeOrReturn: function(e) { - if (!this._editing || e.ctrlKey || e.altKey || e.shiftKey) return; - if (Event.KEY_ESC == e.keyCode) - this.handleFormCancellation(e); - else if (Event.KEY_RETURN == e.keyCode) - this.handleFormSubmission(e); - }, - createControl: function(mode, handler, extraClasses) { - var control = this.options[mode + 'Control']; - var text = this.options[mode + 'Text']; - if ('button' == control) { - var btn = document.createElement('input'); - btn.type = 'submit'; - btn.value = text; - btn.className = 'editor_' + mode + '_button'; - if ('cancel' == mode) - btn.onclick = this._boundCancelHandler; - this._form.appendChild(btn); - this._controls[mode] = btn; - } else if ('link' == control) { - var link = document.createElement('a'); - link.href = '#'; - link.appendChild(document.createTextNode(text)); - link.onclick = 'cancel' == mode ? this._boundCancelHandler : this._boundSubmitHandler; - link.className = 'editor_' + mode + '_link'; - if (extraClasses) - link.className += ' ' + extraClasses; - this._form.appendChild(link); - this._controls[mode] = link; - } - }, - createEditField: function() { - var text = (this.options.loadTextURL ? this.options.loadingText : this.getText()); - var fld; - if (1 >= this.options.rows && !/\r|\n/.test(this.getText())) { - fld = document.createElement('input'); - fld.type = 'text'; - var size = this.options.size || this.options.cols || 0; - if (0 < size) fld.size = size; - } else { - fld = document.createElement('textarea'); - fld.rows = (1 >= this.options.rows ? this.options.autoRows : this.options.rows); - fld.cols = this.options.cols || 40; - } - fld.name = this.options.paramName; - fld.value = text; // No HTML breaks conversion anymore - fld.className = 'editor_field'; - if (this.options.submitOnBlur) - fld.onblur = this._boundSubmitHandler; - this._controls.editor = fld; - if (this.options.loadTextURL) - this.loadExternalText(); - this._form.appendChild(this._controls.editor); - }, - createForm: function() { - var ipe = this; - function addText(mode, condition) { - var text = ipe.options['text' + mode + 'Controls']; - if (!text || condition === false) return; - ipe._form.appendChild(document.createTextNode(text)); - }; - this._form = $(document.createElement('form')); - this._form.id = this.options.formId; - this._form.addClassName(this.options.formClassName); - this._form.onsubmit = this._boundSubmitHandler; - this.createEditField(); - if ('textarea' == this._controls.editor.tagName.toLowerCase()) - this._form.appendChild(document.createElement('br')); - if (this.options.onFormCustomization) - this.options.onFormCustomization(this, this._form); - addText('Before', this.options.okControl || this.options.cancelControl); - this.createControl('ok', this._boundSubmitHandler); - addText('Between', this.options.okControl && this.options.cancelControl); - this.createControl('cancel', this._boundCancelHandler, 'editor_cancel'); - addText('After', this.options.okControl || this.options.cancelControl); - }, - destroy: function() { - if (this._oldInnerHTML) - this.element.innerHTML = this._oldInnerHTML; - this.leaveEditMode(); - this.unregisterListeners(); - }, - enterEditMode: function(e) { - if (this._saving || this._editing) return; - this._editing = true; - this.triggerCallback('onEnterEditMode'); - if (this.options.externalControl) - this.options.externalControl.hide(); - this.element.hide(); - this.createForm(); - this.element.parentNode.insertBefore(this._form, this.element); - if (!this.options.loadTextURL) - this.postProcessEditField(); - if (e) Event.stop(e); - }, - enterHover: function(e) { - if (this.options.hoverClassName) - this.element.addClassName(this.options.hoverClassName); - if (this._saving) return; - this.triggerCallback('onEnterHover'); - }, - getText: function() { - return this.element.innerHTML; - }, - handleAJAXFailure: function(transport) { - this.triggerCallback('onFailure', transport); - if (this._oldInnerHTML) { - this.element.innerHTML = this._oldInnerHTML; - this._oldInnerHTML = null; - } - }, - handleFormCancellation: function(e) { - this.wrapUp(); - if (e) Event.stop(e); - }, - handleFormSubmission: function(e) { - var form = this._form; - var value = $F(this._controls.editor); - this.prepareSubmission(); - var params = this.options.callback(form, value) || ''; - if (Object.isString(params)) - params = params.toQueryParams(); - params.editorId = this.element.id; - if (this.options.htmlResponse) { - var options = Object.extend({ evalScripts: true }, this.options.ajaxOptions); - Object.extend(options, { - parameters: params, - onComplete: this._boundWrapperHandler, - onFailure: this._boundFailureHandler - }); - new Ajax.Updater({ success: this.element }, this.url, options); - } else { - var options = Object.extend({ method: 'get' }, this.options.ajaxOptions); - Object.extend(options, { - parameters: params, - onComplete: this._boundWrapperHandler, - onFailure: this._boundFailureHandler - }); - new Ajax.Request(this.url, options); - } - if (e) Event.stop(e); - }, - leaveEditMode: function() { - this.element.removeClassName(this.options.savingClassName); - this.removeForm(); - this.leaveHover(); - this.element.style.backgroundColor = this._originalBackground; - this.element.show(); - if (this.options.externalControl) - this.options.externalControl.show(); - this._saving = false; - this._editing = false; - this._oldInnerHTML = null; - this.triggerCallback('onLeaveEditMode'); - }, - leaveHover: function(e) { - if (this.options.hoverClassName) - this.element.removeClassName(this.options.hoverClassName); - if (this._saving) return; - this.triggerCallback('onLeaveHover'); - }, - loadExternalText: function() { - this._form.addClassName(this.options.loadingClassName); - this._controls.editor.disabled = true; - var options = Object.extend({ method: 'get' }, this.options.ajaxOptions); - Object.extend(options, { - parameters: 'editorId=' + encodeURIComponent(this.element.id), - onComplete: Prototype.emptyFunction, - onSuccess: function(transport) { - this._form.removeClassName(this.options.loadingClassName); - var text = transport.responseText; - if (this.options.stripLoadedTextTags) - text = text.stripTags(); - this._controls.editor.value = text; - this._controls.editor.disabled = false; - this.postProcessEditField(); - }.bind(this), - onFailure: this._boundFailureHandler - }); - new Ajax.Request(this.options.loadTextURL, options); - }, - postProcessEditField: function() { - var fpc = this.options.fieldPostCreation; - if (fpc) - $(this._controls.editor)['focus' == fpc ? 'focus' : 'activate'](); - }, - prepareOptions: function() { - this.options = Object.clone(Ajax.InPlaceEditor.DefaultOptions); - Object.extend(this.options, Ajax.InPlaceEditor.DefaultCallbacks); - [this._extraDefaultOptions].flatten().compact().each(function(defs) { - Object.extend(this.options, defs); - }.bind(this)); - }, - prepareSubmission: function() { - this._saving = true; - this.removeForm(); - this.leaveHover(); - this.showSaving(); - }, - registerListeners: function() { - this._listeners = { }; - var listener; - $H(Ajax.InPlaceEditor.Listeners).each(function(pair) { - listener = this[pair.value].bind(this); - this._listeners[pair.key] = listener; - if (!this.options.externalControlOnly) - this.element.observe(pair.key, listener); - if (this.options.externalControl) - this.options.externalControl.observe(pair.key, listener); - }.bind(this)); - }, - removeForm: function() { - if (!this._form) return; - this._form.remove(); - this._form = null; - this._controls = { }; - }, - showSaving: function() { - this._oldInnerHTML = this.element.innerHTML; - this.element.innerHTML = this.options.savingText; - this.element.addClassName(this.options.savingClassName); - this.element.style.backgroundColor = this._originalBackground; - this.element.show(); - }, - triggerCallback: function(cbName, arg) { - if ('function' == typeof this.options[cbName]) { - this.options[cbName](this, arg); - } - }, - unregisterListeners: function() { - $H(this._listeners).each(function(pair) { - if (!this.options.externalControlOnly) - this.element.stopObserving(pair.key, pair.value); - if (this.options.externalControl) - this.options.externalControl.stopObserving(pair.key, pair.value); - }.bind(this)); - }, - wrapUp: function(transport) { - this.leaveEditMode(); - // Can't use triggerCallback due to backward compatibility: requires - // binding + direct element - this._boundComplete(transport, this.element); - } -}); - -Object.extend(Ajax.InPlaceEditor.prototype, { - dispose: Ajax.InPlaceEditor.prototype.destroy -}); - -Ajax.InPlaceCollectionEditor = Class.create(Ajax.InPlaceEditor, { - initialize: function($super, element, url, options) { - this._extraDefaultOptions = Ajax.InPlaceCollectionEditor.DefaultOptions; - $super(element, url, options); - }, - - createEditField: function() { - var list = document.createElement('select'); - list.name = this.options.paramName; - list.size = 1; - this._controls.editor = list; - this._collection = this.options.collection || []; - if (this.options.loadCollectionURL) - this.loadCollection(); - else - this.checkForExternalText(); - this._form.appendChild(this._controls.editor); - }, - - loadCollection: function() { - this._form.addClassName(this.options.loadingClassName); - this.showLoadingText(this.options.loadingCollectionText); - var options = Object.extend({ method: 'get' }, this.options.ajaxOptions); - Object.extend(options, { - parameters: 'editorId=' + encodeURIComponent(this.element.id), - onComplete: Prototype.emptyFunction, - onSuccess: function(transport) { - var js = transport.responseText.strip(); - if (!/^\[.*\]$/.test(js)) // TODO: improve sanity check - throw 'Server returned an invalid collection representation.'; - this._collection = eval(js); - this.checkForExternalText(); - }.bind(this), - onFailure: this.onFailure - }); - new Ajax.Request(this.options.loadCollectionURL, options); - }, - - showLoadingText: function(text) { - this._controls.editor.disabled = true; - var tempOption = this._controls.editor.firstChild; - if (!tempOption) { - tempOption = document.createElement('option'); - tempOption.value = ''; - this._controls.editor.appendChild(tempOption); - tempOption.selected = true; - } - tempOption.update((text || '').stripScripts().stripTags()); - }, - - checkForExternalText: function() { - this._text = this.getText(); - if (this.options.loadTextURL) - this.loadExternalText(); - else - this.buildOptionList(); - }, - - loadExternalText: function() { - this.showLoadingText(this.options.loadingText); - var options = Object.extend({ method: 'get' }, this.options.ajaxOptions); - Object.extend(options, { - parameters: 'editorId=' + encodeURIComponent(this.element.id), - onComplete: Prototype.emptyFunction, - onSuccess: function(transport) { - this._text = transport.responseText.strip(); - this.buildOptionList(); - }.bind(this), - onFailure: this.onFailure - }); - new Ajax.Request(this.options.loadTextURL, options); - }, - - buildOptionList: function() { - this._form.removeClassName(this.options.loadingClassName); - this._collection = this._collection.map(function(entry) { - return 2 === entry.length ? entry : [entry, entry].flatten(); - }); - var marker = ('value' in this.options) ? this.options.value : this._text; - var textFound = this._collection.any(function(entry) { - return entry[0] == marker; - }.bind(this)); - this._controls.editor.update(''); - var option; - this._collection.each(function(entry, index) { - option = document.createElement('option'); - option.value = entry[0]; - option.selected = textFound ? entry[0] == marker : 0 == index; - option.appendChild(document.createTextNode(entry[1])); - this._controls.editor.appendChild(option); - }.bind(this)); - this._controls.editor.disabled = false; - Field.scrollFreeActivate(this._controls.editor); - } -}); - -//**** DEPRECATION LAYER FOR InPlace[Collection]Editor! **** -//**** This only exists for a while, in order to let **** -//**** users adapt to the new API. Read up on the new **** -//**** API and convert your code to it ASAP! **** - -Ajax.InPlaceEditor.prototype.initialize.dealWithDeprecatedOptions = function(options) { - if (!options) return; - function fallback(name, expr) { - if (name in options || expr === undefined) return; - options[name] = expr; - }; - fallback('cancelControl', (options.cancelLink ? 'link' : (options.cancelButton ? 'button' : - options.cancelLink == options.cancelButton == false ? false : undefined))); - fallback('okControl', (options.okLink ? 'link' : (options.okButton ? 'button' : - options.okLink == options.okButton == false ? false : undefined))); - fallback('highlightColor', options.highlightcolor); - fallback('highlightEndColor', options.highlightendcolor); -}; - -Object.extend(Ajax.InPlaceEditor, { - DefaultOptions: { - ajaxOptions: { }, - autoRows: 3, // Use when multi-line w/ rows == 1 - cancelControl: 'link', // 'link'|'button'|false - cancelText: 'cancel', - clickToEditText: 'Click to edit', - externalControl: null, // id|elt - externalControlOnly: false, - fieldPostCreation: 'activate', // 'activate'|'focus'|false - formClassName: 'inplaceeditor-form', - formId: null, // id|elt - highlightColor: '#ffff99', - highlightEndColor: '#ffffff', - hoverClassName: '', - htmlResponse: true, - loadingClassName: 'inplaceeditor-loading', - loadingText: 'Loading...', - okControl: 'button', // 'link'|'button'|false - okText: 'ok', - paramName: 'value', - rows: 1, // If 1 and multi-line, uses autoRows - savingClassName: 'inplaceeditor-saving', - savingText: 'Saving...', - size: 0, - stripLoadedTextTags: false, - submitOnBlur: false, - textAfterControls: '', - textBeforeControls: '', - textBetweenControls: '' - }, - DefaultCallbacks: { - callback: function(form) { - return Form.serialize(form); - }, - onComplete: function(transport, element) { - // For backward compatibility, this one is bound to the IPE, and passes - // the element directly. It was too often customized, so we don't break it. - new Effect.Highlight(element, { - startcolor: this.options.highlightColor, keepBackgroundImage: true }); - }, - onEnterEditMode: null, - onEnterHover: function(ipe) { - ipe.element.style.backgroundColor = ipe.options.highlightColor; - if (ipe._effect) - ipe._effect.cancel(); - }, - onFailure: function(transport, ipe) { - alert('Error communication with the server: ' + transport.responseText.stripTags()); - }, - onFormCustomization: null, // Takes the IPE and its generated form, after editor, before controls. - onLeaveEditMode: null, - onLeaveHover: function(ipe) { - ipe._effect = new Effect.Highlight(ipe.element, { - startcolor: ipe.options.highlightColor, endcolor: ipe.options.highlightEndColor, - restorecolor: ipe._originalBackground, keepBackgroundImage: true - }); - } - }, - Listeners: { - click: 'enterEditMode', - keydown: 'checkForEscapeOrReturn', - mouseover: 'enterHover', - mouseout: 'leaveHover' - } -}); - -Ajax.InPlaceCollectionEditor.DefaultOptions = { - loadingCollectionText: 'Loading options...' -}; - -// Delayed observer, like Form.Element.Observer, -// but waits for delay after last key input -// Ideal for live-search fields - -Form.Element.DelayedObserver = Class.create({ - initialize: function(element, delay, callback) { - this.delay = delay || 0.5; - this.element = $(element); - this.callback = callback; - this.timer = null; - this.lastValue = $F(this.element); - Event.observe(this.element,'keyup',this.delayedListener.bindAsEventListener(this)); - }, - delayedListener: function(event) { - if(this.lastValue == $F(this.element)) return; - if(this.timer) clearTimeout(this.timer); - this.timer = setTimeout(this.onTimerEvent.bind(this), this.delay * 1000); - this.lastValue = $F(this.element); - }, - onTimerEvent: function() { - this.timer = null; - this.callback(this.element, $F(this.element)); - } -}); diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/dragdrop.js b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/dragdrop.js deleted file mode 100644 index ccf4a1e4..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/dragdrop.js +++ /dev/null @@ -1,972 +0,0 @@ -// Copyright (c) 2005-2007 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// (c) 2005-2007 Sammi Williams (http://www.oriontransfer.co.nz, sammi@oriontransfer.co.nz) -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -if(Object.isUndefined(Effect)) - throw("dragdrop.js requires including script.aculo.us' effects.js library"); - -var Droppables = { - drops: [], - - remove: function(element) { - this.drops = this.drops.reject(function(d) { return d.element==$(element) }); - }, - - add: function(element) { - element = $(element); - var options = Object.extend({ - greedy: true, - hoverclass: null, - tree: false - }, arguments[1] || { }); - - // cache containers - if(options.containment) { - options._containers = []; - var containment = options.containment; - if(Object.isArray(containment)) { - containment.each( function(c) { options._containers.push($(c)) }); - } else { - options._containers.push($(containment)); - } - } - - if(options.accept) options.accept = [options.accept].flatten(); - - Element.makePositioned(element); // fix IE - options.element = element; - - this.drops.push(options); - }, - - findDeepestChild: function(drops) { - deepest = drops[0]; - - for (i = 1; i < drops.length; ++i) - if (Element.isParent(drops[i].element, deepest.element)) - deepest = drops[i]; - - return deepest; - }, - - isContained: function(element, drop) { - var containmentNode; - if(drop.tree) { - containmentNode = element.treeNode; - } else { - containmentNode = element.parentNode; - } - return drop._containers.detect(function(c) { return containmentNode == c }); - }, - - isAffected: function(point, element, drop) { - return ( - (drop.element!=element) && - ((!drop._containers) || - this.isContained(element, drop)) && - ((!drop.accept) || - (Element.classNames(element).detect( - function(v) { return drop.accept.include(v) } ) )) && - Position.within(drop.element, point[0], point[1]) ); - }, - - deactivate: function(drop) { - if(drop.hoverclass) - Element.removeClassName(drop.element, drop.hoverclass); - this.last_active = null; - }, - - activate: function(drop) { - if(drop.hoverclass) - Element.addClassName(drop.element, drop.hoverclass); - this.last_active = drop; - }, - - show: function(point, element) { - if(!this.drops.length) return; - var drop, affected = []; - - this.drops.each( function(drop) { - if(Droppables.isAffected(point, element, drop)) - affected.push(drop); - }); - - if(affected.length>0) - drop = Droppables.findDeepestChild(affected); - - if(this.last_active && this.last_active != drop) this.deactivate(this.last_active); - if (drop) { - Position.within(drop.element, point[0], point[1]); - if(drop.onHover) - drop.onHover(element, drop.element, Position.overlap(drop.overlap, drop.element)); - - if (drop != this.last_active) Droppables.activate(drop); - } - }, - - fire: function(event, element) { - if(!this.last_active) return; - Position.prepare(); - - if (this.isAffected([Event.pointerX(event), Event.pointerY(event)], element, this.last_active)) - if (this.last_active.onDrop) { - this.last_active.onDrop(element, this.last_active.element, event); - return true; - } - }, - - reset: function() { - if(this.last_active) - this.deactivate(this.last_active); - } -} - -var Draggables = { - drags: [], - observers: [], - - register: function(draggable) { - if(this.drags.length == 0) { - this.eventMouseUp = this.endDrag.bindAsEventListener(this); - this.eventMouseMove = this.updateDrag.bindAsEventListener(this); - this.eventKeypress = this.keyPress.bindAsEventListener(this); - - Event.observe(document, "mouseup", this.eventMouseUp); - Event.observe(document, "mousemove", this.eventMouseMove); - Event.observe(document, "keypress", this.eventKeypress); - } - this.drags.push(draggable); - }, - - unregister: function(draggable) { - this.drags = this.drags.reject(function(d) { return d==draggable }); - if(this.drags.length == 0) { - Event.stopObserving(document, "mouseup", this.eventMouseUp); - Event.stopObserving(document, "mousemove", this.eventMouseMove); - Event.stopObserving(document, "keypress", this.eventKeypress); - } - }, - - activate: function(draggable) { - if(draggable.options.delay) { - this._timeout = setTimeout(function() { - Draggables._timeout = null; - window.focus(); - Draggables.activeDraggable = draggable; - }.bind(this), draggable.options.delay); - } else { - window.focus(); // allows keypress events if window isn't currently focused, fails for Safari - this.activeDraggable = draggable; - } - }, - - deactivate: function() { - this.activeDraggable = null; - }, - - updateDrag: function(event) { - if(!this.activeDraggable) return; - var pointer = [Event.pointerX(event), Event.pointerY(event)]; - // Mozilla-based browsers fire successive mousemove events with - // the same coordinates, prevent needless redrawing (moz bug?) - if(this._lastPointer && (this._lastPointer.inspect() == pointer.inspect())) return; - this._lastPointer = pointer; - - this.activeDraggable.updateDrag(event, pointer); - }, - - endDrag: function(event) { - if(this._timeout) { - clearTimeout(this._timeout); - this._timeout = null; - } - if(!this.activeDraggable) return; - this._lastPointer = null; - this.activeDraggable.endDrag(event); - this.activeDraggable = null; - }, - - keyPress: function(event) { - if(this.activeDraggable) - this.activeDraggable.keyPress(event); - }, - - addObserver: function(observer) { - this.observers.push(observer); - this._cacheObserverCallbacks(); - }, - - removeObserver: function(element) { // element instead of observer fixes mem leaks - this.observers = this.observers.reject( function(o) { return o.element==element }); - this._cacheObserverCallbacks(); - }, - - notify: function(eventName, draggable, event) { // 'onStart', 'onEnd', 'onDrag' - if(this[eventName+'Count'] > 0) - this.observers.each( function(o) { - if(o[eventName]) o[eventName](eventName, draggable, event); - }); - if(draggable.options[eventName]) draggable.options[eventName](draggable, event); - }, - - _cacheObserverCallbacks: function() { - ['onStart','onEnd','onDrag'].each( function(eventName) { - Draggables[eventName+'Count'] = Draggables.observers.select( - function(o) { return o[eventName]; } - ).length; - }); - } -} - -/*--------------------------------------------------------------------------*/ - -var Draggable = Class.create({ - initialize: function(element) { - var defaults = { - handle: false, - reverteffect: function(element, top_offset, left_offset) { - var dur = Math.sqrt(Math.abs(top_offset^2)+Math.abs(left_offset^2))*0.02; - new Effect.Move(element, { x: -left_offset, y: -top_offset, duration: dur, - queue: {scope:'_draggable', position:'end'} - }); - }, - endeffect: function(element) { - var toOpacity = Object.isNumber(element._opacity) ? element._opacity : 1.0; - new Effect.Opacity(element, {duration:0.2, from:0.7, to:toOpacity, - queue: {scope:'_draggable', position:'end'}, - afterFinish: function(){ - Draggable._dragging[element] = false - } - }); - }, - zindex: 1000, - revert: false, - quiet: false, - scroll: false, - scrollSensitivity: 20, - scrollSpeed: 15, - snap: false, // false, or xy or [x,y] or function(x,y){ return [x,y] } - delay: 0 - }; - - if(!arguments[1] || Object.isUndefined(arguments[1].endeffect)) - Object.extend(defaults, { - starteffect: function(element) { - element._opacity = Element.getOpacity(element); - Draggable._dragging[element] = true; - new Effect.Opacity(element, {duration:0.2, from:element._opacity, to:0.7}); - } - }); - - var options = Object.extend(defaults, arguments[1] || { }); - - this.element = $(element); - - if(options.handle && Object.isString(options.handle)) - this.handle = this.element.down('.'+options.handle, 0); - - if(!this.handle) this.handle = $(options.handle); - if(!this.handle) this.handle = this.element; - - if(options.scroll && !options.scroll.scrollTo && !options.scroll.outerHTML) { - options.scroll = $(options.scroll); - this._isScrollChild = Element.childOf(this.element, options.scroll); - } - - Element.makePositioned(this.element); // fix IE - - this.options = options; - this.dragging = false; - - this.eventMouseDown = this.initDrag.bindAsEventListener(this); - Event.observe(this.handle, "mousedown", this.eventMouseDown); - - Draggables.register(this); - }, - - destroy: function() { - Event.stopObserving(this.handle, "mousedown", this.eventMouseDown); - Draggables.unregister(this); - }, - - currentDelta: function() { - return([ - parseInt(Element.getStyle(this.element,'left') || '0'), - parseInt(Element.getStyle(this.element,'top') || '0')]); - }, - - initDrag: function(event) { - if(!Object.isUndefined(Draggable._dragging[this.element]) && - Draggable._dragging[this.element]) return; - if(Event.isLeftClick(event)) { - // abort on form elements, fixes a Firefox issue - var src = Event.element(event); - if((tag_name = src.tagName.toUpperCase()) && ( - tag_name=='INPUT' || - tag_name=='SELECT' || - tag_name=='OPTION' || - tag_name=='BUTTON' || - tag_name=='TEXTAREA')) return; - - var pointer = [Event.pointerX(event), Event.pointerY(event)]; - var pos = Position.cumulativeOffset(this.element); - this.offset = [0,1].map( function(i) { return (pointer[i] - pos[i]) }); - - Draggables.activate(this); - Event.stop(event); - } - }, - - startDrag: function(event) { - this.dragging = true; - if(!this.delta) - this.delta = this.currentDelta(); - - if(this.options.zindex) { - this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0); - this.element.style.zIndex = this.options.zindex; - } - - if(this.options.ghosting) { - this._clone = this.element.cloneNode(true); - this.element._originallyAbsolute = (this.element.getStyle('position') == 'absolute'); - if (!this.element._originallyAbsolute) - Position.absolutize(this.element); - this.element.parentNode.insertBefore(this._clone, this.element); - } - - if(this.options.scroll) { - if (this.options.scroll == window) { - var where = this._getWindowScroll(this.options.scroll); - this.originalScrollLeft = where.left; - this.originalScrollTop = where.top; - } else { - this.originalScrollLeft = this.options.scroll.scrollLeft; - this.originalScrollTop = this.options.scroll.scrollTop; - } - } - - Draggables.notify('onStart', this, event); - - if(this.options.starteffect) this.options.starteffect(this.element); - }, - - updateDrag: function(event, pointer) { - if(!this.dragging) this.startDrag(event); - - if(!this.options.quiet){ - Position.prepare(); - Droppables.show(pointer, this.element); - } - - Draggables.notify('onDrag', this, event); - - this.draw(pointer); - if(this.options.change) this.options.change(this); - - if(this.options.scroll) { - this.stopScrolling(); - - var p; - if (this.options.scroll == window) { - with(this._getWindowScroll(this.options.scroll)) { p = [ left, top, left+width, top+height ]; } - } else { - p = Position.page(this.options.scroll); - p[0] += this.options.scroll.scrollLeft + Position.deltaX; - p[1] += this.options.scroll.scrollTop + Position.deltaY; - p.push(p[0]+this.options.scroll.offsetWidth); - p.push(p[1]+this.options.scroll.offsetHeight); - } - var speed = [0,0]; - if(pointer[0] < (p[0]+this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[0]+this.options.scrollSensitivity); - if(pointer[1] < (p[1]+this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[1]+this.options.scrollSensitivity); - if(pointer[0] > (p[2]-this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[2]-this.options.scrollSensitivity); - if(pointer[1] > (p[3]-this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[3]-this.options.scrollSensitivity); - this.startScrolling(speed); - } - - // fix AppleWebKit rendering - if(Prototype.Browser.WebKit) window.scrollBy(0,0); - - Event.stop(event); - }, - - finishDrag: function(event, success) { - this.dragging = false; - - if(this.options.quiet){ - Position.prepare(); - var pointer = [Event.pointerX(event), Event.pointerY(event)]; - Droppables.show(pointer, this.element); - } - - if(this.options.ghosting) { - if (!this.element._originallyAbsolute) - Position.relativize(this.element); - delete this.element._originallyAbsolute; - Element.remove(this._clone); - this._clone = null; - } - - var dropped = false; - if(success) { - dropped = Droppables.fire(event, this.element); - if (!dropped) dropped = false; - } - if(dropped && this.options.onDropped) this.options.onDropped(this.element); - Draggables.notify('onEnd', this, event); - - var revert = this.options.revert; - if(revert && Object.isFunction(revert)) revert = revert(this.element); - - var d = this.currentDelta(); - if(revert && this.options.reverteffect) { - if (dropped == 0 || revert != 'failure') - this.options.reverteffect(this.element, - d[1]-this.delta[1], d[0]-this.delta[0]); - } else { - this.delta = d; - } - - if(this.options.zindex) - this.element.style.zIndex = this.originalZ; - - if(this.options.endeffect) - this.options.endeffect(this.element); - - Draggables.deactivate(this); - Droppables.reset(); - }, - - keyPress: function(event) { - if(event.keyCode!=Event.KEY_ESC) return; - this.finishDrag(event, false); - Event.stop(event); - }, - - endDrag: function(event) { - if(!this.dragging) return; - this.stopScrolling(); - this.finishDrag(event, true); - Event.stop(event); - }, - - draw: function(point) { - var pos = Position.cumulativeOffset(this.element); - if(this.options.ghosting) { - var r = Position.realOffset(this.element); - pos[0] += r[0] - Position.deltaX; pos[1] += r[1] - Position.deltaY; - } - - var d = this.currentDelta(); - pos[0] -= d[0]; pos[1] -= d[1]; - - if(this.options.scroll && (this.options.scroll != window && this._isScrollChild)) { - pos[0] -= this.options.scroll.scrollLeft-this.originalScrollLeft; - pos[1] -= this.options.scroll.scrollTop-this.originalScrollTop; - } - - var p = [0,1].map(function(i){ - return (point[i]-pos[i]-this.offset[i]) - }.bind(this)); - - if(this.options.snap) { - if(Object.isFunction(this.options.snap)) { - p = this.options.snap(p[0],p[1],this); - } else { - if(Object.isArray(this.options.snap)) { - p = p.map( function(v, i) { - return (v/this.options.snap[i]).round()*this.options.snap[i] }.bind(this)) - } else { - p = p.map( function(v) { - return (v/this.options.snap).round()*this.options.snap }.bind(this)) - } - }} - - var style = this.element.style; - if((!this.options.constraint) || (this.options.constraint=='horizontal')) - style.left = p[0] + "px"; - if((!this.options.constraint) || (this.options.constraint=='vertical')) - style.top = p[1] + "px"; - - if(style.visibility=="hidden") style.visibility = ""; // fix gecko rendering - }, - - stopScrolling: function() { - if(this.scrollInterval) { - clearInterval(this.scrollInterval); - this.scrollInterval = null; - Draggables._lastScrollPointer = null; - } - }, - - startScrolling: function(speed) { - if(!(speed[0] || speed[1])) return; - this.scrollSpeed = [speed[0]*this.options.scrollSpeed,speed[1]*this.options.scrollSpeed]; - this.lastScrolled = new Date(); - this.scrollInterval = setInterval(this.scroll.bind(this), 10); - }, - - scroll: function() { - var current = new Date(); - var delta = current - this.lastScrolled; - this.lastScrolled = current; - if(this.options.scroll == window) { - with (this._getWindowScroll(this.options.scroll)) { - if (this.scrollSpeed[0] || this.scrollSpeed[1]) { - var d = delta / 1000; - this.options.scroll.scrollTo( left + d*this.scrollSpeed[0], top + d*this.scrollSpeed[1] ); - } - } - } else { - this.options.scroll.scrollLeft += this.scrollSpeed[0] * delta / 1000; - this.options.scroll.scrollTop += this.scrollSpeed[1] * delta / 1000; - } - - Position.prepare(); - Droppables.show(Draggables._lastPointer, this.element); - Draggables.notify('onDrag', this); - if (this._isScrollChild) { - Draggables._lastScrollPointer = Draggables._lastScrollPointer || $A(Draggables._lastPointer); - Draggables._lastScrollPointer[0] += this.scrollSpeed[0] * delta / 1000; - Draggables._lastScrollPointer[1] += this.scrollSpeed[1] * delta / 1000; - if (Draggables._lastScrollPointer[0] < 0) - Draggables._lastScrollPointer[0] = 0; - if (Draggables._lastScrollPointer[1] < 0) - Draggables._lastScrollPointer[1] = 0; - this.draw(Draggables._lastScrollPointer); - } - - if(this.options.change) this.options.change(this); - }, - - _getWindowScroll: function(w) { - var T, L, W, H; - with (w.document) { - if (w.document.documentElement && documentElement.scrollTop) { - T = documentElement.scrollTop; - L = documentElement.scrollLeft; - } else if (w.document.body) { - T = body.scrollTop; - L = body.scrollLeft; - } - if (w.innerWidth) { - W = w.innerWidth; - H = w.innerHeight; - } else if (w.document.documentElement && documentElement.clientWidth) { - W = documentElement.clientWidth; - H = documentElement.clientHeight; - } else { - W = body.offsetWidth; - H = body.offsetHeight - } - } - return { top: T, left: L, width: W, height: H }; - } -}); - -Draggable._dragging = { }; - -/*--------------------------------------------------------------------------*/ - -var SortableObserver = Class.create({ - initialize: function(element, observer) { - this.element = $(element); - this.observer = observer; - this.lastValue = Sortable.serialize(this.element); - }, - - onStart: function() { - this.lastValue = Sortable.serialize(this.element); - }, - - onEnd: function() { - Sortable.unmark(); - if(this.lastValue != Sortable.serialize(this.element)) - this.observer(this.element) - } -}); - -var Sortable = { - SERIALIZE_RULE: /^[^_\-](?:[A-Za-z0-9\-\_]*)[_](.*)$/, - - sortables: { }, - - _findRootElement: function(element) { - while (element.tagName.toUpperCase() != "BODY") { - if(element.id && Sortable.sortables[element.id]) return element; - element = element.parentNode; - } - }, - - options: function(element) { - element = Sortable._findRootElement($(element)); - if(!element) return; - return Sortable.sortables[element.id]; - }, - - destroy: function(element){ - var s = Sortable.options(element); - - if(s) { - Draggables.removeObserver(s.element); - s.droppables.each(function(d){ Droppables.remove(d) }); - s.draggables.invoke('destroy'); - - delete Sortable.sortables[s.element.id]; - } - }, - - create: function(element) { - element = $(element); - var options = Object.extend({ - element: element, - tag: 'li', // assumes li children, override with tag: 'tagname' - dropOnEmpty: false, - tree: false, - treeTag: 'ul', - overlap: 'vertical', // one of 'vertical', 'horizontal' - constraint: 'vertical', // one of 'vertical', 'horizontal', false - containment: element, // also takes array of elements (or id's); or false - handle: false, // or a CSS class - only: false, - delay: 0, - hoverclass: null, - ghosting: false, - quiet: false, - scroll: false, - scrollSensitivity: 20, - scrollSpeed: 15, - format: this.SERIALIZE_RULE, - - // these take arrays of elements or ids and can be - // used for better initialization performance - elements: false, - handles: false, - - onChange: Prototype.emptyFunction, - onUpdate: Prototype.emptyFunction - }, arguments[1] || { }); - - // clear any old sortable with same element - this.destroy(element); - - // build options for the draggables - var options_for_draggable = { - revert: true, - quiet: options.quiet, - scroll: options.scroll, - scrollSpeed: options.scrollSpeed, - scrollSensitivity: options.scrollSensitivity, - delay: options.delay, - ghosting: options.ghosting, - constraint: options.constraint, - handle: options.handle }; - - if(options.starteffect) - options_for_draggable.starteffect = options.starteffect; - - if(options.reverteffect) - options_for_draggable.reverteffect = options.reverteffect; - else - if(options.ghosting) options_for_draggable.reverteffect = function(element) { - element.style.top = 0; - element.style.left = 0; - }; - - if(options.endeffect) - options_for_draggable.endeffect = options.endeffect; - - if(options.zindex) - options_for_draggable.zindex = options.zindex; - - // build options for the droppables - var options_for_droppable = { - overlap: options.overlap, - containment: options.containment, - tree: options.tree, - hoverclass: options.hoverclass, - onHover: Sortable.onHover - } - - var options_for_tree = { - onHover: Sortable.onEmptyHover, - overlap: options.overlap, - containment: options.containment, - hoverclass: options.hoverclass - } - - // fix for gecko engine - Element.cleanWhitespace(element); - - options.draggables = []; - options.droppables = []; - - // drop on empty handling - if(options.dropOnEmpty || options.tree) { - Droppables.add(element, options_for_tree); - options.droppables.push(element); - } - - (options.elements || this.findElements(element, options) || []).each( function(e,i) { - var handle = options.handles ? $(options.handles[i]) : - (options.handle ? $(e).select('.' + options.handle)[0] : e); - options.draggables.push( - new Draggable(e, Object.extend(options_for_draggable, { handle: handle }))); - Droppables.add(e, options_for_droppable); - if(options.tree) e.treeNode = element; - options.droppables.push(e); - }); - - if(options.tree) { - (Sortable.findTreeElements(element, options) || []).each( function(e) { - Droppables.add(e, options_for_tree); - e.treeNode = element; - options.droppables.push(e); - }); - } - - // keep reference - this.sortables[element.id] = options; - - // for onupdate - Draggables.addObserver(new SortableObserver(element, options.onUpdate)); - - }, - - // return all suitable-for-sortable elements in a guaranteed order - findElements: function(element, options) { - return Element.findChildren( - element, options.only, options.tree ? true : false, options.tag); - }, - - findTreeElements: function(element, options) { - return Element.findChildren( - element, options.only, options.tree ? true : false, options.treeTag); - }, - - onHover: function(element, dropon, overlap) { - if(Element.isParent(dropon, element)) return; - - if(overlap > .33 && overlap < .66 && Sortable.options(dropon).tree) { - return; - } else if(overlap>0.5) { - Sortable.mark(dropon, 'before'); - if(dropon.previousSibling != element) { - var oldParentNode = element.parentNode; - element.style.visibility = "hidden"; // fix gecko rendering - dropon.parentNode.insertBefore(element, dropon); - if(dropon.parentNode!=oldParentNode) - Sortable.options(oldParentNode).onChange(element); - Sortable.options(dropon.parentNode).onChange(element); - } - } else { - Sortable.mark(dropon, 'after'); - var nextElement = dropon.nextSibling || null; - if(nextElement != element) { - var oldParentNode = element.parentNode; - element.style.visibility = "hidden"; // fix gecko rendering - dropon.parentNode.insertBefore(element, nextElement); - if(dropon.parentNode!=oldParentNode) - Sortable.options(oldParentNode).onChange(element); - Sortable.options(dropon.parentNode).onChange(element); - } - } - }, - - onEmptyHover: function(element, dropon, overlap) { - var oldParentNode = element.parentNode; - var droponOptions = Sortable.options(dropon); - - if(!Element.isParent(dropon, element)) { - var index; - - var children = Sortable.findElements(dropon, {tag: droponOptions.tag, only: droponOptions.only}); - var child = null; - - if(children) { - var offset = Element.offsetSize(dropon, droponOptions.overlap) * (1.0 - overlap); - - for (index = 0; index < children.length; index += 1) { - if (offset - Element.offsetSize (children[index], droponOptions.overlap) >= 0) { - offset -= Element.offsetSize (children[index], droponOptions.overlap); - } else if (offset - (Element.offsetSize (children[index], droponOptions.overlap) / 2) >= 0) { - child = index + 1 < children.length ? children[index + 1] : null; - break; - } else { - child = children[index]; - break; - } - } - } - - dropon.insertBefore(element, child); - - Sortable.options(oldParentNode).onChange(element); - droponOptions.onChange(element); - } - }, - - unmark: function() { - if(Sortable._marker) Sortable._marker.hide(); - }, - - mark: function(dropon, position) { - // mark on ghosting only - var sortable = Sortable.options(dropon.parentNode); - if(sortable && !sortable.ghosting) return; - - if(!Sortable._marker) { - Sortable._marker = - ($('dropmarker') || Element.extend(document.createElement('DIV'))). - hide().addClassName('dropmarker').setStyle({position:'absolute'}); - document.getElementsByTagName("body").item(0).appendChild(Sortable._marker); - } - var offsets = Position.cumulativeOffset(dropon); - Sortable._marker.setStyle({left: offsets[0]+'px', top: offsets[1] + 'px'}); - - if(position=='after') - if(sortable.overlap == 'horizontal') - Sortable._marker.setStyle({left: (offsets[0]+dropon.clientWidth) + 'px'}); - else - Sortable._marker.setStyle({top: (offsets[1]+dropon.clientHeight) + 'px'}); - - Sortable._marker.show(); - }, - - _tree: function(element, options, parent) { - var children = Sortable.findElements(element, options) || []; - - for (var i = 0; i < children.length; ++i) { - var match = children[i].id.match(options.format); - - if (!match) continue; - - var child = { - id: encodeURIComponent(match ? match[1] : null), - element: element, - parent: parent, - children: [], - position: parent.children.length, - container: $(children[i]).down(options.treeTag) - } - - /* Get the element containing the children and recurse over it */ - if (child.container) - this._tree(child.container, options, child) - - parent.children.push (child); - } - - return parent; - }, - - tree: function(element) { - element = $(element); - var sortableOptions = this.options(element); - var options = Object.extend({ - tag: sortableOptions.tag, - treeTag: sortableOptions.treeTag, - only: sortableOptions.only, - name: element.id, - format: sortableOptions.format - }, arguments[1] || { }); - - var root = { - id: null, - parent: null, - children: [], - container: element, - position: 0 - } - - return Sortable._tree(element, options, root); - }, - - /* Construct a [i] index for a particular node */ - _constructIndex: function(node) { - var index = ''; - do { - if (node.id) index = '[' + node.position + ']' + index; - } while ((node = node.parent) != null); - return index; - }, - - sequence: function(element) { - element = $(element); - var options = Object.extend(this.options(element), arguments[1] || { }); - - return $(this.findElements(element, options) || []).map( function(item) { - return item.id.match(options.format) ? item.id.match(options.format)[1] : ''; - }); - }, - - setSequence: function(element, new_sequence) { - element = $(element); - var options = Object.extend(this.options(element), arguments[2] || { }); - - var nodeMap = { }; - this.findElements(element, options).each( function(n) { - if (n.id.match(options.format)) - nodeMap[n.id.match(options.format)[1]] = [n, n.parentNode]; - n.parentNode.removeChild(n); - }); - - new_sequence.each(function(ident) { - var n = nodeMap[ident]; - if (n) { - n[1].appendChild(n[0]); - delete nodeMap[ident]; - } - }); - }, - - serialize: function(element) { - element = $(element); - var options = Object.extend(Sortable.options(element), arguments[1] || { }); - var name = encodeURIComponent( - (arguments[1] && arguments[1].name) ? arguments[1].name : element.id); - - if (options.tree) { - return Sortable.tree(element, arguments[1]).children.map( function (item) { - return [name + Sortable._constructIndex(item) + "[id]=" + - encodeURIComponent(item.id)].concat(item.children.map(arguments.callee)); - }).flatten().join('&'); - } else { - return Sortable.sequence(element, arguments[1]).map( function(item) { - return name + "[]=" + encodeURIComponent(item); - }).join('&'); - } - } -} - -// Returns true if child is contained within element -Element.isParent = function(child, element) { - if (!child.parentNode || child == element) return false; - if (child.parentNode == element) return true; - return Element.isParent(child.parentNode, element); -} - -Element.findChildren = function(element, only, recursive, tagName) { - if(!element.hasChildNodes()) return null; - tagName = tagName.toUpperCase(); - if(only) only = [only].flatten(); - var elements = []; - $A(element.childNodes).each( function(e) { - if(e.tagName && e.tagName.toUpperCase()==tagName && - (!only || (Element.classNames(e).detect(function(v) { return only.include(v) })))) - elements.push(e); - if(recursive) { - var grandchildren = Element.findChildren(e, only, recursive, tagName); - if(grandchildren) elements.push(grandchildren); - } - }); - - return (elements.length>0 ? elements.flatten() : []); -} - -Element.offsetSize = function (element, type) { - return element['offset' + ((type=='vertical' || type=='height') ? 'Height' : 'Width')]; -} diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/effects.js b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/effects.js deleted file mode 100644 index 65aed239..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/javascripts/effects.js +++ /dev/null @@ -1,1120 +0,0 @@ -// Copyright (c) 2005-2007 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// Contributors: -// Justin Palmer (http://encytemedia.com/) -// Mark Pilgrim (http://diveintomark.org/) -// Martin Bialasinki -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -// converts rgb() and #xxx to #xxxxxx format, -// returns self (or first argument) if not convertable -String.prototype.parseColor = function() { - var color = '#'; - if (this.slice(0,4) == 'rgb(') { - var cols = this.slice(4,this.length-1).split(','); - var i=0; do { color += parseInt(cols[i]).toColorPart() } while (++i<3); - } else { - if (this.slice(0,1) == '#') { - if (this.length==4) for(var i=1;i<4;i++) color += (this.charAt(i) + this.charAt(i)).toLowerCase(); - if (this.length==7) color = this.toLowerCase(); - } - } - return (color.length==7 ? color : (arguments[0] || this)); -}; - -/*--------------------------------------------------------------------------*/ - -Element.collectTextNodes = function(element) { - return $A($(element).childNodes).collect( function(node) { - return (node.nodeType==3 ? node.nodeValue : - (node.hasChildNodes() ? Element.collectTextNodes(node) : '')); - }).flatten().join(''); -}; - -Element.collectTextNodesIgnoreClass = function(element, className) { - return $A($(element).childNodes).collect( function(node) { - return (node.nodeType==3 ? node.nodeValue : - ((node.hasChildNodes() && !Element.hasClassName(node,className)) ? - Element.collectTextNodesIgnoreClass(node, className) : '')); - }).flatten().join(''); -}; - -Element.setContentZoom = function(element, percent) { - element = $(element); - element.setStyle({fontSize: (percent/100) + 'em'}); - if (Prototype.Browser.WebKit) window.scrollBy(0,0); - return element; -}; - -Element.getInlineOpacity = function(element){ - return $(element).style.opacity || ''; -}; - -Element.forceRerendering = function(element) { - try { - element = $(element); - var n = document.createTextNode(' '); - element.appendChild(n); - element.removeChild(n); - } catch(e) { } -}; - -/*--------------------------------------------------------------------------*/ - -var Effect = { - _elementDoesNotExistError: { - name: 'ElementDoesNotExistError', - message: 'The specified DOM element does not exist, but is required for this effect to operate' - }, - Transitions: { - linear: Prototype.K, - sinoidal: function(pos) { - return (-Math.cos(pos*Math.PI)/2) + 0.5; - }, - reverse: function(pos) { - return 1-pos; - }, - flicker: function(pos) { - var pos = ((-Math.cos(pos*Math.PI)/4) + 0.75) + Math.random()/4; - return pos > 1 ? 1 : pos; - }, - wobble: function(pos) { - return (-Math.cos(pos*Math.PI*(9*pos))/2) + 0.5; - }, - pulse: function(pos, pulses) { - pulses = pulses || 5; - return ( - ((pos % (1/pulses)) * pulses).round() == 0 ? - ((pos * pulses * 2) - (pos * pulses * 2).floor()) : - 1 - ((pos * pulses * 2) - (pos * pulses * 2).floor()) - ); - }, - spring: function(pos) { - return 1 - (Math.cos(pos * 4.5 * Math.PI) * Math.exp(-pos * 6)); - }, - none: function(pos) { - return 0; - }, - full: function(pos) { - return 1; - } - }, - DefaultOptions: { - duration: 1.0, // seconds - fps: 100, // 100= assume 66fps max. - sync: false, // true for combining - from: 0.0, - to: 1.0, - delay: 0.0, - queue: 'parallel' - }, - tagifyText: function(element) { - var tagifyStyle = 'position:relative'; - if (Prototype.Browser.IE) tagifyStyle += ';zoom:1'; - - element = $(element); - $A(element.childNodes).each( function(child) { - if (child.nodeType==3) { - child.nodeValue.toArray().each( function(character) { - element.insertBefore( - new Element('span', {style: tagifyStyle}).update( - character == ' ' ? String.fromCharCode(160) : character), - child); - }); - Element.remove(child); - } - }); - }, - multiple: function(element, effect) { - var elements; - if (((typeof element == 'object') || - Object.isFunction(element)) && - (element.length)) - elements = element; - else - elements = $(element).childNodes; - - var options = Object.extend({ - speed: 0.1, - delay: 0.0 - }, arguments[2] || { }); - var masterDelay = options.delay; - - $A(elements).each( function(element, index) { - new effect(element, Object.extend(options, { delay: index * options.speed + masterDelay })); - }); - }, - PAIRS: { - 'slide': ['SlideDown','SlideUp'], - 'blind': ['BlindDown','BlindUp'], - 'appear': ['Appear','Fade'] - }, - toggle: function(element, effect) { - element = $(element); - effect = (effect || 'appear').toLowerCase(); - var options = Object.extend({ - queue: { position:'end', scope:(element.id || 'global'), limit: 1 } - }, arguments[2] || { }); - Effect[element.visible() ? - Effect.PAIRS[effect][1] : Effect.PAIRS[effect][0]](element, options); - } -}; - -Effect.DefaultOptions.transition = Effect.Transitions.sinoidal; - -/* ------------- core effects ------------- */ - -Effect.ScopedQueue = Class.create(Enumerable, { - initialize: function() { - this.effects = []; - this.interval = null; - }, - _each: function(iterator) { - this.effects._each(iterator); - }, - add: function(effect) { - var timestamp = new Date().getTime(); - - var position = Object.isString(effect.options.queue) ? - effect.options.queue : effect.options.queue.position; - - switch(position) { - case 'front': - // move unstarted effects after this effect - this.effects.findAll(function(e){ return e.state=='idle' }).each( function(e) { - e.startOn += effect.finishOn; - e.finishOn += effect.finishOn; - }); - break; - case 'with-last': - timestamp = this.effects.pluck('startOn').max() || timestamp; - break; - case 'end': - // start effect after last queued effect has finished - timestamp = this.effects.pluck('finishOn').max() || timestamp; - break; - } - - effect.startOn += timestamp; - effect.finishOn += timestamp; - - if (!effect.options.queue.limit || (this.effects.length < effect.options.queue.limit)) - this.effects.push(effect); - - if (!this.interval) - this.interval = setInterval(this.loop.bind(this), 15); - }, - remove: function(effect) { - this.effects = this.effects.reject(function(e) { return e==effect }); - if (this.effects.length == 0) { - clearInterval(this.interval); - this.interval = null; - } - }, - loop: function() { - var timePos = new Date().getTime(); - for(var i=0, len=this.effects.length;i= this.startOn) { - if (timePos >= this.finishOn) { - this.render(1.0); - this.cancel(); - this.event('beforeFinish'); - if (this.finish) this.finish(); - this.event('afterFinish'); - return; - } - var pos = (timePos - this.startOn) / this.totalTime, - frame = (pos * this.totalFrames).round(); - if (frame > this.currentFrame) { - this.render(pos); - this.currentFrame = frame; - } - } - }, - cancel: function() { - if (!this.options.sync) - Effect.Queues.get(Object.isString(this.options.queue) ? - 'global' : this.options.queue.scope).remove(this); - this.state = 'finished'; - }, - event: function(eventName) { - if (this.options[eventName + 'Internal']) this.options[eventName + 'Internal'](this); - if (this.options[eventName]) this.options[eventName](this); - }, - inspect: function() { - var data = $H(); - for(property in this) - if (!Object.isFunction(this[property])) data.set(property, this[property]); - return '#'; - } -}); - -Effect.Parallel = Class.create(Effect.Base, { - initialize: function(effects) { - this.effects = effects || []; - this.start(arguments[1]); - }, - update: function(position) { - this.effects.invoke('render', position); - }, - finish: function(position) { - this.effects.each( function(effect) { - effect.render(1.0); - effect.cancel(); - effect.event('beforeFinish'); - if (effect.finish) effect.finish(position); - effect.event('afterFinish'); - }); - } -}); - -Effect.Tween = Class.create(Effect.Base, { - initialize: function(object, from, to) { - object = Object.isString(object) ? $(object) : object; - var args = $A(arguments), method = args.last(), - options = args.length == 5 ? args[3] : null; - this.method = Object.isFunction(method) ? method.bind(object) : - Object.isFunction(object[method]) ? object[method].bind(object) : - function(value) { object[method] = value }; - this.start(Object.extend({ from: from, to: to }, options || { })); - }, - update: function(position) { - this.method(position); - } -}); - -Effect.Event = Class.create(Effect.Base, { - initialize: function() { - this.start(Object.extend({ duration: 0 }, arguments[0] || { })); - }, - update: Prototype.emptyFunction -}); - -Effect.Opacity = Class.create(Effect.Base, { - initialize: function(element) { - this.element = $(element); - if (!this.element) throw(Effect._elementDoesNotExistError); - // make this work on IE on elements without 'layout' - if (Prototype.Browser.IE && (!this.element.currentStyle.hasLayout)) - this.element.setStyle({zoom: 1}); - var options = Object.extend({ - from: this.element.getOpacity() || 0.0, - to: 1.0 - }, arguments[1] || { }); - this.start(options); - }, - update: function(position) { - this.element.setOpacity(position); - } -}); - -Effect.Move = Class.create(Effect.Base, { - initialize: function(element) { - this.element = $(element); - if (!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - x: 0, - y: 0, - mode: 'relative' - }, arguments[1] || { }); - this.start(options); - }, - setup: function() { - this.element.makePositioned(); - this.originalLeft = parseFloat(this.element.getStyle('left') || '0'); - this.originalTop = parseFloat(this.element.getStyle('top') || '0'); - if (this.options.mode == 'absolute') { - this.options.x = this.options.x - this.originalLeft; - this.options.y = this.options.y - this.originalTop; - } - }, - update: function(position) { - this.element.setStyle({ - left: (this.options.x * position + this.originalLeft).round() + 'px', - top: (this.options.y * position + this.originalTop).round() + 'px' - }); - } -}); - -// for backwards compatibility -Effect.MoveBy = function(element, toTop, toLeft) { - return new Effect.Move(element, - Object.extend({ x: toLeft, y: toTop }, arguments[3] || { })); -}; - -Effect.Scale = Class.create(Effect.Base, { - initialize: function(element, percent) { - this.element = $(element); - if (!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - scaleX: true, - scaleY: true, - scaleContent: true, - scaleFromCenter: false, - scaleMode: 'box', // 'box' or 'contents' or { } with provided values - scaleFrom: 100.0, - scaleTo: percent - }, arguments[2] || { }); - this.start(options); - }, - setup: function() { - this.restoreAfterFinish = this.options.restoreAfterFinish || false; - this.elementPositioning = this.element.getStyle('position'); - - this.originalStyle = { }; - ['top','left','width','height','fontSize'].each( function(k) { - this.originalStyle[k] = this.element.style[k]; - }.bind(this)); - - this.originalTop = this.element.offsetTop; - this.originalLeft = this.element.offsetLeft; - - var fontSize = this.element.getStyle('font-size') || '100%'; - ['em','px','%','pt'].each( function(fontSizeType) { - if (fontSize.indexOf(fontSizeType)>0) { - this.fontSize = parseFloat(fontSize); - this.fontSizeType = fontSizeType; - } - }.bind(this)); - - this.factor = (this.options.scaleTo - this.options.scaleFrom)/100; - - this.dims = null; - if (this.options.scaleMode=='box') - this.dims = [this.element.offsetHeight, this.element.offsetWidth]; - if (/^content/.test(this.options.scaleMode)) - this.dims = [this.element.scrollHeight, this.element.scrollWidth]; - if (!this.dims) - this.dims = [this.options.scaleMode.originalHeight, - this.options.scaleMode.originalWidth]; - }, - update: function(position) { - var currentScale = (this.options.scaleFrom/100.0) + (this.factor * position); - if (this.options.scaleContent && this.fontSize) - this.element.setStyle({fontSize: this.fontSize * currentScale + this.fontSizeType }); - this.setDimensions(this.dims[0] * currentScale, this.dims[1] * currentScale); - }, - finish: function(position) { - if (this.restoreAfterFinish) this.element.setStyle(this.originalStyle); - }, - setDimensions: function(height, width) { - var d = { }; - if (this.options.scaleX) d.width = width.round() + 'px'; - if (this.options.scaleY) d.height = height.round() + 'px'; - if (this.options.scaleFromCenter) { - var topd = (height - this.dims[0])/2; - var leftd = (width - this.dims[1])/2; - if (this.elementPositioning == 'absolute') { - if (this.options.scaleY) d.top = this.originalTop-topd + 'px'; - if (this.options.scaleX) d.left = this.originalLeft-leftd + 'px'; - } else { - if (this.options.scaleY) d.top = -topd + 'px'; - if (this.options.scaleX) d.left = -leftd + 'px'; - } - } - this.element.setStyle(d); - } -}); - -Effect.Highlight = Class.create(Effect.Base, { - initialize: function(element) { - this.element = $(element); - if (!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ startcolor: '#ffff99' }, arguments[1] || { }); - this.start(options); - }, - setup: function() { - // Prevent executing on elements not in the layout flow - if (this.element.getStyle('display')=='none') { this.cancel(); return; } - // Disable background image during the effect - this.oldStyle = { }; - if (!this.options.keepBackgroundImage) { - this.oldStyle.backgroundImage = this.element.getStyle('background-image'); - this.element.setStyle({backgroundImage: 'none'}); - } - if (!this.options.endcolor) - this.options.endcolor = this.element.getStyle('background-color').parseColor('#ffffff'); - if (!this.options.restorecolor) - this.options.restorecolor = this.element.getStyle('background-color'); - // init color calculations - this._base = $R(0,2).map(function(i){ return parseInt(this.options.startcolor.slice(i*2+1,i*2+3),16) }.bind(this)); - this._delta = $R(0,2).map(function(i){ return parseInt(this.options.endcolor.slice(i*2+1,i*2+3),16)-this._base[i] }.bind(this)); - }, - update: function(position) { - this.element.setStyle({backgroundColor: $R(0,2).inject('#',function(m,v,i){ - return m+((this._base[i]+(this._delta[i]*position)).round().toColorPart()); }.bind(this)) }); - }, - finish: function() { - this.element.setStyle(Object.extend(this.oldStyle, { - backgroundColor: this.options.restorecolor - })); - } -}); - -Effect.ScrollTo = function(element) { - var options = arguments[1] || { }, - scrollOffsets = document.viewport.getScrollOffsets(), - elementOffsets = $(element).cumulativeOffset(), - max = (window.height || document.body.scrollHeight) - document.viewport.getHeight(); - - if (options.offset) elementOffsets[1] += options.offset; - - return new Effect.Tween(null, - scrollOffsets.top, - elementOffsets[1] > max ? max : elementOffsets[1], - options, - function(p){ scrollTo(scrollOffsets.left, p.round()) } - ); -}; - -/* ------------- combination effects ------------- */ - -Effect.Fade = function(element) { - element = $(element); - var oldOpacity = element.getInlineOpacity(); - var options = Object.extend({ - from: element.getOpacity() || 1.0, - to: 0.0, - afterFinishInternal: function(effect) { - if (effect.options.to!=0) return; - effect.element.hide().setStyle({opacity: oldOpacity}); - } - }, arguments[1] || { }); - return new Effect.Opacity(element,options); -}; - -Effect.Appear = function(element) { - element = $(element); - var options = Object.extend({ - from: (element.getStyle('display') == 'none' ? 0.0 : element.getOpacity() || 0.0), - to: 1.0, - // force Safari to render floated elements properly - afterFinishInternal: function(effect) { - effect.element.forceRerendering(); - }, - beforeSetup: function(effect) { - effect.element.setOpacity(effect.options.from).show(); - }}, arguments[1] || { }); - return new Effect.Opacity(element,options); -}; - -Effect.Puff = function(element) { - element = $(element); - var oldStyle = { - opacity: element.getInlineOpacity(), - position: element.getStyle('position'), - top: element.style.top, - left: element.style.left, - width: element.style.width, - height: element.style.height - }; - return new Effect.Parallel( - [ new Effect.Scale(element, 200, - { sync: true, scaleFromCenter: true, scaleContent: true, restoreAfterFinish: true }), - new Effect.Opacity(element, { sync: true, to: 0.0 } ) ], - Object.extend({ duration: 1.0, - beforeSetupInternal: function(effect) { - Position.absolutize(effect.effects[0].element) - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().setStyle(oldStyle); } - }, arguments[1] || { }) - ); -}; - -Effect.BlindUp = function(element) { - element = $(element); - element.makeClipping(); - return new Effect.Scale(element, 0, - Object.extend({ scaleContent: false, - scaleX: false, - restoreAfterFinish: true, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping(); - } - }, arguments[1] || { }) - ); -}; - -Effect.BlindDown = function(element) { - element = $(element); - var elementDimensions = element.getDimensions(); - return new Effect.Scale(element, 100, Object.extend({ - scaleContent: false, - scaleX: false, - scaleFrom: 0, - scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, - restoreAfterFinish: true, - afterSetup: function(effect) { - effect.element.makeClipping().setStyle({height: '0px'}).show(); - }, - afterFinishInternal: function(effect) { - effect.element.undoClipping(); - } - }, arguments[1] || { })); -}; - -Effect.SwitchOff = function(element) { - element = $(element); - var oldOpacity = element.getInlineOpacity(); - return new Effect.Appear(element, Object.extend({ - duration: 0.4, - from: 0, - transition: Effect.Transitions.flicker, - afterFinishInternal: function(effect) { - new Effect.Scale(effect.element, 1, { - duration: 0.3, scaleFromCenter: true, - scaleX: false, scaleContent: false, restoreAfterFinish: true, - beforeSetup: function(effect) { - effect.element.makePositioned().makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().undoPositioned().setStyle({opacity: oldOpacity}); - } - }) - } - }, arguments[1] || { })); -}; - -Effect.DropOut = function(element) { - element = $(element); - var oldStyle = { - top: element.getStyle('top'), - left: element.getStyle('left'), - opacity: element.getInlineOpacity() }; - return new Effect.Parallel( - [ new Effect.Move(element, {x: 0, y: 100, sync: true }), - new Effect.Opacity(element, { sync: true, to: 0.0 }) ], - Object.extend( - { duration: 0.5, - beforeSetup: function(effect) { - effect.effects[0].element.makePositioned(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().undoPositioned().setStyle(oldStyle); - } - }, arguments[1] || { })); -}; - -Effect.Shake = function(element) { - element = $(element); - var options = Object.extend({ - distance: 20, - duration: 0.5 - }, arguments[1] || {}); - var distance = parseFloat(options.distance); - var split = parseFloat(options.duration) / 10.0; - var oldStyle = { - top: element.getStyle('top'), - left: element.getStyle('left') }; - return new Effect.Move(element, - { x: distance, y: 0, duration: split, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -distance*2, y: 0, duration: split*2, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: distance*2, y: 0, duration: split*2, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -distance*2, y: 0, duration: split*2, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: distance*2, y: 0, duration: split*2, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -distance, y: 0, duration: split, afterFinishInternal: function(effect) { - effect.element.undoPositioned().setStyle(oldStyle); - }}) }}) }}) }}) }}) }}); -}; - -Effect.SlideDown = function(element) { - element = $(element).cleanWhitespace(); - // SlideDown need to have the content of the element wrapped in a container element with fixed height! - var oldInnerBottom = element.down().getStyle('bottom'); - var elementDimensions = element.getDimensions(); - return new Effect.Scale(element, 100, Object.extend({ - scaleContent: false, - scaleX: false, - scaleFrom: window.opera ? 0 : 1, - scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, - restoreAfterFinish: true, - afterSetup: function(effect) { - effect.element.makePositioned(); - effect.element.down().makePositioned(); - if (window.opera) effect.element.setStyle({top: ''}); - effect.element.makeClipping().setStyle({height: '0px'}).show(); - }, - afterUpdateInternal: function(effect) { - effect.element.down().setStyle({bottom: - (effect.dims[0] - effect.element.clientHeight) + 'px' }); - }, - afterFinishInternal: function(effect) { - effect.element.undoClipping().undoPositioned(); - effect.element.down().undoPositioned().setStyle({bottom: oldInnerBottom}); } - }, arguments[1] || { }) - ); -}; - -Effect.SlideUp = function(element) { - element = $(element).cleanWhitespace(); - var oldInnerBottom = element.down().getStyle('bottom'); - var elementDimensions = element.getDimensions(); - return new Effect.Scale(element, window.opera ? 0 : 1, - Object.extend({ scaleContent: false, - scaleX: false, - scaleMode: 'box', - scaleFrom: 100, - scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, - restoreAfterFinish: true, - afterSetup: function(effect) { - effect.element.makePositioned(); - effect.element.down().makePositioned(); - if (window.opera) effect.element.setStyle({top: ''}); - effect.element.makeClipping().show(); - }, - afterUpdateInternal: function(effect) { - effect.element.down().setStyle({bottom: - (effect.dims[0] - effect.element.clientHeight) + 'px' }); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().undoPositioned(); - effect.element.down().undoPositioned().setStyle({bottom: oldInnerBottom}); - } - }, arguments[1] || { }) - ); -}; - -// Bug in opera makes the TD containing this element expand for a instance after finish -Effect.Squish = function(element) { - return new Effect.Scale(element, window.opera ? 1 : 0, { - restoreAfterFinish: true, - beforeSetup: function(effect) { - effect.element.makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping(); - } - }); -}; - -Effect.Grow = function(element) { - element = $(element); - var options = Object.extend({ - direction: 'center', - moveTransition: Effect.Transitions.sinoidal, - scaleTransition: Effect.Transitions.sinoidal, - opacityTransition: Effect.Transitions.full - }, arguments[1] || { }); - var oldStyle = { - top: element.style.top, - left: element.style.left, - height: element.style.height, - width: element.style.width, - opacity: element.getInlineOpacity() }; - - var dims = element.getDimensions(); - var initialMoveX, initialMoveY; - var moveX, moveY; - - switch (options.direction) { - case 'top-left': - initialMoveX = initialMoveY = moveX = moveY = 0; - break; - case 'top-right': - initialMoveX = dims.width; - initialMoveY = moveY = 0; - moveX = -dims.width; - break; - case 'bottom-left': - initialMoveX = moveX = 0; - initialMoveY = dims.height; - moveY = -dims.height; - break; - case 'bottom-right': - initialMoveX = dims.width; - initialMoveY = dims.height; - moveX = -dims.width; - moveY = -dims.height; - break; - case 'center': - initialMoveX = dims.width / 2; - initialMoveY = dims.height / 2; - moveX = -dims.width / 2; - moveY = -dims.height / 2; - break; - } - - return new Effect.Move(element, { - x: initialMoveX, - y: initialMoveY, - duration: 0.01, - beforeSetup: function(effect) { - effect.element.hide().makeClipping().makePositioned(); - }, - afterFinishInternal: function(effect) { - new Effect.Parallel( - [ new Effect.Opacity(effect.element, { sync: true, to: 1.0, from: 0.0, transition: options.opacityTransition }), - new Effect.Move(effect.element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }), - new Effect.Scale(effect.element, 100, { - scaleMode: { originalHeight: dims.height, originalWidth: dims.width }, - sync: true, scaleFrom: window.opera ? 1 : 0, transition: options.scaleTransition, restoreAfterFinish: true}) - ], Object.extend({ - beforeSetup: function(effect) { - effect.effects[0].element.setStyle({height: '0px'}).show(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.undoClipping().undoPositioned().setStyle(oldStyle); - } - }, options) - ) - } - }); -}; - -Effect.Shrink = function(element) { - element = $(element); - var options = Object.extend({ - direction: 'center', - moveTransition: Effect.Transitions.sinoidal, - scaleTransition: Effect.Transitions.sinoidal, - opacityTransition: Effect.Transitions.none - }, arguments[1] || { }); - var oldStyle = { - top: element.style.top, - left: element.style.left, - height: element.style.height, - width: element.style.width, - opacity: element.getInlineOpacity() }; - - var dims = element.getDimensions(); - var moveX, moveY; - - switch (options.direction) { - case 'top-left': - moveX = moveY = 0; - break; - case 'top-right': - moveX = dims.width; - moveY = 0; - break; - case 'bottom-left': - moveX = 0; - moveY = dims.height; - break; - case 'bottom-right': - moveX = dims.width; - moveY = dims.height; - break; - case 'center': - moveX = dims.width / 2; - moveY = dims.height / 2; - break; - } - - return new Effect.Parallel( - [ new Effect.Opacity(element, { sync: true, to: 0.0, from: 1.0, transition: options.opacityTransition }), - new Effect.Scale(element, window.opera ? 1 : 0, { sync: true, transition: options.scaleTransition, restoreAfterFinish: true}), - new Effect.Move(element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }) - ], Object.extend({ - beforeStartInternal: function(effect) { - effect.effects[0].element.makePositioned().makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().undoClipping().undoPositioned().setStyle(oldStyle); } - }, options) - ); -}; - -Effect.Pulsate = function(element) { - element = $(element); - var options = arguments[1] || { }; - var oldOpacity = element.getInlineOpacity(); - var transition = options.transition || Effect.Transitions.sinoidal; - var reverser = function(pos){ return transition(1-Effect.Transitions.pulse(pos, options.pulses)) }; - reverser.bind(transition); - return new Effect.Opacity(element, - Object.extend(Object.extend({ duration: 2.0, from: 0, - afterFinishInternal: function(effect) { effect.element.setStyle({opacity: oldOpacity}); } - }, options), {transition: reverser})); -}; - -Effect.Fold = function(element) { - element = $(element); - var oldStyle = { - top: element.style.top, - left: element.style.left, - width: element.style.width, - height: element.style.height }; - element.makeClipping(); - return new Effect.Scale(element, 5, Object.extend({ - scaleContent: false, - scaleX: false, - afterFinishInternal: function(effect) { - new Effect.Scale(element, 1, { - scaleContent: false, - scaleY: false, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().setStyle(oldStyle); - } }); - }}, arguments[1] || { })); -}; - -Effect.Morph = Class.create(Effect.Base, { - initialize: function(element) { - this.element = $(element); - if (!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - style: { } - }, arguments[1] || { }); - - if (!Object.isString(options.style)) this.style = $H(options.style); - else { - if (options.style.include(':')) - this.style = options.style.parseStyle(); - else { - this.element.addClassName(options.style); - this.style = $H(this.element.getStyles()); - this.element.removeClassName(options.style); - var css = this.element.getStyles(); - this.style = this.style.reject(function(style) { - return style.value == css[style.key]; - }); - options.afterFinishInternal = function(effect) { - effect.element.addClassName(effect.options.style); - effect.transforms.each(function(transform) { - effect.element.style[transform.style] = ''; - }); - } - } - } - this.start(options); - }, - - setup: function(){ - function parseColor(color){ - if (!color || ['rgba(0, 0, 0, 0)','transparent'].include(color)) color = '#ffffff'; - color = color.parseColor(); - return $R(0,2).map(function(i){ - return parseInt( color.slice(i*2+1,i*2+3), 16 ) - }); - } - this.transforms = this.style.map(function(pair){ - var property = pair[0], value = pair[1], unit = null; - - if (value.parseColor('#zzzzzz') != '#zzzzzz') { - value = value.parseColor(); - unit = 'color'; - } else if (property == 'opacity') { - value = parseFloat(value); - if (Prototype.Browser.IE && (!this.element.currentStyle.hasLayout)) - this.element.setStyle({zoom: 1}); - } else if (Element.CSS_LENGTH.test(value)) { - var components = value.match(/^([\+\-]?[0-9\.]+)(.*)$/); - value = parseFloat(components[1]); - unit = (components.length == 3) ? components[2] : null; - } - - var originalValue = this.element.getStyle(property); - return { - style: property.camelize(), - originalValue: unit=='color' ? parseColor(originalValue) : parseFloat(originalValue || 0), - targetValue: unit=='color' ? parseColor(value) : value, - unit: unit - }; - }.bind(this)).reject(function(transform){ - return ( - (transform.originalValue == transform.targetValue) || - ( - transform.unit != 'color' && - (isNaN(transform.originalValue) || isNaN(transform.targetValue)) - ) - ) - }); - }, - update: function(position) { - var style = { }, transform, i = this.transforms.length; - while(i--) - style[(transform = this.transforms[i]).style] = - transform.unit=='color' ? '#'+ - (Math.round(transform.originalValue[0]+ - (transform.targetValue[0]-transform.originalValue[0])*position)).toColorPart() + - (Math.round(transform.originalValue[1]+ - (transform.targetValue[1]-transform.originalValue[1])*position)).toColorPart() + - (Math.round(transform.originalValue[2]+ - (transform.targetValue[2]-transform.originalValue[2])*position)).toColorPart() : - (transform.originalValue + - (transform.targetValue - transform.originalValue) * position).toFixed(3) + - (transform.unit === null ? '' : transform.unit); - this.element.setStyle(style, true); - } -}); - -Effect.Transform = Class.create({ - initialize: function(tracks){ - this.tracks = []; - this.options = arguments[1] || { }; - this.addTracks(tracks); - }, - addTracks: function(tracks){ - tracks.each(function(track){ - track = $H(track); - var data = track.values().first(); - this.tracks.push($H({ - ids: track.keys().first(), - effect: Effect.Morph, - options: { style: data } - })); - }.bind(this)); - return this; - }, - play: function(){ - return new Effect.Parallel( - this.tracks.map(function(track){ - var ids = track.get('ids'), effect = track.get('effect'), options = track.get('options'); - var elements = [$(ids) || $$(ids)].flatten(); - return elements.map(function(e){ return new effect(e, Object.extend({ sync:true }, options)) }); - }).flatten(), - this.options - ); - } -}); - -Element.CSS_PROPERTIES = $w( - 'backgroundColor backgroundPosition borderBottomColor borderBottomStyle ' + - 'borderBottomWidth borderLeftColor borderLeftStyle borderLeftWidth ' + - 'borderRightColor borderRightStyle borderRightWidth borderSpacing ' + - 'borderTopColor borderTopStyle borderTopWidth bottom clip color ' + - 'fontSize fontWeight height left letterSpacing lineHeight ' + - 'marginBottom marginLeft marginRight marginTop markerOffset maxHeight '+ - 'maxWidth minHeight minWidth opacity outlineColor outlineOffset ' + - 'outlineWidth paddingBottom paddingLeft paddingRight paddingTop ' + - 'right textIndent top width wordSpacing zIndex'); - -Element.CSS_LENGTH = /^(([\+\-]?[0-9\.]+)(em|ex|px|in|cm|mm|pt|pc|\%))|0$/; - -String.__parseStyleElement = document.createElement('div'); -String.prototype.parseStyle = function(){ - var style, styleRules = $H(); - if (Prototype.Browser.WebKit) - style = new Element('div',{style:this}).style; - else { - String.__parseStyleElement.innerHTML = '
    '; - style = String.__parseStyleElement.childNodes[0].style; - } - - Element.CSS_PROPERTIES.each(function(property){ - if (style[property]) styleRules.set(property, style[property]); - }); - - if (Prototype.Browser.IE && this.include('opacity')) - styleRules.set('opacity', this.match(/opacity:\s*((?:0|1)?(?:\.\d*)?)/)[1]); - - return styleRules; -}; - -if (document.defaultView && document.defaultView.getComputedStyle) { - Element.getStyles = function(element) { - var css = document.defaultView.getComputedStyle($(element), null); - return Element.CSS_PROPERTIES.inject({ }, function(styles, property) { - styles[property] = css[property]; - return styles; - }); - }; -} else { - Element.getStyles = function(element) { - element = $(element); - var css = element.currentStyle, styles; - styles = Element.CSS_PROPERTIES.inject({ }, function(hash, property) { - hash.set(property, css[property]); - return hash; - }); - if (!styles.opacity) styles.set('opacity', element.getOpacity()); - return styles; - }; -}; - -Effect.Methods = { - morph: function(element, style) { - element = $(element); - new Effect.Morph(element, Object.extend({ style: style }, arguments[2] || { })); - return element; - }, - visualEffect: function(element, effect, options) { - element = $(element) - var s = effect.dasherize().camelize(), klass = s.charAt(0).toUpperCase() + s.substring(1); - new Effect[klass](element, options); - return element; - }, - highlight: function(element, options) { - element = $(element); - new Effect.Highlight(element, options); - return element; - } -}; - -$w('fade appear grow shrink fold blindUp blindDown slideUp slideDown '+ - 'pulsate shake puff squish switchOff dropOut').each( - function(effect) { - Effect.Methods[effect] = function(element, options){ - element = $(element); - Effect[effect.charAt(0).toUpperCase() + effect.substring(1)](element, options); - return element; - } - } -); - -$w('getInlineOpacity forceRerendering setContentZoom collectTextNodes collectTextNodesIgnoreClass getStyles').each( - function(f) { Effect.Methods[f] = Element[f]; } -); - -Element.addMethods(Effect.Methods); diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/robots.txt b/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/robots.txt deleted file mode 100644 index 085187fa..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/public/robots.txt +++ /dev/null @@ -1,5 +0,0 @@ -# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file -# -# To ban all spiders from the entire site uncomment the next two lines: -# User-Agent: * -# Disallow: / diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/about b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/about deleted file mode 100755 index cd38a32a..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/about +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/about' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/console b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/console deleted file mode 100755 index 498077ab..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/console +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/console' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/destroy b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/destroy deleted file mode 100755 index a4df765a..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/destroy +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/destroy' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/generate b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/generate deleted file mode 100755 index 173a9f14..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/generate +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/generate' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/benchmarker b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/benchmarker deleted file mode 100755 index c842d35d..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/benchmarker +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/benchmarker' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/profiler b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/profiler deleted file mode 100755 index d855ac8b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/profiler +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/profiler' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/request b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/request deleted file mode 100755 index ae3f38c7..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/performance/request +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/request' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/plugin b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/plugin deleted file mode 100755 index 87cd2070..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/plugin +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/plugin' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/inspector b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/inspector deleted file mode 100755 index bf25ad86..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/inspector +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/inspector' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/reaper b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/reaper deleted file mode 100755 index c77f0453..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/reaper +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/reaper' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/spawner b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/spawner deleted file mode 100755 index 7118f398..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/process/spawner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/spawner' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/runner b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/runner deleted file mode 100755 index a4a7cb25..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/runner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/runner' diff --git a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/server b/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/server deleted file mode 100755 index 3c67f39b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/rails_app/script/server +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/server' diff --git a/vendor/gems/gems/thin-1.2.5/spec/request/mongrel_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/request/mongrel_spec.rb deleted file mode 100644 index 20cc3357..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/request/mongrel_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' -require 'digest/sha1' - -describe Request, 'legacy Mongrel tests' do - it 'should raise error on large header names' do - proc { R("GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(1024))}: Test\r\n\r\n") }. - should raise_error(InvalidRequest) - end - - it 'should raise error on large mangled field values' do - proc { R("GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024*1024, false)}\r\n\r\n") }. - should raise_error(InvalidRequest) - end - - it 'should raise error on big fat ugly headers' do - get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n" - get << "X-Test: test\r\n" * (80 * 1024) - proc { R(get) }.should raise_error(InvalidRequest) - end - - it 'should raise error on random garbage' do - proc { R("GET #{rand_data(1024, 1024+(1024), false)} #{rand_data(1024, 1024+(1024), false)}\r\n\r\n") }. - should raise_error(InvalidRequest) - end - - private - def rand_data(min, max, readable=true) - count = min + ((rand(max)+1) *10).to_i - res = count.to_s + "/" - - if readable - res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40) - else - res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20) - end - - return res - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/request/parser_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/request/parser_spec.rb deleted file mode 100644 index 17b44b45..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/request/parser_spec.rb +++ /dev/null @@ -1,215 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -# Require mongrel so we can test that Thin parser don't clash w/ Mongrel parser. -begin - require 'mongrel' -rescue LoadError - warn "Install mongrel to test compatibility w/ it" -end - -describe Request, 'parser' do - it 'should include basic headers' do - request = R("GET / HTTP/1.1\r\nHost: localhost\r\n\r\n") - request.env['SERVER_PROTOCOL'].should == 'HTTP/1.1' - request.env['REQUEST_PATH'].should == '/' - request.env['HTTP_VERSION'].should == 'HTTP/1.1' - request.env['REQUEST_URI'].should == '/' - request.env['GATEWAY_INTERFACE'].should == 'CGI/1.2' - request.env['REQUEST_METHOD'].should == 'GET' - request.env["rack.url_scheme"].should == 'http' - request.env['FRAGMENT'].to_s.should be_empty - request.env['QUERY_STRING'].to_s.should be_empty - - request.should validate_with_lint - end - - it 'should not prepend HTTP_ to Content-Type and Content-Length' do - request = R("POST / HTTP/1.1\r\nHost: localhost\r\nContent-Type: text/html\r\nContent-Length: 2\r\n\r\naa") - request.env.keys.should_not include('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH') - request.env.keys.should include('CONTENT_TYPE', 'CONTENT_LENGTH') - - request.should validate_with_lint - end - - it 'should raise error on invalid request line' do - proc { R("GET / SsUTF/1.1") }.should raise_error(InvalidRequest) - proc { R("GET / HTTP/1.1yousmelllikecheeze") }.should raise_error(InvalidRequest) - end - - it 'should support fragment in uri' do - request = R("GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\nHost: localhost\r\n\r\n") - - request.env['REQUEST_URI'].should == '/forums/1/topics/2375?page=1' - request.env['PATH_INFO'].should == '/forums/1/topics/2375' - request.env['QUERY_STRING'].should == 'page=1' - request.env['FRAGMENT'].should == 'posts-17408' - - request.should validate_with_lint - end - - it 'should parse path with query string' do - request = R("GET /index.html?234235 HTTP/1.1\r\nHost: localhost\r\n\r\n") - request.env['REQUEST_PATH'].should == '/index.html' - request.env['QUERY_STRING'].should == '234235' - request.env['FRAGMENT'].should be_nil - - request.should validate_with_lint - end - - it 'should parse headers from GET request' do - request = R(<<-EOS, true) -GET / HTTP/1.1 -Host: myhost.com:3000 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.9) Gecko/20071025 Firefox/2.0.0.9 -Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Accept-Language: en-us,en;q=0.5 -Accept-Encoding: gzip,deflate -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 -Cookie: mium=7 -Keep-Alive: 300 -Connection: keep-alive - -EOS - request.env['HTTP_HOST'].should == 'myhost.com:3000' - request.env['SERVER_NAME'].should == 'myhost.com' - request.env['SERVER_PORT'].should == '3000' - request.env['HTTP_COOKIE'].should == 'mium=7' - - request.should validate_with_lint - end - - it 'should parse POST request with data' do - request = R(<<-EOS.chomp, true) -POST /postit HTTP/1.1 -Host: localhost:3000 -User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.9) Gecko/20071025 Firefox/2.0.0.9 -Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5 -Accept-Language: en-us,en;q=0.5 -Accept-Encoding: gzip,deflate -Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 -Keep-Alive: 300 -Connection: keep-alive -Content-Type: text/html -Content-Length: 37 - -name=marc&email=macournoyer@gmail.com -EOS - - request.env['REQUEST_METHOD'].should == 'POST' - request.env['REQUEST_URI'].should == '/postit' - request.env['CONTENT_TYPE'].should == 'text/html' - request.env['CONTENT_LENGTH'].should == '37' - request.env['HTTP_ACCEPT'].should == 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5' - request.env['HTTP_ACCEPT_LANGUAGE'].should == 'en-us,en;q=0.5' - - request.body.rewind - request.body.read.should == 'name=marc&email=macournoyer@gmail.com' - request.body.class.should == StringIO - - request.should validate_with_lint - end - - it 'should not fuck up on stupid fucked IE6 headers' do - body = <<-EOS -POST /codes/58-tracking-file-downloads-automatically-in-google-analytics-with-prototype/refactors HTTP/1.0 -X-Real-IP: 62.24.71.95 -X-Forwarded-For: 62.24.71.95 -Host: refactormycode.com -Connection: close -TE: deflate,gzip;q=0.3 -Accept: */* -Range: bytes=0-499999 -Referer: http://refactormycode.com/codes/58-tracking-file-downloads-automatically-in-google-analytics-with-prototype -User-Agent: Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) -Content-Length: 1 -Content-Type: application/x-www-form-urlencoded -Cookie: _refactormycode_session_id=a1b2n3jk4k5; flash=%7B%7D -Cookie2: $Version="1" - -a -EOS - request = R(body, true) - request.env['HTTP_COOKIE2'].should == '$Version="1"' - - request.should validate_with_lint - end - - it 'shoud accept long query string' do - body = <<-EOS -GET /session?open_id_complete=1&nonce=ytPOcwni&nonce=ytPOcwni&openid.assoc_handle=%7BHMAC-SHA1%7D%7B473e38fe%7D%7BJTjJxA%3D%3D%7D&openid.identity=http%3A%2F%2Fmacournoyer.myopenid.com%2F&openid.mode=id_res&openid.op_endpoint=http%3A%2F%2Fwww.myopenid.com%2Fserver&openid.response_nonce=2007-11-29T01%3A19%3A35ZGA5FUU&openid.return_to=http%3A%2F%2Flocalhost%3A3000%2Fsession%3Fopen_id_complete%3D1%26nonce%3DytPOcwni%26nonce%3DytPOcwni&openid.sig=lPIRgwpfR6JAdGGnb0ZjcY%2FWjr8%3D&openid.signed=assoc_handle%2Cidentity%2Cmode%2Cop_endpoint%2Cresponse_nonce%2Creturn_to%2Csigned%2Csreg.email%2Csreg.nickname&openid.sreg.email=macournoyer%40yahoo.ca&openid.sreg.nickname=macournoyer HTTP/1.1 -Host: localhost:3000 - -EOS - request = R(body, true) - - request.env['QUERY_STRING'].should == 'open_id_complete=1&nonce=ytPOcwni&nonce=ytPOcwni&openid.assoc_handle=%7BHMAC-SHA1%7D%7B473e38fe%7D%7BJTjJxA%3D%3D%7D&openid.identity=http%3A%2F%2Fmacournoyer.myopenid.com%2F&openid.mode=id_res&openid.op_endpoint=http%3A%2F%2Fwww.myopenid.com%2Fserver&openid.response_nonce=2007-11-29T01%3A19%3A35ZGA5FUU&openid.return_to=http%3A%2F%2Flocalhost%3A3000%2Fsession%3Fopen_id_complete%3D1%26nonce%3DytPOcwni%26nonce%3DytPOcwni&openid.sig=lPIRgwpfR6JAdGGnb0ZjcY%2FWjr8%3D&openid.signed=assoc_handle%2Cidentity%2Cmode%2Cop_endpoint%2Cresponse_nonce%2Creturn_to%2Csigned%2Csreg.email%2Csreg.nickname&openid.sreg.email=macournoyer%40yahoo.ca&openid.sreg.nickname=macournoyer' - - request.should validate_with_lint - end - - it 'should parse even with stupid Content-Length' do - body = <<-EOS.chomp -POST / HTTP/1.1 -Host: localhost:3000 -Content-Length: 300 - -aye -EOS - request = R(body, true) - - request.body.rewind - request.body.read.should == 'aye' - end - - it "should parse ie6 urls" do - %w(/some/random/path" - /some/random/path> - /some/random/path< - /we/love/you/ie6?q=<""> - /url?<="&>=" - /mal"formed"? - ).each do |path| - parser = HttpParser.new - req = {} - sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n) - nread = parser.execute(req, sorta_safe, 0) - - sorta_safe.size.should == nread - 1 # Ragel 6 skips last linebreak - parser.should be_finished - parser.should_not be_error - end - end - - xit "should parse absolute request URI" do - request = R(<<-EOS, true) -GET http://localhost:3000/hi HTTP/1.1 -Host: localhost:3000 - -EOS - request.env['PATH_INFO'].should == '/hi' - - request.should validate_with_lint - end - - - it "should fails on heders larger then MAX_HEADER" do - proc { R("GET / HTTP/1.1\r\nFoo: #{'X' * Request::MAX_HEADER}\r\n\r\n") }.should raise_error(InvalidRequest) - end - - it "should default SERVER_NAME to localhost" do - request = R("GET / HTTP/1.1\r\n\r\n") - request.env['SERVER_NAME'].should == "localhost" - end - - it 'should normalize http_fields' do - [ "GET /index.html HTTP/1.1\r\nhos-t: localhost\r\n\r\n", - "GET /index.html HTTP/1.1\r\nhOs_t: localhost\r\n\r\n", - "GET /index.html HTTP/1.1\r\nhoS-T: localhost\r\n\r\n" - ].each { |req_str| - parser = HttpParser.new - req = {} - nread = parser.execute(req, req_str, 0) - req.should be_has_key('HTTP_HOS_T') - } - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/request/persistent_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/request/persistent_spec.rb deleted file mode 100644 index c6d05a6a..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/request/persistent_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Request, 'persistent' do - before do - @request = Request.new - end - - it "should not assume that a persistent connection is maintained for HTTP version 1.0" do - @request.env['HTTP_VERSION'] = 'HTTP/1.0' - @request.should_not be_persistent - end - - it "should assume that a persistent connection is maintained for HTTP version 1.0 when specified" do - @request.env['HTTP_VERSION'] = 'HTTP/1.0' - @request.env['HTTP_CONNECTION'] = 'Keep-Alive' - @request.should be_persistent - end - - it "should maintain a persistent connection for HTTP/1.1 client" do - @request.env['HTTP_VERSION'] = 'HTTP/1.1' - @request.env['HTTP_CONNECTION'] = 'Keep-Alive' - @request.should be_persistent - end - - it "should maintain a persistent connection for HTTP/1.1 client by default" do - @request.env['HTTP_VERSION'] = 'HTTP/1.1' - @request.should be_persistent - end - - it "should not maintain a persistent connection for HTTP/1.1 client when Connection header include close" do - @request.env['HTTP_VERSION'] = 'HTTP/1.1' - @request.env['HTTP_CONNECTION'] = 'close' - @request.should_not be_persistent - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/request/processing_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/request/processing_spec.rb deleted file mode 100644 index 6a8c3935..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/request/processing_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Request, 'processing' do - it 'should parse in chunks' do - request = Request.new - request.parse("POST / HTTP/1.1\r\n").should be_false - request.parse("Host: localhost\r\n").should be_false - request.parse("Content-Length: 9\r\n").should be_false - request.parse("\r\nvery ").should be_false - request.parse("cool").should be_true - - request.env['CONTENT_LENGTH'].should == '9' - request.body.read.should == 'very cool' - request.should validate_with_lint - end - - it "should move body to tempfile when too big" do - len = Request::MAX_BODY + 2 - request = Request.new - request.parse("POST /postit HTTP/1.1\r\nContent-Length: #{len}\r\n\r\n#{'X' * (len/2)}") - request.parse('X' * (len/2)) - - request.body.size.should == len - request.should be_finished - request.body.class.should == Tempfile - end - - it "should delete body tempfile when closing" do - body = 'X' * (Request::MAX_BODY + 1) - - request = Request.new - request.parse("POST /postit HTTP/1.1\r\n") - request.parse("Content-Length: #{body.size}\r\n\r\n") - request.parse(body) - - request.body.path.should_not be_nil - request.close - request.body.path.should be_nil - end - - it "should raise error when header is too big" do - big_headers = "X-Test: X\r\n" * (1024 * (80 + 32)) - proc { R("GET / HTTP/1.1\r\n#{big_headers}\r\n") }.should raise_error(InvalidRequest) - end - - it "should set body external encoding to ASCII_8BIT" do - Request.new.body.external_encoding.should == Encoding::ASCII_8BIT - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/response_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/response_spec.rb deleted file mode 100644 index 8e1c885d..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/response_spec.rb +++ /dev/null @@ -1,91 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Response do - before do - @response = Response.new - @response.headers['Content-Type'] = 'text/html' - @response.headers['Content-Length'] = '0' - @response.body = '' - end - - it 'should output headers' do - @response.headers_output.should include("Content-Type: text/html", "Content-Length: 0", "Connection: close") - end - - it 'should include server name header' do - @response.headers_output.should include("Server: thin") - end - - it 'should output head' do - @response.head.should include("HTTP/1.1 200 OK", "Content-Type: text/html", "Content-Length: 0", - "Connection: close", "\r\n\r\n") - end - - it 'should allow duplicates in headers' do - @response.headers['Set-Cookie'] = 'mium=7' - @response.headers['Set-Cookie'] = 'hi=there' - - @response.head.should include("Set-Cookie: mium=7", "Set-Cookie: hi=there") - end - - it 'should parse simple header values' do - @response.headers = { - 'Host' => 'localhost' - } - - @response.head.should include("Host: localhost") - end - - it 'should parse multiline header values in several headers' do - @response.headers = { - 'Set-Cookie' => "mium=7\nhi=there" - } - - @response.head.should include("Set-Cookie: mium=7", "Set-Cookie: hi=there") - end - - it 'should ignore nil headers' do - @response.headers = nil - @response.headers = { 'Host' => 'localhost' } - @response.headers = { 'Set-Cookie' => nil } - @response.head.should include('Host: localhost') - end - - it 'should output body' do - @response.body = ['', ''] - - out = '' - @response.each { |l| out << l } - out.should include("\r\n\r\n") - end - - it 'should output String body' do - @response.body = '' - - out = '' - @response.each { |l| out << l } - out.should include("\r\n\r\n") - end - - it "should not be persistent by default" do - @response.should_not be_persistent - end - - it "should not be persistent when no Content-Length" do - @response = Response.new - @response.headers['Content-Type'] = 'text/html' - @response.body = '' - - @response.persistent! - @response.should_not be_persistent - end - - it "should be persistent when specified" do - @response.persistent! - @response.should be_persistent - end - - it "should be closeable" do - @response.close - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/runner_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/runner_spec.rb deleted file mode 100644 index 57328915..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/runner_spec.rb +++ /dev/null @@ -1,168 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Runner do - it "should parse options" do - runner = Runner.new(%w(start --pid test.pid --port 5000 -o 3000)) - - runner.options[:pid].should == 'test.pid' - runner.options[:port].should == 5000 - runner.options[:only].should == 3000 - end - - it "should parse specified command" do - Runner.new(%w(start)).command.should == 'start' - Runner.new(%w(stop)).command.should == 'stop' - Runner.new(%w(restart)).command.should == 'restart' - end - - it "should abort on unknow command" do - runner = Runner.new(%w(poop)) - - runner.should_receive(:abort) - runner.run! - end - - it "should exit on empty command" do - runner = Runner.new([]) - - runner.should_receive(:exit).with(1) - - silence_stream(STDOUT) do - runner.run! - end - end - - it "should use Controller when controlling a single server" do - runner = Runner.new(%w(start)) - - controller = mock('controller') - controller.should_receive(:start) - Controllers::Controller.should_receive(:new).and_return(controller) - - runner.run! - end - - it "should use Cluster controller when controlling multiple servers" do - runner = Runner.new(%w(start --servers 3)) - - controller = mock('cluster') - controller.should_receive(:start) - Controllers::Cluster.should_receive(:new).and_return(controller) - - runner.run! - end - - it "should default to single server controller" do - Runner.new(%w(start)).should_not be_a_cluster - end - - it "should consider as a cluster with :servers option" do - Runner.new(%w(start --servers 3)).should be_a_cluster - end - - it "should consider as a cluster with :only option" do - Runner.new(%w(start --only 3000)).should be_a_cluster - end - - it "should warn when require a rack config file" do - STDERR.stub!(:write) - STDERR.should_receive(:write).with(/WARNING:/) - - runner = Runner.new(%w(start -r config.ru)) - runner.run! rescue nil - - runner.options[:rackup].should == 'config.ru' - end - - it "should require file" do - runner = Runner.new(%w(start -r unexisting)) - proc { runner.run! }.should raise_error(LoadError) - end - - it "should remember requires" do - runner = Runner.new(%w(start -r rubygems -r thin)) - runner.options[:require].should == %w(rubygems thin) - end - - it "should remember debug options" do - runner = Runner.new(%w(start -D -V)) - runner.options[:debug].should be_true - runner.options[:trace].should be_true - end - - it "should default debug and trace to false" do - runner = Runner.new(%w(start)) - runner.options[:debug].should_not be_true - runner.options[:trace].should_not be_true - end -end - -describe Runner, 'with config file' do - before do - @runner = Runner.new(%w(start --config spec/configs/cluster.yml)) - end - - it "should load options from file with :config option" do - @runner.send :load_options_from_config_file! - - @runner.options[:environment].should == 'production' - @runner.options[:chdir].should == 'spec/rails_app' - @runner.options[:port].should == 5000 - @runner.options[:servers].should == 3 - end - - it "should change directory after loading config" do - @orig_dir = Dir.pwd - - controller = mock('controller') - controller.should_receive(:respond_to?).with('start').and_return(true) - controller.should_receive(:start) - Controllers::Cluster.should_receive(:new).and_return(controller) - expected_dir = File.expand_path('spec/rails_app') - - begin - silence_stream(STDERR) do - @runner.run! - end - - Dir.pwd.should == expected_dir - - ensure - # any other spec using relative paths should work as expected - Dir.chdir(@orig_dir) - end - end -end - -describe Runner, "service" do - before do - Thin.stub!(:linux?).and_return(true) - - @controller = mock('service') - Controllers::Service.stub!(:new).and_return(@controller) - end - - it "should use Service controller when controlling all servers" do - runner = Runner.new(%w(start --all)) - - @controller.should_receive(:start) - - runner.run! - end - - it "should call install with arguments" do - runner = Runner.new(%w(install /etc/cool)) - - @controller.should_receive(:install).with('/etc/cool') - - runner.run! - end - - it "should call install without arguments" do - runner = Runner.new(%w(install)) - - @controller.should_receive(:install).with() - - runner.run! - end -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/builder_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/builder_spec.rb deleted file mode 100644 index 91645720..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/builder_spec.rb +++ /dev/null @@ -1,44 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, 'app builder' do - it "should build app from constructor" do - app = proc {} - server = Server.new('0.0.0.0', 3000, app) - - server.app.should == app - end - - it "should build app from builder block" do - server = Server.new '0.0.0.0', 3000 do - run(proc { |env| :works }) - end - - server.app.call({}).should == :works - end - - it "should use middlewares in builder block" do - server = Server.new '0.0.0.0', 3000 do - use Rack::ShowExceptions - run(proc { |env| :works }) - end - - server.app.class.should == Rack::ShowExceptions - server.app.call({}).should == :works - end - - it "should work with Rack url mapper" do - server = Server.new '0.0.0.0', 3000 do - map '/test' do - run(proc { |env| [200, {}, 'Found /test'] }) - end - end - - default_env = { 'SCRIPT_NAME' => '' } - - server.app.call(default_env.update('PATH_INFO' => '/'))[0].should == 404 - - status, headers, body = server.app.call(default_env.update('PATH_INFO' => '/test')) - status.should == 200 - body.should == 'Found /test' - end -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/pipelining_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/pipelining_spec.rb deleted file mode 100644 index 7c02abaf..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/pipelining_spec.rb +++ /dev/null @@ -1,110 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, "HTTP pipelining" do - before do - calls = 0 - start_server do |env| - calls += 1 - body = env['PATH_INFO'] + '-' + calls.to_s - [200, { 'Content-Type' => 'text/html' }, body] - end - @server.maximum_persistent_connections = 1024 - end - - it "should pipeline request on same socket" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write "GET /first HTTP/1.1\r\nConnection: keep-alive\r\n\r\n" - socket.flush - socket.write "GET /second HTTP/1.1\r\nConnection: close\r\n\r\n" - socket.flush - response = socket.read - socket.close - - wait_for_requests_to_complete! - - response.should include('/first-1', '/second-2') - end - - it "should pipeline requests by default on HTTP 1.1" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write "GET /first HTTP/1.1\r\n\r\n" - socket.flush - socket.write "GET /second HTTP/1.1\r\nConnection: close\r\n\r\n" - socket.flush - response = socket.read - socket.close - - wait_for_requests_to_complete! - - response.should include('/first-1', '/second-2') - end - - it "should not pipeline request by default on HTTP 1.0" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write "GET /first HTTP/1.0\r\n\r\n" - socket.flush - socket.write "GET /second HTTP/1.0\r\nConnection: close\r\n\r\n" - response = socket.read - socket.close - - wait_for_requests_to_complete! - - response.should include('/first-1') - response.should_not include('/second-2') - end - - it "should not pipeline request on same socket when connection is closed" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write "GET /first HTTP/1.1\r\nConnection: close\r\n\r\n" - socket.flush - socket.write "GET /second HTTP/1.1\r\nConnection: close\r\n\r\n" - response = socket.read - socket.close - - wait_for_requests_to_complete! - - response.should include('/first-1') - response.should_not include('/second-2') - end - - it "should not allow more persistent connection then maximum" do - @server.maximum_persistent_connections = 1 - - socket1 = TCPSocket.new('0.0.0.0', 3333) - socket1.write "GET / HTTP/1.1\r\n\r\n" - socket1.flush - socket2 = TCPSocket.new('0.0.0.0', 3333) - socket2.write "GET / HTTP/1.1\r\n\r\n" - socket2.flush - - @server.backend.persistent_connection_count.should == 1 - @server.backend.size.should == 2 - - socket1.close - socket2.close - end - - it "should decrement persistent connection on close" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write "GET / HTTP/1.1\r\n\r\n" - socket.flush - - @server.backend.persistent_connection_count.should == 1 - - socket.write "GET / HTTP/1.1\r\nConnection: close\r\n\r\n" - socket.close - - wait_for_requests_to_complete! - - @server.backend.persistent_connection_count.should == 0 - end - - after do - stop_server - end - - private - def wait_for_requests_to_complete! - sleep 0.1 until @server.backend.size == 0 - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/robustness_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/robustness_spec.rb deleted file mode 100644 index 8e99bb3d..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/robustness_spec.rb +++ /dev/null @@ -1,34 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, 'robustness' do - before do - start_server do |env| - body = 'hello!' - [200, { 'Content-Type' => 'text/html' }, body] - end - end - - it "should not crash when header too large" do - 100.times do - begin - socket = TCPSocket.new(DEFAULT_TEST_ADDRESS, DEFAULT_TEST_PORT) - socket.write("GET / HTTP/1.1\r\n") - socket.write("Host: localhost\r\n") - socket.write("Connection: close\r\n") - 10000.times do - socket.write("X-Foo: #{'x' * 100}\r\n") - socket.flush - end - socket.write("\r\n") - socket.read - socket.close - rescue Errno::EPIPE, Errno::ECONNRESET - # Ignore. - end - end - end - - after do - stop_server - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/stopping_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/stopping_spec.rb deleted file mode 100644 index ad12c64b..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/stopping_spec.rb +++ /dev/null @@ -1,55 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, "stopping" do - before do - start_server do |env| - [200, { 'Content-Type' => 'text/html' }, ['ok']] - end - @done = false - end - - it "should wait for current requests before soft stopping" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write("GET / HTTP/1.1") - EventMachine.next_tick do - @server.stop # Stop the server in the middle of a request - socket.write("\r\n\r\n") - @done = true - end - - timeout(2) do - Thread.pass until @done - end - - out = socket.read - socket.close - - out.should_not be_empty - end - - it "should not accept new requests when soft stopping" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write("GET / HTTP/1.1") - @server.stop # Stop the server in the middle of a request - - EventMachine.next_tick do - proc { get('/') }.should raise_error(Errno::ECONNRESET) - end - - socket.close - end - - it "should drop current requests when hard stopping" do - socket = TCPSocket.new('0.0.0.0', 3333) - socket.write("GET / HTTP/1.1") - @server.stop! # Force stop the server in the middle of a request - - EventMachine.next_tick do - socket.should be_closed - end - end - - after do - stop_server - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply.yml b/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply.yml deleted file mode 100644 index f339acdf..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply.yml +++ /dev/null @@ -1,6 +0,0 @@ -cluster_address: 0.0.0.0 -cluster_port: 3333 -map: - - incoming: 127.0.0.1 - outgoing: 127.0.0.1:5555 - default: true \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply_spec.rb deleted file mode 100644 index 4272e5d1..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/swiftiply_spec.rb +++ /dev/null @@ -1,32 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -if SWIFTIPLY_PATH.empty? - warn "Ignoring Server on Swiftiply specs, gem install swiftiply to run" -else - describe Server, 'on Swiftiply' do - before do - @swiftiply = fork do - exec "#{SWIFTIPLY_PATH} -c #{File.dirname(__FILE__)}/swiftiply.yml" - end - wait_for_socket('0.0.0.0', 3333) - sleep 2 # HACK ooh boy, I wish I knew how to make those specs more stable... - start_server('0.0.0.0', 5555, :backend => Backends::SwiftiplyClient, :wait_for_socket => false) do |env| - body = env.inspect + env['rack.input'].read - [200, { 'Content-Type' => 'text/html' }, body] - end - end - - it 'should GET from Net::HTTP' do - Net::HTTP.get(URI.parse("http://0.0.0.0:3333/?cthis")).should include('cthis') - end - - it 'should POST from Net::HTTP' do - Net::HTTP.post_form(URI.parse("http://0.0.0.0:3333/"), :arg => 'pirate').body.should include('arg=pirate') - end - - after do - stop_server - Process.kill(9, @swiftiply) - end - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/tcp_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/tcp_spec.rb deleted file mode 100644 index 841f39ec..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/tcp_spec.rb +++ /dev/null @@ -1,57 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, 'on TCP socket' do - before do - start_server do |env| - body = env.inspect + env['rack.input'].read - [200, { 'Content-Type' => 'text/html' }, body] - end - end - - it 'should GET from Net::HTTP' do - get('/?cthis').should include('cthis') - end - - it 'should GET from TCPSocket' do - status, headers, body = parse_response(send_data("GET /?this HTTP/1.0\r\nConnection: close\r\n\r\n")) - status.should == 200 - headers['Content-Type'].should == 'text/html' - headers['Connection'].should == 'close' - body.should include('this') - end - - it "should add the Content-Length to the response when not present" do - status, headers, body = parse_response(send_data("GET / HTTP/1.0\r\nConnection: close\r\n\r\n")) - headers.should have_key('Content-Length') - end - - it 'should set the Content-Length to equal the body size in bytes' do - status, headers, body = parse_response(send_data("GET / HTTP/1.0\r\nConnection: close\r\n\r\n")) - headers['Content-Length'].should == (body.respond_to?(:bytesize) ? body.bytesize : body.size).to_s - end - - it 'should return empty string on incomplete headers' do - send_data("GET /?this HTTP/1.1\r\nHost:").should be_empty - end - - it 'should return empty string on incorrect Content-Length' do - send_data("POST / HTTP/1.1\r\nContent-Length: 300\r\nConnection: close\r\n\r\naye").should be_empty - end - - it 'should POST from Net::HTTP' do - post('/', :arg => 'pirate').should include('arg=pirate') - end - - it 'should handle big POST' do - big = 'X' * (20 * 1024) - post('/', :big => big).should include(big) - end - - it "should retreive remote address" do - get('/').should include('"REMOTE_ADDR"=>"127.0.0.1"') - end - - after do - stop_server - end -end diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/threaded_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/threaded_spec.rb deleted file mode 100644 index 27894c4c..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/threaded_spec.rb +++ /dev/null @@ -1,27 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, 'with threads' do - before do - @requests = 0 - start_server DEFAULT_TEST_ADDRESS, DEFAULT_TEST_PORT, :threaded => true do |env| - sleep env['PATH_INFO'].delete('/').to_i - @requests += 1 - [200, { 'Content-Type' => 'text/html' }, 'hi'] - end - end - - it "should process request" do - get('/').should_not be_empty - end - - it "should process requests when blocked" do - slow_request = Thread.new { get('/3') } - get('/').should_not be_empty - @requests.should == 1 - slow_request.kill - end - - after do - stop_server - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server/unix_socket_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server/unix_socket_spec.rb deleted file mode 100644 index 326d48f9..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server/unix_socket_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -require File.dirname(__FILE__) + '/../spec_helper' - -describe Server, "on UNIX domain socket" do - before do - start_server('/tmp/thin_test.sock') do |env| - [200, { 'Content-Type' => 'text/html' }, [env.inspect]] - end - end - - it "should accept GET request" do - get("/?this").should include('this') - end - - it "should retreive remote address" do - get('/').should include('"REMOTE_ADDR"=>"127.0.0.1"') - end - - it "should remove socket file after server stops" do - @server.stop! - File.exist?('/tmp/thin_test.sock').should be_false - end - - after do - stop_server - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/server_spec.rb b/vendor/gems/gems/thin-1.2.5/spec/server_spec.rb deleted file mode 100644 index 338f50e3..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/server_spec.rb +++ /dev/null @@ -1,96 +0,0 @@ -require File.dirname(__FILE__) + '/spec_helper' - -describe Server do - before do - @server = Server.new('0.0.0.0', 3000) - end - - it "should set maximum_connections size" do - @server.maximum_connections = 100 - @server.config - @server.maximum_connections.should == 100 - end - - it "should set lower maximum_connections size when too large" do - @server.maximum_connections = 100_000 - @server.config - @server.maximum_connections.should < 100_000 - end - - it "should default to non-threaded" do - @server.should_not be_threaded - end - - it "should set backend to threaded" do - @server.threaded = true - @server.backend.should be_threaded - end -end - -describe Server, "initialization" do - it "should set host and port" do - server = Server.new('192.168.1.1', 8080) - - server.host.should == '192.168.1.1' - server.port.should == 8080 - end - - it "should set socket" do - server = Server.new('/tmp/thin.sock') - - server.socket.should == '/tmp/thin.sock' - end - - it "should set host, port and app" do - app = proc {} - server = Server.new('192.168.1.1', 8080, app) - - server.host.should_not be_nil - server.app.should == app - end - - it "should set socket and app" do - app = proc {} - server = Server.new('/tmp/thin.sock', app) - - server.socket.should_not be_nil - server.app.should == app - end - - it "should set socket, nil and app" do - app = proc {} - server = Server.new('/tmp/thin.sock', nil, app) - - server.socket.should_not be_nil - server.app.should == app - end - - it "should set host, port and backend" do - server = Server.new('192.168.1.1', 8080, :backend => Thin::Backends::SwiftiplyClient) - - server.host.should_not be_nil - server.backend.should be_kind_of(Thin::Backends::SwiftiplyClient) - end - - it "should set host, port, app and backend" do - app = proc {} - server = Server.new('192.168.1.1', 8080, app, :backend => Thin::Backends::SwiftiplyClient) - - server.host.should_not be_nil - server.app.should == app - server.backend.should be_kind_of(Thin::Backends::SwiftiplyClient) - end - - it "should set port as string" do - app = proc {} - server = Server.new('192.168.1.1', '8080') - - server.host.should == '192.168.1.1' - server.port.should == 8080 - end - - it "should not register signals w/ :signals => false" do - Server.should_not_receive(:setup_signals) - Server.new(:signals => false) - end -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/spec/spec_helper.rb b/vendor/gems/gems/thin-1.2.5/spec/spec_helper.rb deleted file mode 100644 index bec8ac95..00000000 --- a/vendor/gems/gems/thin-1.2.5/spec/spec_helper.rb +++ /dev/null @@ -1,219 +0,0 @@ -require 'rubygems' -require 'thin' -require 'spec' -require 'benchmark' -require 'timeout' -require 'fileutils' -require 'net/http' -require 'socket' - -include Thin - -FileUtils.mkdir_p File.dirname(__FILE__) + '/../log' -Command.script = File.dirname(__FILE__) + '/../bin/thin' -Logging.silent = true - -unless Object.const_defined?(:SWIFTIPLY_PATH) - SWIFTIPLY_PATH = `which swiftiply`.chomp - DEFAULT_TEST_ADDRESS = '0.0.0.0' - DEFAULT_TEST_PORT = 3333 -end - -module Matchers - class BeFasterThen - def initialize(max_time) - require 'benchmark_unit' - @max_time = max_time - end - - # Base on benchmark_unit/assertions#compare_benchmarks - def matches?(proc) - @time, multiplier = 0, 1 - - while (@time < 0.01) do - @time = Benchmark::Unit.measure do - multiplier.times &proc - end - multiplier *= 10 - end - - multiplier /= 10 - - iterations = (Benchmark::Unit::CLOCK_TARGET / @time).to_i * multiplier - iterations = 1 if iterations < 1 - - total = Benchmark::Unit.measure do - iterations.times &proc - end - - @time = total / iterations - - @time < @max_time - end - - def failure_message(less_more=:less) - "took <#{@time.inspect} RubySeconds>, should take #{less_more} than #{@max_time} RubySeconds." - end - - def negative_failure_message - failure_message :more - end - end - - class ValidateWithLint - def matches?(request) - @request = request - Rack::Lint.new(proc{[200, {'Content-Type' => 'text/html', 'Content-Length' => '0'}, []]}).call(@request.env) - true - rescue Rack::Lint::LintError => e - @message = e.message - false - end - - def failure_message(negation=nil) - "should#{negation} validate with Rack Lint: #{@message}" - end - - def negative_failure_message - failure_message ' not' - end - end - - class TakeLessThen - def initialize(time) - @time = time - end - - def matches?(proc) - Timeout.timeout(@time) { proc.call } - true - rescue Timeout::Error - false - end - - def failure_message(negation=nil) - "should#{negation} take less then #{@time} sec to run" - end - - def negative_failure_message - failure_message ' not' - end - end - - # Actual matchers that are exposed. - - def be_faster_then(time) - BeFasterThen.new(time) - end - - def validate_with_lint - ValidateWithLint.new - end - - def take_less_then(time) - TakeLessThen.new(time) - end -end - -module Helpers - # Silences any stream for the duration of the block. - # - # silence_stream(STDOUT) do - # puts 'This will never be seen' - # end - # - # puts 'But this will' - # - # (Taken from ActiveSupport) - def silence_stream(stream) - old_stream = stream.dup - stream.reopen(RUBY_PLATFORM =~ /mswin/ ? 'NUL:' : '/dev/null') - stream.sync = true - yield - ensure - stream.reopen(old_stream) - end - - # Create and parse a request - def R(raw, convert_line_feed=false) - raw.gsub!("\n", "\r\n") if convert_line_feed - request = Thin::Request.new - request.parse(raw) - request - end - - def start_server(address=DEFAULT_TEST_ADDRESS, port=DEFAULT_TEST_PORT, options={}, &app) - @server = Thin::Server.new(address, port, options, app) - @server.threaded = options[:threaded] - @server.timeout = 3 - - @thread = Thread.new { @server.start } - if options[:wait_for_socket] - wait_for_socket(address, port) - else - # If we can't ping the address fallback to just wait for the server to run - sleep 1 until @server.running? - end - end - - def stop_server - @server.stop! - @thread.kill - raise "Reactor still running, wtf?" if EventMachine.reactor_running? - end - - def wait_for_socket(address=DEFAULT_TEST_ADDRESS, port=DEFAULT_TEST_PORT, timeout=5) - Timeout.timeout(timeout) do - loop do - begin - if address.include?('/') - UNIXSocket.new(address).close - else - TCPSocket.new(address, port).close - end - return true - rescue - end - end - end - end - - def send_data(data) - if @server.backend.class == Backends::UnixServer - socket = UNIXSocket.new(@server.socket) - else - socket = TCPSocket.new(@server.host, @server.port) - end - socket.write data - out = socket.read - socket.close - out - end - - def parse_response(response) - raw_headers, body = response.split("\r\n\r\n", 2) - raw_status, raw_headers = raw_headers.split("\r\n", 2) - - status = raw_status.match(%r{\AHTTP/1.1\s+(\d+)\b}).captures.first.to_i - headers = Hash[ *raw_headers.split("\r\n").map { |h| h.split(/:\s+/, 2) }.flatten ] - - [ status, headers, body ] - end - - def get(url) - if @server.backend.class == Backends::UnixServer - send_data("GET #{url} HTTP/1.1\r\nConnection: close\r\n\r\n") - else - Net::HTTP.get(URI.parse("http://#{@server.host}:#{@server.port}" + url)) - end - end - - def post(url, params={}) - Net::HTTP.post_form(URI.parse("http://#{@server.host}:#{@server.port}" + url), params).body - end -end - -Spec::Runner.configure do |config| - config.include Matchers - config.include Helpers -end \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/tasks/announce.rake b/vendor/gems/gems/thin-1.2.5/tasks/announce.rake deleted file mode 100644 index 1b6a44e3..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/announce.rake +++ /dev/null @@ -1,22 +0,0 @@ -require 'erb' - -MSG_TEMPLATE = File.dirname(__FILE__) + '/email.erb' -SEND_TO = %w(thin-ruby@googlegroups.com ruby-talk@ruby-lang.org) - -desc 'Generate a template for the new version annoucement' -task :ann do - msg = ERB.new(File.read(MSG_TEMPLATE)).result(binding) - - body = < %w(site:upload rdoc:upload) - - desc 'Deploy on rubyforge' - task :gem => %w(gem:upload_rubyforge deploy:site) -end -desc 'Deploy on all servers' -task :deploy => "deploy:gem" - -def upload(file, to, options={}) - sh %{ssh macournoyer@code.macournoyer.com "rm -rf code.macournoyer.com/#{to}"} if options[:replace] - sh %{scp -rq #{file} macournoyer@code.macournoyer.com:code.macournoyer.com/#{to}} -end diff --git a/vendor/gems/gems/thin-1.2.5/tasks/email.erb b/vendor/gems/gems/thin-1.2.5/tasks/email.erb deleted file mode 100644 index 78877a88..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/email.erb +++ /dev/null @@ -1,30 +0,0 @@ -Hey all, - -Thin version <%= Thin::VERSION::STRING %> (codename <%= Thin::VERSION::CODENAME %>) is out! - -== What's new? - -<%= changelog %> - -== Get it! - -Install Thin from RubyForge: - - gem install thin - -Or using my mirror: - - gem install thin --source http://code.macournoyer.com - -== Contribute - -Site: http://code.macournoyer.com/thin/ -Group: http://groups.google.com/group/thin-ruby/topics -Bugs: http://thin.lighthouseapp.com/projects/7212-thin -Code: http://github.com/macournoyer/thin -IRC: #thin on freenode - -Thanks to all the people who contributed to Thin, EventMachine, Rack and Mongrel. - -Marc-Andre Cournoyer -http://macournoyer.com/ \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/tasks/gem.rake b/vendor/gems/gems/thin-1.2.5/tasks/gem.rake deleted file mode 100644 index 1ab42291..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/gem.rake +++ /dev/null @@ -1,74 +0,0 @@ -require 'rake/gempackagetask' -require 'yaml' - -WIN_SUFFIX = ENV['WIN_SUFFIX'] || 'x86-mswin32' - -task :clean => :clobber_package - -Thin::GemSpec = Gem::Specification.new do |s| - s.name = Thin::NAME - s.version = Thin::VERSION::STRING - s.platform = WIN ? Gem::Platform::CURRENT : Gem::Platform::RUBY - s.summary = - s.description = "A thin and fast web server" - s.author = "Marc-Andre Cournoyer" - s.email = 'macournoyer@gmail.com' - s.homepage = 'http://code.macournoyer.com/thin/' - s.rubyforge_project = 'thin' - s.has_rdoc = true - s.executables = %w(thin) - - s.required_ruby_version = '>= 1.8.5' - - s.add_dependency 'rack', '>= 1.0.0' - s.add_dependency 'eventmachine', '>= 0.12.6' - unless WIN - s.add_dependency 'daemons', '>= 1.0.9' - end - - s.files = %w(COPYING CHANGELOG README Rakefile) + - Dir.glob("{benchmark,bin,doc,example,lib,spec,tasks}/**/*") + - Dir.glob("ext/**/*.{h,c,rb,rl}") - - if WIN - s.files += ["lib/thin_parser.#{Config::CONFIG['DLEXT']}"] - else - s.extensions = FileList["ext/**/extconf.rb"].to_a - end - - s.require_path = "lib" - s.bindir = "bin" -end - -Rake::GemPackageTask.new(Thin::GemSpec) do |p| - p.gem_spec = Thin::GemSpec -end - -task :tag_warn do - puts "*" * 40 - puts "Don't forget to tag the release:" - puts - puts " git tag -m 'Tagging #{Thin::SERVER}' -a v#{Thin::VERSION::STRING}" - puts - puts "or run rake tag" - puts "*" * 40 -end -task :tag do - sh "git tag -m 'Tagging #{Thin::SERVER}' -a v#{Thin::VERSION::STRING}" -end -task :gem => :tag_warn - -namespace :gem do - desc "Update the gemspec for GitHub's gem server" - task :github do - File.open("thin.gemspec", 'w') { |f| f << YAML.dump(Thin::GemSpec) } - end - - desc 'Upload gems (ruby & win32) to rubyforge.org' - task :upload => :gem do - sh 'rubyforge login' - sh "rubyforge add_release thin thin #{Thin::VERSION::STRING} pkg/#{Thin::GemSpec.full_name}.gem" - sh "rubyforge add_file thin thin #{Thin::VERSION::STRING} pkg/#{Thin::GemSpec.full_name}.gem" - sh "rubyforge add_file thin thin #{Thin::VERSION::STRING} pkg/#{Thin::GemSpec.full_name}-#{WIN_SUFFIX}.gem" - end -end diff --git a/vendor/gems/gems/thin-1.2.5/tasks/rdoc.rake b/vendor/gems/gems/thin-1.2.5/tasks/rdoc.rake deleted file mode 100644 index 83efd803..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/rdoc.rake +++ /dev/null @@ -1,25 +0,0 @@ -require 'rake/rdoctask' - -CLEAN.include %w(doc/rdoc) - -Rake::RDocTask.new do |rdoc| - rdoc.rdoc_dir = 'doc/rdoc' - rdoc.options += ['--quiet', '--title', Thin::NAME, - "--opname", "index.html", - "--line-numbers", - "--main", "README", - "--inline-source"] - rdoc.template = "site/rdoc.rb" - rdoc.main = "README" - rdoc.title = Thin::NAME - rdoc.rdoc_files.add %w(README) + - FileList['lib/**/*.rb'] + - FileList['bin/*'] -end - -namespace :rdoc do - desc 'Upload rdoc to code.macournoyer.com' - task :upload => :rdoc do - upload "doc/rdoc", 'thin/doc', :replace => true - end -end diff --git a/vendor/gems/gems/thin-1.2.5/tasks/site.rake b/vendor/gems/gems/thin-1.2.5/tasks/site.rake deleted file mode 100644 index 634565c0..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/site.rake +++ /dev/null @@ -1,15 +0,0 @@ -namespace :site do - task :build do - mkdir_p 'tmp/site/images' - cd 'tmp/site' do - sh "SITE_ROOT='/thin' ruby ../../site/thin.rb --dump" - end - cp 'site/style.css', 'tmp/site' - cp_r Dir['site/images/*'], 'tmp/site/images' - end - - desc 'Upload website to code.macournoyer.com' - task :upload => 'site:build' do - upload 'tmp/site/*', 'thin' - end -end diff --git a/vendor/gems/gems/thin-1.2.5/tasks/spec.rake b/vendor/gems/gems/thin-1.2.5/tasks/spec.rake deleted file mode 100644 index ca61789f..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/spec.rake +++ /dev/null @@ -1,43 +0,0 @@ -CLEAN.include %w(coverage tmp log) - -require 'spec/rake/spectask' - -PERF_SPECS = FileList['spec/perf/*_spec.rb'] -WIN_SPECS = %w( - spec/backends/unix_server_spec.rb - spec/controllers/service_spec.rb - spec/daemonizing_spec.rb - spec/server/unix_socket_spec.rb - spec/server/swiftiply_spec.rb -) -# HACK Event machine causes some problems when running multiple -# tests in the same VM so we split the specs in 2 before I find -# a better solution... -SPECS2 = %w(spec/server/threaded_spec.rb spec/server/tcp_spec.rb) -SPECS = FileList['spec/**/*_spec.rb'] - PERF_SPECS - SPECS2 - -def spec_task(name, specs) - Spec::Rake::SpecTask.new(name) do |t| - t.libs << 'lib' - t.spec_opts = %w(-fs -c) - t.spec_files = specs - end -end - -desc "Run all examples" -spec_task :spec, SPECS -spec_task :spec2, SPECS2 -task :spec => [:compile, :spec2] - -desc "Run all performance examples" -spec_task 'spec:perf', PERF_SPECS - -task :check_benchmark_unit_gem do - begin - require 'benchmark_unit' - rescue LoadError - abort "To run specs, install benchmark_unit gem" - end -end - -task 'spec:perf' => :check_benchmark_unit_gem \ No newline at end of file diff --git a/vendor/gems/gems/thin-1.2.5/tasks/stats.rake b/vendor/gems/gems/thin-1.2.5/tasks/stats.rake deleted file mode 100644 index cdae8439..00000000 --- a/vendor/gems/gems/thin-1.2.5/tasks/stats.rake +++ /dev/null @@ -1,28 +0,0 @@ -desc 'Show some stats about the code' -task :stats do - line_count = proc do |path| - Dir[path].collect { |f| File.open(f).readlines.reject { |l| l =~ /(^\s*(\#|\/\*))|^\s*$/ }.size }.inject(0){ |sum,n| sum += n } - end - comment_count = proc do |path| - Dir[path].collect { |f| File.open(f).readlines.select { |l| l =~ /^\s*\#/ }.size }.inject(0) { |sum,n| sum += n } - end - lib = line_count['lib/**/*.rb'] - comment = comment_count['lib/**/*.rb'] - ext = line_count['ext/**/*.{c,h}'] - spec = line_count['spec/**/*.rb'] - - comment_ratio = '%1.2f' % (comment.to_f / lib.to_f) - spec_ratio = '%1.2f' % (spec.to_f / lib.to_f) - - puts '/======================\\' - puts '| Part LOC |' - puts '|======================|' - puts "| lib #{lib.to_s.ljust(5)}|" - puts "| lib comments #{comment.to_s.ljust(5)}|" - puts "| ext #{ext.to_s.ljust(5)}|" - puts "| spec #{spec.to_s.ljust(5)}|" - puts '| ratios: |' - puts "| lib/comment #{comment_ratio.to_s.ljust(5)}|" - puts "| lib/spec #{spec_ratio.to_s.ljust(5)}|" - puts '\======================/' -end diff --git a/vendor/gems/specifications/compass-slickmap-0.2.1.gemspec b/vendor/gems/specifications/compass-slickmap-0.2.1.gemspec new file mode 100644 index 00000000..bc3a3a14 --- /dev/null +++ b/vendor/gems/specifications/compass-slickmap-0.2.1.gemspec @@ -0,0 +1,31 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{compass-slickmap} + s.version = "0.2.1" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Thomas Reynolds"] + s.date = %q{2009-10-09} + s.email = %q{tdreyno@gmail.com} + s.files = [".gitignore", "Rakefile", "VERSION", "compass-slickmap.gemspec", "lib/slickmap.rb", "lib/slickmap/compass_plugin.rb", "sass/_slickmap.sass", "templates/project/images/L1-center.png", "templates/project/images/L1-left.png", "templates/project/images/L1-right.png", "templates/project/images/L3-bottom.png", "templates/project/images/L3-center.png", "templates/project/images/L3-li-top.png", "templates/project/images/L3-ul-top.png", "templates/project/images/vertical-line.png", "templates/project/images/white-highlight.png", "templates/project/manifest.rb", "templates/project/sitemap.sass"] + s.homepage = %q{http://github.com/tdreyno/compass-slickmap} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{compassslickmap} + s.rubygems_version = %q{1.3.5} + s.summary = %q{An implementation of SlickmapCSS sitemap in Sass} + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/specifications/daemons-1.0.10.gemspec b/vendor/gems/specifications/daemons-1.0.10.gemspec deleted file mode 100644 index fb8b91e7..00000000 --- a/vendor/gems/specifications/daemons-1.0.10.gemspec +++ /dev/null @@ -1,30 +0,0 @@ -# -*- encoding: utf-8 -*- - -Gem::Specification.new do |s| - s.name = %q{daemons} - s.version = "1.0.10" - - s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= - s.authors = ["Thomas Uehlinger"] - s.autorequire = %q{daemons} - s.date = %q{2008-03-20} - s.description = %q{Daemons provides an easy way to wrap existing ruby scripts (for example a self-written server) to be run as a daemon and to be controlled by simple start/stop/restart commands. You can also call blocks as daemons and control them from the parent or just daemonize the current process. Besides this basic functionality, daemons offers many advanced features like exception backtracing and logging (in case your ruby script crashes) and monitoring and automatic restarting of your processes if they crash.} - s.email = %q{th.uehlinger@gmx.ch} - s.extra_rdoc_files = ["README", "Releases", "TODO"] - s.files = ["Rakefile", "Releases", "TODO", "README", "LICENSE", "setup.rb", "lib/daemons/application.rb", "lib/daemons/application_group.rb", "lib/daemons/cmdline.rb", "lib/daemons/controller.rb", "lib/daemons/daemonize.rb", "lib/daemons/exceptions.rb", "lib/daemons/monitor.rb", "lib/daemons/pid.rb", "lib/daemons/pidfile.rb", "lib/daemons/pidmem.rb", "lib/daemons.rb", "examples/call", "examples/call/call.rb", "examples/call/call_monitor.rb", "examples/daemonize", "examples/daemonize/daemonize.rb", "examples/run", "examples/run/ctrl_crash.rb", "examples/run/ctrl_exec.rb", "examples/run/ctrl_exit.rb", "examples/run/ctrl_keep_pid_files.rb", "examples/run/ctrl_monitor.rb", "examples/run/ctrl_multiple.rb", "examples/run/ctrl_normal.rb", "examples/run/ctrl_ontop.rb", "examples/run/ctrl_optionparser.rb", "examples/run/ctrl_proc.rb", "examples/run/ctrl_proc.rb.output", "examples/run/ctrl_proc_multiple.rb", "examples/run/ctrl_proc_multiple.rb.output", "examples/run/ctrl_proc_simple.rb", "examples/run/myserver.rb", "examples/run/myserver_crashing.rb", "examples/run/myserver_crashing.rb.output", "examples/run/myserver_exiting.rb"] - s.homepage = %q{http://daemons.rubyforge.org} - s.require_paths = ["lib"] - s.rubyforge_project = %q{daemons} - s.rubygems_version = %q{1.3.5} - s.summary = %q{A toolkit to create and control daemons in different ways} - - if s.respond_to? :specification_version then - current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION - s.specification_version = 2 - - if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then - else - end - else - end -end diff --git a/vendor/gems/specifications/eventmachine-0.12.10.gemspec b/vendor/gems/specifications/eventmachine-0.12.10.gemspec deleted file mode 100644 index e03b00da..00000000 --- a/vendor/gems/specifications/eventmachine-0.12.10.gemspec +++ /dev/null @@ -1,40 +0,0 @@ -# -*- encoding: utf-8 -*- - -Gem::Specification.new do |s| - s.name = %q{eventmachine} - s.version = "0.12.10" - - s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= - s.authors = ["Francis Cianfrocca"] - s.date = %q{2009-10-25} - s.description = %q{EventMachine implements a fast, single-threaded engine for arbitrary network -communications. It's extremely easy to use in Ruby. EventMachine wraps all -interactions with IP sockets, allowing programs to concentrate on the -implementation of network protocols. It can be used to create both network -servers and clients. To create a server or client, a Ruby program only needs -to specify the IP address and port, and provide a Module that implements the -communications protocol. Implementations of several standard network protocols -are provided with the package, primarily to serve as examples. The real goal -of EventMachine is to enable programs to easily interface with other programs -using TCP/IP, especially if custom protocols are required. -} - s.email = %q{garbagecat10@gmail.com} - s.extensions = ["ext/extconf.rb", "ext/fastfilereader/extconf.rb"] - s.files = [".gitignore", "README", "Rakefile", "docs/COPYING", "docs/ChangeLog", "docs/DEFERRABLES", "docs/EPOLL", "docs/GNU", "docs/INSTALL", "docs/KEYBOARD", "docs/LEGAL", "docs/LIGHTWEIGHT_CONCURRENCY", "docs/PURE_RUBY", "docs/RELEASE_NOTES", "docs/SMTP", "docs/SPAWNED_PROCESSES", "docs/TODO", "eventmachine.gemspec", "examples/ex_channel.rb", "examples/ex_queue.rb", "examples/helper.rb", "ext/binder.cpp", "ext/binder.h", "ext/cmain.cpp", "ext/cplusplus.cpp", "ext/ed.cpp", "ext/ed.h", "ext/em.cpp", "ext/em.h", "ext/emwin.cpp", "ext/emwin.h", "ext/epoll.cpp", "ext/epoll.h", "ext/eventmachine.h", "ext/eventmachine_cpp.h", "ext/extconf.rb", "ext/fastfilereader/extconf.rb", "ext/fastfilereader/mapper.cpp", "ext/fastfilereader/mapper.h", "ext/fastfilereader/rubymain.cpp", "ext/files.cpp", "ext/files.h", "ext/kb.cpp", "ext/page.cpp", "ext/page.h", "ext/pipe.cpp", "ext/project.h", "ext/rubymain.cpp", "ext/sigs.cpp", "ext/sigs.h", "ext/ssl.cpp", "ext/ssl.h", "java/.classpath", "java/.project", "java/src/com/rubyeventmachine/EmReactor.java", "java/src/com/rubyeventmachine/EmReactorException.java", "java/src/com/rubyeventmachine/EventableChannel.java", "java/src/com/rubyeventmachine/EventableDatagramChannel.java", "java/src/com/rubyeventmachine/EventableSocketChannel.java", "java/src/com/rubyeventmachine/application/Application.java", "java/src/com/rubyeventmachine/application/Connection.java", "java/src/com/rubyeventmachine/application/ConnectionFactory.java", "java/src/com/rubyeventmachine/application/DefaultConnectionFactory.java", "java/src/com/rubyeventmachine/application/PeriodicTimer.java", "java/src/com/rubyeventmachine/application/Timer.java", "java/src/com/rubyeventmachine/tests/ApplicationTest.java", "java/src/com/rubyeventmachine/tests/ConnectTest.java", "java/src/com/rubyeventmachine/tests/EMTest.java", "java/src/com/rubyeventmachine/tests/TestDatagrams.java", "java/src/com/rubyeventmachine/tests/TestServers.java", "java/src/com/rubyeventmachine/tests/TestTimers.java", "lib/em/buftok.rb", "lib/em/callback.rb", "lib/em/channel.rb", "lib/em/connection.rb", "lib/em/deferrable.rb", "lib/em/file_watch.rb", "lib/em/future.rb", "lib/em/messages.rb", "lib/em/process_watch.rb", "lib/em/processes.rb", "lib/em/protocols.rb", "lib/em/protocols/header_and_content.rb", "lib/em/protocols/httpclient.rb", "lib/em/protocols/httpclient2.rb", "lib/em/protocols/line_and_text.rb", "lib/em/protocols/linetext2.rb", "lib/em/protocols/memcache.rb", "lib/em/protocols/object_protocol.rb", "lib/em/protocols/postgres3.rb", "lib/em/protocols/saslauth.rb", "lib/em/protocols/smtpclient.rb", "lib/em/protocols/smtpserver.rb", "lib/em/protocols/socks4.rb", "lib/em/protocols/stomp.rb", "lib/em/protocols/tcptest.rb", "lib/em/queue.rb", "lib/em/spawnable.rb", "lib/em/streamer.rb", "lib/em/timers.rb", "lib/em/version.rb", "lib/eventmachine.rb", "lib/evma.rb", "lib/evma/callback.rb", "lib/evma/container.rb", "lib/evma/factory.rb", "lib/evma/protocol.rb", "lib/evma/reactor.rb", "lib/jeventmachine.rb", "lib/pr_eventmachine.rb", "setup.rb", "tasks/cpp.rake_example", "tests/client.crt", "tests/client.key", "tests/test_attach.rb", "tests/test_basic.rb", "tests/test_channel.rb", "tests/test_connection_count.rb", "tests/test_defer.rb", "tests/test_epoll.rb", "tests/test_error_handler.rb", "tests/test_errors.rb", "tests/test_exc.rb", "tests/test_file_watch.rb", "tests/test_futures.rb", "tests/test_get_sock_opt.rb", "tests/test_handler_check.rb", "tests/test_hc.rb", "tests/test_httpclient.rb", "tests/test_httpclient2.rb", "tests/test_inactivity_timeout.rb", "tests/test_kb.rb", "tests/test_ltp.rb", "tests/test_ltp2.rb", "tests/test_next_tick.rb", "tests/test_object_protocol.rb", "tests/test_pause.rb", "tests/test_pending_connect_timeout.rb", "tests/test_process_watch.rb", "tests/test_processes.rb", "tests/test_proxy_connection.rb", "tests/test_pure.rb", "tests/test_queue.rb", "tests/test_running.rb", "tests/test_sasl.rb", "tests/test_send_file.rb", "tests/test_servers.rb", "tests/test_smtpclient.rb", "tests/test_smtpserver.rb", "tests/test_spawn.rb", "tests/test_ssl_args.rb", "tests/test_ssl_methods.rb", "tests/test_ssl_verify.rb", "tests/test_timers.rb", "tests/test_ud.rb", "tests/testem.rb", "web/whatis"] - s.homepage = %q{http://rubyeventmachine.com} - s.rdoc_options = ["--title", "EventMachine", "--main", "README", "--line-numbers", "-x", "lib/em/version", "-x", "lib/emva", "-x", "lib/evma/", "-x", "lib/pr_eventmachine", "-x", "lib/jeventmachine"] - s.require_paths = ["lib"] - s.rubyforge_project = %q{eventmachine} - s.rubygems_version = %q{1.3.5} - s.summary = %q{Ruby/EventMachine library} - - if s.respond_to? :specification_version then - current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION - s.specification_version = 3 - - if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then - else - end - else - end -end diff --git a/vendor/gems/specifications/gemcutter-0.1.7.gemspec b/vendor/gems/specifications/gemcutter-0.1.7.gemspec new file mode 100644 index 00000000..fa20ee26 --- /dev/null +++ b/vendor/gems/specifications/gemcutter-0.1.7.gemspec @@ -0,0 +1,49 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{gemcutter} + s.version = "0.1.7" + + s.required_rubygems_version = Gem::Requirement.new(">= 1.3.5") if s.respond_to? :required_rubygems_version= + s.authors = ["Nick Quaranto"] + s.date = %q{2009-11-15} + s.description = %q{Adds several commands for using gemcutter.org, such as pushing new gems, migrating gems from RubyForge, and more.} + s.email = %q{nick@quaran.to} + s.files = ["lib/commands/abstract_command.rb", "lib/commands/migrate.rb", "lib/commands/owner.rb", "lib/commands/push.rb", "lib/commands/tumble.rb", "lib/rubygems_plugin.rb", "test/command_helper.rb"] + s.homepage = %q{http://github.com/qrush/gemcutter} + s.post_install_message = %q{ +======================================================================== + + Thanks for installing Gemcutter! You can now run: + + gem tumble use Gemcutter as your primary RubyGem source + gem push publish your gems for the world to use and enjoy + gem migrate take over your gem from RubyForge on Gemcutter + gem owner allow/disallow others to push to your gems + +======================================================================== + +} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{gemcutter} + s.rubygems_version = %q{1.3.5} + s.summary = %q{Commands to interact with gemcutter.org} + s.test_files = ["test/command_helper.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 0"]) + s.add_runtime_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/specifications/git-1.2.5.gemspec b/vendor/gems/specifications/git-1.2.5.gemspec new file mode 100644 index 00000000..89d211a4 --- /dev/null +++ b/vendor/gems/specifications/git-1.2.5.gemspec @@ -0,0 +1,30 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{git} + s.version = "1.2.5" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Scott Chacon"] + s.date = %q{2009-10-16} + s.email = %q{schacon@gmail.com} + s.extra_rdoc_files = ["README"] + s.files = ["lib/git.rb", "lib/git/author.rb", "lib/git/base.rb", "lib/git/branch.rb", "lib/git/branches.rb", "lib/git/diff.rb", "lib/git/index.rb", "lib/git/lib.rb", "lib/git/log.rb", "lib/git/object.rb", "lib/git/path.rb", "lib/git/remote.rb", "lib/git/repository.rb", "lib/git/stash.rb", "lib/git/stashes.rb", "lib/git/status.rb", "lib/git/working_directory.rb", "README"] + s.homepage = %q{http://github.com/schacon/ruby-git} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.requirements = ["git 1.6.0.0, or greater"] + s.rubyforge_project = %q{git} + s.rubygems_version = %q{1.3.5} + s.summary = %q{Ruby/Git is a Ruby library that can be used to create, read and manipulate Git repositories by wrapping system calls to the git binary} + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + else + end + else + end +end diff --git a/vendor/gems/specifications/jeweler-1.3.0.gemspec b/vendor/gems/specifications/jeweler-1.3.0.gemspec new file mode 100644 index 00000000..a725a51d --- /dev/null +++ b/vendor/gems/specifications/jeweler-1.3.0.gemspec @@ -0,0 +1,57 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{jeweler} + s.version = "1.3.0" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Josh Nichols"] + s.date = %q{2009-10-17} + s.default_executable = %q{jeweler} + s.description = %q{Simple and opinionated helper for creating Rubygem projects on GitHub} + s.email = %q{josh@technicalpickles.com} + s.executables = ["jeweler"] + s.extra_rdoc_files = ["ChangeLog.markdown", "LICENSE", "README.markdown"] + s.files = [".gitignore", "ChangeLog.markdown", "LICENSE", "README.markdown", "Rakefile", "VERSION.yml", "bin/jeweler", "features/generator/cucumber.feature", "features/generator/directory_layout.feature", "features/generator/dotdocument.feature", "features/generator/env_options.feature", "features/generator/git.feature", "features/generator/license.feature", "features/generator/rakefile.feature", "features/generator/readme.feature", "features/generator/test.feature", "features/generator/test_helper.feature", "features/placeholder.feature", "features/step_definitions/debug_steps.rb", "features/step_definitions/filesystem_steps.rb", "features/step_definitions/generator_steps.rb", "features/step_definitions/task_steps.rb", "features/support/env.rb", "features/tasks/build_gem.feature", "features/tasks/version.feature", "features/tasks/version_bumping.feature", "jeweler.gemspec", "lib/jeweler.rb", "lib/jeweler/commands.rb", "lib/jeweler/commands/build_gem.rb", "lib/jeweler/commands/check_dependencies.rb", "lib/jeweler/commands/install_gem.rb", "lib/jeweler/commands/release_to_gemcutter.rb", "lib/jeweler/commands/release_to_git.rb", "lib/jeweler/commands/release_to_github.rb", "lib/jeweler/commands/release_to_rubyforge.rb", "lib/jeweler/commands/setup_rubyforge.rb", "lib/jeweler/commands/validate_gemspec.rb", "lib/jeweler/commands/version/base.rb", "lib/jeweler/commands/version/bump_major.rb", "lib/jeweler/commands/version/bump_minor.rb", "lib/jeweler/commands/version/bump_patch.rb", "lib/jeweler/commands/version/write.rb", "lib/jeweler/commands/write_gemspec.rb", "lib/jeweler/errors.rb", "lib/jeweler/gemcutter_tasks.rb", "lib/jeweler/gemspec_helper.rb", "lib/jeweler/generator.rb", "lib/jeweler/generator/application.rb", "lib/jeweler/generator/bacon_mixin.rb", "lib/jeweler/generator/github_mixin.rb", "lib/jeweler/generator/micronaut_mixin.rb", "lib/jeweler/generator/minitest_mixin.rb", "lib/jeweler/generator/options.rb", "lib/jeweler/generator/rdoc_mixin.rb", "lib/jeweler/generator/riot_mixin.rb", "lib/jeweler/generator/rspec_mixin.rb", "lib/jeweler/generator/shoulda_mixin.rb", "lib/jeweler/generator/testspec_mixin.rb", "lib/jeweler/generator/testunit_mixin.rb", "lib/jeweler/generator/yard_mixin.rb", "lib/jeweler/rubyforge_tasks.rb", "lib/jeweler/specification.rb", "lib/jeweler/tasks.rb", "lib/jeweler/templates/.document", "lib/jeweler/templates/.gitignore", "lib/jeweler/templates/LICENSE", "lib/jeweler/templates/README.rdoc", "lib/jeweler/templates/Rakefile", "lib/jeweler/templates/bacon/flunking.rb", "lib/jeweler/templates/bacon/helper.rb", "lib/jeweler/templates/features/default.feature", "lib/jeweler/templates/features/support/env.rb", "lib/jeweler/templates/micronaut/flunking.rb", "lib/jeweler/templates/micronaut/helper.rb", "lib/jeweler/templates/minitest/flunking.rb", "lib/jeweler/templates/minitest/helper.rb", "lib/jeweler/templates/riot/flunking.rb", "lib/jeweler/templates/riot/helper.rb", "lib/jeweler/templates/rspec/flunking.rb", "lib/jeweler/templates/rspec/helper.rb", "lib/jeweler/templates/rspec/spec.opts", "lib/jeweler/templates/shoulda/flunking.rb", "lib/jeweler/templates/shoulda/helper.rb", "lib/jeweler/templates/testspec/flunking.rb", "lib/jeweler/templates/testspec/helper.rb", "lib/jeweler/templates/testunit/flunking.rb", "lib/jeweler/templates/testunit/helper.rb", "lib/jeweler/version_helper.rb", "test/fixtures/bar/VERSION.yml", "test/fixtures/bar/bin/foo_the_ultimate_bin", "test/fixtures/bar/hey_include_me_in_gemspec", "test/fixtures/bar/lib/foo_the_ultimate_lib.rb", "test/fixtures/existing-project-with-version-constant/.document", "test/fixtures/existing-project-with-version-constant/.gitignore", "test/fixtures/existing-project-with-version-constant/LICENSE", "test/fixtures/existing-project-with-version-constant/README.rdoc", "test/fixtures/existing-project-with-version-constant/Rakefile", "test/fixtures/existing-project-with-version-constant/existing-project-with-version.gemspec", "test/fixtures/existing-project-with-version-constant/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-constant/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-constant/test/test_helper.rb", "test/fixtures/existing-project-with-version-plaintext/.document", "test/fixtures/existing-project-with-version-plaintext/.gitignore", "test/fixtures/existing-project-with-version-plaintext/LICENSE", "test/fixtures/existing-project-with-version-plaintext/README.rdoc", "test/fixtures/existing-project-with-version-plaintext/Rakefile", "test/fixtures/existing-project-with-version-plaintext/VERSION", "test/fixtures/existing-project-with-version-plaintext/existing-project-with-version.gemspec", "test/fixtures/existing-project-with-version-plaintext/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-plaintext/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-plaintext/test/test_helper.rb", "test/fixtures/existing-project-with-version-yaml/.document", "test/fixtures/existing-project-with-version-yaml/.gitignore", "test/fixtures/existing-project-with-version-yaml/LICENSE", "test/fixtures/existing-project-with-version-yaml/README.rdoc", "test/fixtures/existing-project-with-version-yaml/Rakefile", "test/fixtures/existing-project-with-version-yaml/VERSION.yml", "test/fixtures/existing-project-with-version-yaml/bin/foo_the_ultimate_bin", "test/fixtures/existing-project-with-version-yaml/existing-project-with-version.gemspec", "test/fixtures/existing-project-with-version-yaml/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-yaml/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-yaml/test/test_helper.rb", "test/geminstaller.yml", "test/jeweler/commands/test_build_gem.rb", "test/jeweler/commands/test_install_gem.rb", "test/jeweler/commands/test_release_to_gemcutter.rb", "test/jeweler/commands/test_release_to_git.rb", "test/jeweler/commands/test_release_to_github.rb", "test/jeweler/commands/test_release_to_rubyforge.rb", "test/jeweler/commands/test_setup_rubyforge.rb", "test/jeweler/commands/test_validate_gemspec.rb", "test/jeweler/commands/test_write_gemspec.rb", "test/jeweler/commands/version/test_base.rb", "test/jeweler/commands/version/test_bump_major.rb", "test/jeweler/commands/version/test_bump_minor.rb", "test/jeweler/commands/version/test_bump_patch.rb", "test/jeweler/commands/version/test_write.rb", "test/jeweler/generator/test_application.rb", "test/jeweler/generator/test_options.rb", "test/jeweler/test_gemspec_helper.rb", "test/jeweler/test_generator.rb", "test/jeweler/test_generator_initialization.rb", "test/jeweler/test_generator_mixins.rb", "test/jeweler/test_specification.rb", "test/jeweler/test_tasks.rb", "test/jeweler/test_version_helper.rb", "test/shoulda_macros/jeweler_macros.rb", "test/test_helper.rb", "test/test_jeweler.rb"] + s.homepage = %q{http://github.com/technicalpickles/jeweler} + s.rdoc_options = ["--charset=UTF-8"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{pickles} + s.rubygems_version = %q{1.3.5} + s.summary = %q{Simple and opinionated helper for creating Rubygem projects on GitHub} + s.test_files = ["test/fixtures/bar/lib/foo_the_ultimate_lib.rb", "test/fixtures/existing-project-with-version-constant/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-constant/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-constant/test/test_helper.rb", "test/fixtures/existing-project-with-version-plaintext/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-plaintext/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-plaintext/test/test_helper.rb", "test/fixtures/existing-project-with-version-yaml/lib/existing_project_with_version.rb", "test/fixtures/existing-project-with-version-yaml/test/existing_project_with_version_test.rb", "test/fixtures/existing-project-with-version-yaml/test/test_helper.rb", "test/jeweler/commands/test_build_gem.rb", "test/jeweler/commands/test_install_gem.rb", "test/jeweler/commands/test_release_to_gemcutter.rb", "test/jeweler/commands/test_release_to_git.rb", "test/jeweler/commands/test_release_to_github.rb", "test/jeweler/commands/test_release_to_rubyforge.rb", "test/jeweler/commands/test_setup_rubyforge.rb", "test/jeweler/commands/test_validate_gemspec.rb", "test/jeweler/commands/test_write_gemspec.rb", "test/jeweler/commands/version/test_base.rb", "test/jeweler/commands/version/test_bump_major.rb", "test/jeweler/commands/version/test_bump_minor.rb", "test/jeweler/commands/version/test_bump_patch.rb", "test/jeweler/commands/version/test_write.rb", "test/jeweler/generator/test_application.rb", "test/jeweler/generator/test_options.rb", "test/jeweler/test_gemspec_helper.rb", "test/jeweler/test_generator.rb", "test/jeweler/test_generator_initialization.rb", "test/jeweler/test_generator_mixins.rb", "test/jeweler/test_specification.rb", "test/jeweler/test_tasks.rb", "test/jeweler/test_version_helper.rb", "test/shoulda_macros/jeweler_macros.rb", "test/test_helper.rb", "test/test_jeweler.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 1.2.5"]) + s.add_runtime_dependency(%q, [">= 2.0.0"]) + s.add_runtime_dependency(%q, [">= 0.1.0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + s.add_development_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 1.2.5"]) + s.add_dependency(%q, [">= 2.0.0"]) + s.add_dependency(%q, [">= 0.1.0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 1.2.5"]) + s.add_dependency(%q, [">= 2.0.0"]) + s.add_dependency(%q, [">= 0.1.0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/specifications/json_pure-1.2.0.gemspec b/vendor/gems/specifications/json_pure-1.2.0.gemspec new file mode 100644 index 00000000..d12f31fd --- /dev/null +++ b/vendor/gems/specifications/json_pure-1.2.0.gemspec @@ -0,0 +1,33 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{json_pure} + s.version = "1.2.0" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Florian Frank"] + s.date = %q{2009-11-07} + s.default_executable = %q{edit_json.rb} + s.description = %q{This is a JSON implementation in pure Ruby.} + s.email = %q{flori@ping.de} + s.executables = ["edit_json.rb", "prettify_json.rb"] + s.extra_rdoc_files = ["README"] + s.files = ["CHANGES", "bin/edit_json.rb", "bin/prettify_json.rb", "VERSION", "GPL", "TODO", "README", "benchmarks/parser_benchmark.rb", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkPure.log", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkComparison.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkYAML#parser.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_safe.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkExt#parser.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_fast.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_fast-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkExt#parser-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkRails#generator-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt.log", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_fast-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_fast.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkRails#parser.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_pretty-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkPure#parser-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_pretty.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkRails.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkExt.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkRails.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkComparison.log", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_safe.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkRails#generator.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_safe-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_pretty.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkYAML.log", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkYAML#parser-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkExt#generator_pretty-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkRails#parser-autocorrelation.dat", "benchmarks/data-p4-3GHz-ruby18/ParserBenchmarkPure#parser.dat", "benchmarks/data-p4-3GHz-ruby18/GeneratorBenchmarkPure#generator_safe-autocorrelation.dat", "benchmarks/generator_benchmark.rb", "ext/json/ext/generator/extconf.rb", "ext/json/ext/generator/unicode.c", "ext/json/ext/generator/generator.c", "ext/json/ext/generator/unicode.h", "ext/json/ext/parser/extconf.rb", "ext/json/ext/parser/parser.rl", "ext/json/ext/parser/unicode.c", "ext/json/ext/parser/parser.c", "ext/json/ext/parser/unicode.h", "Rakefile", "tools/fuzz.rb", "tools/server.rb", "lib/json.rb", "lib/json/json.xpm", "lib/json/Key.xpm", "lib/json/String.xpm", "lib/json/Numeric.xpm", "lib/json/Hash.xpm", "lib/json/add/rails.rb", "lib/json/add/core.rb", "lib/json/common.rb", "lib/json/Array.xpm", "lib/json/FalseClass.xpm", "lib/json/pure/generator.rb", "lib/json/pure/parser.rb", "lib/json/TrueClass.xpm", "lib/json/pure.rb", "lib/json/version.rb", "lib/json/ext.rb", "lib/json/editor.rb", "lib/json/NilClass.xpm", "data/example.json", "data/index.html", "data/prototype.js", "tests/test_json_encoding.rb", "tests/test_json_addition.rb", "tests/fixtures/pass16.json", "tests/fixtures/fail4.json", "tests/fixtures/fail1.json", "tests/fixtures/fail28.json", "tests/fixtures/fail8.json", "tests/fixtures/fail19.json", "tests/fixtures/pass2.json", "tests/fixtures/pass26.json", "tests/fixtures/pass1.json", "tests/fixtures/fail3.json", "tests/fixtures/fail20.json", "tests/fixtures/pass3.json", "tests/fixtures/pass15.json", "tests/fixtures/fail12.json", "tests/fixtures/fail13.json", "tests/fixtures/fail22.json", "tests/fixtures/fail24.json", "tests/fixtures/fail9.json", "tests/fixtures/fail2.json", "tests/fixtures/fail14.json", "tests/fixtures/fail6.json", "tests/fixtures/fail21.json", "tests/fixtures/fail7.json", "tests/fixtures/pass17.json", "tests/fixtures/fail11.json", "tests/fixtures/fail25.json", "tests/fixtures/fail5.json", "tests/fixtures/fail18.json", "tests/fixtures/fail27.json", "tests/fixtures/fail10.json", "tests/fixtures/fail23.json", "tests/test_json_rails.rb", "tests/test_json.rb", "tests/test_json_generate.rb", "tests/test_json_unicode.rb", "tests/test_json_fixtures.rb", "COPYING", "install.rb"] + s.homepage = %q{http://json.rubyforge.org} + s.rdoc_options = ["--title", "JSON -- A JSON implemention", "--main", "README"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{json} + s.rubygems_version = %q{1.3.5} + s.summary = %q{A JSON implementation in Ruby} + s.test_files = ["tests/test_json_encoding.rb", "tests/test_json_addition.rb", "tests/test_json_rails.rb", "tests/test_json.rb", "tests/test_json_generate.rb", "tests/test_json_unicode.rb", "tests/test_json_fixtures.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + else + end + else + end +end diff --git a/vendor/gems/specifications/markaby-0.5.gemspec b/vendor/gems/specifications/markaby-0.5.gemspec new file mode 100644 index 00000000..92648622 --- /dev/null +++ b/vendor/gems/specifications/markaby-0.5.gemspec @@ -0,0 +1,31 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{markaby} + s.version = "0.5" + + s.required_rubygems_version = nil if s.respond_to? :required_rubygems_version= + s.authors = ["Tim Fletcher and _why"] + s.cert_chain = nil + s.date = %q{2006-10-02} + s.extra_rdoc_files = ["README"] + s.files = ["README", "Rakefile", "setup.rb", "test/test_markaby.rb", "lib/markaby", "lib/markaby.rb", "lib/markaby/metaid.rb", "lib/markaby/tags.rb", "lib/markaby/builder.rb", "lib/markaby/cssproxy.rb", "lib/markaby/rails.rb", "lib/markaby/template.rb", "tools/rakehelp.rb"] + s.require_paths = ["lib"] + s.required_ruby_version = Gem::Requirement.new("> 0.0.0") + s.rubygems_version = %q{1.3.5} + s.summary = %q{Markup as Ruby, write HTML in your native Ruby tongue} + s.test_files = ["test/test_markaby.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 1 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 2.0.0"]) + else + s.add_dependency(%q, [">= 2.0.0"]) + end + else + s.add_dependency(%q, [">= 2.0.0"]) + end +end diff --git a/vendor/gems/specifications/maruku-0.6.0.gemspec b/vendor/gems/specifications/maruku-0.6.0.gemspec new file mode 100644 index 00000000..182aed72 --- /dev/null +++ b/vendor/gems/specifications/maruku-0.6.0.gemspec @@ -0,0 +1,32 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{maruku} + s.version = "0.6.0" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Andrea Censi"] + s.autorequire = %q{maruku} + s.date = %q{2009-05-04} + s.description = %q{Maruku is a Markdown interpreter in Ruby. It features native export to HTML and PDF (via Latex). The output is really beautiful!} + s.email = %q{andrea@rubyforge.org} + s.executables = ["maruku", "marutex"] + s.files = ["lib/maruku/attributes.rb", "lib/maruku/defaults.rb", "lib/maruku/errors_management.rb", "lib/maruku/ext/div.rb", "lib/maruku/ext/math/elements.rb", "lib/maruku/ext/math/latex_fix.rb", "lib/maruku/ext/math/mathml_engines/blahtex.rb", "lib/maruku/ext/math/mathml_engines/itex2mml.rb", "lib/maruku/ext/math/mathml_engines/none.rb", "lib/maruku/ext/math/mathml_engines/ritex.rb", "lib/maruku/ext/math/parsing.rb", "lib/maruku/ext/math/to_html.rb", "lib/maruku/ext/math/to_latex.rb", "lib/maruku/ext/math.rb", "lib/maruku/helpers.rb", "lib/maruku/input/charsource.rb", "lib/maruku/input/extensions.rb", "lib/maruku/input/html_helper.rb", "lib/maruku/input/linesource.rb", "lib/maruku/input/parse_block.rb", "lib/maruku/input/parse_doc.rb", "lib/maruku/input/parse_span_better.rb", "lib/maruku/input/rubypants.rb", "lib/maruku/input/type_detection.rb", "lib/maruku/input_textile2/t2_parser.rb", "lib/maruku/maruku.rb", "lib/maruku/output/s5/fancy.rb", "lib/maruku/output/s5/to_s5.rb", "lib/maruku/output/to_html.rb", "lib/maruku/output/to_latex.rb", "lib/maruku/output/to_latex_entities.rb", "lib/maruku/output/to_latex_strings.rb", "lib/maruku/output/to_markdown.rb", "lib/maruku/output/to_s.rb", "lib/maruku/string_utils.rb", "lib/maruku/structures.rb", "lib/maruku/structures_inspect.rb", "lib/maruku/structures_iterators.rb", "lib/maruku/tests/benchmark.rb", "lib/maruku/tests/new_parser.rb", "lib/maruku/tests/tests.rb", "lib/maruku/textile2.rb", "lib/maruku/toc.rb", "lib/maruku/usage/example1.rb", "lib/maruku/version.rb", "lib/maruku.rb", "docs/changelog.md", "docs/div_syntax.md", "docs/entity_test.md", "docs/markdown_syntax.md", "docs/maruku.md", "docs/math.md", "docs/other_stuff.md", "docs/proposal.md", "tests/bugs/code_in_links.md", "tests/bugs/complex_escaping.md", "tests/math/syntax.md", "tests/math_usage/document.md", "tests/others/abbreviations.md", "tests/others/blank.md", "tests/others/code.md", "tests/others/code2.md", "tests/others/code3.md", "tests/others/email.md", "tests/others/entities.md", "tests/others/escaping.md", "tests/others/extra_dl.md", "tests/others/extra_header_id.md", "tests/others/extra_table1.md", "tests/others/footnotes.md", "tests/others/headers.md", "tests/others/hrule.md", "tests/others/images.md", "tests/others/inline_html.md", "tests/others/links.md", "tests/others/list1.md", "tests/others/list2.md", "tests/others/list3.md", "tests/others/lists.md", "tests/others/lists_after_paragraph.md", "tests/others/lists_ol.md", "tests/others/misc_sw.md", "tests/others/one.md", "tests/others/paragraphs.md", "tests/others/sss06.md", "tests/others/test.md", "tests/s5/s5profiling.md", "tests/unittest/abbreviations.md", "tests/unittest/alt.md", "tests/unittest/attributes/att2.md", "tests/unittest/attributes/att3.md", "tests/unittest/attributes/attributes.md", "tests/unittest/attributes/circular.md", "tests/unittest/attributes/default.md", "tests/unittest/blank.md", "tests/unittest/blanks_in_code.md", "tests/unittest/bug_def.md", "tests/unittest/bug_table.md", "tests/unittest/code.md", "tests/unittest/code2.md", "tests/unittest/code3.md", "tests/unittest/data_loss.md", "tests/unittest/divs/div1.md", "tests/unittest/divs/div2.md", "tests/unittest/divs/div3_nest.md", "tests/unittest/easy.md", "tests/unittest/email.md", "tests/unittest/encoding/iso-8859-1.md", "tests/unittest/encoding/utf-8.md", "tests/unittest/entities.md", "tests/unittest/escaping.md", "tests/unittest/extra_dl.md", "tests/unittest/extra_header_id.md", "tests/unittest/extra_table1.md", "tests/unittest/footnotes.md", "tests/unittest/hang.md", "tests/unittest/headers.md", "tests/unittest/hex_entities.md", "tests/unittest/hrule.md", "tests/unittest/html2.md", "tests/unittest/html3.md", "tests/unittest/html4.md", "tests/unittest/html5.md", "tests/unittest/ie.md", "tests/unittest/images.md", "tests/unittest/images2.md", "tests/unittest/inline_html.md", "tests/unittest/inline_html2.md", "tests/unittest/links.md", "tests/unittest/links2.md", "tests/unittest/list1.md", "tests/unittest/list12.md", "tests/unittest/list2.md", "tests/unittest/list3.md", "tests/unittest/list4.md", "tests/unittest/lists.md", "tests/unittest/lists10.md", "tests/unittest/lists11.md", "tests/unittest/lists6.md", "tests/unittest/lists9.md", "tests/unittest/lists_after_paragraph.md", "tests/unittest/lists_ol.md", "tests/unittest/loss.md", "tests/unittest/math/equations.md", "tests/unittest/math/inline.md", "tests/unittest/math/math2.md", "tests/unittest/math/notmath.md", "tests/unittest/math/table.md", "tests/unittest/math/table2.md", "tests/unittest/misc_sw.md", "tests/unittest/notyet/escape.md", "tests/unittest/notyet/header_after_par.md", "tests/unittest/notyet/ticks.md", "tests/unittest/notyet/triggering.md", "tests/unittest/olist.md", "tests/unittest/one.md", "tests/unittest/paragraph.md", "tests/unittest/paragraph_rules/dont_merge_ref.md", "tests/unittest/paragraph_rules/tab_is_blank.md", "tests/unittest/paragraphs.md", "tests/unittest/pending/amps.md", "tests/unittest/pending/empty_cells.md", "tests/unittest/pending/link.md", "tests/unittest/pending/ref.md", "tests/unittest/recover/recover_links.md", "tests/unittest/red_tests/abbrev.md", "tests/unittest/red_tests/lists7.md", "tests/unittest/red_tests/lists7b.md", "tests/unittest/red_tests/lists8.md", "tests/unittest/red_tests/xml.md", "tests/unittest/references/long_example.md", "tests/unittest/references/spaces_and_numbers.md", "tests/unittest/smartypants.md", "tests/unittest/syntax_hl.md", "tests/unittest/table_attributes.md", "tests/unittest/test.md", "tests/unittest/underscore_in_words.md", "tests/unittest/wrapping.md", "tests/unittest/xml2.md", "tests/unittest/xml3.md", "tests/unittest/xml_instruction.md", "tests/utf8-files/simple.md", "bin/marudown", "bin/maruku", "bin/marutest", "bin/marutex", "unit_test_block.sh", "unit_test_span.sh", "Rakefile", "maruku_gem.rb"] + s.homepage = %q{http://maruku.rubyforge.org} + s.require_paths = ["lib"] + s.rubygems_version = %q{1.3.5} + s.summary = %q{Maruku is a Markdown-superset interpreter written in Ruby.} + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 2 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 1.0.0"]) + else + s.add_dependency(%q, [">= 1.0.0"]) + end + else + s.add_dependency(%q, [">= 1.0.0"]) + end +end diff --git a/vendor/gems/specifications/net-scp-1.0.2.gemspec b/vendor/gems/specifications/net-scp-1.0.2.gemspec new file mode 100644 index 00000000..b7ca0aff --- /dev/null +++ b/vendor/gems/specifications/net-scp-1.0.2.gemspec @@ -0,0 +1,37 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{net-scp} + s.version = "1.0.2" + + s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version= + s.authors = ["Jamis Buck"] + s.date = %q{2009-02-03} + s.description = %q{A pure Ruby implementation of the SCP client protocol} + s.email = %q{jamis@jamisbuck.org} + s.extra_rdoc_files = ["CHANGELOG.rdoc", "lib/net/scp/download.rb", "lib/net/scp/errors.rb", "lib/net/scp/upload.rb", "lib/net/scp/version.rb", "lib/net/scp.rb", "lib/uri/open-scp.rb", "lib/uri/scp.rb", "README.rdoc"] + s.files = ["CHANGELOG.rdoc", "lib/net/scp/download.rb", "lib/net/scp/errors.rb", "lib/net/scp/upload.rb", "lib/net/scp/version.rb", "lib/net/scp.rb", "lib/uri/open-scp.rb", "lib/uri/scp.rb", "Rakefile", "README.rdoc", "setup.rb", "test/common.rb", "test/test_all.rb", "test/test_download.rb", "test/test_scp.rb", "test/test_upload.rb", "Manifest", "net-scp.gemspec"] + s.homepage = %q{http://net-ssh.rubyforge.org/scp} + s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Net-scp", "--main", "README.rdoc"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{net-ssh} + s.rubygems_version = %q{1.3.5} + s.summary = %q{A pure Ruby implementation of the SCP client protocol} + s.test_files = ["test/test_all.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 2 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 1.99.1"]) + s.add_development_dependency(%q, [">= 0"]) + else + s.add_dependency(%q, [">= 1.99.1"]) + s.add_dependency(%q, [">= 0"]) + end + else + s.add_dependency(%q, [">= 1.99.1"]) + s.add_dependency(%q, [">= 0"]) + end +end diff --git a/vendor/gems/specifications/net-ssh-2.0.15.gemspec b/vendor/gems/specifications/net-ssh-2.0.15.gemspec new file mode 100644 index 00000000..473de335 --- /dev/null +++ b/vendor/gems/specifications/net-ssh-2.0.15.gemspec @@ -0,0 +1,30 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{net-ssh} + s.version = "2.0.15" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Jamis Buck", "Delano Mandelbaum"] + s.date = %q{2009-09-02} + s.description = %q{Net::SSH: a pure-Ruby implementation of the SSH2 client protocol.} + s.email = ["net-ssh@solutious.com", "net-ssh@solutious.com"] + s.extra_rdoc_files = ["README.rdoc", "THANKS.rdoc", "CHANGELOG.rdoc"] + s.files = ["CHANGELOG.rdoc", "Manifest", "README.rdoc", "Rakefile", "Rudyfile", "THANKS.rdoc", "lib/net/ssh.rb", "lib/net/ssh/authentication/agent.rb", "lib/net/ssh/authentication/constants.rb", "lib/net/ssh/authentication/key_manager.rb", "lib/net/ssh/authentication/methods/abstract.rb", "lib/net/ssh/authentication/methods/hostbased.rb", "lib/net/ssh/authentication/methods/keyboard_interactive.rb", "lib/net/ssh/authentication/methods/password.rb", "lib/net/ssh/authentication/methods/publickey.rb", "lib/net/ssh/authentication/pageant.rb", "lib/net/ssh/authentication/session.rb", "lib/net/ssh/buffer.rb", "lib/net/ssh/buffered_io.rb", "lib/net/ssh/config.rb", "lib/net/ssh/connection/channel.rb", "lib/net/ssh/connection/constants.rb", "lib/net/ssh/connection/session.rb", "lib/net/ssh/connection/term.rb", "lib/net/ssh/errors.rb", "lib/net/ssh/key_factory.rb", "lib/net/ssh/known_hosts.rb", "lib/net/ssh/loggable.rb", "lib/net/ssh/packet.rb", "lib/net/ssh/prompt.rb", "lib/net/ssh/proxy/errors.rb", "lib/net/ssh/proxy/http.rb", "lib/net/ssh/proxy/socks4.rb", "lib/net/ssh/proxy/socks5.rb", "lib/net/ssh/ruby_compat.rb", "lib/net/ssh/service/forward.rb", "lib/net/ssh/test.rb", "lib/net/ssh/test/channel.rb", "lib/net/ssh/test/extensions.rb", "lib/net/ssh/test/kex.rb", "lib/net/ssh/test/local_packet.rb", "lib/net/ssh/test/packet.rb", "lib/net/ssh/test/remote_packet.rb", "lib/net/ssh/test/script.rb", "lib/net/ssh/test/socket.rb", "lib/net/ssh/transport/algorithms.rb", "lib/net/ssh/transport/cipher_factory.rb", "lib/net/ssh/transport/constants.rb", "lib/net/ssh/transport/hmac.rb", "lib/net/ssh/transport/hmac/abstract.rb", "lib/net/ssh/transport/hmac/md5.rb", "lib/net/ssh/transport/hmac/md5_96.rb", "lib/net/ssh/transport/hmac/none.rb", "lib/net/ssh/transport/hmac/sha1.rb", "lib/net/ssh/transport/hmac/sha1_96.rb", "lib/net/ssh/transport/identity_cipher.rb", "lib/net/ssh/transport/kex.rb", "lib/net/ssh/transport/kex/diffie_hellman_group1_sha1.rb", "lib/net/ssh/transport/kex/diffie_hellman_group_exchange_sha1.rb", "lib/net/ssh/transport/openssl.rb", "lib/net/ssh/transport/packet_stream.rb", "lib/net/ssh/transport/server_version.rb", "lib/net/ssh/transport/session.rb", "lib/net/ssh/transport/state.rb", "lib/net/ssh/verifiers/lenient.rb", "lib/net/ssh/verifiers/null.rb", "lib/net/ssh/verifiers/strict.rb", "lib/net/ssh/version.rb", "net-ssh.gemspec", "setup.rb", "support/arcfour_check.rb", "test/authentication/methods/common.rb", "test/authentication/methods/test_abstract.rb", "test/authentication/methods/test_hostbased.rb", "test/authentication/methods/test_keyboard_interactive.rb", "test/authentication/methods/test_password.rb", "test/authentication/methods/test_publickey.rb", "test/authentication/test_agent.rb", "test/authentication/test_key_manager.rb", "test/authentication/test_session.rb", "test/common.rb", "test/configs/eqsign", "test/configs/exact_match", "test/configs/multihost", "test/configs/wild_cards", "test/connection/test_channel.rb", "test/connection/test_session.rb", "test/test_all.rb", "test/test_buffer.rb", "test/test_buffered_io.rb", "test/test_config.rb", "test/test_key_factory.rb", "test/transport/hmac/test_md5.rb", "test/transport/hmac/test_md5_96.rb", "test/transport/hmac/test_none.rb", "test/transport/hmac/test_sha1.rb", "test/transport/hmac/test_sha1_96.rb", "test/transport/kex/test_diffie_hellman_group1_sha1.rb", "test/transport/kex/test_diffie_hellman_group_exchange_sha1.rb", "test/transport/test_algorithms.rb", "test/transport/test_cipher_factory.rb", "test/transport/test_hmac.rb", "test/transport/test_identity_cipher.rb", "test/transport/test_packet_stream.rb", "test/transport/test_server_version.rb", "test/transport/test_session.rb", "test/transport/test_state.rb"] + s.homepage = %q{http://rubyforge.org/projects/net-ssh/} + s.rdoc_options = ["--line-numbers", "--title", "Net::SSH: a pure-Ruby implementation of the SSH2 client protocol.", "--main", "README.rdoc"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{net-ssh} + s.rubygems_version = %q{1.3.5} + s.summary = %q{Net::SSH: a pure-Ruby implementation of the SSH2 client protocol.} + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + else + end + else + end +end diff --git a/vendor/gems/specifications/rubyforge-2.0.3.gemspec b/vendor/gems/specifications/rubyforge-2.0.3.gemspec new file mode 100644 index 00000000..fb9470ce --- /dev/null +++ b/vendor/gems/specifications/rubyforge-2.0.3.gemspec @@ -0,0 +1,43 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{rubyforge} + s.version = "2.0.3" + + s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= + s.authors = ["Ryan Davis", "Eric Hodel", "Ara T Howard", "Tom Copeland"] + s.date = %q{2009-10-10} + s.default_executable = %q{rubyforge} + s.description = %q{A script which automates a limited set of rubyforge operations. + +* Run 'rubyforge help' for complete usage. +* Setup: For first time users AND upgrades to 0.4.0: + * rubyforge setup (deletes your username and password, so run sparingly!) + * edit ~/.rubyforge/user-config.yml + * rubyforge config +* For all rubyforge upgrades, run 'rubyforge config' to ensure you have latest.} + s.email = ["ryand-ruby@zenspider.com", "drbrain@segment7.net", "ara.t.howard@gmail.com", "tom@infoether.com"] + s.executables = ["rubyforge"] + s.extra_rdoc_files = ["History.txt", "Manifest.txt", "README.txt"] + s.files = ["History.txt", "Manifest.txt", "README.txt", "Rakefile", "bin/rubyforge", "lib/rubyforge.rb", "lib/rubyforge/client.rb", "test/test_rubyforge.rb", "test/test_rubyforge_client.rb"] + s.homepage = %q{http://codeforpeople.rubyforge.org/rubyforge/} + s.rdoc_options = ["--main", "README.txt"] + s.require_paths = ["lib"] + s.rubyforge_project = %q{codeforpeople} + s.rubygems_version = %q{1.3.5} + s.summary = %q{A script which automates a limited set of rubyforge operations} + s.test_files = ["test/test_rubyforge.rb", "test/test_rubyforge_client.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 3 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + s.add_runtime_dependency(%q, [">= 1.1.7"]) + else + s.add_dependency(%q, [">= 1.1.7"]) + end + else + s.add_dependency(%q, [">= 1.1.7"]) + end +end diff --git a/vendor/gems/specifications/syntax-1.0.0.gemspec b/vendor/gems/specifications/syntax-1.0.0.gemspec new file mode 100644 index 00000000..b45b277d --- /dev/null +++ b/vendor/gems/specifications/syntax-1.0.0.gemspec @@ -0,0 +1,29 @@ +# -*- encoding: utf-8 -*- + +Gem::Specification.new do |s| + s.name = %q{syntax} + s.version = "1.0.0" + + s.required_rubygems_version = nil if s.respond_to? :required_rubygems_version= + s.authors = ["Jamis Buck"] + s.autorequire = %q{syntax} + s.cert_chain = nil + s.date = %q{2005-06-18} + s.email = %q{jamis@jamisbuck.org} + s.files = ["data/ruby.css", "data/xml.css", "data/yaml.css", "lib/syntax", "lib/syntax.rb", "lib/syntax/common.rb", "lib/syntax/convertors", "lib/syntax/lang", "lib/syntax/version.rb", "lib/syntax/convertors/abstract.rb", "lib/syntax/convertors/html.rb", "lib/syntax/lang/ruby.rb", "lib/syntax/lang/xml.rb", "lib/syntax/lang/yaml.rb", "test/ALL-TESTS.rb", "test/syntax", "test/tc_syntax.rb", "test/syntax/tc_ruby.rb", "test/syntax/tc_xml.rb", "test/syntax/tc_yaml.rb", "test/syntax/tokenizer_testcase.rb"] + s.require_paths = ["lib"] + s.required_ruby_version = Gem::Requirement.new("> 0.0.0") + s.rubygems_version = %q{1.3.5} + s.summary = %q{Syntax is Ruby library for performing simple syntax highlighting.} + s.test_files = ["test/ALL-TESTS.rb"] + + if s.respond_to? :specification_version then + current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION + s.specification_version = 1 + + if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then + else + end + else + end +end diff --git a/vendor/gems/specifications/thin-1.2.5.gemspec b/vendor/gems/specifications/thin-1.2.5.gemspec deleted file mode 100644 index 7fe3fed5..00000000 --- a/vendor/gems/specifications/thin-1.2.5.gemspec +++ /dev/null @@ -1,41 +0,0 @@ -# -*- encoding: utf-8 -*- - -Gem::Specification.new do |s| - s.name = %q{thin} - s.version = "1.2.5" - - s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= - s.authors = ["Marc-Andre Cournoyer"] - s.date = %q{2009-11-03} - s.default_executable = %q{thin} - s.description = %q{A thin and fast web server} - s.email = %q{macournoyer@gmail.com} - s.executables = ["thin"] - s.extensions = ["ext/thin_parser/extconf.rb"] - s.files = ["COPYING", "CHANGELOG", "README", "Rakefile", "benchmark/abc", "benchmark/benchmarker.rb", "benchmark/runner", "bin/thin", "example/adapter.rb", "example/async_app.ru", "example/async_chat.ru", "example/async_tailer.ru", "example/config.ru", "example/monit_sockets", "example/monit_unixsock", "example/myapp.rb", "example/ramaze.ru", "example/thin.god", "example/thin_solaris_smf.erb", "example/thin_solaris_smf.readme.txt", "example/vlad.rake", "lib/rack/adapter/loader.rb", "lib/rack/adapter/rails.rb", "lib/thin/backends/base.rb", "lib/thin/backends/swiftiply_client.rb", "lib/thin/backends/tcp_server.rb", "lib/thin/backends/unix_server.rb", "lib/thin/command.rb", "lib/thin/connection.rb", "lib/thin/controllers/cluster.rb", "lib/thin/controllers/controller.rb", "lib/thin/controllers/service.rb", "lib/thin/controllers/service.sh.erb", "lib/thin/daemonizing.rb", "lib/thin/headers.rb", "lib/thin/logging.rb", "lib/thin/request.rb", "lib/thin/response.rb", "lib/thin/runner.rb", "lib/thin/server.rb", "lib/thin/stats.html.erb", "lib/thin/stats.rb", "lib/thin/statuses.rb", "lib/thin/version.rb", "lib/thin.rb", "lib/thin_parser.bundle", "spec/backends/swiftiply_client_spec.rb", "spec/backends/tcp_server_spec.rb", "spec/backends/unix_server_spec.rb", "spec/command_spec.rb", "spec/configs/cluster.yml", "spec/configs/single.yml", "spec/connection_spec.rb", "spec/controllers/cluster_spec.rb", "spec/controllers/controller_spec.rb", "spec/controllers/service_spec.rb", "spec/daemonizing_spec.rb", "spec/headers_spec.rb", "spec/logging_spec.rb", "spec/perf/request_perf_spec.rb", "spec/perf/response_perf_spec.rb", "spec/perf/server_perf_spec.rb", "spec/rack/loader_spec.rb", "spec/rack/rails_adapter_spec.rb", "spec/rails_app/app/controllers/application.rb", "spec/rails_app/app/controllers/simple_controller.rb", "spec/rails_app/app/helpers/application_helper.rb", "spec/rails_app/app/views/simple/index.html.erb", "spec/rails_app/config/boot.rb", "spec/rails_app/config/environment.rb", "spec/rails_app/config/environments/development.rb", "spec/rails_app/config/environments/production.rb", "spec/rails_app/config/environments/test.rb", "spec/rails_app/config/initializers/inflections.rb", "spec/rails_app/config/initializers/mime_types.rb", "spec/rails_app/config/routes.rb", "spec/rails_app/public/404.html", "spec/rails_app/public/422.html", "spec/rails_app/public/500.html", "spec/rails_app/public/dispatch.cgi", "spec/rails_app/public/dispatch.fcgi", "spec/rails_app/public/dispatch.rb", "spec/rails_app/public/favicon.ico", "spec/rails_app/public/images/rails.png", "spec/rails_app/public/index.html", "spec/rails_app/public/javascripts/application.js", "spec/rails_app/public/javascripts/controls.js", "spec/rails_app/public/javascripts/dragdrop.js", "spec/rails_app/public/javascripts/effects.js", "spec/rails_app/public/javascripts/prototype.js", "spec/rails_app/public/robots.txt", "spec/rails_app/script/about", "spec/rails_app/script/console", "spec/rails_app/script/destroy", "spec/rails_app/script/generate", "spec/rails_app/script/performance/benchmarker", "spec/rails_app/script/performance/profiler", "spec/rails_app/script/performance/request", "spec/rails_app/script/plugin", "spec/rails_app/script/process/inspector", "spec/rails_app/script/process/reaper", "spec/rails_app/script/process/spawner", "spec/rails_app/script/runner", "spec/rails_app/script/server", "spec/request/mongrel_spec.rb", "spec/request/parser_spec.rb", "spec/request/persistent_spec.rb", "spec/request/processing_spec.rb", "spec/response_spec.rb", "spec/runner_spec.rb", "spec/server/builder_spec.rb", "spec/server/pipelining_spec.rb", "spec/server/robustness_spec.rb", "spec/server/stopping_spec.rb", "spec/server/swiftiply.yml", "spec/server/swiftiply_spec.rb", "spec/server/tcp_spec.rb", "spec/server/threaded_spec.rb", "spec/server/unix_socket_spec.rb", "spec/server_spec.rb", "spec/spec_helper.rb", "tasks/announce.rake", "tasks/deploy.rake", "tasks/email.erb", "tasks/gem.rake", "tasks/rdoc.rake", "tasks/site.rake", "tasks/spec.rake", "tasks/stats.rake", "ext/thin_parser/ext_help.h", "ext/thin_parser/parser.h", "ext/thin_parser/parser.c", "ext/thin_parser/thin.c", "ext/thin_parser/extconf.rb", "ext/thin_parser/common.rl", "ext/thin_parser/parser.rl"] - s.homepage = %q{http://code.macournoyer.com/thin/} - s.require_paths = ["lib"] - s.required_ruby_version = Gem::Requirement.new(">= 1.8.5") - s.rubyforge_project = %q{thin} - s.rubygems_version = %q{1.3.5} - s.summary = %q{A thin and fast web server} - - if s.respond_to? :specification_version then - current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION - s.specification_version = 3 - - if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then - s.add_runtime_dependency(%q, [">= 1.0.0"]) - s.add_runtime_dependency(%q, [">= 0.12.6"]) - s.add_runtime_dependency(%q, [">= 1.0.9"]) - else - s.add_dependency(%q, [">= 1.0.0"]) - s.add_dependency(%q, [">= 0.12.6"]) - s.add_dependency(%q, [">= 1.0.9"]) - end - else - s.add_dependency(%q, [">= 1.0.0"]) - s.add_dependency(%q, [">= 0.12.6"]) - s.add_dependency(%q, [">= 1.0.9"]) - end -end