diff --git a/.gitignore b/.gitignore index cf2883f..0fd4dd9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.rvmrc .DS_Store db/*.db db/*.sqlite3 diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..0651920 --- /dev/null +++ b/Gemfile @@ -0,0 +1,21 @@ +source 'http://rubygems.org' + +gem 'rails', '3.0.7' +gem 'rake', '~> 0.8.7' +gem 'will_paginate', '~> 2.3.15' +gem 'colored' +gem 'youtube-g', '~> 0.5.0' +gem 'uuidtools', '~> 2.1.2' +gem 'gravatarify' +gem 'paperclip', '~> 2.3.11' +gem 'thinking-sphinx', '~> 2.0.3' +gem 'ts-delayed-delta', '~> 1.1.2', :require => 'thinking_sphinx/deltas/delayed_delta' +gem 'mysql', '~> 2.8.1' +gem 'flickr' + + +group :development, :test do + gem 'hpricot' + gem 'mocha' + gem 'shoulda', '3.0.0.beta2' +end diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 0000000..965b63e --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,115 @@ +GEM + remote: http://rubygems.org/ + specs: + abstract (1.0.0) + actionmailer (3.0.7) + actionpack (= 3.0.7) + mail (~> 2.2.15) + actionpack (3.0.7) + activemodel (= 3.0.7) + activesupport (= 3.0.7) + builder (~> 2.1.2) + erubis (~> 2.6.6) + i18n (~> 0.5.0) + rack (~> 1.2.1) + rack-mount (~> 0.6.14) + rack-test (~> 0.5.7) + tzinfo (~> 0.3.23) + activemodel (3.0.7) + activesupport (= 3.0.7) + builder (~> 2.1.2) + i18n (~> 0.5.0) + activerecord (3.0.7) + activemodel (= 3.0.7) + activesupport (= 3.0.7) + arel (~> 2.0.2) + tzinfo (~> 0.3.23) + activeresource (3.0.7) + activemodel (= 3.0.7) + activesupport (= 3.0.7) + activesupport (3.0.7) + arel (2.0.10) + builder (2.1.2) + colored (1.2) + daemons (1.1.3) + delayed_job (2.1.4) + activesupport (~> 3.0) + daemons + erubis (2.6.6) + abstract (>= 1.0.0) + flickr (1.0.2) + xml-simple (>= 1.0.7) + gravatarify (2.2.2) + hpricot (0.8.4) + i18n (0.5.0) + mail (2.2.19) + activesupport (>= 2.3.6) + i18n (>= 0.4.0) + mime-types (~> 1.16) + treetop (~> 1.4.8) + mime-types (1.16) + mocha (0.9.12) + mysql (2.8.1) + paperclip (2.3.11) + activerecord (>= 2.3.0) + activesupport (>= 2.3.2) + polyglot (0.3.1) + rack (1.2.3) + rack-mount (0.6.14) + rack (>= 1.0.0) + rack-test (0.5.7) + rack (>= 1.0) + rails (3.0.7) + actionmailer (= 3.0.7) + actionpack (= 3.0.7) + activerecord (= 3.0.7) + activeresource (= 3.0.7) + activesupport (= 3.0.7) + bundler (~> 1.0) + railties (= 3.0.7) + railties (3.0.7) + actionpack (= 3.0.7) + activesupport (= 3.0.7) + rake (>= 0.8.7) + thor (~> 0.14.4) + rake (0.8.7) + riddle (1.3.3) + shoulda (3.0.0.beta2) + shoulda-context (~> 1.0.0.beta1) + shoulda-matchers (~> 1.0.0.beta1) + shoulda-context (1.0.0.beta1) + shoulda-matchers (1.0.0.beta2) + thinking-sphinx (2.0.5) + activerecord (>= 3.0.3) + riddle (>= 1.3.3) + thor (0.14.6) + treetop (1.4.9) + polyglot (>= 0.3.1) + ts-delayed-delta (1.1.2) + delayed_job (>= 2.1.0) + thinking-sphinx (>= 1.3.6) + tzinfo (0.3.27) + uuidtools (2.1.2) + will_paginate (2.3.15) + xml-simple (1.0.15) + youtube-g (0.5.0) + +PLATFORMS + ruby + +DEPENDENCIES + colored + flickr + gravatarify + hpricot + mocha + mysql (~> 2.8.1) + paperclip (~> 2.3.11) + rails (= 3.0.7) + rake (~> 0.8.7) + shoulda (= 3.0.0.beta2) + thinking-sphinx (~> 2.0.3) + ts-delayed-delta (~> 1.1.2) + uuidtools (~> 2.1.2) + will_paginate (~> 2.3.15) + youtube-g (~> 0.5.0) diff --git a/Rakefile b/Rakefile index 3bb0e85..20773b7 100644 --- a/Rakefile +++ b/Rakefile @@ -1,10 +1,7 @@ # Add your own tasks in files placed in lib/tasks ending in .rake, # for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. -require(File.join(File.dirname(__FILE__), 'config', 'boot')) - +require File.expand_path('../config/application', __FILE__) require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' -require 'tasks/rails' +LovdByLess::Application.load_tasks diff --git a/app/controllers/application.rb b/app/controllers/application_controller.rb similarity index 98% rename from app/controllers/application.rb rename to app/controllers/application_controller.rb index 42e7a16..125e5ff 100644 --- a/app/controllers/application.rb +++ b/app/controllers/application_controller.rb @@ -1,7 +1,8 @@ class ApplicationController < ActionController::Base + protect_from_forgery + helper :all include ExceptionNotifiable - filter_parameter_logging "password" before_filter :allow_to, :check_user, :set_profile, :login_from_cookie, :login_required, :check_permissions, :pagination_defaults diff --git a/app/helpers/profiles_helper.rb b/app/helpers/profiles_helper.rb index 140b726..c899ebf 100644 --- a/app/helpers/profiles_helper.rb +++ b/app/helpers/profiles_helper.rb @@ -1,7 +1,9 @@ -require 'avatar/view/action_view_support' +# FIXME: gravatarify +#require 'avatar/view/action_view_support' module ProfilesHelper - include Avatar::View::ActionViewSupport + # FIXME: gravatarify + # include Avatar::View::ActionViewSupport def icon profile, size = :small, img_opts = {} return "" if profile.nil? diff --git a/config.ru b/config.ru new file mode 100644 index 0000000..04e99ca --- /dev/null +++ b/config.ru @@ -0,0 +1,4 @@ +# This file is used by Rack-based servers to start the application. + +require ::File.expand_path('../config/environment', __FILE__) +run LovdByLess::Application diff --git a/config/application.rb b/config/application.rb new file mode 100644 index 0000000..4bd6312 --- /dev/null +++ b/config/application.rb @@ -0,0 +1,43 @@ +require File.expand_path('../boot', __FILE__) + +require 'rails/all' + +# If you have a Gemfile, require the gems listed there, including any gems +# you've limited to :test, :development, or :production. +Bundler.require(:default, Rails.env) if defined?(Bundler) + +module LovdByLess + class Application < Rails::Application + # Settings in config/environments/* take precedence over those specified here. + # Application configuration should go into files in config/initializers + # -- all .rb files in that directory are automatically loaded. + + # Custom directories with classes and modules you want to be autoloadable. + # config.autoload_paths += %W(#{config.root}/extras) + + # Only load the plugins named here, in the order given (default is alphabetical). + # :all can be used as a placeholder for all plugins not explicitly named. + # config.plugins = [ :exception_notification, :ssl_requirement, :all ] + + # Activate observers that should always be running. + # config.active_record.observers = :cacher, :garbage_collector, :forum_observer + + # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. + # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. + config.time_zone = 'UTC' + + # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. + # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] + # config.i18n.default_locale = :de + + # JavaScript files you want as :defaults (application.js is always included). + # config.action_view.javascript_expansions[:defaults] = %w(jquery rails) + + # Configure the default encoding used in templates for Ruby 1.9. + config.encoding = "utf-8" + + # Configure sensitive parameters which will be filtered from the log file. + config.filter_parameters += [:password] + + end +end diff --git a/config/boot.rb b/config/boot.rb index 6a30b54..4489e58 100644 --- a/config/boot.rb +++ b/config/boot.rb @@ -1,109 +1,6 @@ -# Don't change this file! -# Configure your app in config/environment.rb and config/environments/*.rb +require 'rubygems' -RAILS_ROOT = "#{File.dirname(__FILE__)}/.." unless defined?(RAILS_ROOT) +# Set up gems listed in the Gemfile. +ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) -module Rails - class << self - def boot! - unless booted? - preinitialize - pick_boot.run - end - end - - def booted? - defined? Rails::Initializer - end - - def pick_boot - (vendor_rails? ? VendorBoot : GemBoot).new - end - - def vendor_rails? - File.exist?("#{RAILS_ROOT}/vendor/rails") - end - - def preinitialize - load(preinitializer_path) if File.exist?(preinitializer_path) - end - - def preinitializer_path - "#{RAILS_ROOT}/config/preinitializer.rb" - end - end - - class Boot - def run - load_initializer - Rails::Initializer.run(:set_load_path) - end - end - - class VendorBoot < Boot - def load_initializer - require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer" - Rails::Initializer.run(:install_gem_spec_stubs) - end - end - - class GemBoot < Boot - def load_initializer - self.class.load_rubygems - load_rails_gem - require 'initializer' - end - - def load_rails_gem - if version = self.class.gem_version - gem 'rails', version - else - gem 'rails' - end - rescue Gem::LoadError => load_error - $stderr.puts %(Missing the Rails #{version} gem. Please `gem install -v=#{version} rails`, update your RAILS_GEM_VERSION setting in config/environment.rb for the Rails version you do have installed, or comment out RAILS_GEM_VERSION to use the latest version installed.) - exit 1 - end - - class << self - def rubygems_version - Gem::RubyGemsVersion if defined? Gem::RubyGemsVersion - end - - def gem_version - if defined? RAILS_GEM_VERSION - RAILS_GEM_VERSION - elsif ENV.include?('RAILS_GEM_VERSION') - ENV['RAILS_GEM_VERSION'] - else - parse_gem_version(read_environment_rb) - end - end - - def load_rubygems - require 'rubygems' - min_version = '1.1.1' - unless rubygems_version >= min_version - $stderr.puts %Q(Rails requires RubyGems >= #{min_version} (you have #{rubygems_version}). Please `gem update --system` and try again.) - exit 1 - end - - rescue LoadError - $stderr.puts %Q(Rails requires RubyGems >= #{min_version}. Please install RubyGems and try again: http://rubygems.rubyforge.org) - exit 1 - end - - def parse_gem_version(text) - $1 if text =~ /^[^#]*RAILS_GEM_VERSION\s*=\s*["']([!~<>=]*\s*[\d.]+)["']/ - end - - private - def read_environment_rb - File.read("#{RAILS_ROOT}/config/environment.rb") - end - end - end -end - -# All that for this: -Rails.boot! +require 'bundler/setup' if File.exists?(ENV['BUNDLE_GEMFILE']) diff --git a/config/environment.rb b/config/environment.rb index 6b775cb..671a79c 100644 --- a/config/environment.rb +++ b/config/environment.rb @@ -1,44 +1,49 @@ +# Load the rails application +require File.expand_path('../application', __FILE__) -RAILS_GEM_VERSION = '2.2.2' unless defined? RAILS_GEM_VERSION +# Initialize the rails application +LovdByLess::Application.initialize! +#Less::JsRoutes.generate! -# Bootstrap the Rails environment, frameworks, and default configuration -require File.join(File.dirname(__FILE__), 'boot') -Rails::Initializer.run do |config| - - # Cookie sessions (limit = 4K) - # WARNING: You MUST generate a new secret (use "rake secret") and add it below! - config.action_controller.session = { - :session_key => '_your_app_name', - :secret => '0677a17f4e94869409e7aecb29a00fd9' # <- New secret key goes here - } - config.action_controller.session_store = :active_record_store - - # Use SQL instead of Active Record's schema dumper when creating the test database. - # This is necessary if your schema can't be completely dumped by the schema dumper, - # like if you have constraints or database-specific column types - # config.active_record.schema_format = :sql - - # Activate observers that should always be running - # config.active_record.observers = :cacher, :garbage_collector - - # Make Active Record use UTC-base instead of local time - config.time_zone = 'UTC' - - # Gem dependencies - config.gem 'will_paginate', :version => '~> 2.2.2' - config.gem 'colored', :version=> '1.1' - config.gem 'youtube-g', :version=> '0.4.9.9', :lib=>'youtube_g' - config.gem 'uuidtools', :version=> '1.0.4' - config.gem 'hpricot', :version=> '0.6.164' - config.gem 'mocha', :version=> '0.9.3' - config.gem 'redgreen', :version=> '1.2.2' unless ENV['TM_MODE'] - config.gem 'gcnovus-avatar', :version=> '0.0.7', :lib => 'avatar' - config.gem 'paperclip', :version=> '2.1.2' - - - - -end - -Less::JsRoutes.generate! +# RAILS_GEM_VERSION = '2.2.2' unless defined? RAILS_GEM_VERSION +# +# # Bootstrap the Rails environment, frameworks, and default configuration +# require File.join(File.dirname(__FILE__), 'boot') +# +# Rails::Initializer.run do |config| +# +# # Cookie sessions (limit = 4K) +# # WARNING: You MUST generate a new secret (use "rake secret") and add it below! +# config.action_controller.session = { +# :session_key => '_your_app_name', +# :secret => '0677a17f4e94869409e7aecb29a00fd9' # <- New secret key goes here +# } +# config.action_controller.session_store = :active_record_store +# +# # Use SQL instead of Active Record's schema dumper when creating the test database. +# # This is necessary if your schema can't be completely dumped by the schema dumper, +# # like if you have constraints or database-specific column types +# # config.active_record.schema_format = :sql +# +# # Activate observers that should always be running +# # config.active_record.observers = :cacher, :garbage_collector +# +# # Make Active Record use UTC-base instead of local time +# config.time_zone = 'UTC' +# +# # Gem dependencies +# config.gem 'will_paginate', :version => '~> 2.2.2' +# config.gem 'colored', :version=> '1.1' +# config.gem 'youtube-g', :version=> '0.4.9.9', :lib=>'youtube_g' +# config.gem 'uuidtools', :version=> '1.0.4' +# config.gem 'hpricot', :version=> '0.6.164' +# config.gem 'mocha', :version=> '0.9.3' +# config.gem 'redgreen', :version=> '1.2.2' unless ENV['TM_MODE'] +# config.gem 'gcnovus-avatar', :version=> '0.0.7', :lib => 'avatar' +# config.gem 'paperclip', :version=> '2.1.2' +# +# +# +# +# end diff --git a/config/environments/development.rb b/config/environments/development.rb index 02706be..8749cd3 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -1,22 +1,26 @@ -# Settings specified here will take precedence over those in config/environment.rb +LovdByLess::Application.configure do + # Settings specified here will take precedence over those in config/application.rb -# In the development environment your application's code is reloaded on -# every request. This slows down response time but is perfect for development -# since you don't have to restart the webserver when you make code changes. -config.cache_classes = false + # In the development environment your application's code is reloaded on + # every request. This slows down response time but is perfect for development + # since you don't have to restart the webserver when you make code changes. + config.cache_classes = false -# Log error messages when you accidentally call methods on nil. -config.whiny_nils = true + # Log error messages when you accidentally call methods on nil. + config.whiny_nils = true -# Show full error reports and disable caching -config.action_controller.consider_all_requests_local = true -config.action_controller.perform_caching = false -config.action_view.debug_rjs = true -config.action_controller.asset_host = "http://localhost:3000" + # Show full error reports and disable caching + config.consider_all_requests_local = true + config.action_view.debug_rjs = true + config.action_controller.perform_caching = false -# needed for Avatar::Source::RailsAssetSource -config.action_controller.asset_host = "http://localhost:3000" + # Don't care if the mailer can't send + config.action_mailer.raise_delivery_errors = false + + # Print deprecation notices to the Rails logger + config.active_support.deprecation = :log + + # Only use best-standards-support built into browsers + config.action_dispatch.best_standards_support = :builtin +end -# Don't care if the mailer can't send -config.action_mailer.raise_delivery_errors = true -config.action_mailer.delivery_method = :test diff --git a/config/environments/production.rb b/config/environments/production.rb index 9f3bb78..6fceafe 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -1,18 +1,49 @@ -# Settings specified here will take precedence over those in config/environment.rb +LovdByLess::Application.configure do + # Settings specified here will take precedence over those in config/application.rb -# The production environment is meant for finished, "live" apps. -# Code is not reloaded between requests -config.cache_classes = true + # The production environment is meant for finished, "live" apps. + # Code is not reloaded between requests + config.cache_classes = true -# Use a different logger for distributed setups -# config.logger = SyslogLogger.new + # Full error reports are disabled and caching is turned on + config.consider_all_requests_local = false + config.action_controller.perform_caching = true -# Full error reports are disabled and caching is turned on -config.action_controller.consider_all_requests_local = false -config.action_controller.perform_caching = true + # Specifies the header that your server uses for sending files + config.action_dispatch.x_sendfile_header = "X-Sendfile" -# needed for Avatar::Source::RailsAssetSource -config.action_controller.asset_host = "http://CHANGE THIS VALUE.com" + # For nginx: + # config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' -# Disable delivery errors, bad email addresses will be ignored -# config.action_mailer.raise_delivery_errors = false + # If you have no front-end server that supports something like X-Sendfile, + # just comment this out and Rails will serve the files + + # See everything in the log (default is :info) + # config.log_level = :debug + + # Use a different logger for distributed setups + # config.logger = SyslogLogger.new + + # Use a different cache store in production + # config.cache_store = :mem_cache_store + + # Disable Rails's static asset server + # In production, Apache or nginx will already do this + config.serve_static_assets = false + + # Enable serving of images, stylesheets, and javascripts from an asset server + # config.action_controller.asset_host = "http://assets.example.com" + + # Disable delivery errors, bad email addresses will be ignored + # config.action_mailer.raise_delivery_errors = false + + # Enable threaded mode + # config.threadsafe! + + # Enable locale fallbacks for I18n (makes lookups for any locale fall back to + # the I18n.default_locale when a translation can not be found) + config.i18n.fallbacks = true + + # Send deprecation notices to registered listeners + config.active_support.deprecation = :notify +end diff --git a/config/environments/test.rb b/config/environments/test.rb index 5a55648..f6adb25 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -1,25 +1,35 @@ -# require "ruby-debug" +LovdByLess::Application.configure do + # Settings specified here will take precedence over those in config/application.rb -# Settings specified here will take precedence over those in config/environment.rb + # The test environment is used exclusively to run your application's + # test suite. You never need to work with it otherwise. Remember that + # your test database is "scratch space" for the test suite and is wiped + # and recreated between test runs. Don't rely on the data there! + config.cache_classes = true -# The test environment is used exclusively to run your application's -# test suite. You never need to work with it otherwise. Remember that -# your test database is "scratch space" for the test suite and is wiped -# and recreated between test runs. Don't rely on the data there! -config.cache_classes = true + # Log error messages when you accidentally call methods on nil. + config.whiny_nils = true -# Log error messages when you accidentally call methods on nil. -config.whiny_nils = true + # Show full error reports and disable caching + config.consider_all_requests_local = true + config.action_controller.perform_caching = false -# Show full error reports and disable caching -config.action_controller.consider_all_requests_local = true -config.action_controller.perform_caching = false -config.action_controller.asset_host = "http://test.host" + # Raise exceptions instead of rendering exception templates + config.action_dispatch.show_exceptions = false -# needed for Avatar::Source::RailsAssetSource -config.action_controller.asset_host = "http://test.host" + # Disable request forgery protection in test environment + config.action_controller.allow_forgery_protection = false -# Tell ActionMailer not to deliver emails to the real world. -# The :test delivery method accumulates sent emails in the -# ActionMailer::Base.deliveries array. -config.action_mailer.delivery_method = :test \ No newline at end of file + # Tell Action Mailer not to deliver emails to the real world. + # The :test delivery method accumulates sent emails in the + # ActionMailer::Base.deliveries array. + config.action_mailer.delivery_method = :test + + # Use SQL instead of Active Record's schema dumper when creating the test database. + # This is necessary if your schema can't be completely dumped by the schema dumper, + # like if you have constraints or database-specific column types + # config.active_record.schema_format = :sql + + # Print deprecation notices to the stderr + config.active_support.deprecation = :stderr +end diff --git a/config/initializers/avatar_sources.rb b/config/initializers/avatar_sources.rb index 405661c..7bdae91 100644 --- a/config/initializers/avatar_sources.rb +++ b/config/initializers/avatar_sources.rb @@ -1,30 +1,30 @@ -require 'avatar' -require 'avatar/source/paperclip_source' -require 'avatar/source/source_chain' -require 'avatar/source/static_url_source' -require 'avatar/source/wrapper/rails_asset_source_wrapper' -require 'avatar/source/wrapper/string_substitution_source_wrapper' -require 'sized_gravatar_source' - -# order: -# 1. Paperclip(Profile#icon) -# 2. Gravatar(Profile#email), with default -# a RailsAssetSourceWrapper containing -# a StringSubstitutionSourceWrapper containing -# a StaticUrlSource ('/images/avatar_default_#{size}.png') -# -# Gravatar does not understand :small, :medium, and :big, -# so we must translate using SizedGravatarSource - -default = Avatar::Source::Wrapper::RailsAssetSourceWrapper.new( - Avatar::Source::Wrapper::StringSubstitutionSourceWrapper.new( - Avatar::Source::StaticUrlSource.new('/images/avatar_default_#{size}.png'), - {:size => :small} - ) -) - -chain = Avatar::Source::SourceChain.new -chain << Avatar::Source::PaperclipSource.new(:icon) -chain << SizedGravatarSource.new(default, :email) - -Avatar::source = chain +# require 'avatar' +# require 'avatar/source/paperclip_source' +# require 'avatar/source/source_chain' +# require 'avatar/source/static_url_source' +# require 'avatar/source/wrapper/rails_asset_source_wrapper' +# require 'avatar/source/wrapper/string_substitution_source_wrapper' +# require 'sized_gravatar_source' +# +# # order: +# # 1. Paperclip(Profile#icon) +# # 2. Gravatar(Profile#email), with default +# # a RailsAssetSourceWrapper containing +# # a StringSubstitutionSourceWrapper containing +# # a StaticUrlSource ('/images/avatar_default_#{size}.png') +# # +# # Gravatar does not understand :small, :medium, and :big, +# # so we must translate using SizedGravatarSource +# +# default = Avatar::Source::Wrapper::RailsAssetSourceWrapper.new( +# Avatar::Source::Wrapper::StringSubstitutionSourceWrapper.new( +# Avatar::Source::StaticUrlSource.new('/images/avatar_default_#{size}.png'), +# {:size => :small} +# ) +# ) +# +# chain = Avatar::Source::SourceChain.new +# chain << Avatar::Source::PaperclipSource.new(:icon) +# chain << SizedGravatarSource.new(default, :email) +# +# Avatar::source = chain diff --git a/config/initializers/backtrace_silencers.rb b/config/initializers/backtrace_silencers.rb new file mode 100644 index 0000000..59385cd --- /dev/null +++ b/config/initializers/backtrace_silencers.rb @@ -0,0 +1,7 @@ +# Be sure to restart your server when you modify this file. + +# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. +# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ } + +# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code. +# Rails.backtrace_cleaner.remove_silencers! diff --git a/config/initializers/lib_requires.rb b/config/initializers/lib_requires.rb deleted file mode 100644 index caa22cc..0000000 --- a/config/initializers/lib_requires.rb +++ /dev/null @@ -1,3 +0,0 @@ -require 'rubygems' -require 'ostruct' - diff --git a/config/initializers/secret_token.rb b/config/initializers/secret_token.rb new file mode 100644 index 0000000..81beded --- /dev/null +++ b/config/initializers/secret_token.rb @@ -0,0 +1,7 @@ +# Be sure to restart your server when you modify this file. + +# Your secret key for verifying the integrity of signed cookies. +# If you change this key, all old signed cookies will become invalid! +# Make sure the secret is at least 30 characters and all random, +# no regular words or you'll be exposed to dictionary attacks. +LovdByLess::Application.config.secret_token = '2d7624733a988b75213a202fa387ee5fa75f762bd178611e651337562bc94b67ec49a5872ffdccc4d00e73d50bf10062a98773e666d11f508d2dba4894014b4b' diff --git a/config/initializers/session_store.rb b/config/initializers/session_store.rb new file mode 100644 index 0000000..3f340de --- /dev/null +++ b/config/initializers/session_store.rb @@ -0,0 +1,8 @@ +# Be sure to restart your server when you modify this file. + +LovdByLess::Application.config.session_store :cookie_store, :key => '_lovdbyless_app_session' + +# Use the database for sessions instead of the cookie-based default, +# which shouldn't be used to store highly confidential information +# (create the session table with "rails generate session_migration") +# LovdByLess::Application.config.session_store :active_record_store diff --git a/config/routes.rb b/config/routes.rb index b886ca7..b1bad2b 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -1,36 +1,44 @@ -ActionController::Routing::Routes.draw do |map| +LovdByLess::Application.routes.draw do - map.namespace :admin do |a| - a.resources :users, :collection => {:search => :post} + namespace :admin do + resources :users do + collection { post :search } + end end - map.resources :profiles, - :member=>{:delete_icon=>:post}, :collection=>{:search=>:get}, - :has_many=>[:friends, :blogs, :photos, :comments, :feed_items, :messages] + resources :profiles do + member { post :delete_icon } + collection { get :search } + # FIXME: :has_many=>[:friends, :blogs, :photos, :comments, :feed_items, :messages] + end - map.resources :messages, :collection => {:sent => :get} - map.resources :blogs do |blog| - blog.resources :comments + resources :messages do + collection { get :sent } + end + + resources :blogs do + resources :comments end - map.resources :forums, :collection => {:update_positions => :post} do |forum| - forum.resources :topics, :controller => :forum_topics do |topic| - topic.resources :posts, :controller => :forum_posts + resources :forums do + collection { post :update_positions } + resources :topics, :controller => :forum_topics do + resources :posts, :controller => :forum_posts end end - - map.with_options(:controller => 'accounts') do |accounts| - accounts.login "/login", :action => 'login' - accounts.logout "/logout", :action => 'logout' - accounts.signup "/signup", :action => 'signup' + + scope :controller => :accounts do + get :login + get :logout + get :signup end - map.with_options(:controller => 'home') do |home| - home.home '/', :action => 'index' - home.latest_comments '/latest_comments.rss', :action => 'latest_comments', :format=>'rss' - home.newest_members '/newest_members.rss', :action => 'newest_members', :format=>'rss' - home.tos '/tos', :action => 'terms' - home.contact '/contact', :action => 'contact' + scope :controller => :home do + get :index, :as => :home + get :latest_comments + get :newest_members + get :terms, :as => :tos + get :contact end end diff --git a/lib/sized_gravatar_source.rb b/lib/sized_gravatar_source.rb index 7387d06..40fe33b 100644 --- a/lib/sized_gravatar_source.rb +++ b/lib/sized_gravatar_source.rb @@ -1,20 +1,21 @@ -require 'avatar/source/gravatar_source' -class SizedGravatarSource < Avatar::Source::GravatarSource - - alias_method :parse_options_without_size, :parse_options - - def self.sizes - { :small => 50, :medium => 100, :large => 150, :big => 150 } - end - - def parse_options(profile, options) - #pass :gravatar_size through, but translate :size or :s to a number if possible - parsed_options = parse_options_without_size(profile, options) - [:size, :s].each do |k| - parsed_options[k] = self.class.sizes[options[k]] if self.class.sizes.has_key?(options[k]) - end - parsed_options - end - -end \ No newline at end of file +# require 'avatar/source/gravatar_source' +# +# class SizedGravatarSource < Avatar::Source::GravatarSource +# +# alias_method :parse_options_without_size, :parse_options +# +# def self.sizes +# { :small => 50, :medium => 100, :large => 150, :big => 150 } +# end +# +# def parse_options(profile, options) +# #pass :gravatar_size through, but translate :size or :s to a number if possible +# parsed_options = parse_options_without_size(profile, options) +# [:size, :s].each do |k| +# parsed_options[k] = self.class.sizes[options[k]] if self.class.sizes.has_key?(options[k]) +# end +# parsed_options +# end +# +# end \ No newline at end of file diff --git a/lib/tasks/getting_started.rake b/lib/tasks/getting_started.rake deleted file mode 100644 index 680de42..0000000 --- a/lib/tasks/getting_started.rake +++ /dev/null @@ -1,35 +0,0 @@ -namespace :gems do - namespace :dependent do - desc "Install gems required for lovd-by-less" - task :install do - windoz = /win32/ =~ RUBY_PLATFORM - gems = %w[ - rflickr - rmagick - RedCloth - ] - gems << 'win32console' if windoz - sudo = windoz ? '' : 'sudo ' - gems.each do |gem| - `#{sudo}gem install #{gem}` - end - `rake gems:build` - end - end -end - -namespace :lovdbyless do - task :check do - puts "TODO - check that all config ready" - end - - desc "Getting started with lovd-by-less" - task :getting_started => [ - "environment", - "lovdbyless:check", - "gems:dependent:install", - "db:create:all", "mig" - ] do - puts "Finished setting up enviornment and application!" - end -end diff --git a/lib/tasks/js_routes.rake b/lib/tasks/js_routes.rake deleted file mode 100644 index 7c54607..0000000 --- a/lib/tasks/js_routes.rake +++ /dev/null @@ -1,97 +0,0 @@ -debug = true - - -def build_params segs, others = '' - s = [] - segs.each do |seg| - if seg.is_a?(ActionController::Routing::DynamicSegment) - s << seg.key.to_s.gsub(':', '') - end - end - s <<( others) unless others.blank? - s.join(', ') -end - -def build_path segs - s = "" - segs.each do |seg| - if seg.is_a?(ActionController::Routing::DividerSegment) || seg.is_a?(ActionController::Routing::StaticSegment) - s << seg.instance_variable_get(:@value) - elsif seg.is_a?(ActionController::Routing::DynamicSegment) - s << "' + #{seg.key.to_s.gsub(':', '')} + '" - end - end - s -end - -def get_method route - route.instance_variable_get(:@conditions)[:method] == :get ? 'GET' : 'POST' -end -def get_params route, others = '' - x = '' - x += "'_method=#{route.instance_variable_get(:@conditions)[:method]}'" unless [:get, :post].include? route.instance_variable_get(:@conditions)[:method] - x += " + " unless x.blank? || others.blank? - x += "#{others}" unless others.blank? - x -end - -def get_js_helpers - <<-JS - function get_params(obj){ - for (prop in obj){ - console.log(prop + ": " + obj[prop]); - } - } -JS -end - - - - - -desc "Make a js file that will have functions that will return restful routes/urls." -task :js_routes => :environment do - s = get_js_helpers - ActionController::Routing::Routes.routes.each do |route| - name = ActionController::Routing::Routes.named_routes.routes.index(route).to_s - next if name.blank? - s << "/////\n//#{route}\n" if debug - s << <<-JS -function #{name}_path(#{build_params route.segments}){ return '#{build_path route.segments}';} -function #{name}_ajax(#{build_params route.segments, 'params'}){ - return jQuery.ajax({ - url: '#{build_path route.segments}', - type: '#{get_method route}', - params: #{get_params route, 'params'} - }); -} -JS - end - File.open(RAILS_ROOT + '/public/javascripts/less_routes.js', 'w') do |f| - f.write s - end -end -=begin -"admin_users", :action=>"index"}, -@defaults={:controller=>"admin_users", :action=>"index"}, -@optimise=true, -@segments=[#, -, -, -, -], -@conditions={:method=>:get}, -@to_s="GET /admin_users.:format/ {:controller=>\"admin_users\", :action=>\"index\"}", -@significant_keys=[:format, :controller, :action]> -=end -# # -# @segments=[ -# , -# , -# , -# , -# , -# , -# , -# , -# ] \ No newline at end of file diff --git a/lib/tasks/less.rake b/lib/tasks/less.rake deleted file mode 100644 index e07a89d..0000000 --- a/lib/tasks/less.rake +++ /dev/null @@ -1,66 +0,0 @@ -# desc "Rebuild development database" -# task :rebuild_database => [] # 'log:clear', 'db:migrate', :default] -# -# task :all => ['log:clear', :default] do -# end -# -# task :wipe_devel_database => :environment do -# ActiveRecord::Base.establish_connection(:development) -# conf = ActiveRecord::Base.configurations -# ActiveRecord::Base.connection.execute("drop database #{conf['development']['database']};") -# ActiveRecord::Base.connection.execute("create database #{conf['development']['database']};") -# ActiveRecord::Base.establish_connection(:development) -# end -# -# desc "Load fixtures data into the development database" -# task :load_fixtures_to_development_weg do -# ActiveRecord::Base.establish_connection(:development) -# require 'active_record/fixtures' -# -# fixtures_to_load = ActiveRecord::Base.configurations[:fixtures_load_order] -# if fixtures_to_load.nil? -# raise 'Define ActiveRecord::Base.configurations[:fixtures_load_order] = [:model_name] in your environment first' -# end -# Fixtures.create_fixtures("test/fixtures", fixtures_to_load) -# end -# -# desc "Alias for :update_development task" -# task :ud => [:update_development] - -desc "Commit changes to subversion and run tests" -task :ci => [:check_uncommitted_files, :default, :svn_commit] -# -# desc "Update the project (development) and run data migrations" -# task :update_development => [:svn_update, :rebuild_database, :default] - -desc "Run 'svn update' command" -task :svn_update do - puts `svn update` -end - -desc "Run 'svn commit' command" -task :svn_commit => [:check_uncommitted_files] do - raise "\n\n!!!!! You must specify a message for the commit (example: m='some message') !!!!!\n\n" if ENV['m'].nil? - puts `svn commit -m "#{ENV['m']}"` - # svn_commit_result =~ /Committed revision (\d+)\.$/ - # svn_version = $1.to_i - # puts svn_commit_result -end - -desc "Check uncommitted files" -task :check_uncommitted_files do - svn_status_result = `svn status` - if svn_status_result.index(/^\?/) - puts svn_status_result - raise "\n\n!!!!! You have local files not added to subversion (note the question marks above) !!!!!\n\n" - end -end -# -# desc "Trim trailing spaces and convert tab to spaces" -# task :trim_codes do -# include LineFormatter -# format_dir("#{File.dirname(__FILE__)}/../../", /.*\.((rhtml)|(rb)|(yml)|(css))$/) do |line| -# remove_trailing_whitespace(detab(line, 2)) -# end -# end - diff --git a/lib/tasks/migrate.rake b/lib/tasks/migrate.rake deleted file mode 100644 index b0bfc5d..0000000 --- a/lib/tasks/migrate.rake +++ /dev/null @@ -1,11 +0,0 @@ -task :mig do - puts 'rake db:migrate RAILS_ENV="development"' - system "rake db:migrate RAILS_ENV='development'" - puts "rake db:test:clone" - system "rake db:test:clone" - if !ENV['a'].nil? && ENV['a'].size > 0 - puts "NOT RUNNING: rake annotate_models" - else - system "rake annotate_models" - end -end diff --git a/locales/en.yml b/locales/en.yml new file mode 100644 index 0000000..a747bfa --- /dev/null +++ b/locales/en.yml @@ -0,0 +1,5 @@ +# Sample localization file for English. Add more files in this directory for other locales. +# See http://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points. + +en: + hello: "Hello world" diff --git a/script/about b/script/about deleted file mode 100755 index 7b07d46..0000000 --- a/script/about +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/about' \ No newline at end of file diff --git a/script/breakpointer b/script/breakpointer deleted file mode 100755 index 64af76e..0000000 --- a/script/breakpointer +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/breakpointer' \ No newline at end of file diff --git a/script/console b/script/console deleted file mode 100755 index 42f28f7..0000000 --- a/script/console +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/console' \ No newline at end of file diff --git a/script/dbconsole b/script/dbconsole deleted file mode 100755 index caa60ce..0000000 --- a/script/dbconsole +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/dbconsole' diff --git a/script/destroy b/script/destroy deleted file mode 100755 index fa0e6fc..0000000 --- a/script/destroy +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/destroy' \ No newline at end of file diff --git a/script/generate b/script/generate deleted file mode 100755 index ef976e0..0000000 --- a/script/generate +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/generate' \ No newline at end of file diff --git a/script/performance/benchmarker b/script/performance/benchmarker deleted file mode 100755 index c842d35..0000000 --- a/script/performance/benchmarker +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/benchmarker' diff --git a/script/performance/profiler b/script/performance/profiler deleted file mode 100755 index d855ac8..0000000 --- a/script/performance/profiler +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/profiler' diff --git a/script/performance/request b/script/performance/request deleted file mode 100755 index ae3f38c..0000000 --- a/script/performance/request +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/request' diff --git a/script/plugin b/script/plugin deleted file mode 100755 index 26ca64c..0000000 --- a/script/plugin +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/plugin' \ No newline at end of file diff --git a/script/process/inspector b/script/process/inspector deleted file mode 100755 index bf25ad8..0000000 --- a/script/process/inspector +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/inspector' diff --git a/script/process/reaper b/script/process/reaper deleted file mode 100755 index c77f045..0000000 --- a/script/process/reaper +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/reaper' diff --git a/script/process/spawner b/script/process/spawner deleted file mode 100755 index 7118f39..0000000 --- a/script/process/spawner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/spawner' diff --git a/script/rails b/script/rails new file mode 100755 index 0000000..f8da2cf --- /dev/null +++ b/script/rails @@ -0,0 +1,6 @@ +#!/usr/bin/env ruby +# This command will automatically be run when you run "rails" with Rails 3 gems installed from the root of your application. + +APP_PATH = File.expand_path('../../config/application', __FILE__) +require File.expand_path('../../config/boot', __FILE__) +require 'rails/commands' diff --git a/script/runner b/script/runner deleted file mode 100755 index ccc30f9..0000000 --- a/script/runner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/runner' \ No newline at end of file diff --git a/script/server b/script/server deleted file mode 100755 index dfabcb8..0000000 --- a/script/server +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/server' \ No newline at end of file diff --git a/test/functional/accounts_controller_test.rb b/test/functional/accounts_controller_test.rb index 75f24ad..063302d 100644 --- a/test/functional/accounts_controller_test.rb +++ b/test/functional/accounts_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class AccountsControllerTest < ActionController::TestCase diff --git a/test/functional/admin/users_controller_test.rb b/test/functional/admin/users_controller_test.rb index 7574313..2492f2d 100644 --- a/test/functional/admin/users_controller_test.rb +++ b/test/functional/admin/users_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../../test_helper' +require 'test_helper' class Admin::UsersControllerTest < ActionController::TestCase diff --git a/test/functional/blogs_controller_test.rb b/test/functional/blogs_controller_test.rb index 3261862..529e4bb 100644 --- a/test/functional/blogs_controller_test.rb +++ b/test/functional/blogs_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class BlogsControllerTest < ActionController::TestCase diff --git a/test/functional/comments_controller_test.rb b/test/functional/comments_controller_test.rb index 29ba6af..14bfbd0 100644 --- a/test/functional/comments_controller_test.rb +++ b/test/functional/comments_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class CommentsControllerTest < ActionController::TestCase diff --git a/test/functional/feed_items_controller_test.rb b/test/functional/feed_items_controller_test.rb index d653dd0..a22c8d1 100644 --- a/test/functional/feed_items_controller_test.rb +++ b/test/functional/feed_items_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class FeedItemsControllerTest < ActionController::TestCase diff --git a/test/functional/forum_posts_controller_test.rb b/test/functional/forum_posts_controller_test.rb index 0bad1b2..7837cf8 100644 --- a/test/functional/forum_posts_controller_test.rb +++ b/test/functional/forum_posts_controller_test.rb @@ -6,11 +6,11 @@ # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumPostsControllerTest < ActionController::TestCase - include ForumsTestHelper + # include ForumsTestHelper ## # :index diff --git a/test/functional/forum_topics_controller_test.rb b/test/functional/forum_topics_controller_test.rb index 3fada3f..c3a7d27 100644 --- a/test/functional/forum_topics_controller_test.rb +++ b/test/functional/forum_topics_controller_test.rb @@ -5,12 +5,12 @@ # Updated on: 5/16/08 # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumTopicsControllerTest < ActionController::TestCase - include ForumsTestHelper + # include ForumsTestHelper ## diff --git a/test/functional/forums_controller_test.rb b/test/functional/forums_controller_test.rb index 4da365b..9362725 100644 --- a/test/functional/forums_controller_test.rb +++ b/test/functional/forums_controller_test.rb @@ -3,11 +3,11 @@ # Author: Les Freeman (lesliefreeman3@gmail.com) # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumsControllerTest < ActionController::TestCase - include ForumsTestHelper + # include ForumsTestHelper ## # :index diff --git a/test/functional/friends_controller_test.rb b/test/functional/friends_controller_test.rb index db9128d..d596dfb 100644 --- a/test/functional/friends_controller_test.rb +++ b/test/functional/friends_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class FriendsControllerTest < ActionController::TestCase diff --git a/test/functional/home_controller_test.rb b/test/functional/home_controller_test.rb index 891e3d3..4c54e0d 100644 --- a/test/functional/home_controller_test.rb +++ b/test/functional/home_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class HomeControllerTest < ActionController::TestCase diff --git a/test/functional/messages_controller_test.rb b/test/functional/messages_controller_test.rb index 0a46555..f2dd770 100644 --- a/test/functional/messages_controller_test.rb +++ b/test/functional/messages_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class MessagesControllerTest < ActionController::TestCase diff --git a/test/functional/photos_controller_test.rb b/test/functional/photos_controller_test.rb index 2473fcf..24cd22b 100644 --- a/test/functional/photos_controller_test.rb +++ b/test/functional/photos_controller_test.rb @@ -1,21 +1,22 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class PhotosControllerTest < ActionController::TestCase - VALID_PHOTO = { - :image => ActionController::TestUploadedFile.new(File.join(RAILS_ROOT, 'public/images/avatar_default_big.png'), 'image/png') - } + + #VALID_PHOTO = { + # :image => ActionController::TestUploadedFile.new(File.join(RAILS_ROOT, 'public/images/avatar_default_big.png'), 'image/png') + #} context 'on GET to :index while not logged in' do setup do get :index, {:profile_id => profiles(:user).id} end - should_assign_to :profile - should_assign_to :photos - should_respond_with :success - should_render_template :index - should_not_set_the_flash + should assign_to :profile + should assign_to :photos + should respond_with :success + should render_template :index + should_not set_the_flash should "not render the upload form" do assert_no_tag :tag => 'form', :attributes => {:action => profile_photos_path(assigns(:profile))} end @@ -27,11 +28,11 @@ class PhotosControllerTest < ActionController::TestCase get :index, {:profile_id => profiles(:user).id}, {:user => users(:user).id} end - should_assign_to :profile - should_assign_to :photos - should_respond_with :success - should_render_template :index - should_not_set_the_flash + should assign_to :profile + should assign_to :photos + should respond_with :success + should render_template :index + should_not set_the_flash should "render the upload form" do assert_tag :tag => 'form', :attributes => {:action => profile_photos_path(assigns(:profile))} end @@ -42,11 +43,11 @@ class PhotosControllerTest < ActionController::TestCase get :index, {:profile_id => profiles(:user).id}, {:user => users(:user2).id} end - should_assign_to :profile - should_assign_to :photos - should_respond_with :success - should_render_template :index - should_not_set_the_flash + should assign_to :profile + should assign_to :photos + should respond_with :success + should render_template :index + should_not set_the_flash should "not render the upload form" do assert_no_tag :tag => 'form', :attributes => {:action => profile_photos_path(assigns(:profile))} end @@ -57,9 +58,9 @@ class PhotosControllerTest < ActionController::TestCase get :show, {:profile_id => profiles(:user).id, :id => photos(:first)} end - should_respond_with :redirect - should_redirect_to 'profile_photos_path(profiles(:user))' - should_not_set_the_flash + should respond_with :redirect + should redirect_to 'profile_photos_path(profiles(:user))' + should_not set_the_flash end @@ -70,9 +71,9 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'profile_photos_path(profiles(:user))' - should_set_the_flash_to 'Photo was deleted.' + should respond_with :redirect + should redirect_to 'profile_photos_path(profiles(:user))' + should set_the_flash.to 'Photo was deleted.' end context 'on DELETE to :destroy while logged in as :user' do @@ -82,9 +83,9 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'home_path' - should_set_the_flash_to 'It looks like you don\'t have permission to view that page.' + should respond_with :redirect + should redirect_to 'home_path' + should set_the_flash.to 'It looks like you don\'t have permission to view that page.' end context 'on DELETE to :destroy while logged not in' do @@ -94,9 +95,9 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'home_path' - should_set_the_flash_to 'It looks like you don\'t have permission to view that page.' + should respond_with :redirect + should redirect_to 'home_path' + should set_the_flash.to 'It looks like you don\'t have permission to view that page.' end @@ -108,9 +109,9 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'profile_photos_path(profiles(:user))' - should_set_the_flash_to 'Photo successfully uploaded.' + should respond_with :redirect + should redirect_to 'profile_photos_path(profiles(:user))' + should set_the_flash.to 'Photo successfully uploaded.' end context 'on POST to :create with bad data while logged in as :owner' do @@ -120,8 +121,8 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :success - should_render_template 'index' + should respond_with :success + should render_template 'index' end context 'on POST to :create while logged in as :user' do @@ -131,8 +132,8 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'home_path' + should respond_with :redirect + should redirect_to 'home_path' end context 'on POST to :create while logged not in' do @@ -142,8 +143,8 @@ class PhotosControllerTest < ActionController::TestCase end end - should_respond_with :redirect - should_redirect_to 'home_path' + should respond_with :redirect + should redirect_to 'home_path' end diff --git a/test/functional/profiles_controller_test.rb b/test/functional/profiles_controller_test.rb index 2eb25c5..848c720 100644 --- a/test/functional/profiles_controller_test.rb +++ b/test/functional/profiles_controller_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ProfilesControllerTest < ActionController::TestCase @@ -8,9 +8,9 @@ class ProfilesControllerTest < ActionController::TestCase post :search, {:q => 'user'} end - should_assign_to :results - should_respond_with :success - should_render_template :search + should assign_to :results + should respond_with :success + should render_template :search end context 'on GET to :index' do @@ -19,9 +19,9 @@ class ProfilesControllerTest < ActionController::TestCase get :index end - should_assign_to :results - should_respond_with :success - should_render_template :search + should assign_to :results + should respond_with :success + should render_template :search end context 'on GET to :show while not logged in' do @@ -30,11 +30,11 @@ class ProfilesControllerTest < ActionController::TestCase assert_match "Sign-up to Follow", @response.body end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :show - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :show + should_not set_the_flash end context 'on GET to :show.rss while not logged in' do @@ -43,11 +43,11 @@ class ProfilesControllerTest < ActionController::TestCase assert_match "\n \n #{SITE_NAME} Activity Feed", @response.body end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :show - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :show + should_not set_the_flash end context 'on GET to :edit while not logged in' do @@ -55,10 +55,10 @@ class ProfilesControllerTest < ActionController::TestCase get :edit, {:id => profiles(:user).id} end - should_not_assign_to :user - should_respond_with :redirect - should_redirect_to 'login_path' - should_not_set_the_flash + should_not assign_to :user + should respond_with :redirect + should redirect_to 'login_path' + should_not set_the_flash end @@ -67,11 +67,11 @@ class ProfilesControllerTest < ActionController::TestCase get :show, {:id => profiles(:user).id}, {:user => profiles(:user).id} end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :show - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :show + should_not set_the_flash end context 'on GET to :show while logged in as :user3' do @@ -81,11 +81,11 @@ class ProfilesControllerTest < ActionController::TestCase assert_match "Be Friends", @response.body end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :show - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :show + should_not set_the_flash end context 'on GET to :show while logged in as :user2' do @@ -94,11 +94,11 @@ class ProfilesControllerTest < ActionController::TestCase assert_match "Start Following", @response.body end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :show - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :show + should_not set_the_flash end @@ -107,12 +107,11 @@ class ProfilesControllerTest < ActionController::TestCase get :edit, {:id => profiles(:user).id}, {:user => profiles(:user).id} end - should_assign_to :user - should_assign_to :profile - should_respond_with :success - should_render_template :edit - should_render_a_form - should_not_set_the_flash + should assign_to :user + should assign_to :profile + should respond_with :success + should render_template :edit + should_not set_the_flash end context 'rendering an avatar' do @@ -212,7 +211,7 @@ class ProfilesControllerTest < ActionController::TestCase should "delete" do assert_difference 'User.count', -1 do assert users(:user) - delete :destroy, {:id=>users(:user).id}, {:user, users(:user).id} + delete :destroy, {:id => users(:user).id}, {:user => users(:user).id} assert_response 200 assert_nil User.find_by_id(users(:user).id) end diff --git a/test/test_helper.rb b/test/test_helper.rb index f5d969d..4584ed3 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -1,6 +1,7 @@ ENV["RAILS_ENV"] = "test" -require File.expand_path(File.dirname(__FILE__) + "/../config/environment") -require 'test_help' +require File.expand_path('../../config/environment', __FILE__) +require 'rails/test_help' + require 'ostruct' require 'mocha' @@ -8,7 +9,7 @@ # place any "already uploaded" files in a subdirectory within /test/ instead of overwriting production files. # FileColumn::ClassMethods::DEFAULT_OPTIONS[:root_path] = File.join(RAILS_ROOT, 'test', "public", 'system') -class Test::Unit::TestCase +class ActiveSupport::TestCase # Transactional fixtures accelerate your tests by wrapping each test method # in a transaction that's rolled back on completion. This ensures that the # test database remains unchanged so your fixtures don't have to be reloaded diff --git a/test/unit/blog_test.rb b/test/unit/blog_test.rb index 2f3cf4a..3f45321 100644 --- a/test/unit/blog_test.rb +++ b/test/unit/blog_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class BlogTest < ActiveSupport::TestCase diff --git a/test/unit/comment_test.rb b/test/unit/comment_test.rb index 1dfba20..2a682cb 100644 --- a/test/unit/comment_test.rb +++ b/test/unit/comment_test.rb @@ -1,10 +1,10 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class CommentTest < ActiveSupport::TestCase context 'A Comment instance' do - should_belong_to :commentable - should_belong_to :profile + should belong_to :commentable + should belong_to :profile end should "show me the wall between us" do diff --git a/test/unit/email_test.rb b/test/unit/email_test.rb index 9b42538..0700bdb 100644 --- a/test/unit/email_test.rb +++ b/test/unit/email_test.rb @@ -1,11 +1,9 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class EmailTest < ActiveSupport::TestCase FIXTURES_PATH = File.dirname(__FILE__) + '/../fixtures' CHARSET = "utf-8" - include ActionMailer::Quoting - def setup ActionMailer::Base.delivery_method = :test ActionMailer::Base.perform_deliveries = true diff --git a/test/unit/feed_item_test.rb b/test/unit/feed_item_test.rb index ae610aa..5087948 100644 --- a/test/unit/feed_item_test.rb +++ b/test/unit/feed_item_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class FeedItemTest < ActiveSupport::TestCase # Replace this with your real tests. diff --git a/test/unit/feed_test.rb b/test/unit/feed_test.rb index ea98c57..084b806 100644 --- a/test/unit/feed_test.rb +++ b/test/unit/feed_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class FeedTest < ActiveSupport::TestCase def test_truth diff --git a/test/unit/forum_post_test.rb b/test/unit/forum_post_test.rb index eca01d7..a6c5d32 100644 --- a/test/unit/forum_post_test.rb +++ b/test/unit/forum_post_test.rb @@ -1,17 +1,17 @@ -## # ForumPost test # Author: Les Freeman (lesliefreeman3@gmail.com) # Created on: 5/16/08 # Updated on: 5/16/08 # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumPostTest < ActiveSupport::TestCase - should_require_attributes :body, :owner_id + should validate_presence_of :body + should validate_presence_of :owner_id - should_belong_to :owner - should_belong_to :topic + should belong_to :owner + should belong_to :topic end diff --git a/test/unit/forum_test.rb b/test/unit/forum_test.rb index 2f0ed7c..0dae825 100644 --- a/test/unit/forum_test.rb +++ b/test/unit/forum_test.rb @@ -5,16 +5,17 @@ # Updated on: 5/16/08 # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumTest < ActiveSupport::TestCase - include ForumsTestHelper + #include ForumsTestHelper context "A Forum instance" do - should_require_attributes :name - should_have_many :topics, :posts + should validate_presence_of :name + should have_many :topics + should have_many :posts context ".build_topic" do should "return a ForumTopic" do diff --git a/test/unit/forum_topic_test.rb b/test/unit/forum_topic_test.rb index c0bf911..1f1f566 100644 --- a/test/unit/forum_topic_test.rb +++ b/test/unit/forum_topic_test.rb @@ -5,16 +5,19 @@ # Updated on: 5/16/08 # -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ForumTopicTest < ActiveSupport::TestCase - include ForumsTestHelper + # include ForumsTestHelper - should_require_attributes :title, :forum_id, :owner_id + should validate_presence_of :title + should validate_presence_of :forum_id + should validate_presence_of :owner_id - should_belong_to :forum, :owner - should_have_many :posts + should belong_to :forum + should belong_to :owner + should have_many :posts should "create a feed item" do assert_difference "FeedItem.count" do diff --git a/test/unit/friend_test.rb b/test/unit/friend_test.rb index adaf4c2..cc1519c 100644 --- a/test/unit/friend_test.rb +++ b/test/unit/friend_test.rb @@ -1,10 +1,10 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class FriendTest < ActiveSupport::TestCase # # context "A Friend instance" do - # should_belong_to :inviter - # should_belong_to :invited + # should belong_to :inviter + # should belong_to :invited # end # # diff --git a/test/unit/message_test.rb b/test/unit/message_test.rb index e7dbfbc..c71b43e 100644 --- a/test/unit/message_test.rb +++ b/test/unit/message_test.rb @@ -1,8 +1,8 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class MessageTest < ActiveSupport::TestCase -# should_require_attributes :subject, :body +# should validate_presence_of :subject, :body def test_associations diff --git a/test/unit/photo_test.rb b/test/unit/photo_test.rb index 0627dd7..d4c307d 100644 --- a/test/unit/photo_test.rb +++ b/test/unit/photo_test.rb @@ -1,10 +1,10 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class PhotoTest < ActiveSupport::TestCase context "A Photo instance" do - should_belong_to :profile - should_require_attributes :profile_id + should belong_to :profile + should validate_presence_of :profile_id end end \ No newline at end of file diff --git a/test/unit/profile_test.rb b/test/unit/profile_test.rb index 28bc29a..1a83b88 100644 --- a/test/unit/profile_test.rb +++ b/test/unit/profile_test.rb @@ -1,21 +1,33 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class ProfileTest < ActiveSupport::TestCase context 'A Profile instance' do - should_belong_to :user - should_have_many :friendships - should_have_many :friends, :through => :friendships - should_have_many :follower_friends - should_have_many :following_friends - should_have_many :followers, :through => :follower_friends - should_have_many :followings, :through => :following_friends - should_have_many :comments, :blogs - should_protect_attributes :is_active - - should_ensure_length_in_range :email, 3..100, :short_message => 'does not look like an email address.', :long_message => 'does not look like an email address.' - should_allow_values_for :email, 'a@x.com', 'de.veloper@example.com', :message => 'does not look like an email address.' - should_not_allow_values_for :email, 'example.com', '@example.com', 'developer@example', 'developer', :message => 'does not look like an email address.' + should belong_to :user + should have_many :friendships + should have_many :friends + should have_many :follower_friends + should have_many :following_friends + should have_many :followers + should have_many :followings + should have_many :comments + should have_many :blogs + should_not allow_mass_assignment_of :is_active + + should ensure_length_of(:email). + is_at_least(3). + is_at_most(100). + with_short_message('does not look like an email address.'). + with_long_message('does not look like an email address.') + + should allow_value('a@x.com').for(:email).with_message('does not look like an email address.') + should allow_value('de.veloper@example.com').for(:email).with_message('does not look like an email address.') + + should_not allow_value('example.com').for(:email).with_message('does not look like an email address.') + should_not allow_value('@example.com').for(:email).with_message('does not look like an email address.') + should_not allow_value('developer@example').for(:email).with_message('does not look like an email address.') + should_not allow_value('developer').for(:email).with_message('does not look like an email address.') + end diff --git a/test/unit/system_mailer_test.rb b/test/unit/system_mailer_test.rb index 8ed5c09..9b88b3f 100644 --- a/test/unit/system_mailer_test.rb +++ b/test/unit/system_mailer_test.rb @@ -1,12 +1,10 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class AccountMailerTest < ActiveSupport::TestCase FIXTURES_PATH = File.dirname(__FILE__) + '/../fixtures' CHARSET = "utf-8" - include ActionMailer::Quoting - def setup ActionMailer::Base.delivery_method = :test ActionMailer::Base.perform_deliveries = true diff --git a/test/unit/user_comments_test.rb b/test/unit/user_comments_test.rb index 5776943..ad96095 100644 --- a/test/unit/user_comments_test.rb +++ b/test/unit/user_comments_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class UserCommentsTest < ActiveSupport::TestCase diff --git a/test/unit/user_test.rb b/test/unit/user_test.rb index 551b6fb..3deb034 100644 --- a/test/unit/user_test.rb +++ b/test/unit/user_test.rb @@ -1,4 +1,4 @@ -require File.dirname(__FILE__) + '/../test_helper' +require 'test_helper' class UserTest < ActiveSupport::TestCase @@ -7,12 +7,16 @@ class UserTest < ActiveSupport::TestCase context 'A User instance' do - should_require_attributes :login, :password, :password_confirmation - should_require_unique_attributes :login - - should_ensure_length_in_range :login, 3..40 - should_ensure_length_in_range :password, 4..40 - should_protect_attributes :is_admin, :can_send_messages + should validate_presence_of :login + should validate_presence_of :password + should validate_presence_of :password_confirmation + should validate_uniqueness_of :login + + should ensure_length_of(:login).is_at_least(3).is_at_most(40) + should ensure_length_of(:password).is_at_least(4).is_at_most(40) + + should_not allow_mass_assignment_of :is_admin + should_not allow_mass_assignment_of :can_send_messages should 'be able to change their password' do assert p = users(:user).crypted_password diff --git a/vendor/gems/colored-1.1/.specification b/vendor/gems/colored-1.1/.specification deleted file mode 100644 index 58f9895..0000000 --- a/vendor/gems/colored-1.1/.specification +++ /dev/null @@ -1,58 +0,0 @@ ---- !ruby/object:Gem::Specification -name: colored -version: !ruby/object:Gem::Version - version: "1.1" -platform: ruby -authors: -- Chris Wanstrath -autorequire: -bindir: bin -cert_chain: [] - -date: 2007-07-25 00:00:00 -04:00 -default_executable: -dependencies: [] - -description: Add some color to your life. -email: chris[at]ozmm[dot]org -executables: [] - -extensions: [] - -extra_rdoc_files: [] - -files: -- README -- lib/colored.rb -- test/colored_test.rb -has_rdoc: false -homepage: http://errtheblog.com/ -licenses: [] - -post_install_message: -rdoc_options: [] - -require_paths: -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: Add some color to your life. -test_files: [] - diff --git a/vendor/gems/colored-1.1/README b/vendor/gems/colored-1.1/README deleted file mode 100644 index 213b1b5..0000000 --- a/vendor/gems/colored-1.1/README +++ /dev/null @@ -1,23 +0,0 @@ -cute. - - >> puts "this is red".red - - >> puts "this is red with a blue background (read: ugly)".red_on_blue - - >> puts "this is red with an underline".red.underline - - >> puts "this is really bold and really blue".bold.blue - - >> logger.debug "hey this is broken!".red_on_yellow # in rails - - >> puts Color.red "This is red" # but this part is mostly untested - -Windows users: - You will need the Win32 Console Ansi gem. Get it: - - $ sudo gem install win32console-1.0.0 --source require.errtheblog.com - - (We're trying to make it official. Hang in there.) - ->> chris[at]ozmm[dot]org -=> http://errtheblog.com/ diff --git a/vendor/gems/colored-1.1/lib/colored.rb b/vendor/gems/colored-1.1/lib/colored.rb deleted file mode 100644 index cefb8ba..0000000 --- a/vendor/gems/colored-1.1/lib/colored.rb +++ /dev/null @@ -1,91 +0,0 @@ -require 'Win32/Console/ANSI' if PLATFORM =~ /win32/ - -## -# cute. -# -# >> "this is red".red -# -# >> "this is red with a blue background (read: ugly)".red_on_blue -# -# >> "this is red with an underline".red.underline -# -# >> "this is really bold and really blue".bold.blue -# -# >> Colored.red "This is red" # but this part is mostly untested -module Colored - extend self - - COLORS = { - 'black' => 30, - 'red' => 31, - 'green' => 32, - 'yellow' => 33, - 'blue' => 34, - 'magenta' => 35, - 'cyan' => 36, - 'white' => 37 - } - - EXTRAS = { - 'clear' => 0, - 'bold' => 1, - 'underline' => 4, - 'reversed' => 7 - } - - COLORS.each do |color, value| - define_method(color) do - colorize(self, :foreground => color) - end - - define_method("on_#{color}") do - colorize(self, :background => color) - end - - COLORS.each do |highlight, value| - next if color == highlight - define_method("#{color}_on_#{highlight}") do - colorize(self, :foreground => color, :background => highlight) - end - end - end - - EXTRAS.each do |extra, value| - next if extra == 'clear' - define_method(extra) do - colorize(self, :extra => extra) - end - end - - define_method(:to_eol) do - tmp = sub(/^(\e\[[\[\e0-9;m]+m)/, "\\1\e[2K") - if tmp == self - return "\e[2K" << self - end - tmp - end - - def colorize(string, options = {}) - colored = [color(options[:foreground]), color("on_#{options[:background]}"), extra(options[:extra])].compact * '' - colored << string - colored << extra(:clear) - end - - def colors - @@colors ||= COLORS.keys.sort - end - - def extra(extra_name) - extra_name = extra_name.to_s - "\e[#{EXTRAS[extra_name]}m" if EXTRAS[extra_name] - end - - def color(color_name) - background = color_name.to_s =~ /on_/ - color_name = color_name.to_s.sub('on_', '') - return unless color_name && COLORS[color_name] - "\e[#{COLORS[color_name] + (background ? 10 : 0)}m" - end -end unless Object.const_defined? :Colored - -String.send(:include, Colored) diff --git a/vendor/gems/colored-1.1/test/colored_test.rb b/vendor/gems/colored-1.1/test/colored_test.rb deleted file mode 100644 index 3b77990..0000000 --- a/vendor/gems/colored-1.1/test/colored_test.rb +++ /dev/null @@ -1,44 +0,0 @@ -require 'test/unit' -require File.dirname(__FILE__) + '/../lib/colored' - -class TestColor < Test::Unit::TestCase - def test_one_color - assert_equal "\e[31mred\e[0m", "red".red - end - - def test_two_colors - assert_equal "\e[34m\e[31mblue\e[0m\e[0m", "blue".red.blue - end - - def test_background_color - assert_equal "\e[43mon yellow\e[0m", "on yellow".on_yellow - end - - def test_hot_color_on_color_action - assert_equal "\e[31m\e[44mred on blue\e[0m", "red on blue".red_on_blue - end - - def test_modifier - assert_equal "\e[1mway bold\e[0m", "way bold".bold - end - - def test_modifiers_stack - assert_equal "\e[4m\e[1munderlined bold\e[0m\e[0m", "underlined bold".bold.underline - end - - def test_modifiers_stack_with_colors - assert_equal "\e[36m\e[4m\e[1mcyan underlined bold\e[0m\e[0m\e[0m", "cyan underlined bold".bold.underline.cyan - end - - def test_eol - assert_equal "\e[2Knothing to see here really.", "nothing to see here really.".to_eol - end - - def test_eol_with_with_two_colors - assert_equal "\e[34m\e[31m\e[2Kblue\e[0m\e[0m", "blue".red.blue.to_eol - end - - def test_eol_with_modifiers_stack_with_colors - assert_equal "\e[36m\e[4m\e[1m\e[2Kcyan underlined bold\e[0m\e[0m\e[0m", "cyan underlined bold".bold.underline.cyan.to_eol - end -end diff --git a/vendor/gems/gcnovus-avatar-0.0.7/.specification b/vendor/gems/gcnovus-avatar-0.0.7/.specification deleted file mode 100644 index eec168c..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/.specification +++ /dev/null @@ -1,87 +0,0 @@ ---- !ruby/object:Gem::Specification -name: gcnovus-avatar -version: !ruby/object:Gem::Version - version: 0.0.7 -platform: ruby -authors: -- James Rosen -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-07-17 00:00:00 -04:00 -default_executable: -dependencies: [] - -description: Adds support for rendering avatars from a variety of sources. -email: james.a.rosen@gmail.com -executables: [] - -extensions: [] - -extra_rdoc_files: [] - -files: -- History.txt -- License.txt -- README.txt -- init.rb -- rails/init.rb -- lib/avatar -- lib/avatar/object_support.rb -- lib/avatar/source -- lib/avatar/source/abstract_source.rb -- lib/avatar/source/file_column_source.rb -- lib/avatar/source/gravatar_source.rb -- lib/avatar/source/nil_source.rb -- lib/avatar/source/paperclip_source.rb -- lib/avatar/source/source_chain.rb -- lib/avatar/source/static_url_source.rb -- lib/avatar/source/wrapper -- lib/avatar/source/wrapper/abstract_source_wrapper.rb -- lib/avatar/source/wrapper/rails_asset_source_wrapper.rb -- lib/avatar/source/wrapper/string_substitution_source_wrapper.rb -- lib/avatar/source/wrapper.rb -- lib/avatar/source.rb -- lib/avatar/version.rb -- lib/avatar/view -- lib/avatar/view/abstract_view_support.rb -- lib/avatar/view/action_view_support.rb -- lib/avatar/view.rb -- lib/avatar.rb -has_rdoc: true -homepage: http://github.com/gcnovus/avatar -licenses: [] - -post_install_message: -rdoc_options: -- --line-numbers -- --inline-source -- --title -- Grammar RDoc -- --charset -- utf-8 -require_paths: -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: Multi-source avatar support -test_files: [] - diff --git a/vendor/gems/gcnovus-avatar-0.0.7/History.txt b/vendor/gems/gcnovus-avatar-0.0.7/History.txt deleted file mode 100644 index e74e319..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/History.txt +++ /dev/null @@ -1,63 +0,0 @@ -== 0.0.6 2008-06-09 -* 1 major fix: - * removed all task files (./task/*.rake) for Rails plugin compatability - -* 1 minor enhancement - * removed redundant Hoe dependencies, since the gem is only hosted on GitHub - -== 0.0.5 2008-04-01 -* 4 major enhancements: - * added PaperclipSource for use with the Paperclip plugin - * added AbstractSourceWrapper - * changed StringSubstitutionSource to a SourceWrapper - * changed RailsAssetSource to a SourceWrapper - -* 1 minor change: - * moved StringSubstitution out of separate module and into new StringSubstitutionSourceWrapper - -== 0.0.4 2008-03-28 -* 2 major fixes: - * GravatarSource downcases email to comply with Gravatar standards - * GravatarSource raises exception if passed a path instead of a URL for default - -* 1 major enhancement: - * added RailsAssetSource - -* 3 minor enhancements: - * added default_options to StringSubstitutionSource - * broke StringSubstitution out into a separate module - * added :any to GravatarSource::allowed_ratings - -== 0.0.3 2008-03-27 -* 2 minor fixes: - * GravatarSource now takes parameter :gravatar_field instead of :field for compatibility with other sources - * FileColumnSource now takes parameter :file_column_field instead of :field for compatibility with other sources - -* 2 minor enhancements: - * broke out options parsing in FileColumnSource to allow overriding - * added :gravatar_xxx versions to allowed options for GravatarSource - -== 0.0.2 2008-03-24 -* 1 major fix: - * SourceChain duplicates options before passing to elements - -* 1 minor fix: - * forced GravatarSource to coerce rating and size to proper values - -* 1 minor enhancement: - * broke out URL generation in GravatarSource to allow overriding - -== 0.0.1 2008-03-24 - -* 1 major enhancement: - * Initial release - * AbstractSource - * FileColumnSource - * GravatarSource - * NilSource - * SourceChain - * StaticUrlSource - * StringSubstitutionSource - * AbstractViewSupport - * ActionViewSupport - * tests diff --git a/vendor/gems/gcnovus-avatar-0.0.7/License.txt b/vendor/gems/gcnovus-avatar-0.0.7/License.txt deleted file mode 100644 index f172163..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/License.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008 Universal Presence, Inc. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/README.txt b/vendor/gems/gcnovus-avatar-0.0.7/README.txt deleted file mode 100644 index ed75232..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/README.txt +++ /dev/null @@ -1,65 +0,0 @@ -= avatar - -* http://avatar.rubyforge.org - -== DESCRIPTION: -Avatar is a collection of Ruby utilities for displaying avatars. - -== FEATURES/PROBLEMS: - -Avatar currently supports the following sources: -* Gravatar (see http://www.gravatar.com) -* a constant URL (e.g. http://mysite.com/images/default_icon.png) -* parameterized URLs (e.g. http://mysite.com/images/famfamfam/user_#{color}.png) -* file_column (see http://www.kanthak.net/opensource/file_column/) -* chains of sources (e.g. file_column if exists; otherwise default constant URL) - -Avatar currently supports the following views: -* ActionView (Rails), through avatar_tag -* AbstractView (any framework), through avatar_url_for - -== SYNOPSIS: - -in RAILS_ROOT/app/helpers/people_helper.rb -require 'avatar/view/action_view_support' -class PeopleHelper - include Avatar::View::ActionViewSupport -end - -in RAILS_ROOT/app/views/people/show.html.erb: -<%= avatar_tag(@current_user, :size => 40) %> - -== REQUIREMENTS: - -* none for basic functionality -* will integrate with ActionView -* will integrate with the file_column Rails plugin - -== INSTALL: - -* sudo gem install avatar - -== LICENSE: - -(The MIT License) - -Copyright (c) 2008 Universal Presenece, Inc. - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/init.rb b/vendor/gems/gcnovus-avatar-0.0.7/init.rb deleted file mode 100644 index 0ecedc6..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/init.rb +++ /dev/null @@ -1,5 +0,0 @@ -# Old Rails style init: -if Object.const_defined?(:RAILS_ENV) - # redirect to the new style - require File.expand_path(File.join(File.dirname(__FILE__), 'lib', 'rails', 'init')) -end diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar.rb deleted file mode 100644 index 07f4077..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar.rb +++ /dev/null @@ -1,37 +0,0 @@ -$:.unshift File.expand_path(File.dirname(__FILE__)) -require 'avatar/source/abstract_source' -require 'avatar/source/gravatar_source' - -# Helpers for displaying avatars. -# Usage in Rails: -# # in app/helpers/ProfileHelper.rb: -# include Avatar::ActionView::Support -# -# # in app/views/profiles/show.html.erb: -# <%= avatar_for @person => current_person %> -# -# By default, Avatar::source is a GravatarSource -module Avatar - - @@source = Avatar::Source::GravatarSource.new - @@default_avatar_options = {} - - def self.source - @@source.dup - end - - def self.source=(source) - raise ArgumentError.new("#{source} is not an Avatar::Source::AbstractSource") unless source.kind_of?(Avatar::Source::AbstractSource) - @@source = source - end - - def self.default_avatar_options - @@default_avatar_options.dup - end - - def self.default_avatar_options=(options) - raise ArgumentError.new("#{options} is not a Hash") unless options.kind_of?(Hash) - @@options = options - end - -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/object_support.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/object_support.rb deleted file mode 100644 index e3e542f..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/object_support.rb +++ /dev/null @@ -1,8 +0,0 @@ -unless Object.method_defined?(:returning) - Object.class_eval do - def returning(value) - yield(value) - value - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source.rb deleted file mode 100644 index 732314c..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source.rb +++ /dev/null @@ -1,4 +0,0 @@ -module Avatar # :nodoc: - module Source # :nodoc: - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/abstract_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/abstract_source.rb deleted file mode 100644 index d4156a1..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/abstract_source.rb +++ /dev/null @@ -1,15 +0,0 @@ -module Avatar # :nodoc: - module Source # :nodoc: - # To be included by classes that generate avatar URLs from profiles. - module AbstractSource - - # Return an avatar URL for the person, or nil if this source cannot generate one. - # Including classes must override this method. In general, implementations - # should return nil if +person+ is nil. - def avatar_url_for(person, options = {}) - raise NotImplementedError.new('including class must define avatar_url_for(person, options = {})') - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/file_column_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/file_column_source.rb deleted file mode 100644 index 44deed6..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/file_column_source.rb +++ /dev/null @@ -1,39 +0,0 @@ -require 'avatar/source/abstract_source' -require 'file_column_helper' - -module Avatar # :nodoc: - module Source # :nodoc: - # For use with the FileColumn Rails plugin. - class FileColumnSource - include AbstractSource - include FileColumnHelper - - attr_accessor :default_field - - def initialize(default_field = :avatar) - raise ArgumentError.new('default_field cannot be nil') if default_field.nil? - @default_field = default_field - end - - # Returns nil if person is nil; otherwise, returns the (possibly nil) result of - # url_for_image_column, passing in all of +options+ except :file_column_field. - # Options: - # * :file_column_field - the image file column within +person+; by default, :avatar - # * :file_column_version - one of the versions of the file_column; no default - # If :file_column_version is not specified, all other options are passed to - # url_for_image_column as +options+ (see FileColumnHelper) - def avatar_url_for(person, options = {}) - return nil if person.nil? - options = parse_options(person, options) - field = options.delete(:file_column_field) || default_field - return nil if field.nil? || person.send(field).nil? - options = options[:file_column_version] || options - url_for_image_column(person, field, options) - end - - def parse_options(person, options) - options - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/gravatar_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/gravatar_source.rb deleted file mode 100644 index 706af66..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/gravatar_source.rb +++ /dev/null @@ -1,118 +0,0 @@ -require 'avatar/object_support' -require 'avatar/source/abstract_source' -require 'avatar/source/static_url_source' -require 'avatar/source/nil_source' -require 'digest/md5' - -module Avatar # :nodoc: - module Source # :nodoc: - # NOTE: since Gravatar always returns a URL (never a 404), instances of this - # class should only be placed at the end of a SourceChain. - # (see link:classes/Avatar/Source/SourceChain.html) - # Alternatively, use default_source = ... to generate a site-wide - # default to be passed to Gravatar. (In fact, since default_source - # is an instance of Avatar::Source::AbstractSource, it can generate a different - # default for each person.) - class GravatarSource - include AbstractSource - - attr_accessor :default_field - attr_reader :default_source - - # 'http://www.gravatar.com/avatar/' - def self.base_url - 'http://www.gravatar.com/avatar/' - end - - # ['G', 'PG', 'R', 'X', 'any'] - def self.allowed_ratings - ['G', 'PG', 'R', 'X', 'any'] - end - - # Arguments: - # * +default_source+: a Source to generate defaults to be passed to Gravatar; optional; default: nil (a NilSource). - # * +default_field+: the field within each +person+ passed to avatar_url_for in which to look for an email address - def initialize(default_source = nil, default_field = :email) - self.default_source = default_source #not @default_source = ... b/c want to use the setter function below - @default_field = default_field - raise "There's a bug in the code" if @default_source.nil? - end - - # Generates a Gravatar URL. Returns nil if person is nil. - # Options: - # * :gravatar_field (Symbol) - the field to call from person. By default, :email. - # * :gravatar_default_url (String) - override the default generated by default_source. - # * :gravatar_size or size or :s - the size in pixels of the avatar to render. - # * :gravatar_rating or rating or :r - the maximum rating; one of ['G', 'PG', 'R', 'X'] - def avatar_url_for(person, options = {}) - return nil if person.nil? - options = parse_options(person, options) - field = options.delete(:gravatar_field) - raise ArgumentError.new('No field specified; either specify a default field or pass in a value for :gravatar_field (probably :email)') unless field - - email = person.send(field) - return nil if email.nil? || email.to_s.blank? - email = email.to_s.downcase - - returning(self.class.base_url) do |url| - url << Digest::MD5::hexdigest(email).strip - # default must be last or the other options will be parameters to that URL, not the Gravatar one - [:size, :rating, :default].each do |k| - v = options[k] - next if v.nil? - url << (url.include?('?') ? '&' : '?') - url << "#{k}=#{v}" - end - end - end - - # Returns a Hash containing - # * :field - value of :gravatar_field; defaults to self.default_field - # * :default - value of :gravatar_default_url; defaults to self.default_avatar_url_for(+person+, +options+) - # * :size - value of :gravatar_size or :size or :s passed through only if a number - # * :rating - value of :gravatar_rating or :rating or :r passed through only if one of self.class.allowed_ratings - def parse_options(person, options) - returning({}) do |result| - result[:gravatar_field] = options[:gravatar_field] || default_field - - default = options[:gravatar_default_url] || default_avatar_url_for(person, options) - raise "default must be a fully-qualified URL with port and host" unless self.class.valid_default_url?(default) - result[:default] = default - - size = (options[:gravatar_size] || options[:size] || options[:s] || '').to_s.to_i - result[:size] = size if size > 0 - - rating = options[:gravatar_rating] || options[:rating] || options[:r] - result[:rating] = rating if rating and self.class.allowed_ratings.include?(rating.to_s) - end - end - - # Set the default source for all people. - # If +default+ is a String, it will be converted to an instance of Avatar::Source::StaticUrlSource. - # If +default+ is nil, sets the default to a NilSource. - def default_source=(default) - case default - when String - @default_source = StaticUrlSource.new(default) - when AbstractSource - @default_source = default - when NilClass - @default_source = NilSource.new - else - raise ArgumentError.new("#{default} must be either a String or an instance of #{AbstractSource}") - end - end - - def self.valid_default_url?(url) - url.nil? || url =~ /^http[s]?\:/ - end - - private - - def default_avatar_url_for(person, options) - @default_source.avatar_url_for(person, options) - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/nil_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/nil_source.rb deleted file mode 100644 index 0ce4c23..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/nil_source.rb +++ /dev/null @@ -1,17 +0,0 @@ -require 'avatar/source/abstract_source' - -module Avatar # :nodoc: - module Source # :nodoc: - # A really dumb implementation that never returns a URL. - # Can be helpful for testing. Also used in GravatarSource::default_source. - class NilSource - include AbstractSource - - # Always returns nil. - def avatar_url_for(person, options = {}) - nil - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/paperclip_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/paperclip_source.rb deleted file mode 100644 index 5bb0487..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/paperclip_source.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'avatar/object_support' -require 'avatar/source/abstract_source' - -module Avatar # :nodoc: - module Source # :nodoc: - # Source for a file attachment using Paperclip. - # See http://giantrobots.thoughtbot.com/2008/3/18/for-attaching-files-use-paperclip - class PaperclipSource - include AbstractSource - - attr_accessor :default_field, :default_style - - # Create a new FileColumnSource with a +default_field+ (by default, :avatar), - # and a +default_style+ (by default, nil) - def initialize(default_field = :avatar, default_style = nil) - @default_field = default_field - @default_style = default_style - end - - # Returns a URL for a has_attached_file attribute, via - # person..url, passing in :paperclip_style if present. - # Returns nil under any of the following circumstances: - # * person is nil - # * person. is nil - # * person.? returns false - # * person..styles does not include :paperclip_style (if present) - # Options: - # * :paperclip_field - the has_attached_file column within +person+; by default, self.default_field - # * :paperclip_style - one of the styles of the has_attached_file; by default, self.default_style - def avatar_url_for(person, options = {}) - return nil if person.nil? - options = parse_options(person, options) - field = options[:paperclip_field] - return nil if field.nil? - return nil unless person.send("#{field}?".to_sym) - avatar = person.send(field) - style = options[:paperclip_style] - return nil if style && !avatar.styles.keys.include?(style) - avatar.url(style) - end - - # Copies :paperclip_field and :paperclip_style from +options+, adding defaults if necessary. - def parse_options(person, options) - returning({}) do |result| - result[:paperclip_field] = options[:paperclip_field] || default_field - result[:paperclip_style] = options[:paperclip_style] || default_style - end - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/source_chain.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/source_chain.rb deleted file mode 100644 index 27ce6e1..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/source_chain.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'avatar/source/abstract_source' - -module Avatar # :nodoc: - module Source # :nodoc: - class SourceChain - include AbstractSource - - # :nodoc: - def initialize - clear! - end - - # Clear the chain - def clear! - @chain = [] - end - - # Retrieve the +n+th Source. - def [](n) - @chain[n] - end - - # Add a source to the chain. +source+ must be an instance of (a subclass of) Avatar::Source::AbstractSource. - def add_source(source) - raise ArgumentError.new("#{source} is not an Avatar::Source::AbstractSource") unless source.kind_of?(Avatar::Source::AbstractSource) - @chain << source - end - - # Alias for add_source - def <<(source) - add_source(source) - end - - # True unless a Source has been added. - def empty? - @chain.empty? - end - - # Iterate through the chain and return the first URL returned. - # Any error raised will propagate. Duplicates +options+ before - # passing so each Source receives the same arguments. - def avatar_url_for(person, options = {}) - @chain.each do |source| - result = source.avatar_url_for(person, options.dup) - return result unless result.nil? - end - return nil - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/static_url_source.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/static_url_source.rb deleted file mode 100644 index 7d56b4c..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/static_url_source.rb +++ /dev/null @@ -1,25 +0,0 @@ -require 'avatar/source/abstract_source' - -module Avatar # :nodoc: - module Source # :nodoc: - # Source representing a constant URL. - # Good as a default or last-resort source. - class StaticUrlSource - include AbstractSource - - attr_accessor :url - - # Create a new source with static url +url+. - def initialize(url) - raise ArgumentError.new("URL cannot be nil") if url.nil? - @url = url.to_s - end - - # Returns nil if person is nil; the static url otherwise. - def avatar_url_for(person, options = {}) - person.nil? ? nil : url - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper.rb deleted file mode 100644 index 3628992..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper.rb +++ /dev/null @@ -1,6 +0,0 @@ -module Avatar - module Source - module Wrapper - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/abstract_source_wrapper.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/abstract_source_wrapper.rb deleted file mode 100644 index 771e55c..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/abstract_source_wrapper.rb +++ /dev/null @@ -1,33 +0,0 @@ -require 'avatar/source/abstract_source' - -module Avatar - module Source - module Wrapper - class AbstractSourceWrapper - include Avatar::Source::AbstractSource - - attr_reader :underlying_source - - # Create a new Wrapper - def initialize(underlying_source) - raise ArgumentError.new("underlying_source must be Source") unless underlying_source.kind_of?(Avatar::Source::AbstractSource) - @underlying_source = underlying_source - end - - # Return nil if the underlying_source does; otherwise, calls wrap, - # passing the returned URL and the person and options passed. - def avatar_url_for(person, options = {}) - url = @underlying_source.avatar_url_for(person, options) - url.nil? ? nil : wrap(url, person, options) - end - - # Apply appropriate wrapping of the +url+ returned by underlying_source. - # Will never be called with a nil +url+. - def wrap(url, person, options) - raise NotImplementedError('subclasses must override wrap(url, person, options)') - end - - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/rails_asset_source_wrapper.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/rails_asset_source_wrapper.rb deleted file mode 100644 index be43608..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/rails_asset_source_wrapper.rb +++ /dev/null @@ -1,36 +0,0 @@ -require 'avatar/source/wrapper/abstract_source_wrapper' -require 'action_view/helpers/asset_tag_helper' - -module Avatar # :nodoc: - module Source # :nodoc: - module Wrapper - # Wraps a Source using Rails' AssetTagHelper#image_path, - # which can turn path URLs (e.g. '/images/my_avatar.png') - # into absolute URLs( e.g. 'http://assets.mysite.com/images/my_avatar.png'). - class RailsAssetSourceWrapper < AbstractSourceWrapper - - attr_reader :url_helper - - private :url_helper - - def initialize(source) - super - @url_helper = Object.new - @url_helper.extend(ActionView::Helpers::AssetTagHelper) - end - - # Passes +url+ to AssetTagHelper#image_path. Raises - # an error if it cannot generate a fully-qualified URI. Try - # setting ActionController::Base.asset_host to - # avoid this error. - def wrap(url, person, options = {}) - # url will never be nil b/c of guarantee in AbstractSourceWrapper - result = url_helper.image_path(url) - raise "could not generate protocol and host for #{url}. Have you set ActionController::Base.asset_host?" unless result =~ /^http[s]?\:\/\// - result - end - - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/string_substitution_source_wrapper.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/string_substitution_source_wrapper.rb deleted file mode 100644 index 7cd51ad..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/source/wrapper/string_substitution_source_wrapper.rb +++ /dev/null @@ -1,55 +0,0 @@ -require 'avatar/source/wrapper/abstract_source_wrapper' -require 'action_view/helpers/asset_tag_helper' - -module Avatar # :nodoc: - module Source # :nodoc: - module Wrapper - # Wraps a Source using Rails' AssetTagHelper#image_path, - # which can turn path URLs (e.g. '/images/my_avatar.png') - # into absolute URLs( e.g. 'http://assets.mysite.com/images/my_avatar.png'). - class StringSubstitutionSourceWrapper < AbstractSourceWrapper - - attr_accessor :default_options - - def initialize(source, default_options = {}) - super(source) - self.default_options = default_options || {} - end - - # Passes +url+ to AssetTagHelper#image_path. Raises - # an error if it cannot generate a fully-qualified URI. Try - # setting ActionController::Base.asset_host to - # avoid this error. - def wrap(url, person, options = {}) - # url will never be nil b/c of guarantee in AbstractSourceWrapper - result = apply_substitution(url, self.default_options.merge(options)) - substitution_needed?(result) ? nil : result - end - - def default_options=(opts) - @default_options = opts || {} - end - - # For each key in +options+ replaces '#{key}' in +string+ with the - # corresponding value in +options+. - # +string+ should - # be of the form '...#{variable_a}...#{variable_b}...'. Note the - # single quotes. Double quotes will cause the variables to be - # substituted before this method is run, which is almost - # certainly not what you want. - def apply_substitution(string, options) - returning(string.dup) do |result| - options.each do |k,v| - result.gsub!(Regexp.new('#\{' + "#{k}" + '\}'), "#{v}") - end - end - end - - def substitution_needed?(string) - string =~ /#\{.*\}/ - end - - end - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/version.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/version.rb deleted file mode 100644 index 9571f72..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/version.rb +++ /dev/null @@ -1,9 +0,0 @@ -module Avatar #:nodoc: - module VERSION #:nodoc: - MAJOR = 0 - MINOR = 0 - TINY = 7 - - STRING = [MAJOR, MINOR, TINY].join('.') - end -end diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view.rb deleted file mode 100644 index 1cbe002..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view.rb +++ /dev/null @@ -1,4 +0,0 @@ -module Avatar # :nodoc: - module View # :nodoc: - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/abstract_view_support.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/abstract_view_support.rb deleted file mode 100644 index c76dcec..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/abstract_view_support.rb +++ /dev/null @@ -1,15 +0,0 @@ -require 'avatar' - -module Avatar # :nodoc: - module View # :nodoc: - module AbstractViewSupport - - def avatar_url_for(person, options = {}) - default_options = Avatar::default_avatar_options || {} - options = default_options.merge(options) - Avatar::source.avatar_url_for(person, options) - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/action_view_support.rb b/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/action_view_support.rb deleted file mode 100644 index 8008212..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/lib/avatar/view/action_view_support.rb +++ /dev/null @@ -1,23 +0,0 @@ -require 'avatar/view/abstract_view_support' - -module Avatar # :nodoc: - module View # :nodoc: - module ActionViewSupport - include Avatar::View::AbstractViewSupport - - # Generates an image_tag for the URL or path generated by - # avatar_url_for(+person+, +avatar_options+). Passes additional - # tag options from +html_options+. Returns an empty string - # if +person+ is nil. - # - # Because this method uses image_tag, Sources - # can generate paths instead of absolute URLs. - def avatar_tag(person, avatar_options={}, html_options={}) - return "" if person.nil? - url = avatar_url_for(person, avatar_options) - image_tag(url, html_options) - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/gcnovus-avatar-0.0.7/rails/init.rb b/vendor/gems/gcnovus-avatar-0.0.7/rails/init.rb deleted file mode 100644 index 995ccdd..0000000 --- a/vendor/gems/gcnovus-avatar-0.0.7/rails/init.rb +++ /dev/null @@ -1 +0,0 @@ -# File so gem can act like Rails plugin \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/.specification b/vendor/gems/mocha-0.9.3/.specification deleted file mode 100644 index 82aab0f..0000000 --- a/vendor/gems/mocha-0.9.3/.specification +++ /dev/null @@ -1,231 +0,0 @@ ---- !ruby/object:Gem::Specification -name: mocha -version: !ruby/object:Gem::Version - version: 0.9.3 -platform: ruby -authors: -- James Mead -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-11-26 00:00:00 -05:00 -default_executable: -dependencies: -- !ruby/object:Gem::Dependency - name: rake - type: :runtime - version_requirement: - version_requirements: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -description: Mocking and stubbing library with JMock/SchMock syntax, which allows mocking and stubbing of methods on real (non-mock) classes. -email: mocha-developer@rubyforge.org -executables: [] - -extensions: [] - -extra_rdoc_files: -- README -- COPYING -files: -- lib/mocha/any_instance_method.rb -- lib/mocha/backtrace_filter.rb -- lib/mocha/cardinality.rb -- lib/mocha/central.rb -- lib/mocha/change_state_side_effect.rb -- lib/mocha/class_method.rb -- lib/mocha/configuration.rb -- lib/mocha/deprecation.rb -- lib/mocha/exception_raiser.rb -- lib/mocha/expectation.rb -- lib/mocha/expectation_error.rb -- lib/mocha/expectation_list.rb -- lib/mocha/in_state_ordering_constraint.rb -- lib/mocha/inspect.rb -- lib/mocha/instance_method.rb -- lib/mocha/is_a.rb -- lib/mocha/logger.rb -- lib/mocha/metaclass.rb -- lib/mocha/method_matcher.rb -- lib/mocha/mini_test_adapter.rb -- lib/mocha/mock.rb -- lib/mocha/mockery.rb -- lib/mocha/module_method.rb -- lib/mocha/multiple_yields.rb -- lib/mocha/names.rb -- lib/mocha/no_yields.rb -- lib/mocha/object.rb -- lib/mocha/parameter_matchers/all_of.rb -- lib/mocha/parameter_matchers/any_of.rb -- lib/mocha/parameter_matchers/any_parameters.rb -- lib/mocha/parameter_matchers/anything.rb -- lib/mocha/parameter_matchers/base.rb -- lib/mocha/parameter_matchers/equals.rb -- lib/mocha/parameter_matchers/has_entries.rb -- lib/mocha/parameter_matchers/has_entry.rb -- lib/mocha/parameter_matchers/has_key.rb -- lib/mocha/parameter_matchers/has_value.rb -- lib/mocha/parameter_matchers/includes.rb -- lib/mocha/parameter_matchers/instance_of.rb -- lib/mocha/parameter_matchers/is_a.rb -- lib/mocha/parameter_matchers/kind_of.rb -- lib/mocha/parameter_matchers/not.rb -- lib/mocha/parameter_matchers/object.rb -- lib/mocha/parameter_matchers/optionally.rb -- lib/mocha/parameter_matchers/regexp_matches.rb -- lib/mocha/parameter_matchers/responds_with.rb -- lib/mocha/parameter_matchers/yaml_equivalent.rb -- lib/mocha/parameter_matchers.rb -- lib/mocha/parameters_matcher.rb -- lib/mocha/pretty_parameters.rb -- lib/mocha/return_values.rb -- lib/mocha/sequence.rb -- lib/mocha/single_return_value.rb -- lib/mocha/single_yield.rb -- lib/mocha/standalone.rb -- lib/mocha/state_machine.rb -- lib/mocha/stubbing_error.rb -- lib/mocha/test_case_adapter.rb -- lib/mocha/unexpected_invocation.rb -- lib/mocha/yield_parameters.rb -- lib/mocha.rb -- lib/mocha_standalone.rb -- lib/stubba.rb -- test/acceptance/acceptance_test_helper.rb -- test/acceptance/bug_18914_test.rb -- test/acceptance/bug_21465_test.rb -- test/acceptance/bug_21563_test.rb -- test/acceptance/expected_invocation_count_test.rb -- test/acceptance/failure_messages_test.rb -- test/acceptance/minitest_test.rb -- test/acceptance/mocha_example_test.rb -- test/acceptance/mocha_test_result_test.rb -- test/acceptance/mock_test.rb -- test/acceptance/mock_with_initializer_block_test.rb -- test/acceptance/mocked_methods_dispatch_test.rb -- test/acceptance/optional_parameters_test.rb -- test/acceptance/parameter_matcher_test.rb -- test/acceptance/partial_mocks_test.rb -- test/acceptance/return_value_test.rb -- test/acceptance/sequence_test.rb -- test/acceptance/standalone_test.rb -- test/acceptance/states_test.rb -- test/acceptance/stub_any_instance_method_test.rb -- test/acceptance/stub_class_method_test.rb -- test/acceptance/stub_everything_test.rb -- test/acceptance/stub_instance_method_test.rb -- test/acceptance/stub_module_method_test.rb -- test/acceptance/stub_test.rb -- test/acceptance/stubba_example_test.rb -- test/acceptance/stubba_test.rb -- test/acceptance/stubba_test_result_test.rb -- test/acceptance/stubbing_error_backtrace_test.rb -- test/acceptance/stubbing_method_unnecessarily_test.rb -- test/acceptance/stubbing_non_existent_any_instance_method_test.rb -- test/acceptance/stubbing_non_existent_class_method_test.rb -- test/acceptance/stubbing_non_existent_instance_method_test.rb -- test/acceptance/stubbing_non_public_any_instance_method_test.rb -- test/acceptance/stubbing_non_public_class_method_test.rb -- test/acceptance/stubbing_non_public_instance_method_test.rb -- test/acceptance/stubbing_on_non_mock_object_test.rb -- test/active_record_test_case.rb -- test/deprecation_disabler.rb -- test/execution_point.rb -- test/method_definer.rb -- test/simple_counter.rb -- test/test_helper.rb -- test/test_runner.rb -- test/unit/any_instance_method_test.rb -- test/unit/array_inspect_test.rb -- test/unit/backtrace_filter_test.rb -- test/unit/cardinality_test.rb -- test/unit/central_test.rb -- test/unit/change_state_side_effect_test.rb -- test/unit/class_method_test.rb -- test/unit/date_time_inspect_test.rb -- test/unit/exception_raiser_test.rb -- test/unit/expectation_list_test.rb -- test/unit/expectation_test.rb -- test/unit/hash_inspect_test.rb -- test/unit/in_state_ordering_constraint_test.rb -- test/unit/metaclass_test.rb -- test/unit/method_matcher_test.rb -- test/unit/mock_test.rb -- test/unit/mockery_test.rb -- test/unit/multiple_yields_test.rb -- test/unit/no_yields_test.rb -- test/unit/object_inspect_test.rb -- test/unit/object_test.rb -- test/unit/parameter_matchers/all_of_test.rb -- test/unit/parameter_matchers/any_of_test.rb -- test/unit/parameter_matchers/anything_test.rb -- test/unit/parameter_matchers/equals_test.rb -- test/unit/parameter_matchers/has_entries_test.rb -- test/unit/parameter_matchers/has_entry_test.rb -- test/unit/parameter_matchers/has_key_test.rb -- test/unit/parameter_matchers/has_value_test.rb -- test/unit/parameter_matchers/includes_test.rb -- test/unit/parameter_matchers/instance_of_test.rb -- test/unit/parameter_matchers/is_a_test.rb -- test/unit/parameter_matchers/kind_of_test.rb -- test/unit/parameter_matchers/not_test.rb -- test/unit/parameter_matchers/regexp_matches_test.rb -- test/unit/parameter_matchers/responds_with_test.rb -- test/unit/parameter_matchers/stub_matcher.rb -- test/unit/parameter_matchers/yaml_equivalent_test.rb -- test/unit/parameters_matcher_test.rb -- test/unit/return_values_test.rb -- test/unit/sequence_test.rb -- test/unit/single_return_value_test.rb -- test/unit/single_yield_test.rb -- test/unit/state_machine_test.rb -- test/unit/string_inspect_test.rb -- test/unit/yield_parameters_test.rb -- examples/misc.rb -- examples/mocha.rb -- examples/stubba.rb -- COPYING -- MIT-LICENSE -- Rakefile -- README -- RELEASE -has_rdoc: true -homepage: http://mocha.rubyforge.org -licenses: [] - -post_install_message: -rdoc_options: -- --title -- Mocha -- --main -- README -- --line-numbers -require_paths: -- bin -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: mocha -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: Mocking and stubbing library -test_files: [] - diff --git a/vendor/gems/mocha-0.9.3/COPYING b/vendor/gems/mocha-0.9.3/COPYING deleted file mode 100644 index 8f74d71..0000000 --- a/vendor/gems/mocha-0.9.3/COPYING +++ /dev/null @@ -1,3 +0,0 @@ -Copyright Revieworld Ltd. 2006 - -You may use, copy and redistribute this library under the same terms as Ruby itself (see http://www.ruby-lang.org/en/LICENSE.txt) or under the MIT license (see MIT-LICENSE file). diff --git a/vendor/gems/mocha-0.9.3/MIT-LICENSE b/vendor/gems/mocha-0.9.3/MIT-LICENSE deleted file mode 100644 index fa4efe7..0000000 --- a/vendor/gems/mocha-0.9.3/MIT-LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright (c) 2006 Revieworld Ltd. - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/README b/vendor/gems/mocha-0.9.3/README deleted file mode 100644 index 03171f3..0000000 --- a/vendor/gems/mocha-0.9.3/README +++ /dev/null @@ -1,35 +0,0 @@ -= Mocha - -Mocha is a library for mocking and stubbing using a syntax like that of JMock[http://www.jmock.org]. - -It can be used with many testing frameworks e.g. Test::Unit[http://www.ruby-doc.org/core/classes/Test/Unit.html], RSpec[http://rspec.info/], test/spec[http://chneukirchen.org/repos/testspec/README], expectations[http://expectations.rubyforge.org/], Dust[http://dust.rubyforge.org/], MiniTest[http://rubyforge.org/projects/bfts] and even JtestR[http://jtestr.codehaus.org/]. - -Mocha provides a unified, simple and readable syntax for both traditional mocking and partial mocking. - -Mocha was harvested from projects at Reevoo[http://www.reevoo.com] by me (James[http://blog.floehopper.org]) and my colleagues Ben[http://www.techbelly.com/], Chris[http://blog.seagul.co.uk] and Paul[http://po-ru.com]. - -== Download and Installation - -Install the gem with the following command... - - $ gem install mocha - -Or install the Rails[http://www.rubyonrails.org] plugin... - - $ script/plugin install svn://rubyforge.org/var/svn/mocha/trunk - -Or download Mocha from here - http://rubyforge.org/projects/mocha - -== Examples - -* Quick Start - {Usage Examples}[link:examples/misc.html] -* Traditional mocking - {Star Trek Example}[link:examples/mocha.html] -* Setting expectations on real classes - {Order Example}[link:examples/stubba.html] -* More examples on {Floehopper's Blog}[http://blog.floehopper.org] -* {Mailing List Archives}[http://groups.google.com/group/mocha-developer] - -== License - -Copyright Revieworld Ltd. 2006 - -You may use, copy and redistribute this library under the same terms as {Ruby itself}[http://www.ruby-lang.org/en/LICENSE.txt] or under the {MIT license}[http://mocha.rubyforge.org/files/MIT-LICENSE.html]. \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/RELEASE b/vendor/gems/mocha-0.9.3/RELEASE deleted file mode 100644 index bba8d8e..0000000 --- a/vendor/gems/mocha-0.9.3/RELEASE +++ /dev/null @@ -1,257 +0,0 @@ -= 0.9.3 (8219bb2d2881c8529c93fc21e97a11d01203c759) -* Added support for MiniTest thanks to sprsquish. -* Fixed a possible bug with some of the non-default Configuration options relating to the argument to Object#respond_to? -* As per Jay Fields recommendations [1] and with further impetus from a talk at Ruby Manor, any methods added to core classes are now added by including a module. This means that Mocha is a better citizen of the Ruby world and it's behaviour is more easily extended. [1] http://blog.jayfields.com/2008/07/ruby-underuse-of-modules.html & http://blog.jayfields.com/2008/07/ruby-redefine-method-behavior.html -* Removed deprecated gem autorequire. - -= 0.9.2 (r355) -* Improved documentation to address [#22530] 'Mock methods with multiple return values not possible?' -* respond_with parameter matcher was not available in tests. -* Patch [#22630] Fix for a bug in running Rails tests with Ruby 1.8.7. Array#flatten was being called which in turn was checking whether each element responded to #to_ary. This check was using the two parameter version of #respond_to?, but Mock was only defining a one parameter version. - -= 0.9.1 (r349) - -* Fixed bug #21465 - expects & stubs should support method names as strings (as well as symbols) or fail fast. Convert all expectation method names to a symbol in case they were supplied as a string. -* By removing Mock#unexpected_method_called we reduce the number of methods vulnerable to the problem that surfaced in bug #21563. -* Fix bug #21563 - stubbing 'verified?' method is unsafe. Instance method names on the Mock class should be more obscure. -* Performance improvement. StubbaExampleTest goes twice as fast on my local machine. -* Added primitive performance test to default rake task. -* Fix format of case statements which don't work in Ruby 1.9 and make others consistent. -* There is no point in running (potentially expensive) checks if configuration is set to allow such checks to fail. This is a relatively quick fix in response to Chris McGrath's performance problems. -* Fix for bug #21161 - 'uninitialized constant Deprecation in stubba.rb'. -* It's more readable to talk about 'once' and 'twice' rather than '1 time' and '2 times'. -* Fix bug #20883 - never should raise when called to prevent follow up errors. Fail fast when there are no matching invokable expectations and handle the stub_everything case sensibly. This might not be entirely backwards compatible, but I think the benefits outweigh the risks. The most likely change is that a test that was already failing will now fail faster, which doesn't seem so awful. - -= 0.9.0 (r316) - -* Configurable warnings or errors - * when a method on a non-public method is stubbed - * when a method on a non-existent method is stubbed - * when a method on a non-mock object is stubbed - * when a method is stubbed unnecessarily (i.e. the stubbed method is not called during the test) - -* Improved error messages - * User-friendly list of unsatisfied expectations, satisfied expectations and state machines. - * Improved readability of cardinality description. - * Display sensible failure message for any_instance expectations e.g. "#.bar - expected calls: 1, actual calls: 0" - -* Parameter matchers - * New to this release - * optionally (allows matching of optional parameters if available) - * yaml_equivalent (allows matching of YAML that represents the specified object) - * responds_with (tests the quack not the duck) - * Nesting of parameter matchers is now supported. - -* Optional block passed into mock initializer is evaluated in the context of the new mock instance and can be used as a shortcut to set up expectations. - -* Added JMock-style sequences for constraining the order of expected invocations. See Standalone#sequence and Expectation#in_sequence. - -* Added JMock-style states for constraining the order of expected invocations. See Standalone#states, Expectation#then, Expectation#when and StateMachine. - -* Compatibility with versions of Ruby - * Compatibility with Ruby v1.9. All test errors and warnings fixed. - * Nasty fix so that TestCaseAdaptor works consistently with earlier versions of Test::Unit as well as more recent versions. - * Added platform to gem specification to avoid bug in rubygems 0.9.5 - see http://www.dcmanges.com/blog/rubygems-0-9-5-platform-bug and http://rubygems.org/read/chapter/20#platform. - * Make ExpectationRaiser deal with subclasses of Interrupt which seem to need a message supplied in the raise statement in Ruby 1.8.6 (but not 1.8.4 or 1.9). Not sure this is really Mocha's responsibility. - -* Added deprecation warning in stubba.rb which is no longer needed and will be removed. - -* Supply positioning information to evals to improve any error messages. See http://ola-bini.blogspot.com/2008/01/ruby-antipattern-using-eval-without.html - -* Bug fixes - * 18914 in revision 296 - http://rubyforge.org/tracker/index.php?func=detail&aid=18914&group_id=1917&atid=7477 - * 18917 in revision 295 - http://rubyforge.org/tracker/index.php?func=detail&aid=18917&group_id=1917&atid=7477 - * 18336 in revision 287 - http://rubyforge.org/tracker/index.php?func=detail&aid=18336&group_id=1917&atid=7477 - * 17835 in revision 255 - http://rubyforge.org/tracker/index.php?func=detail&aid=17835&group_id=1917&atid=7477 - * 17412 in revision 242 - http://rubyforge.org/tracker/index.php?func=detail&aid=17412&group_id=1917&atid=7477 - * 15977 in revision 198 - http://rubyforge.org/tracker/index.php?func=detail&aid=15977&group_id=1917&atid=7477 - * 11885 in revision 156 - http://rubyforge.org/tracker/index.php?func=detail&aid=11885&group_id=1917&atid=7477 - -= 0.5.5 (r167) - -- Renamed Matches parameter matcher to RegexpMatches for clarity. -- Added noframes tag to rdoc index to assist Google. - -= 0.5.4 (r166) - -- Added matches parameter matcher for matching regular expressions. - -= 0.5.3 (r165) - -- Attempt to fix packaging problems by switching to newer version (1.15.1) of gnutar and setting COPY_EXTENDED_ATTRIBUTES_DISABLE environment variable. -- Removed unused ExpectationSequenceError exception. -- Added instance_of and kind_of parameter matchers. -- Added Google Webmaster meta tag to rdoc template header. -- Put Google Webmaster meta tag in the right header i.e. the one for the index page. - -= 0.5.2 (r159) - -- Fix bug 11885 - "never doesn't work with stub_everything" submitted by Alexander Lang. In fixing this bug, also fixed undiscoverd bug where expected & actual invocation counts were being incorrectly reported which seems to have been introduced when fixes were added for invocation dispatch (see MockedMethodDispatchAcceptanceTest). -- Previously when an expectation did not allow more invocations, it was treated as not matching. Now we prefer matching expectations which allow more invocations, but still match expectations which cannot allow more invocations. I think this may be overcomplicating things, but let's see how it goes. - -= 0.5.1 (r149) - -- Fixed bug #11583 "Mocha 0.5.0 throwing unexpected warnings". Also switched on ruby warning for all rake test tasks. Fixed majority of warnings, but some left to fix. - -= 0.5.0 (r147) - -- Parameter Matchers - I’ve added a few Hamcrest-style parameter matchers which are designed to be used inside Expectation#with. The following matchers are currently available: anything(), includes(), has_key(), has_value(), has_entry(), all_of() & any_of(). More to follow soon. The idea is eventually to get rid of the nasty parameter_block option on Expectation#with. - - object = mock() - object.expects(:method).with(has_key('key_1')) - object.method('key_1' => 1, 'key_2' => 2) - # no verification error raised - - object = mock() - object.expects(:method).with(has_key('key_1')) - object.method('key_2' => 2) - # verification error raised, because method was not called with Hash containing key: 'key_1' - -- Values Returned and Exceptions Raised on Consecutive Invocations - Allow multiple calls to Expectation#returns and Expectation#raises to build up a sequence of responses to invocations on the mock. Added syntactic sugar method Expectation#then to allow more readable expectations. - - object = mock() - object.stubs(:method).returns(1, 2).then.raises(Exception).then.returns(4) - object.method # => 1 - object.method # => 2 - object.method # => raises exception of class Exception - object.method # => 4 - -- Yields on Consecutive Invocations - Allow multiple calls to yields on single expectation to allow yield parameters to be specified for consecutive invocations. - - object = mock() - object.stubs(:method).yields(1, 2).then.yields(3) - object.method { |*values| p values } # => [1, 2] - object.method { |*values| p values } # => [3] - -- Multiple Yields on Single Invocation - Added Expectation#multiple_yields to allow a mocked or stubbed method to yield multiple times for a single invocation. - - object = mock() - object.stubs(:method).multiple_yields([1, 2], [3]) - object.method { |*values| p values } # => [1, 2] # => [3] - -- Invocation Dispatch - Expectations were already being matched in reverse order i.e. the most recently defined one was being found first. This is still the case, but we now stop matching an expectation when its maximum number of expected invocations is reached. c.f. JMock v1. A stub will never stop matching by default. Hopefully this means we can soon get rid of the need to pass a Proc to Expectation#returns. - - object = mock() - object.stubs(:method).returns(2) - object.expects(:method).once.returns(1) - object.method # => 1 - object.method # => 2 - object.method # => 2 - # no verification error raised - - # The following should still work... - - Time.stubs(:now).returns(Time.parse('Mon Jan 01 00:00:00 UTC 2007')) - Time.now # => Mon Jan 01 00:00:00 UTC 2007 - Time.stubs(:now).returns(Time.parse('Thu Feb 01 00:00:00 UTC 2007')) - Time.now # => Thu Feb 01 00:00:00 UTC 2007 - -- Deprecate passing an instance of Proc to Expectation#returns. -- Explicitly include all Rakefile dependencies in project. -- Fixed old Stubba example. -- Fix so that it is possible for a stubbed method to raise an Interrupt exception without a message in Ruby 1.8.6 -- Added responds_like and quacks_like. -- Capture standard object methods before Mocha adds any. -- Added Expectation#once method to make interface less surprising. -- Use Rake::TestTask to run tests. Created three separate tasks to run unit, integration & acceptance tests. Split inspect_test into one file per TestCase. Deleted superfluous all_tests file. -- Fiddled with mocha_inspect and tests to give more sensible results on x86 platform. -- Fixed bug #7834 "infinite_range.rb makes incorrect assumption about to_f" logged by James Moore. - -= 0.4.0 (r92) - -- Allow naming of mocks (patch from Chris Roos). -- Specify multiple return values for consecutive calls. -- Improved consistency of expectation error messages. -- Allow mocking of Object instance methods e.g. kind_of?, type. -- Provide aliased versions of #expects and #stubs to allow mocking of these methods. -- Added at_least, at_most, at_most_once methods to expectation. -- Allow expects and stubs to take a hash of method and return values. -- Eliminate warning: "instance variable @yield not initialized" (patch from Xavier Shay). -- Restore instance methods on partial mocks (patch from Chris Roos). -- Allow stubbing of a method with non-word characters in its name (patch from Paul Battley). -- Removed coupling to Test::Unit. -- Allow specified exception instance to be raised (patch from Chris Roos). -- Make mock object_id appear in hex like normal Ruby inspect (patch from Paul Battley). -- Fix path to object.rb in rdoc rake task (patch from Tomas Pospisek). -- Reverse order in which expectations are matched, so that last expectation is matched first. This allows e.g. a call to #stubs to be effectively overridden by a call to #expects (patch from Tobias Lutke). -- Stubba & SmartTestCase modules incorporated into Mocha module so only need to require 'mocha' - no longer need to require 'stubba'. -- AutoMocha removed. - -= 0.3.3 - -- Quick bug fix to restore instance methods on partial mocks (for Kevin Clark). - -= 0.3.2 - -- Examples added. - -= 0.3.1 - -- Dual licensing with MIT license added. - -= 0.3.0 - -* Rails plugin. -* Auto-verify for expectations on concrete classes. -* Include each expectation verification in the test result assertion count. -* Filter out noise from assertion backtraces. -* Point assertion backtrace to line where failing expectation was created. -* New yields method for expectations. -* Create stubs which stub all method calls. -* Mocks now respond_to? expected methods. - -= 0.2.1 - -* Rename MochaAcceptanceTest::Rover#move method to avoid conflict with Rake (in Ruby 1.8.4 only?) - -= 0.2.0 - -* Small change to SetupAndTeardown#teardown_stubs suggested by Luke Redpath (http://www.lukeredpath.co.uk) to allow use of Stubba with RSpec (http://rspec.rubyforge.org). -* Reorganized directory structure and extracted addition of setup and teardown methods into SmartTestCase mini-library. -* Addition of auto-verify for Mocha (but not Stubba). This means there is more significance in the choice of expects or stubs in that any expects on a mock will automatically get verified. - -So instead of... - - wotsit = Mocha.new - wotsit.expects(:thingummy).with(5).returns(10) - doobrey = Doobrey.new(wotsit) - doobrey.hoojamaflip - wotsit.verify - -you need to do... - - wotsit = mock() - wotsit.expects(:thingummy).with(5).returns(10) - doobrey = Doobrey.new(wotsit) - doobrey.hoojamaflip - # no need to verify - -There are also shortcuts as follows... - -instead of... - - wotsit = Mocha.new - wotsit.expects(:thingummy).returns(10) - wotsit.expects(:summat).returns(25) - -you can have... - - wotsit = mock(:thingummy => 5, :summat => 25) - -and instead of... - - wotsit = Mocha.new - wotsit.stubs(:thingummy).returns(10) - wotsit.stubs(:summat).returns(25) - -you can have... - - wotsit = stub(:thingummy => 5, :summat => 25) - -= 0.1.2 - -* Minor tweaks - -= 0.1.1 - -* Initial release. diff --git a/vendor/gems/mocha-0.9.3/Rakefile b/vendor/gems/mocha-0.9.3/Rakefile deleted file mode 100644 index ddc38fc..0000000 --- a/vendor/gems/mocha-0.9.3/Rakefile +++ /dev/null @@ -1,187 +0,0 @@ -require 'rubygems' -require 'rake/rdoctask' -require 'rake/gempackagetask' -require 'rake/testtask' -require 'rake/contrib/sshpublisher' - -module Mocha - VERSION = "0.9.3" -end - -desc "Run all tests" -task 'default' => ['test:units', 'test:acceptance', 'test:performance'] - -namespace 'test' do - - unit_tests = FileList['test/unit/**/*_test.rb'] - acceptance_tests = FileList['test/acceptance/*_test.rb'] - - desc "Run unit tests" - Rake::TestTask.new('units') do |t| - t.libs << 'test' - t.test_files = unit_tests - t.verbose = true - t.warning = true - end - - desc "Run acceptance tests" - Rake::TestTask.new('acceptance') do |t| - t.libs << 'test' - t.test_files = acceptance_tests - t.verbose = true - t.warning = true - end - - # require 'rcov/rcovtask' - # Rcov::RcovTask.new('coverage') do |t| - # t.libs << 'test' - # t.test_files = unit_tests + acceptance_tests - # t.verbose = true - # t.warning = true - # t.rcov_opts << '--sort coverage' - # t.rcov_opts << '--xref' - # end - - desc "Run performance tests" - task 'performance' do - require 'test/acceptance/stubba_example_test' - require 'test/acceptance/mocha_example_test' - iterations = 1000 - puts "\nBenchmarking with #{iterations} iterations..." - [MochaExampleTest, StubbaExampleTest].each do |test_case| - puts "#{test_case}: #{benchmark_test_case(test_case, iterations)} seconds." - end - end - -end - -def benchmark_test_case(klass, iterations) - require 'test/unit/ui/console/testrunner' - require 'benchmark' - time = Benchmark.realtime { iterations.times { Test::Unit::UI::Console::TestRunner.run(klass, Test::Unit::UI::SILENT) } } -end - -desc 'Generate RDoc' -Rake::RDocTask.new('rdoc') do |task| - task.main = 'README' - task.title = "Mocha #{Mocha::VERSION}" - task.rdoc_dir = 'doc' - task.template = File.expand_path(File.join(File.dirname(__FILE__), "templates", "html_with_google_analytics")) - task.rdoc_files.include( - 'README', - 'RELEASE', - 'COPYING', - 'MIT-LICENSE', - 'agiledox.txt', - 'lib/mocha/standalone.rb', - 'lib/mocha/mock.rb', - 'lib/mocha/expectation.rb', - 'lib/mocha/object.rb', - 'lib/mocha/parameter_matchers.rb', - 'lib/mocha/parameter_matchers', - 'lib/mocha/state_machine.rb', - 'lib/mocha/configuration.rb', - 'lib/mocha/stubbing_error.rb' - ) -end -task 'rdoc' => 'examples' - -desc "Upload RDoc to RubyForge" -task 'publish_rdoc' => ['rdoc', 'examples'] do - Rake::SshDirPublisher.new("jamesmead@rubyforge.org", "/var/www/gforge-projects/mocha", "doc").upload -end - -desc "Generate agiledox-like documentation for tests" -file 'agiledox.txt' do - File.open('agiledox.txt', 'w') do |output| - tests = FileList['test/**/*_test.rb'] - tests.each do |file| - m = %r".*/([^/].*)_test.rb".match(file) - output << m[1]+" should:\n" - test_definitions = File::readlines(file).select {|line| line =~ /.*def test.*/} - test_definitions.sort.each do |definition| - m = %r"test_(should_)?(.*)".match(definition) - output << " - "+m[2].gsub(/_/," ") << "\n" - end - end - end -end - -desc "Convert example ruby files to syntax-highlighted html" -task 'examples' do - $:.unshift File.expand_path(File.join(File.dirname(__FILE__), "vendor", "coderay-0.7.4.215", "lib")) - require 'coderay' - mkdir_p 'doc/examples' - File.open('doc/examples/coderay.css', 'w') do |output| - output << CodeRay::Encoders[:html]::CSS.new.stylesheet - end - ['mocha', 'stubba', 'misc'].each do |filename| - File.open("doc/examples/#{filename}.html", 'w') do |file| - file << "" - file << "" - file << %q() - file << "" - file << "" - file << CodeRay.scan_file("examples/#{filename}.rb").html.div - file << "" - file << "" - end - end -end - -Gem.manage_gems if Gem::RubyGemsVersion < '1.2.0' - -specification = Gem::Specification.new do |s| - s.name = "mocha" - s.summary = "Mocking and stubbing library" - s.version = Mocha::VERSION - s.platform = Gem::Platform::RUBY - s.author = 'James Mead' - s.description = <<-EOF - Mocking and stubbing library with JMock/SchMock syntax, which allows mocking and stubbing of methods on real (non-mock) classes. - EOF - s.email = 'mocha-developer@rubyforge.org' - s.homepage = 'http://mocha.rubyforge.org' - s.rubyforge_project = 'mocha' - - s.has_rdoc = true - s.extra_rdoc_files = ['README', 'COPYING'] - s.rdoc_options << '--title' << 'Mocha' << '--main' << 'README' << '--line-numbers' - - s.add_dependency('rake') - s.files = FileList['{lib,test,examples}/**/*.rb', '[A-Z]*'].exclude('TODO').to_a -end - -Rake::GemPackageTask.new(specification) do |package| - package.need_zip = true - package.need_tar = true -end - -task 'verify_user' do - raise "RUBYFORGE_USER environment variable not set!" unless ENV['RUBYFORGE_USER'] -end - -task 'verify_password' do - raise "RUBYFORGE_PASSWORD environment variable not set!" unless ENV['RUBYFORGE_PASSWORD'] -end - -desc "Publish package files on RubyForge." -task 'publish_packages' => ['verify_user', 'verify_password', 'package'] do - $:.unshift File.expand_path(File.join(File.dirname(__FILE__), "vendor", "meta_project-0.4.15", "lib")) - require 'meta_project' - require 'rake/contrib/xforge' - release_files = FileList[ - "pkg/mocha-#{Mocha::VERSION}.gem", - "pkg/mocha-#{Mocha::VERSION}.tgz", - "pkg/mocha-#{Mocha::VERSION}.zip" - ] - - Rake::XForge::Release.new(MetaProject::Project::XForge::RubyForge.new('mocha')) do |release| - release.user_name = ENV['RUBYFORGE_USER'] - release.password = ENV['RUBYFORGE_PASSWORD'] - release.files = release_files.to_a - release.release_name = "Mocha #{Mocha::VERSION}" - release.release_changes = '' - release.release_notes = '' - end -end diff --git a/vendor/gems/mocha-0.9.3/examples/misc.rb b/vendor/gems/mocha-0.9.3/examples/misc.rb deleted file mode 100644 index 1cb8b55..0000000 --- a/vendor/gems/mocha-0.9.3/examples/misc.rb +++ /dev/null @@ -1,44 +0,0 @@ -require 'test/unit' -require 'rubygems' -require 'mocha' - -class MiscExampleTest < Test::Unit::TestCase - - def test_mocking_a_class_method - product = Product.new - Product.expects(:find).with(1).returns(product) - assert_equal product, Product.find(1) - end - - def test_mocking_an_instance_method_on_a_real_object - product = Product.new - product.expects(:save).returns(true) - assert product.save - end - - def test_stubbing_instance_methods_on_real_objects - prices = [stub(:pence => 1000), stub(:pence => 2000)] - product = Product.new - product.stubs(:prices).returns(prices) - assert_equal [1000, 2000], product.prices.collect {|p| p.pence} - end - - def test_stubbing_an_instance_method_on_all_instances_of_a_class - Product.any_instance.stubs(:name).returns('stubbed_name') - product = Product.new - assert_equal 'stubbed_name', product.name - end - - def test_traditional_mocking - object = mock() - object.expects(:expected_method).with(:p1, :p2).returns(:result) - assert_equal :result, object.expected_method(:p1, :p2) - end - - def test_shortcuts - object = stub(:method1 => :result1, :method2 => :result2) - assert_equal :result1, object.method1 - assert_equal :result2, object.method2 - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/examples/mocha.rb b/vendor/gems/mocha-0.9.3/examples/mocha.rb deleted file mode 100644 index 863270d..0000000 --- a/vendor/gems/mocha-0.9.3/examples/mocha.rb +++ /dev/null @@ -1,26 +0,0 @@ -class Enterprise - - def initialize(dilithium) - @dilithium = dilithium - end - - def go(warp_factor) - warp_factor.times { @dilithium.nuke(:anti_matter) } - end - -end - -require 'test/unit' -require 'rubygems' -require 'mocha' - -class EnterpriseTest < Test::Unit::TestCase - - def test_should_boldly_go - dilithium = mock() - dilithium.expects(:nuke).with(:anti_matter).at_least_once # auto-verified at end of test - enterprise = Enterprise.new(dilithium) - enterprise.go(2) - end - -end diff --git a/vendor/gems/mocha-0.9.3/examples/stubba.rb b/vendor/gems/mocha-0.9.3/examples/stubba.rb deleted file mode 100644 index 2788d1b..0000000 --- a/vendor/gems/mocha-0.9.3/examples/stubba.rb +++ /dev/null @@ -1,65 +0,0 @@ -class Order - - attr_accessor :shipped_on - - def total_cost - line_items.inject(0) { |total, line_item| total + line_item.price } + shipping_cost - end - - def total_weight - line_items.inject(0) { |total, line_item| total + line_item.weight } - end - - def shipping_cost - total_weight * 5 + 10 - end - - class << self - - def find_all - # Database.connection.execute('select * from orders... - end - - def number_shipped_since(date) - find_all.select { |order| order.shipped_on > date }.length - end - - def unshipped_value - find_all.inject(0) { |total, order| order.shipped_on ? total : total + order.total_cost } - end - - end - -end - -require 'test/unit' -require 'rubygems' -require 'mocha' - -class OrderTest < Test::Unit::TestCase - - # illustrates stubbing instance method - def test_should_calculate_shipping_cost_based_on_total_weight - order = Order.new - order.stubs(:total_weight).returns(10) - assert_equal 60, order.shipping_cost - end - - # illustrates stubbing class method - def test_should_count_number_of_orders_shipped_after_specified_date - now = Time.now; week_in_secs = 7 * 24 * 60 * 60 - order_1 = Order.new; order_1.shipped_on = now - 1 * week_in_secs - order_2 = Order.new; order_2.shipped_on = now - 3 * week_in_secs - Order.stubs(:find_all).returns([order_1, order_2]) - assert_equal 1, Order.number_shipped_since(now - 2 * week_in_secs) - end - - # illustrates stubbing instance method for all instances of a class - def test_should_calculate_value_of_unshipped_orders - Order.stubs(:find_all).returns([Order.new, Order.new, Order.new]) - Order.any_instance.stubs(:shipped_on).returns(nil) - Order.any_instance.stubs(:total_cost).returns(10) - assert_equal 30, Order.unshipped_value - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha.rb b/vendor/gems/mocha-0.9.3/lib/mocha.rb deleted file mode 100644 index 2cc657e..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha.rb +++ /dev/null @@ -1,47 +0,0 @@ -require 'mocha_standalone' -require 'mocha/configuration' - -if RUBY_VERSION < '1.9' - begin - require 'rubygems' - begin - gem 'minitest', '>=1.3' - require 'minitest/unit' - rescue Gem::LoadError - # Compatible version of MiniTest gem not available - end - rescue LoadError - # RubyGems not available - end -else - begin - require 'minitest/unit' - rescue LoadError - # MiniTest not available - end -end - -if defined?(MiniTest) - require 'mocha/mini_test_adapter' - - module MiniTest - class Unit - class TestCase - include Mocha::Standalone - include Mocha::MiniTestCaseAdapter - end - end - end -end - -require 'mocha/test_case_adapter' -require 'test/unit/testcase' - -module Test - module Unit - class TestCase - include Mocha::Standalone - include Mocha::TestCaseAdapter - end - end -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/any_instance_method.rb b/vendor/gems/mocha-0.9.3/lib/mocha/any_instance_method.rb deleted file mode 100644 index 42d8901..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/any_instance_method.rb +++ /dev/null @@ -1,54 +0,0 @@ -require 'mocha/class_method' - -module Mocha - - class AnyInstanceMethod < ClassMethod - - def unstub - remove_new_method - restore_original_method - stubbee.any_instance.reset_mocha - end - - def mock - stubbee.any_instance.mocha - end - - def hide_original_method - if method_exists?(method) - begin - stubbee.class_eval("alias_method :#{hidden_method}, :#{method}", __FILE__, __LINE__) - rescue NameError - # deal with nasties like ActiveRecord::Associations::AssociationProxy - end - end - end - - def define_new_method - stubbee.class_eval("def #{method}(*args, &block); self.class.any_instance.mocha.method_missing(:#{method}, *args, &block); end", __FILE__, __LINE__) - end - - def remove_new_method - stubbee.class_eval("remove_method :#{method}", __FILE__, __LINE__) - end - - def restore_original_method - if method_exists?(hidden_method) - begin - stubbee.class_eval("alias_method :#{method}, :#{hidden_method}; remove_method :#{hidden_method}", __FILE__, __LINE__) - rescue NameError - # deal with nasties like ActiveRecord::Associations::AssociationProxy - end - end - end - - def method_exists?(method) - return true if stubbee.public_instance_methods(false).include?(method) - return true if stubbee.protected_instance_methods(false).include?(method) - return true if stubbee.private_instance_methods(false).include?(method) - return false - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/backtrace_filter.rb b/vendor/gems/mocha-0.9.3/lib/mocha/backtrace_filter.rb deleted file mode 100644 index 69215e7..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/backtrace_filter.rb +++ /dev/null @@ -1,17 +0,0 @@ -module Mocha - - class BacktraceFilter - - LIB_DIRECTORY = File.expand_path(File.join(File.dirname(__FILE__), "..")) + File::SEPARATOR - - def initialize(lib_directory = LIB_DIRECTORY) - @lib_directory = lib_directory - end - - def filtered(backtrace) - backtrace.reject { |location| Regexp.new(@lib_directory).match(File.expand_path(location)) } - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/cardinality.rb b/vendor/gems/mocha-0.9.3/lib/mocha/cardinality.rb deleted file mode 100644 index b0c0ddf..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/cardinality.rb +++ /dev/null @@ -1,95 +0,0 @@ -module Mocha - - class Cardinality - - INFINITY = 1 / 0.0 - - class << self - - def exactly(count) - new(count, count) - end - - def at_least(count) - new(count, INFINITY) - end - - def at_most(count) - new(0, count) - end - - def times(range_or_count) - case range_or_count - when Range then new(range_or_count.first, range_or_count.last) - else new(range_or_count, range_or_count) - end - end - - end - - def initialize(required, maximum) - @required, @maximum = required, maximum - end - - def invocations_allowed?(invocation_count) - invocation_count < maximum - end - - def satisfied?(invocations_so_far) - invocations_so_far >= required - end - - def needs_verifying? - !allowed_any_number_of_times? - end - - def verified?(invocation_count) - (invocation_count >= required) && (invocation_count <= maximum) - end - - def allowed_any_number_of_times? - required == 0 && infinite?(maximum) - end - - def used?(invocation_count) - (invocation_count > 0) || (maximum == 0) - end - - def mocha_inspect - if allowed_any_number_of_times? - "allowed any number of times" - else - if required == 0 && maximum == 0 - "expected never" - elsif required == maximum - "expected exactly #{times(required)}" - elsif infinite?(maximum) - "expected at least #{times(required)}" - elsif required == 0 - "expected at most #{times(maximum)}" - else - "expected between #{required} and #{times(maximum)}" - end - end - end - - protected - - attr_reader :required, :maximum - - def times(number) - case number - when 0 then "no times" - when 1 then "once" - when 2 then "twice" - else "#{number} times" - end - end - - def infinite?(number) - number.respond_to?(:infinite?) && number.infinite? - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/central.rb b/vendor/gems/mocha-0.9.3/lib/mocha/central.rb deleted file mode 100644 index 7bb287a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/central.rb +++ /dev/null @@ -1,27 +0,0 @@ -module Mocha - - class Central - - attr_accessor :stubba_methods - - def initialize - self.stubba_methods = [] - end - - def stub(method) - unless stubba_methods.include?(method) - method.stub - stubba_methods.push(method) - end - end - - def unstub_all - while stubba_methods.length > 0 - method = stubba_methods.pop - method.unstub - end - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/change_state_side_effect.rb b/vendor/gems/mocha-0.9.3/lib/mocha/change_state_side_effect.rb deleted file mode 100644 index fe85d6e..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/change_state_side_effect.rb +++ /dev/null @@ -1,19 +0,0 @@ -module Mocha - - class ChangeStateSideEffect - - def initialize(state) - @state = state - end - - def perform - @state.activate - end - - def mocha_inspect - "then #{@state.mocha_inspect}" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/class_method.rb b/vendor/gems/mocha-0.9.3/lib/mocha/class_method.rb deleted file mode 100644 index 19a1827..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/class_method.rb +++ /dev/null @@ -1,86 +0,0 @@ -require 'mocha/metaclass' - -module Mocha - - class ClassMethod - - attr_reader :stubbee, :method - - def initialize(stubbee, method) - @stubbee = stubbee - @method = RUBY_VERSION < '1.9' ? method.to_s : method.to_sym - end - - def stub - hide_original_method - define_new_method - end - - def unstub - remove_new_method - restore_original_method - stubbee.reset_mocha - end - - def mock - stubbee.mocha - end - - def hide_original_method - if method_exists?(method) - begin - stubbee.__metaclass__.class_eval("alias_method :#{hidden_method}, :#{method}", __FILE__, __LINE__) - rescue NameError - # deal with nasties like ActiveRecord::Associations::AssociationProxy - end - end - end - - def define_new_method - stubbee.__metaclass__.class_eval("def #{method}(*args, &block); mocha.method_missing(:#{method}, *args, &block); end", __FILE__, __LINE__) - end - - def remove_new_method - stubbee.__metaclass__.class_eval("remove_method :#{method}", __FILE__, __LINE__) - end - - def restore_original_method - if method_exists?(hidden_method) - begin - stubbee.__metaclass__.class_eval("alias_method :#{method}, :#{hidden_method}; remove_method :#{hidden_method}", __FILE__, __LINE__) - rescue NameError - # deal with nasties like ActiveRecord::Associations::AssociationProxy - end - end - end - - def hidden_method - if RUBY_VERSION < '1.9' - method_name = method.to_s.gsub(/\W/) { |s| "_substituted_character_#{s[0]}_" } - else - method_name = method.to_s.gsub(/\W/) { |s| "_substituted_character_#{s.ord}_" } - end - hidden_method = "__stubba__#{method_name}__stubba__" - RUBY_VERSION < '1.9' ? hidden_method.to_s : hidden_method.to_sym - end - - def eql?(other) - return false unless (other.class == self.class) - (stubbee.object_id == other.stubbee.object_id) and (method == other.method) - end - - alias_method :==, :eql? - - def to_s - "#{stubbee}.#{method}" - end - - def method_exists?(method) - symbol = method.to_sym - metaclass = stubbee.__metaclass__ - metaclass.public_method_defined?(symbol) || metaclass.protected_method_defined?(symbol) || metaclass.private_method_defined?(symbol) - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/configuration.rb b/vendor/gems/mocha-0.9.3/lib/mocha/configuration.rb deleted file mode 100644 index fb1ab6f..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/configuration.rb +++ /dev/null @@ -1,60 +0,0 @@ -module Mocha # :nodoc: - - # Configuration settings - class Configuration - - DEFAULTS = { :stubbing_method_unnecessarily => :allow, :stubbing_method_on_non_mock_object => :allow, :stubbing_non_existent_method => :allow, :stubbing_non_public_method => :allow } - - class << self - - # :call-seq: allow(action) - # - # Allow the specified action (as a symbol). - # The actions currently available are :stubbing_method_unnecessarily, :stubbing_method_on_non_mock_object, :stubbing_non_existent_method, :stubbing_non_public_method. - def allow(action) - configuration[action] = :allow - end - - def allow?(action) # :nodoc: - configuration[action] == :allow - end - - # :call-seq: warn_when(action) - # - # Warn if the specified action (as a symbol) is attempted. - # The actions currently available are :stubbing_method_unnecessarily, :stubbing_method_on_non_mock_object, :stubbing_non_existent_method, :stubbing_non_public_method. - def warn_when(action) - configuration[action] = :warn - end - - def warn_when?(action) # :nodoc: - configuration[action] == :warn - end - - # :call-seq: prevent(action) - # - # Raise a StubbingError if the specified action (as a symbol) is attempted. - # The actions currently available are :stubbing_method_unnecessarily, :stubbing_method_on_non_mock_object, :stubbing_non_existent_method, :stubbing_non_public_method. - def prevent(action) - configuration[action] = :prevent - end - - def prevent?(action) # :nodoc: - configuration[action] == :prevent - end - - def reset_configuration # :nodoc: - @configuration = nil - end - - private - - def configuration # :nodoc: - @configuration ||= DEFAULTS.dup - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/deprecation.rb b/vendor/gems/mocha-0.9.3/lib/mocha/deprecation.rb deleted file mode 100644 index 7448510..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/deprecation.rb +++ /dev/null @@ -1,22 +0,0 @@ -module Mocha - - class Deprecation - - class << self - - attr_accessor :mode, :messages - - def warning(message) - @messages << message - $stderr.puts "Mocha deprecation warning: #{message}" unless mode == :disabled - $stderr.puts caller.join("\n ") if mode == :debug - end - - end - - self.mode = :enabled - self.messages = [] - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/exception_raiser.rb b/vendor/gems/mocha-0.9.3/lib/mocha/exception_raiser.rb deleted file mode 100644 index 9e009cb..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/exception_raiser.rb +++ /dev/null @@ -1,17 +0,0 @@ -module Mocha # :nodoc: - - class ExceptionRaiser # :nodoc: - - def initialize(exception, message) - @exception, @message = exception, message - end - - def evaluate - raise @exception, @exception.to_s if @exception.is_a?(Module) && @exception.ancestors.include?(Interrupt) - raise @exception, @message if @message - raise @exception - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/expectation.rb b/vendor/gems/mocha-0.9.3/lib/mocha/expectation.rb deleted file mode 100644 index 64e2544..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/expectation.rb +++ /dev/null @@ -1,451 +0,0 @@ -require 'mocha/method_matcher' -require 'mocha/parameters_matcher' -require 'mocha/expectation_error' -require 'mocha/return_values' -require 'mocha/exception_raiser' -require 'mocha/yield_parameters' -require 'mocha/is_a' -require 'mocha/in_state_ordering_constraint' -require 'mocha/change_state_side_effect' -require 'mocha/cardinality' - -module Mocha # :nodoc: - - # Methods on expectations returned from Mock#expects, Mock#stubs, Object#expects and Object#stubs. - class Expectation - - # :call-seq: times(range) -> expectation - # - # Modifies expectation so that the number of calls to the expected method must be within a specific +range+. - # - # +range+ can be specified as an exact integer or as a range of integers - # object = mock() - # object.expects(:expected_method).times(3) - # 3.times { object.expected_method } - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).times(3) - # 2.times { object.expected_method } - # # => verify fails - # - # object = mock() - # object.expects(:expected_method).times(2..4) - # 3.times { object.expected_method } - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).times(2..4) - # object.expected_method - # # => verify fails - def times(range) - @cardinality = Cardinality.times(range) - self - end - - # :call-seq: once() -> expectation - # - # Modifies expectation so that the expected method must be called exactly once. - # Note that this is the default behaviour for an expectation, but you may wish to use it for clarity/emphasis. - # object = mock() - # object.expects(:expected_method).once - # object.expected_method - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).once - # object.expected_method - # object.expected_method - # # => verify fails - # - # object = mock() - # object.expects(:expected_method).once - # # => verify fails - def once - @cardinality = Cardinality.exactly(1) - self - end - - # :call-seq: never() -> expectation - # - # Modifies expectation so that the expected method must never be called. - # object = mock() - # object.expects(:expected_method).never - # object.expected_method - # # => verify fails - # - # object = mock() - # object.expects(:expected_method).never - # object.expected_method - # # => verify succeeds - def never - @cardinality = Cardinality.exactly(0) - self - end - - # :call-seq: at_least(minimum_number_of_times) -> expectation - # - # Modifies expectation so that the expected method must be called at least a +minimum_number_of_times+. - # object = mock() - # object.expects(:expected_method).at_least(2) - # 3.times { object.expected_method } - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).at_least(2) - # object.expected_method - # # => verify fails - def at_least(minimum_number_of_times) - @cardinality = Cardinality.at_least(minimum_number_of_times) - self - end - - # :call-seq: at_least_once() -> expectation - # - # Modifies expectation so that the expected method must be called at least once. - # object = mock() - # object.expects(:expected_method).at_least_once - # object.expected_method - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).at_least_once - # # => verify fails - def at_least_once - at_least(1) - self - end - - # :call-seq: at_most(maximum_number_of_times) -> expectation - # - # Modifies expectation so that the expected method must be called at most a +maximum_number_of_times+. - # object = mock() - # object.expects(:expected_method).at_most(2) - # 2.times { object.expected_method } - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).at_most(2) - # 3.times { object.expected_method } - # # => verify fails - def at_most(maximum_number_of_times) - @cardinality = Cardinality.at_most(maximum_number_of_times) - self - end - - # :call-seq: at_most_once() -> expectation - # - # Modifies expectation so that the expected method must be called at most once. - # object = mock() - # object.expects(:expected_method).at_most_once - # object.expected_method - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).at_most_once - # 2.times { object.expected_method } - # # => verify fails - def at_most_once() - at_most(1) - self - end - - # :call-seq: with(*expected_parameters, &matching_block) -> expectation - # - # Modifies expectation so that the expected method must be called with +expected_parameters+. - # object = mock() - # object.expects(:expected_method).with(:param1, :param2) - # object.expected_method(:param1, :param2) - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).with(:param1, :param2) - # object.expected_method(:param3) - # # => verify fails - # May be used with parameter matchers in Mocha::ParameterMatchers. - # - # If a +matching_block+ is given, the block is called with the parameters passed to the expected method. - # The expectation is matched if the block evaluates to +true+. - # object = mock() - # object.expects(:expected_method).with() { |value| value % 4 == 0 } - # object.expected_method(16) - # # => verify succeeds - # - # object = mock() - # object.expects(:expected_method).with() { |value| value % 4 == 0 } - # object.expected_method(17) - # # => verify fails - def with(*expected_parameters, &matching_block) - @parameters_matcher = ParametersMatcher.new(expected_parameters, &matching_block) - self - end - - # :call-seq: yields(*parameters) -> expectation - # - # Modifies expectation so that when the expected method is called, it yields with the specified +parameters+. - # object = mock() - # object.expects(:expected_method).yields('result') - # yielded_value = nil - # object.expected_method { |value| yielded_value = value } - # yielded_value # => 'result' - # May be called multiple times on the same expectation for consecutive invocations. Also see Expectation#then. - # object = mock() - # object.stubs(:expected_method).yields(1).then.yields(2) - # yielded_values_from_first_invocation = [] - # yielded_values_from_second_invocation = [] - # object.expected_method { |value| yielded_values_from_first_invocation << value } # first invocation - # object.expected_method { |value| yielded_values_from_second_invocation << value } # second invocation - # yielded_values_from_first_invocation # => [1] - # yielded_values_from_second_invocation # => [2] - def yields(*parameters) - @yield_parameters.add(*parameters) - self - end - - # :call-seq: multiple_yields(*parameter_groups) -> expectation - # - # Modifies expectation so that when the expected method is called, it yields multiple times per invocation with the specified +parameter_groups+. - # object = mock() - # object.expects(:expected_method).multiple_yields(['result_1', 'result_2'], ['result_3']) - # yielded_values = [] - # object.expected_method { |*values| yielded_values << values } - # yielded_values # => [['result_1', 'result_2'], ['result_3]] - # May be called multiple times on the same expectation for consecutive invocations. Also see Expectation#then. - # object = mock() - # object.stubs(:expected_method).multiple_yields([1, 2], [3]).then.multiple_yields([4], [5, 6]) - # yielded_values_from_first_invocation = [] - # yielded_values_from_second_invocation = [] - # object.expected_method { |*values| yielded_values_from_first_invocation << values } # first invocation - # object.expected_method { |*values| yielded_values_from_second_invocation << values } # second invocation - # yielded_values_from_first_invocation # => [[1, 2], [3]] - # yielded_values_from_second_invocation # => [[4], [5, 6]] - def multiple_yields(*parameter_groups) - @yield_parameters.multiple_add(*parameter_groups) - self - end - - # :call-seq: returns(value) -> expectation - # returns(*values) -> expectation - # - # Modifies expectation so that when the expected method is called, it returns the specified +value+. - # object = mock() - # object.stubs(:stubbed_method).returns('result') - # object.stubbed_method # => 'result' - # object.stubbed_method # => 'result' - # If multiple +values+ are given, these are returned in turn on consecutive calls to the method. - # object = mock() - # object.stubs(:stubbed_method).returns(1, 2) - # object.stubbed_method # => 1 - # object.stubbed_method # => 2 - # May be called multiple times on the same expectation. Also see Expectation#then. - # object = mock() - # object.stubs(:expected_method).returns(1, 2).then.returns(3) - # object.expected_method # => 1 - # object.expected_method # => 2 - # object.expected_method # => 3 - # May be called in conjunction with Expectation#raises on the same expectation. - # object = mock() - # object.stubs(:expected_method).returns(1, 2).then.raises(Exception) - # object.expected_method # => 1 - # object.expected_method # => 2 - # object.expected_method # => raises exception of class Exception1 - # Note that in Ruby a method returning multiple values is exactly equivalent to a method returning an Array of those values. - # object = mock() - # object.stubs(:expected_method).returns([1, 2]) - # x, y = object.expected_method - # x # => 1 - # y # => 2 - def returns(*values) - @return_values += ReturnValues.build(*values) - self - end - - # :call-seq: raises(exception = RuntimeError, message = nil) -> expectation - # - # Modifies expectation so that when the expected method is called, it raises the specified +exception+ with the specified +message+. - # object = mock() - # object.expects(:expected_method).raises(Exception, 'message') - # object.expected_method # => raises exception of class Exception and with message 'message' - # May be called multiple times on the same expectation. Also see Expectation#then. - # object = mock() - # object.stubs(:expected_method).raises(Exception1).then.raises(Exception2) - # object.expected_method # => raises exception of class Exception1 - # object.expected_method # => raises exception of class Exception2 - # May be called in conjunction with Expectation#returns on the same expectation. - # object = mock() - # object.stubs(:expected_method).raises(Exception).then.returns(2, 3) - # object.expected_method # => raises exception of class Exception1 - # object.expected_method # => 2 - # object.expected_method # => 3 - def raises(exception = RuntimeError, message = nil) - @return_values += ReturnValues.new(ExceptionRaiser.new(exception, message)) - self - end - - # :call-seq: then() -> expectation - # then(state_machine.is(state)) -> expectation - # - # then() is used as syntactic sugar to improve readability. It has no effect on state of the expectation. - # object = mock() - # object.stubs(:expected_method).returns(1, 2).then.raises(Exception).then.returns(4) - # object.expected_method # => 1 - # object.expected_method # => 2 - # object.expected_method # => raises exception of class Exception - # object.expected_method # => 4 - # - # then(state_machine.is(state)) is used to change the +state_machine+ to the specified +state+ when the invocation occurs. - # - # See also Standalone#states, StateMachine and Expectation#when. - # power = states('power').starts_as('off') - # - # radio = mock('radio') - # radio.expects(:switch_on).then(power.is('on')) - # radio.expects(:select_channel).with('BBC Radio 4').when(power.is('on')) - # radio.expects(:adjust_volume).with(+5).when(power.is('on')) - # radio.expects(:select_channel).with('BBC World Service').when(power.is('on')) - # radio.expects(:adjust_volume).with(-5).when(power.is('on')) - # radio.expects(:switch_off).then(power.is('off')) - def then(*parameters) - if parameters.length == 1 - state = parameters.first - add_side_effect(ChangeStateSideEffect.new(state)) - end - self - end - - # :call-seq: when(state_machine.is(state)) -> exception - # - # Constrains the expectation to occur only when the +state_machine+ is in the named +state+. - # - # See also Standalone#states, StateMachine#starts_as and Expectation#then. - # power = states('power').starts_as('off') - # - # radio = mock('radio') - # radio.expects(:switch_on).then(power.is('on')) - # radio.expects(:select_channel).with('BBC Radio 4').when(power.is('on')) - # radio.expects(:adjust_volume).with(+5).when(power.is('on')) - # radio.expects(:select_channel).with('BBC World Service').when(power.is('on')) - # radio.expects(:adjust_volume).with(-5).when(power.is('on')) - # radio.expects(:switch_off).then(power.is('off')) - def when(state_predicate) - add_ordering_constraint(InStateOrderingConstraint.new(state_predicate)) - self - end - - # :call-seq: in_sequence(*sequences) -> expectation - # - # Constrains this expectation so that it must be invoked at the current point in the sequence. - # - # To expect a sequence of invocations, write the expectations in order and add the in_sequence(sequence) clause to each one. - # - # Expectations in a sequence can have any invocation count. - # - # If an expectation in a sequence is stubbed, rather than expected, it can be skipped in the sequence. - # - # See also Standalone#sequence. - # breakfast = sequence('breakfast') - # - # egg = mock('egg') - # egg.expects(:crack).in_sequence(breakfast) - # egg.expects(:fry).in_sequence(breakfast) - # egg.expects(:eat).in_sequence(breakfast) - def in_sequence(*sequences) - sequences.each { |sequence| add_in_sequence_ordering_constraint(sequence) } - self - end - - # :stopdoc: - - attr_reader :backtrace - - def initialize(mock, expected_method_name, backtrace = nil) - @mock = mock - @method_matcher = MethodMatcher.new(expected_method_name.to_sym) - @parameters_matcher = ParametersMatcher.new - @ordering_constraints = [] - @side_effects = [] - @cardinality, @invocation_count = Cardinality.exactly(1), 0 - @return_values = ReturnValues.new - @yield_parameters = YieldParameters.new - @backtrace = backtrace || caller - end - - def add_ordering_constraint(ordering_constraint) - @ordering_constraints << ordering_constraint - end - - def add_in_sequence_ordering_constraint(sequence) - sequence.constrain_as_next_in_sequence(self) - end - - def add_side_effect(side_effect) - @side_effects << side_effect - end - - def perform_side_effects - @side_effects.each { |side_effect| side_effect.perform } - end - - def in_correct_order? - @ordering_constraints.all? { |ordering_constraint| ordering_constraint.allows_invocation_now? } - end - - def matches_method?(method_name) - @method_matcher.match?(method_name) - end - - def match?(actual_method_name, *actual_parameters) - @method_matcher.match?(actual_method_name) && @parameters_matcher.match?(actual_parameters) && in_correct_order? - end - - def invocations_allowed? - @cardinality.invocations_allowed?(@invocation_count) - end - - def satisfied? - @cardinality.satisfied?(@invocation_count) - end - - def invoke - @invocation_count += 1 - perform_side_effects() - if block_given? then - @yield_parameters.next_invocation.each do |yield_parameters| - yield(*yield_parameters) - end - end - @return_values.next - end - - def verified?(assertion_counter = nil) - assertion_counter.increment if assertion_counter && @cardinality.needs_verifying? - @cardinality.verified?(@invocation_count) - end - - def used? - @cardinality.used?(@invocation_count) - end - - def mocha_inspect - message = "#{@cardinality.mocha_inspect}, " - message << case @invocation_count - when 0 then "not yet invoked" - when 1 then "already invoked once" - when 2 then "already invoked twice" - else "already invoked #{@invocation_count} times" - end - message << ": " - message << method_signature - message << "; #{@ordering_constraints.map { |oc| oc.mocha_inspect }.join("; ")}" unless @ordering_constraints.empty? - message - end - - def method_signature - "#{@mock.mocha_inspect}.#{@method_matcher.mocha_inspect}#{@parameters_matcher.mocha_inspect}" - end - - # :startdoc: - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/expectation_error.rb b/vendor/gems/mocha-0.9.3/lib/mocha/expectation_error.rb deleted file mode 100644 index ffa9835..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/expectation_error.rb +++ /dev/null @@ -1,15 +0,0 @@ -require 'mocha/backtrace_filter' - -module Mocha - - class ExpectationError < StandardError - - def initialize(message = nil, backtrace = []) - super(message) - filter = BacktraceFilter.new - set_backtrace(filter.filtered(backtrace)) - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/expectation_list.rb b/vendor/gems/mocha-0.9.3/lib/mocha/expectation_list.rb deleted file mode 100644 index 788d07c..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/expectation_list.rb +++ /dev/null @@ -1,50 +0,0 @@ -module Mocha # :nodoc: - - class ExpectationList - - def initialize - @expectations = [] - end - - def add(expectation) - @expectations.unshift(expectation) - expectation - end - - def matches_method?(method_name) - @expectations.any? { |expectation| expectation.matches_method?(method_name) } - end - - def match(method_name, *arguments) - matching_expectations(method_name, *arguments).first - end - - def match_allowing_invocation(method_name, *arguments) - matching_expectations(method_name, *arguments).detect { |e| e.invocations_allowed? } - end - - def verified?(assertion_counter = nil) - @expectations.all? { |expectation| expectation.verified?(assertion_counter) } - end - - def to_a - @expectations - end - - def to_set - @expectations.to_set - end - - def length - @expectations.length - end - - private - - def matching_expectations(method_name, *arguments) - @expectations.select { |e| e.match?(method_name, *arguments) } - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/in_state_ordering_constraint.rb b/vendor/gems/mocha-0.9.3/lib/mocha/in_state_ordering_constraint.rb deleted file mode 100644 index 1ff0898..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/in_state_ordering_constraint.rb +++ /dev/null @@ -1,19 +0,0 @@ -module Mocha - - class InStateOrderingConstraint - - def initialize(state_predicate) - @state_predicate = state_predicate - end - - def allows_invocation_now? - @state_predicate.active? - end - - def mocha_inspect - "when #{@state_predicate.mocha_inspect}" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/inspect.rb b/vendor/gems/mocha-0.9.3/lib/mocha/inspect.rb deleted file mode 100644 index bca613d..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/inspect.rb +++ /dev/null @@ -1,67 +0,0 @@ -require 'date' - -module Mocha - - module ObjectMethods - def mocha_inspect - address = self.__id__ * 2 - address += 0x100000000 if address < 0 - inspect =~ /#" : inspect - end - end - - module StringMethods - def mocha_inspect - inspect.gsub(/\"/, "'") - end - end - - module ArrayMethods - def mocha_inspect - "[#{collect { |member| member.mocha_inspect }.join(', ')}]" - end - end - - module HashMethods - def mocha_inspect - "{#{collect { |key, value| "#{key.mocha_inspect} => #{value.mocha_inspect}" }.join(', ')}}" - end - end - - module TimeMethods - def mocha_inspect - "#{inspect} (#{to_f} secs)" - end - end - - module DateMethods - def mocha_inspect - to_s - end - end - -end - -class Object - include Mocha::ObjectMethods -end - -class String - include Mocha::StringMethods -end - -class Array - include Mocha::ArrayMethods -end - -class Hash - include Mocha::HashMethods -end - -class Time - include Mocha::TimeMethods -end - -class Date - include Mocha::DateMethods -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/instance_method.rb b/vendor/gems/mocha-0.9.3/lib/mocha/instance_method.rb deleted file mode 100644 index 49669ae..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/instance_method.rb +++ /dev/null @@ -1,16 +0,0 @@ -require 'mocha/class_method' - -module Mocha - - class InstanceMethod < ClassMethod - - def method_exists?(method) - return true if stubbee.public_methods(false).include?(method) - return true if stubbee.protected_methods(false).include?(method) - return true if stubbee.private_methods(false).include?(method) - return false - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/is_a.rb b/vendor/gems/mocha-0.9.3/lib/mocha/is_a.rb deleted file mode 100644 index ee23c86..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/is_a.rb +++ /dev/null @@ -1,9 +0,0 @@ -class Object - - # :stopdoc: - - alias_method :__is_a__, :is_a? - - # :startdoc: - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/logger.rb b/vendor/gems/mocha-0.9.3/lib/mocha/logger.rb deleted file mode 100644 index 9f09300..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/logger.rb +++ /dev/null @@ -1,15 +0,0 @@ -module Mocha - - class Logger - - def initialize(io) - @io = io - end - - def warn(message) - @io.puts "WARNING: #{message}" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/metaclass.rb b/vendor/gems/mocha-0.9.3/lib/mocha/metaclass.rb deleted file mode 100644 index 3c3680d..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/metaclass.rb +++ /dev/null @@ -1,13 +0,0 @@ -module Mocha - - module ObjectMethods - def __metaclass__ - class << self; self; end - end - end - -end - -class Object - include Mocha::ObjectMethods -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/method_matcher.rb b/vendor/gems/mocha-0.9.3/lib/mocha/method_matcher.rb deleted file mode 100644 index 6ce5f6d..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/method_matcher.rb +++ /dev/null @@ -1,21 +0,0 @@ -module Mocha - - class MethodMatcher - - attr_reader :expected_method_name - - def initialize(expected_method_name) - @expected_method_name = expected_method_name - end - - def match?(actual_method_name) - @expected_method_name == actual_method_name - end - - def mocha_inspect - "#{@expected_method_name}" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/mini_test_adapter.rb b/vendor/gems/mocha-0.9.3/lib/mocha/mini_test_adapter.rb deleted file mode 100644 index 6b53110..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/mini_test_adapter.rb +++ /dev/null @@ -1,50 +0,0 @@ -module Mocha - - module MiniTestCaseAdapter - - class AssertionCounter - def initialize(test_case) - @test_case = test_case - end - - def increment - @test_case._assertions += 1 - end - end - - def self.included(base) - base.class_eval do - - alias_method :run_before_mocha_mini_test_adapter, :run - - def run runner - assertion_counter = AssertionCounter.new(self) - result = '.' - begin - begin - @passed = nil - self.setup - self.__send__ self.name - mocha_verify(assertion_counter) - @passed = true - rescue Exception => e - @passed = false - result = runner.puke(self.class, self.name, e) - ensure - begin - self.teardown - rescue Exception => e - result = runner.puke(self.class, self.name, e) - end - end - ensure - mocha_teardown - end - result - end - - end - end - - end -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/mock.rb b/vendor/gems/mocha-0.9.3/lib/mocha/mock.rb deleted file mode 100644 index f247ae2..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/mock.rb +++ /dev/null @@ -1,200 +0,0 @@ -require 'mocha/expectation' -require 'mocha/expectation_list' -require 'mocha/metaclass' -require 'mocha/names' -require 'mocha/mockery' -require 'mocha/method_matcher' -require 'mocha/parameters_matcher' -require 'mocha/unexpected_invocation' - -module Mocha # :nodoc: - - # Traditional mock object. - # - # Methods return an Expectation which can be further modified by methods on Expectation. - class Mock - - # :call-seq: expects(method_name) -> expectation - # expects(method_names) -> last expectation - # - # Adds an expectation that a method identified by +method_name+ symbol must be called exactly once with any parameters. - # Returns the new expectation which can be further modified by methods on Expectation. - # object = mock() - # object.expects(:method1) - # object.method1 - # # no error raised - # - # object = mock() - # object.expects(:method1) - # # error raised, because method1 not called exactly once - # If +method_names+ is a +Hash+, an expectation will be set up for each entry using the key as +method_name+ and value as +return_value+. - # object = mock() - # object.expects(:method1 => :result1, :method2 => :result2) - # - # # exactly equivalent to - # - # object = mock() - # object.expects(:method1).returns(:result1) - # object.expects(:method2).returns(:result2) - # - # Aliased by \_\_expects\_\_ - def expects(method_name_or_hash, backtrace = nil) - if method_name_or_hash.is_a?(Hash) then - method_name_or_hash.each do |method_name, return_value| - ensure_method_not_already_defined(method_name) - @expectations.add(Expectation.new(self, method_name, backtrace).returns(return_value)) - end - else - ensure_method_not_already_defined(method_name_or_hash) - @expectations.add(Expectation.new(self, method_name_or_hash, backtrace)) - end - end - - # :call-seq: stubs(method_name) -> expectation - # stubs(method_names) -> last expectation - # - # Adds an expectation that a method identified by +method_name+ symbol may be called any number of times with any parameters. - # Returns the new expectation which can be further modified by methods on Expectation. - # object = mock() - # object.stubs(:method1) - # object.method1 - # object.method1 - # # no error raised - # If +method_names+ is a +Hash+, an expectation will be set up for each entry using the key as +method_name+ and value as +return_value+. - # object = mock() - # object.stubs(:method1 => :result1, :method2 => :result2) - # - # # exactly equivalent to - # - # object = mock() - # object.stubs(:method1).returns(:result1) - # object.stubs(:method2).returns(:result2) - # - # Aliased by \_\_stubs\_\_ - def stubs(method_name_or_hash, backtrace = nil) - if method_name_or_hash.is_a?(Hash) then - method_name_or_hash.each do |method_name, return_value| - ensure_method_not_already_defined(method_name) - @expectations.add(Expectation.new(self, method_name, backtrace).at_least(0).returns(return_value)) - end - else - ensure_method_not_already_defined(method_name_or_hash) - @expectations.add(Expectation.new(self, method_name_or_hash, backtrace).at_least(0)) - end - end - - # :call-seq: responds_like(responder) -> mock - # - # Constrains the +mock+ so that it can only expect or stub methods to which +responder+ responds. The constraint is only applied at method invocation time. - # - # A +NoMethodError+ will be raised if the +responder+ does not respond_to? a method invocation (even if the method has been expected or stubbed). - # - # The +mock+ will delegate its respond_to? method to the +responder+. - # class Sheep - # def chew(grass); end - # def self.number_of_legs; end - # end - # - # sheep = mock('sheep') - # sheep.expects(:chew) - # sheep.expects(:foo) - # sheep.respond_to?(:chew) # => true - # sheep.respond_to?(:foo) # => true - # sheep.chew - # sheep.foo - # # no error raised - # - # sheep = mock('sheep') - # sheep.responds_like(Sheep.new) - # sheep.expects(:chew) - # sheep.expects(:foo) - # sheep.respond_to?(:chew) # => true - # sheep.respond_to?(:foo) # => false - # sheep.chew - # sheep.foo # => raises NoMethodError exception - # - # sheep_class = mock('sheep_class') - # sheep_class.responds_like(Sheep) - # sheep_class.stubs(:number_of_legs).returns(4) - # sheep_class.expects(:foo) - # sheep_class.respond_to?(:number_of_legs) # => true - # sheep_class.respond_to?(:foo) # => false - # assert_equal 4, sheep_class.number_of_legs - # sheep_class.foo # => raises NoMethodError exception - # - # Aliased by +quacks_like+ - def responds_like(object) - @responder = object - self - end - - # :stopdoc: - - def initialize(name = nil, &block) - @name = name || DefaultName.new(self) - @expectations = ExpectationList.new - @everything_stubbed = false - @responder = nil - instance_eval(&block) if block - end - - attr_reader :everything_stubbed, :expectations - - alias_method :__expects__, :expects - - alias_method :__stubs__, :stubs - - alias_method :quacks_like, :responds_like - - def stub_everything - @everything_stubbed = true - end - - def method_missing(symbol, *arguments, &block) - if @responder and not @responder.respond_to?(symbol) - raise NoMethodError, "undefined method `#{symbol}' for #{self.mocha_inspect} which responds like #{@responder.mocha_inspect}" - end - if matching_expectation_allowing_invocation = @expectations.match_allowing_invocation(symbol, *arguments) - matching_expectation_allowing_invocation.invoke(&block) - else - if (matching_expectation = @expectations.match(symbol, *arguments)) || (!matching_expectation && !@everything_stubbed) - message = UnexpectedInvocation.new(self, symbol, *arguments).to_s - message << Mockery.instance.mocha_inspect - raise ExpectationError.new(message, caller) - end - end - end - - def respond_to?(symbol, include_private = false) - if @responder then - if @responder.method(:respond_to?).arity > 1 - @responder.respond_to?(symbol, include_private) - else - @responder.respond_to?(symbol) - end - else - @expectations.matches_method?(symbol) - end - end - - def __verified__?(assertion_counter = nil) - @expectations.verified?(assertion_counter) - end - - def mocha_inspect - @name.mocha_inspect - end - - def inspect - mocha_inspect - end - - def ensure_method_not_already_defined(method_name) - self.__metaclass__.send(:undef_method, method_name) if self.__metaclass__.method_defined?(method_name) - end - - # :startdoc: - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/mockery.rb b/vendor/gems/mocha-0.9.3/lib/mocha/mockery.rb deleted file mode 100644 index 6fd5e5a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/mockery.rb +++ /dev/null @@ -1,181 +0,0 @@ -require 'mocha/central' -require 'mocha/mock' -require 'mocha/names' -require 'mocha/state_machine' -require 'mocha/logger' -require 'mocha/configuration' -require 'mocha/stubbing_error' - -module Mocha - - class Mockery - - class << self - - def instance - @instance ||= new - end - - def reset_instance - @instance = nil - end - - end - - def named_mock(name, &block) - add_mock(Mock.new(Name.new(name), &block)) - end - - def unnamed_mock(&block) - add_mock(Mock.new(&block)) - end - - def mock_impersonating(object, &block) - add_mock(Mock.new(ImpersonatingName.new(object), &block)) - end - - def mock_impersonating_any_instance_of(klass, &block) - add_mock(Mock.new(ImpersonatingAnyInstanceName.new(klass), &block)) - end - - def new_state_machine(name) - add_state_machine(StateMachine.new(name)) - end - - def verify(assertion_counter = nil) - unless mocks.all? { |mock| mock.__verified__?(assertion_counter) } - message = "not all expectations were satisfied\n#{mocha_inspect}" - if unsatisfied_expectations.empty? - backtrace = caller - else - backtrace = unsatisfied_expectations[0].backtrace - end - raise ExpectationError.new(message, backtrace) - end - expectations.each do |e| - unless Mocha::Configuration.allow?(:stubbing_method_unnecessarily) - unless e.used? - on_stubbing_method_unnecessarily(e) - end - end - end - end - - def teardown - stubba.unstub_all - reset - end - - def stubba - @stubba ||= Central.new - end - - def mocks - @mocks ||= [] - end - - def state_machines - @state_machines ||= [] - end - - def mocha_inspect - message = "" - message << "unsatisfied expectations:\n- #{unsatisfied_expectations.map { |e| e.mocha_inspect }.join("\n- ")}\n" unless unsatisfied_expectations.empty? - message << "satisfied expectations:\n- #{satisfied_expectations.map { |e| e.mocha_inspect }.join("\n- ")}\n" unless satisfied_expectations.empty? - message << "states:\n- #{state_machines.map { |sm| sm.mocha_inspect }.join("\n- ")}" unless state_machines.empty? - message - end - - def on_stubbing(object, method) - method = RUBY_VERSION < '1.9' ? method.to_s : method.to_sym - unless Mocha::Configuration.allow?(:stubbing_non_existent_method) - unless object.method_exists?(method, include_public_methods = true) - on_stubbing_non_existent_method(object, method) - end - end - unless Mocha::Configuration.allow?(:stubbing_non_public_method) - if object.method_exists?(method, include_public_methods = false) - on_stubbing_non_public_method(object, method) - end - end - unless Mocha::Configuration.allow?(:stubbing_method_on_non_mock_object) - on_stubbing_method_on_non_mock_object(object, method) - end - end - - def on_stubbing_non_existent_method(object, method) - if Mocha::Configuration.prevent?(:stubbing_non_existent_method) - raise StubbingError.new("stubbing non-existent method: #{object.mocha_inspect}.#{method}", caller) - end - if Mocha::Configuration.warn_when?(:stubbing_non_existent_method) - logger.warn "stubbing non-existent method: #{object.mocha_inspect}.#{method}" - end - end - - def on_stubbing_non_public_method(object, method) - if Mocha::Configuration.prevent?(:stubbing_non_public_method) - raise StubbingError.new("stubbing non-public method: #{object.mocha_inspect}.#{method}", caller) - end - if Mocha::Configuration.warn_when?(:stubbing_non_public_method) - logger.warn "stubbing non-public method: #{object.mocha_inspect}.#{method}" - end - end - - def on_stubbing_method_on_non_mock_object(object, method) - if Mocha::Configuration.prevent?(:stubbing_method_on_non_mock_object) - raise StubbingError.new("stubbing method on non-mock object: #{object.mocha_inspect}.#{method}", caller) - end - if Mocha::Configuration.warn_when?(:stubbing_method_on_non_mock_object) - logger.warn "stubbing method on non-mock object: #{object.mocha_inspect}.#{method}" - end - end - - def on_stubbing_method_unnecessarily(expectation) - if Mocha::Configuration.prevent?(:stubbing_method_unnecessarily) - raise StubbingError.new("stubbing method unnecessarily: #{expectation.method_signature}", expectation.backtrace) - end - if Mocha::Configuration.warn_when?(:stubbing_method_unnecessarily) - logger.warn "stubbing method unnecessarily: #{expectation.method_signature}" - end - end - - attr_writer :logger - - def logger - @logger ||= Logger.new($stderr) - end - - - private - - def expectations - mocks.map { |mock| mock.expectations.to_a }.flatten - end - - def unsatisfied_expectations - expectations.reject { |e| e.verified? } - end - - def satisfied_expectations - expectations.select { |e| e.verified? } - end - - def add_mock(mock) - mocks << mock - mock - end - - def add_state_machine(state_machine) - state_machines << state_machine - state_machine - end - - def reset - @stubba = nil - @mocks = nil - @state_machines = nil - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/module_method.rb b/vendor/gems/mocha-0.9.3/lib/mocha/module_method.rb deleted file mode 100644 index 0c3b180..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/module_method.rb +++ /dev/null @@ -1,16 +0,0 @@ -require 'mocha/module_method' - -module Mocha - - class ModuleMethod < ClassMethod - - def method_exists?(method) - return true if stubbee.public_methods(false).include?(method) - return true if stubbee.protected_methods(false).include?(method) - return true if stubbee.private_methods(false).include?(method) - return false - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/multiple_yields.rb b/vendor/gems/mocha-0.9.3/lib/mocha/multiple_yields.rb deleted file mode 100644 index 8186c30..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/multiple_yields.rb +++ /dev/null @@ -1,20 +0,0 @@ -module Mocha # :nodoc: - - class MultipleYields # :nodoc: - - attr_reader :parameter_groups - - def initialize(*parameter_groups) - @parameter_groups = parameter_groups - end - - def each - @parameter_groups.each do |parameter_group| - yield(parameter_group) - end - end - - end - -end - diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/names.rb b/vendor/gems/mocha-0.9.3/lib/mocha/names.rb deleted file mode 100644 index f59ebed..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/names.rb +++ /dev/null @@ -1,53 +0,0 @@ -module Mocha - - class ImpersonatingName - - def initialize(object) - @object = object - end - - def mocha_inspect - @object.mocha_inspect - end - - end - - class ImpersonatingAnyInstanceName - - def initialize(klass) - @klass = klass - end - - def mocha_inspect - "#" - end - - end - - class Name - - def initialize(name) - @name = name - end - - def mocha_inspect - "#" - end - - end - - class DefaultName - - def initialize(mock) - @mock = mock - end - - def mocha_inspect - address = @mock.__id__ * 2 - address += 0x100000000 if address < 0 - "#" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/no_yields.rb b/vendor/gems/mocha-0.9.3/lib/mocha/no_yields.rb deleted file mode 100644 index b0fba41..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/no_yields.rb +++ /dev/null @@ -1,11 +0,0 @@ -module Mocha # :nodoc: - - class NoYields # :nodoc: - - def each - end - - end - -end - diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/object.rb b/vendor/gems/mocha-0.9.3/lib/mocha/object.rb deleted file mode 100644 index b0cc9d2..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/object.rb +++ /dev/null @@ -1,150 +0,0 @@ -require 'mocha/mockery' -require 'mocha/instance_method' -require 'mocha/class_method' -require 'mocha/module_method' -require 'mocha/any_instance_method' - -module Mocha - - # Methods added all objects to allow mocking and stubbing on real objects. - # - # Methods return a Mocha::Expectation which can be further modified by methods on Mocha::Expectation. - module ObjectMethods - - def mocha # :nodoc: - @mocha ||= Mocha::Mockery.instance.mock_impersonating(self) - end - - def reset_mocha # :nodoc: - @mocha = nil - end - - def stubba_method # :nodoc: - Mocha::InstanceMethod - end - - def stubba_object # :nodoc: - self - end - - # :call-seq: expects(symbol) -> expectation - # - # Adds an expectation that a method identified by +symbol+ must be called exactly once with any parameters. - # Returns the new expectation which can be further modified by methods on Mocha::Expectation. - # product = Product.new - # product.expects(:save).returns(true) - # assert_equal false, product.save - # - # The original implementation of Product#save is replaced temporarily. - # - # The original implementation of Product#save is restored at the end of the test. - def expects(symbol) - mockery = Mocha::Mockery.instance - mockery.on_stubbing(self, symbol) - method = stubba_method.new(stubba_object, symbol) - mockery.stubba.stub(method) - mocha.expects(symbol, caller) - end - - # :call-seq: stubs(symbol) -> expectation - # - # Adds an expectation that a method identified by +symbol+ may be called any number of times with any parameters. - # Returns the new expectation which can be further modified by methods on Mocha::Expectation. - # product = Product.new - # product.stubs(:save).returns(true) - # assert_equal false, product.save - # - # The original implementation of Product#save is replaced temporarily. - # - # The original implementation of Product#save is restored at the end of the test. - def stubs(symbol) - mockery = Mocha::Mockery.instance - mockery.on_stubbing(self, symbol) - method = stubba_method.new(stubba_object, symbol) - mockery.stubba.stub(method) - mocha.stubs(symbol, caller) - end - - def method_exists?(method, include_public_methods = true) # :nodoc: - if include_public_methods - return true if public_methods(include_superclass_methods = true).include?(method) - return true if respond_to?(method.to_sym) - end - return true if protected_methods(include_superclass_methods = true).include?(method) - return true if private_methods(include_superclass_methods = true).include?(method) - return false - end - - end - - module ModuleMethods # :nodoc: - - def stubba_method - Mocha::ModuleMethod - end - - end - - # Methods added all classes to allow mocking and stubbing on real objects. - module ClassMethods - - def stubba_method # :nodoc: - Mocha::ClassMethod - end - - class AnyInstance # :nodoc: - - def initialize(klass) - @stubba_object = klass - end - - def mocha - @mocha ||= Mocha::Mockery.instance.mock_impersonating_any_instance_of(@stubba_object) - end - - def stubba_method - Mocha::AnyInstanceMethod - end - - def stubba_object - @stubba_object - end - - def method_exists?(method, include_public_methods = true) - if include_public_methods - return true if @stubba_object.public_instance_methods(include_superclass_methods = true).include?(method) - end - return true if @stubba_object.protected_instance_methods(include_superclass_methods = true).include?(method) - return true if @stubba_object.private_instance_methods(include_superclass_methods = true).include?(method) - return false - end - - end - - # :call-seq: any_instance -> mock object - # - # Returns a mock object which will detect calls to any instance of this class. - # Product.any_instance.stubs(:save).returns(false) - # product_1 = Product.new - # assert_equal false, product_1.save - # product_2 = Product.new - # assert_equal false, product_2.save - def any_instance - @any_instance ||= AnyInstance.new(self) - end - - end - -end - -class Object # :nodoc: - include Mocha::ObjectMethods -end - -class Module # :nodoc: - include Mocha::ModuleMethods -end - -class Class # :nodoc: - include Mocha::ClassMethods -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers.rb deleted file mode 100644 index 7ce445f..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers.rb +++ /dev/null @@ -1,27 +0,0 @@ -module Mocha - - # Used as parameters for Expectation#with to restrict the parameter values which will match the expectation. Can be nested. - module ParameterMatchers; end - -end - -require 'mocha/parameter_matchers/object' - -require 'mocha/parameter_matchers/all_of' -require 'mocha/parameter_matchers/any_of' -require 'mocha/parameter_matchers/any_parameters' -require 'mocha/parameter_matchers/anything' -require 'mocha/parameter_matchers/equals' -require 'mocha/parameter_matchers/has_entry' -require 'mocha/parameter_matchers/has_entries' -require 'mocha/parameter_matchers/has_key' -require 'mocha/parameter_matchers/has_value' -require 'mocha/parameter_matchers/includes' -require 'mocha/parameter_matchers/instance_of' -require 'mocha/parameter_matchers/is_a' -require 'mocha/parameter_matchers/kind_of' -require 'mocha/parameter_matchers/not' -require 'mocha/parameter_matchers/optionally' -require 'mocha/parameter_matchers/regexp_matches' -require 'mocha/parameter_matchers/responds_with' -require 'mocha/parameter_matchers/yaml_equivalent' diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/all_of.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/all_of.rb deleted file mode 100644 index 50bf05b..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/all_of.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: all_of(*parameter_matchers) -> parameter_matcher - # - # Matches if all +parameter_matchers+ match. - # object = mock() - # object.expects(:method_1).with(all_of(includes(1), includes(3))) - # object.method_1([1, 3]) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(all_of(includes(1), includes(3))) - # object.method_1([1, 2]) - # # error raised, because method_1 was not called with object including 1 and 3 - def all_of(*matchers) - AllOf.new(*matchers) - end - - class AllOf < Base # :nodoc: - - def initialize(*matchers) - @matchers = matchers - end - - def matches?(available_parameters) - parameter = available_parameters.shift - @matchers.all? { |matcher| matcher.to_matcher.matches?([parameter]) } - end - - def mocha_inspect - "all_of(#{@matchers.map { |matcher| matcher.mocha_inspect }.join(", ") })" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_of.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_of.rb deleted file mode 100644 index b391ff3..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_of.rb +++ /dev/null @@ -1,47 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: any_of(*parameter_matchers) -> parameter_matcher - # - # Matches if any +parameter_matchers+ match. - # object = mock() - # object.expects(:method_1).with(any_of(1, 3)) - # object.method_1(1) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(any_of(1, 3)) - # object.method_1(3) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(any_of(1, 3)) - # object.method_1(2) - # # error raised, because method_1 was not called with 1 or 3 - def any_of(*matchers) - AnyOf.new(*matchers) - end - - class AnyOf < Base # :nodoc: - - def initialize(*matchers) - @matchers = matchers - end - - def matches?(available_parameters) - parameter = available_parameters.shift - @matchers.any? { |matcher| matcher.to_matcher.matches?([parameter]) } - end - - def mocha_inspect - "any_of(#{@matchers.map { |matcher| matcher.mocha_inspect }.join(", ") })" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_parameters.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_parameters.rb deleted file mode 100644 index 11dae83..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/any_parameters.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: any_parameters() -> parameter_matcher - # - # Matches any parameters. - # object = mock() - # object.expects(:method_1).with(any_parameters) - # object.method_1(1, 2, 3, 4) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(any_parameters) - # object.method_1(5, 6, 7, 8, 9, 0) - # # no error raised - def any_parameters - AnyParameters.new - end - - class AnyParameters < Base # :nodoc: - - def matches?(available_parameters) - while available_parameters.length > 0 do - available_parameters.shift - end - return true - end - - def mocha_inspect - "any_parameters" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/anything.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/anything.rb deleted file mode 100644 index 90510e2..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/anything.rb +++ /dev/null @@ -1,33 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: anything() -> parameter_matcher - # - # Matches any object. - # object = mock() - # object.expects(:method_1).with(anything) - # object.method_1('foo') - # # no error raised - def anything - Anything.new - end - - class Anything < Base # :nodoc: - - def matches?(available_parameters) - available_parameters.shift - return true - end - - def mocha_inspect - "anything" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/base.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/base.rb deleted file mode 100644 index 6aaec51..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/base.rb +++ /dev/null @@ -1,15 +0,0 @@ -module Mocha - - module ParameterMatchers - - class Base # :nodoc: - - def to_matcher - self - end - - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/equals.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/equals.rb deleted file mode 100644 index bdc61a0..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/equals.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: equals(value) -> parameter_matcher - # - # Matches +Object+ equalling +value+. - # object = mock() - # object.expects(:method_1).with(equals(2)) - # object.method_1(2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(equals(2)) - # object.method_1(3) - # # error raised, because method_1 was not called with Object equalling 3 - def equals(value) - Equals.new(value) - end - - class Equals < Base # :nodoc: - - def initialize(value) - @value = value - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter == @value - end - - def mocha_inspect - @value.mocha_inspect - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entries.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entries.rb deleted file mode 100644 index 03e968f..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entries.rb +++ /dev/null @@ -1,45 +0,0 @@ -require 'mocha/parameter_matchers/base' -require 'mocha/parameter_matchers/all_of' -require 'mocha/parameter_matchers/has_entry' - -module Mocha - - module ParameterMatchers - - # :call-seq: has_entries(entries) -> parameter_matcher - # - # Matches +Hash+ containing all +entries+. - # object = mock() - # object.expects(:method_1).with(has_entries('key_1' => 1, 'key_2' => 2)) - # object.method_1('key_1' => 1, 'key_2' => 2, 'key_3' => 3) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(has_entries('key_1' => 1, 'key_2' => 2)) - # object.method_1('key_1' => 1, 'key_2' => 99) - # # error raised, because method_1 was not called with Hash containing entries: 'key_1' => 1, 'key_2' => 2 - def has_entries(entries) - HasEntries.new(entries) - end - - class HasEntries < Base # :nodoc: - - def initialize(entries) - @entries = entries - end - - def matches?(available_parameters) - parameter = available_parameters.shift - has_entry_matchers = @entries.map { |key, value| HasEntry.new(key, value) } - AllOf.new(*has_entry_matchers).matches?([parameter]) - end - - def mocha_inspect - "has_entries(#{@entries.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entry.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entry.rb deleted file mode 100644 index 303f1e0..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_entry.rb +++ /dev/null @@ -1,56 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: has_entry(key, value) -> parameter_matcher - # has_entry(key => value) -> parameter_matcher - # - # Matches +Hash+ containing entry with +key+ and +value+. - # object = mock() - # object.expects(:method_1).with(has_entry('key_1', 1)) - # object.method_1('key_1' => 1, 'key_2' => 2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(has_entry('key_1' => 1)) - # object.method_1('key_1' => 1, 'key_2' => 2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(has_entry('key_1', 1)) - # object.method_1('key_1' => 2, 'key_2' => 1) - # # error raised, because method_1 was not called with Hash containing entry: 'key_1' => 1 - # - # object = mock() - # object.expects(:method_1).with(has_entry('key_1' => 1)) - # object.method_1('key_1' => 2, 'key_2' => 1) - # # error raised, because method_1 was not called with Hash containing entry: 'key_1' => 1 - def has_entry(*options) - key, value = options.shift, options.shift - key, value = key.to_a[0][0..1] if key.is_a?(Hash) - HasEntry.new(key, value) - end - - class HasEntry < Base # :nodoc: - - def initialize(key, value) - @key, @value = key, value - end - - def matches?(available_parameters) - parameter = available_parameters.shift - matching_keys = parameter.keys.select { |key| @key.to_matcher.matches?([key]) } - matching_keys.any? { |key| @value.to_matcher.matches?([parameter[key]]) } - end - - def mocha_inspect - "has_entry(#{@key.mocha_inspect} => #{@value.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_key.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_key.rb deleted file mode 100644 index a4c2668..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_key.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: has_key(key) -> parameter_matcher - # - # Matches +Hash+ containing +key+. - # object = mock() - # object.expects(:method_1).with(has_key('key_1')) - # object.method_1('key_1' => 1, 'key_2' => 2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(has_key('key_1')) - # object.method_1('key_2' => 2) - # # error raised, because method_1 was not called with Hash containing key: 'key_1' - def has_key(key) - HasKey.new(key) - end - - class HasKey < Base # :nodoc: - - def initialize(key) - @key = key - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.keys.any? { |key| @key.to_matcher.matches?([key]) } - end - - def mocha_inspect - "has_key(#{@key.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_value.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_value.rb deleted file mode 100644 index 6671237..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/has_value.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: has_value(value) -> parameter_matcher - # - # Matches +Hash+ containing +value+. - # object = mock() - # object.expects(:method_1).with(has_value(1)) - # object.method_1('key_1' => 1, 'key_2' => 2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(has_value(1)) - # object.method_1('key_2' => 2) - # # error raised, because method_1 was not called with Hash containing value: 1 - def has_value(value) - HasValue.new(value) - end - - class HasValue < Base # :nodoc: - - def initialize(value) - @value = value - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.values.any? { |value| @value.to_matcher.matches?([value]) } - end - - def mocha_inspect - "has_value(#{@value.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/includes.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/includes.rb deleted file mode 100644 index 4539a5c..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/includes.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: includes(item) -> parameter_matcher - # - # Matches any object that responds true to include?(item) - # object = mock() - # object.expects(:method_1).with(includes('foo')) - # object.method_1(['foo', 'bar']) - # # no error raised - # - # object.method_1(['baz']) - # # error raised, because ['baz'] does not include 'foo'. - def includes(item) - Includes.new(item) - end - - class Includes < Base # :nodoc: - - def initialize(item) - @item = item - end - - def matches?(available_parameters) - parameter = available_parameters.shift - return parameter.include?(@item) - end - - def mocha_inspect - "includes(#{@item.mocha_inspect})" - end - - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/instance_of.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/instance_of.rb deleted file mode 100644 index 49b4a47..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/instance_of.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: instance_of(klass) -> parameter_matcher - # - # Matches any object that is an instance of +klass+ - # object = mock() - # object.expects(:method_1).with(instance_of(String)) - # object.method_1('string') - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(instance_of(String)) - # object.method_1(99) - # # error raised, because method_1 was not called with an instance of String - def instance_of(klass) - InstanceOf.new(klass) - end - - class InstanceOf < Base # :nodoc: - - def initialize(klass) - @klass = klass - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.instance_of?(@klass) - end - - def mocha_inspect - "instance_of(#{@klass.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/is_a.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/is_a.rb deleted file mode 100644 index a721db5..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/is_a.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: is_a(klass) -> parameter_matcher - # - # Matches any object that is a +klass+ - # object = mock() - # object.expects(:method_1).with(is_a(Integer)) - # object.method_1(99) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(is_a(Integer)) - # object.method_1('string') - # # error raised, because method_1 was not called with an Integer - def is_a(klass) - IsA.new(klass) - end - - class IsA < Base # :nodoc: - - def initialize(klass) - @klass = klass - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.is_a?(@klass) - end - - def mocha_inspect - "is_a(#{@klass.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/kind_of.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/kind_of.rb deleted file mode 100644 index 710d709..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/kind_of.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: kind_of(klass) -> parameter_matcher - # - # Matches any object that is a kind of +klass+ - # object = mock() - # object.expects(:method_1).with(kind_of(Integer)) - # object.method_1(99) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(kind_of(Integer)) - # object.method_1('string') - # # error raised, because method_1 was not called with a kind of Integer - def kind_of(klass) - KindOf.new(klass) - end - - class KindOf < Base # :nodoc: - - def initialize(klass) - @klass = klass - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.kind_of?(@klass) - end - - def mocha_inspect - "kind_of(#{@klass.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/not.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/not.rb deleted file mode 100644 index 7a9cf27..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/not.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: Not(parameter_matcher) -> parameter_matcher - # - # Matches if +parameter_matcher+ does not match. - # object = mock() - # object.expects(:method_1).with(Not(includes(1))) - # object.method_1([0, 2, 3]) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(Not(includes(1))) - # object.method_1([0, 1, 2, 3]) - # # error raised, because method_1 was not called with object not including 1 - def Not(matcher) - Not.new(matcher) - end - - class Not < Base # :nodoc: - - def initialize(matcher) - @matcher = matcher - end - - def matches?(available_parameters) - parameter = available_parameters.shift - !@matcher.matches?([parameter]) - end - - def mocha_inspect - "Not(#{@matcher.mocha_inspect})" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/object.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/object.rb deleted file mode 100644 index 56a1940..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/object.rb +++ /dev/null @@ -1,15 +0,0 @@ -require 'mocha/parameter_matchers/equals' - -module Mocha - - module ObjectMethods - def to_matcher # :nodoc: - Mocha::ParameterMatchers::Equals.new(self) - end - end - -end - -class Object - include Mocha::ObjectMethods -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/optionally.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/optionally.rb deleted file mode 100644 index fc2c3a9..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/optionally.rb +++ /dev/null @@ -1,55 +0,0 @@ -module Mocha - - module ParameterMatchers - - # :call-seq: optionally(*parameter_matchers) -> parameter_matcher - # - # Matches optional parameters if available. - # object = mock() - # object.expects(:method_1).with(1, 2, optionally(3, 4)) - # object.method_1(1, 2) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(1, 2, optionally(3, 4)) - # object.method_1(1, 2, 3) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(1, 2, optionally(3, 4)) - # object.method_1(1, 2, 3, 4) - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(1, 2, optionally(3, 4)) - # object.method_1(1, 2, 3, 5) - # # error raised, because optional parameters did not match - def optionally(*matchers) - Optionally.new(*matchers) - end - - class Optionally < Base # :nodoc: - - def initialize(*parameters) - @matchers = parameters.map { |parameter| parameter.to_matcher } - end - - def matches?(available_parameters) - index = 0 - while (available_parameters.length > 0) && (index < @matchers.length) do - matcher = @matchers[index] - return false unless matcher.matches?(available_parameters) - index += 1 - end - return true - end - - def mocha_inspect - "optionally(#{@matchers.map { |matcher| matcher.mocha_inspect }.join(", ") })" - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/regexp_matches.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/regexp_matches.rb deleted file mode 100644 index a807d8b..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/regexp_matches.rb +++ /dev/null @@ -1,43 +0,0 @@ -require 'mocha/parameter_matchers/base' - -module Mocha - - module ParameterMatchers - - # :call-seq: regexp_matches(regular_expression) -> parameter_matcher - # - # Matches any object that matches +regular_expression+. - # object = mock() - # object.expects(:method_1).with(regexp_matches(/e/)) - # object.method_1('hello') - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(regexp_matches(/a/)) - # object.method_1('hello') - # # error raised, because method_1 was not called with a parameter that matched the - # # regular expression - def regexp_matches(regexp) - RegexpMatches.new(regexp) - end - - class RegexpMatches < Base # :nodoc: - - def initialize(regexp) - @regexp = regexp - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter =~ @regexp - end - - def mocha_inspect - "regexp_matches(#{@regexp.mocha_inspect})" - end - - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/responds_with.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/responds_with.rb deleted file mode 100644 index 4355796..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/responds_with.rb +++ /dev/null @@ -1,43 +0,0 @@ -require 'mocha/parameter_matchers/base' -require 'yaml' - -module Mocha - - module ParameterMatchers - - # :call-seq: responds_with(message, result) -> parameter_matcher - # - # Matches any object that responds to +message+ with +result+. To put it another way, it tests the quack, not the duck. - # object = mock() - # object.expects(:method_1).with(responds_with(:upcase, "FOO")) - # object.method_1("foo") - # # no error raised, because "foo".upcase == "FOO" - # - # object = mock() - # object.expects(:method_1).with(responds_with(:upcase, "BAR")) - # object.method_1("foo") - # # error raised, because "foo".upcase != "BAR" - def responds_with(message, result) - RespondsWith.new(message, result) - end - - class RespondsWith < Base # :nodoc: - - def initialize(message, result) - @message, @result = message, result - end - - def matches?(available_parameters) - parameter = available_parameters.shift - parameter.__send__(@message) == @result - end - - def mocha_inspect - "responds_with(#{@message.mocha_inspect}, #{@result.mocha_inspect})" - end - - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/yaml_equivalent.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/yaml_equivalent.rb deleted file mode 100644 index 6449717..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameter_matchers/yaml_equivalent.rb +++ /dev/null @@ -1,43 +0,0 @@ -require 'mocha/parameter_matchers/base' -require 'yaml' - -module Mocha - - module ParameterMatchers - - # :call-seq: yaml_equivalent(object) -> parameter_matcher - # - # Matches any YAML that represents the specified +object+ - # object = mock() - # object.expects(:method_1).with(yaml_equivalent(1, 2, 3)) - # object.method_1("--- \n- 1\n- 2\n- 3\n") - # # no error raised - # - # object = mock() - # object.expects(:method_1).with(yaml_equivalent(1, 2, 3)) - # object.method_1("--- \n- 1\n- 2\n") - # # error raised, because method_1 was not called with YAML representing the specified Array - def yaml_equivalent(object) - YamlEquivalent.new(object) - end - - class YamlEquivalent < Base # :nodoc: - - def initialize(object) - @object = object - end - - def matches?(available_parameters) - parameter = available_parameters.shift - @object == YAML.load(parameter) - end - - def mocha_inspect - "yaml_equivalent(#{@object.mocha_inspect})" - end - - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/parameters_matcher.rb b/vendor/gems/mocha-0.9.3/lib/mocha/parameters_matcher.rb deleted file mode 100644 index d43ae43..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/parameters_matcher.rb +++ /dev/null @@ -1,37 +0,0 @@ -require 'mocha/inspect' -require 'mocha/parameter_matchers' - -module Mocha - - class ParametersMatcher - - def initialize(expected_parameters = [ParameterMatchers::AnyParameters.new], &matching_block) - @expected_parameters, @matching_block = expected_parameters, matching_block - end - - def match?(actual_parameters = []) - if @matching_block - return @matching_block.call(*actual_parameters) - else - return parameters_match?(actual_parameters) - end - end - - def parameters_match?(actual_parameters) - matchers.all? { |matcher| matcher.matches?(actual_parameters) } && (actual_parameters.length == 0) - end - - def mocha_inspect - signature = matchers.mocha_inspect - signature = signature.gsub(/^\[|\]$/, '') - signature = signature.gsub(/^\{|\}$/, '') if matchers.length == 1 - "(#{signature})" - end - - def matchers - @expected_parameters.map { |parameter| parameter.to_matcher } - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/pretty_parameters.rb b/vendor/gems/mocha-0.9.3/lib/mocha/pretty_parameters.rb deleted file mode 100644 index 59ed636..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/pretty_parameters.rb +++ /dev/null @@ -1,28 +0,0 @@ -require 'mocha/inspect' - -module Mocha - - class PrettyParameters - - def initialize(params) - @params = params - @params_string = params.mocha_inspect - end - - def pretty - remove_outer_array_braces! - remove_outer_hash_braces! - @params_string - end - - def remove_outer_array_braces! - @params_string = @params_string.gsub(/^\[|\]$/, '') - end - - def remove_outer_hash_braces! - @params_string = @params_string.gsub(/^\{|\}$/, '') if @params.length == 1 - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/return_values.rb b/vendor/gems/mocha-0.9.3/lib/mocha/return_values.rb deleted file mode 100644 index d93fb1a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/return_values.rb +++ /dev/null @@ -1,31 +0,0 @@ -require 'mocha/single_return_value' - -module Mocha # :nodoc: - - class ReturnValues # :nodoc: - - def self.build(*values) - new(*values.map { |value| SingleReturnValue.new(value) }) - end - - attr_accessor :values - - def initialize(*values) - @values = values - end - - def next - case @values.length - when 0 then nil - when 1 then @values.first.evaluate - else @values.shift.evaluate - end - end - - def +(other) - self.class.new(*(@values + other.values)) - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/sequence.rb b/vendor/gems/mocha-0.9.3/lib/mocha/sequence.rb deleted file mode 100644 index ed9852e..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/sequence.rb +++ /dev/null @@ -1,42 +0,0 @@ -module Mocha # :nodoc: - - class Sequence - - class InSequenceOrderingConstraint - - def initialize(sequence, index) - @sequence, @index = sequence, index - end - - def allows_invocation_now? - @sequence.satisfied_to_index?(@index) - end - - def mocha_inspect - "in sequence #{@sequence.mocha_inspect}" - end - - end - - def initialize(name) - @name = name - @expectations = [] - end - - def constrain_as_next_in_sequence(expectation) - index = @expectations.length - @expectations << expectation - expectation.add_ordering_constraint(InSequenceOrderingConstraint.new(self, index)) - end - - def satisfied_to_index?(index) - @expectations[0...index].all? { |expectation| expectation.satisfied? } - end - - def mocha_inspect - "#{@name.mocha_inspect}" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/single_return_value.rb b/vendor/gems/mocha-0.9.3/lib/mocha/single_return_value.rb deleted file mode 100644 index 98bc4be..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/single_return_value.rb +++ /dev/null @@ -1,17 +0,0 @@ -require 'mocha/is_a' - -module Mocha # :nodoc: - - class SingleReturnValue # :nodoc: - - def initialize(value) - @value = value - end - - def evaluate - @value - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/single_yield.rb b/vendor/gems/mocha-0.9.3/lib/mocha/single_yield.rb deleted file mode 100644 index 5af5716..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/single_yield.rb +++ /dev/null @@ -1,18 +0,0 @@ -module Mocha # :nodoc: - - class SingleYield # :nodoc: - - attr_reader :parameters - - def initialize(*parameters) - @parameters = parameters - end - - def each - yield(@parameters) - end - - end - -end - diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/standalone.rb b/vendor/gems/mocha-0.9.3/lib/mocha/standalone.rb deleted file mode 100644 index 2628296..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/standalone.rb +++ /dev/null @@ -1,166 +0,0 @@ -require 'mocha/parameter_matchers' -require 'mocha/mockery' -require 'mocha/sequence' - -module Mocha # :nodoc: - - # Methods added to Test::Unit::TestCase or equivalent. - module Standalone - - include ParameterMatchers - - # :call-seq: mock(name, &block) -> mock object - # mock(expected_methods = {}, &block) -> mock object - # mock(name, expected_methods = {}, &block) -> mock object - # - # Creates a mock object. - # - # +name+ is a +String+ identifier for the mock object. - # - # +expected_methods+ is a +Hash+ with expected method name symbols as keys and corresponding return values as values. - # - # Note that (contrary to expectations set up by #stub) these expectations must be fulfilled during the test. - # def test_product - # product = mock('ipod_product', :manufacturer => 'ipod', :price => 100) - # assert_equal 'ipod', product.manufacturer - # assert_equal 100, product.price - # # an error will be raised unless both Product#manufacturer and Product#price have been called - # end - # - # +block+ is an optional block to be evaluated against the mock object instance, giving an alernative way to set up expectations & stubs. - # def test_product - # product = mock('ipod_product') do - # expects(:manufacturer).returns('ipod') - # expects(:price).returns(100) - # end - # assert_equal 'ipod', product.manufacturer - # assert_equal 100, product.price - # # an error will be raised unless both Product#manufacturer and Product#price have been called - # end - def mock(*arguments, &block) - name = arguments.shift if arguments.first.is_a?(String) - expectations = arguments.shift || {} - mock = name ? Mockery.instance.named_mock(name, &block) : Mockery.instance.unnamed_mock(&block) - mock.expects(expectations) - mock - end - - # :call-seq: stub(name, &block) -> mock object - # stub(stubbed_methods = {}, &block) -> mock object - # stub(name, stubbed_methods = {}, &block) -> mock object - # - # Creates a mock object. - # - # +name+ is a +String+ identifier for the mock object. - # - # +stubbed_methods+ is a +Hash+ with stubbed method name symbols as keys and corresponding return values as values. - # Note that (contrary to expectations set up by #mock) these expectations need not be fulfilled during the test. - # def test_product - # product = stub('ipod_product', :manufacturer => 'ipod', :price => 100) - # assert_equal 'ipod', product.manufacturer - # assert_equal 100, product.price - # # an error will not be raised even if Product#manufacturer and Product#price have not been called - # end - # - # +block+ is an optional block to be evaluated against the mock object instance, giving an alernative way to set up expectations & stubs. - # def test_product - # product = stub('ipod_product') do - # stubs(:manufacturer).returns('ipod') - # stubs(:price).returns(100) - # end - # assert_equal 'ipod', product.manufacturer - # assert_equal 100, product.price - # # an error will not be raised even if Product#manufacturer and Product#price have not been called - # end - def stub(*arguments, &block) - name = arguments.shift if arguments.first.is_a?(String) - expectations = arguments.shift || {} - stub = name ? Mockery.instance.named_mock(name, &block) : Mockery.instance.unnamed_mock(&block) - stub.stubs(expectations) - stub - end - - # :call-seq: stub_everything(name, &block) -> mock object - # stub_everything(stubbed_methods = {}, &block) -> mock object - # stub_everything(name, stubbed_methods = {}, &block) -> mock object - # - # Creates a mock object that accepts calls to any method. - # - # By default it will return +nil+ for any method call. - # - # +block+ is a block to be evaluated against the mock object instance, giving an alernative way to set up expectations & stubs. - # - # +name+ and +stubbed_methods+ work in the same way as for #stub. - # def test_product - # product = stub_everything('ipod_product', :price => 100) - # assert_nil product.manufacturer - # assert_nil product.any_old_method - # assert_equal 100, product.price - # end - def stub_everything(*arguments, &block) - name = arguments.shift if arguments.first.is_a?(String) - expectations = arguments.shift || {} - stub = name ? Mockery.instance.named_mock(name, &block) : Mockery.instance.unnamed_mock(&block) - stub.stub_everything - stub.stubs(expectations) - stub - end - - # :call-seq: sequence(name) -> sequence - # - # Returns a new sequence that is used to constrain the order in which expectations can occur. - # - # Specify that an expected invocation must occur in within a named +sequence+ by using Expectation#in_sequence. - # - # See also Expectation#in_sequence. - # breakfast = sequence('breakfast') - # - # egg = mock('egg') - # egg.expects(:crack).in_sequence(breakfast) - # egg.expects(:fry).in_sequence(breakfast) - # egg.expects(:eat).in_sequence(breakfast) - def sequence(name) - Sequence.new(name) - end - - # :call-seq: states(name) -> state_machine - # - # Returns a new +state_machine+ that is used to constrain the order in which expectations can occur. - # - # Specify the initial +state+ of the +state_machine+ by using StateMachine#starts_as. - # - # Specify that an expected invocation should change the +state+ of the +state_machine+ by using Expectation#then. - # - # Specify that an expected invocation should be constrained to occur within a particular +state+ by using Expectation#when. - # - # A test can contain multiple +state_machines+. - # - # See also Expectation#then, Expectation#when and StateMachine. - # power = states('power').starts_as('off') - # - # radio = mock('radio') - # radio.expects(:switch_on).then(power.is('on')) - # radio.expects(:select_channel).with('BBC Radio 4').when(power.is('on')) - # radio.expects(:adjust_volume).with(+5).when(power.is('on')) - # radio.expects(:select_channel).with('BBC World Service').when(power.is('on')) - # radio.expects(:adjust_volume).with(-5).when(power.is('on')) - # radio.expects(:switch_off).then(power.is('off')) - def states(name) - Mockery.instance.new_state_machine(name) - end - - def mocha_setup # :nodoc: - end - - def mocha_verify(assertion_counter = nil) # :nodoc: - Mockery.instance.verify(assertion_counter) - end - - def mocha_teardown # :nodoc: - Mockery.instance.teardown - Mockery.reset_instance - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/state_machine.rb b/vendor/gems/mocha-0.9.3/lib/mocha/state_machine.rb deleted file mode 100644 index 1b9781a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/state_machine.rb +++ /dev/null @@ -1,91 +0,0 @@ -module Mocha # :nodoc: - - # A state machine that is used to constrain the order of invocations. - # An invocation can be constrained to occur when a state is, or is_not, active. - class StateMachine - - class State # :nodoc: - - def initialize(state_machine, state) - @state_machine, @state = state_machine, state - end - - def activate - @state_machine.current_state = @state - end - - def active? - @state_machine.current_state == @state - end - - def mocha_inspect - "#{@state_machine.name} is #{@state.mocha_inspect}" - end - - end - - class StatePredicate # :nodoc: - - def initialize(state_machine, state) - @state_machine, @state = state_machine, state - end - - def active? - @state_machine.current_state != @state - end - - def mocha_inspect - "#{@state_machine.name} is not #{@state.mocha_inspect}" - end - - end - - attr_reader :name # :nodoc: - - attr_accessor :current_state # :nodoc: - - def initialize(name) # :nodoc: - @name = name - @current_state = nil - end - - # :call-seq: starts_as(initial_state) -> state_machine - # - # Put the +state_machine+ into the +initial_state+. - def starts_as(initial_state) - become(initial_state) - self - end - - # :call-seq: become(next_state) - # - # Put the +state_machine+ into the +next_state+. - def become(next_state) - @current_state = next_state - end - - # :call-seq: is(state) - # - # Determines whether the +state_machine+ is in the specified +state+. - def is(state) - State.new(self, state) - end - - # :call-seq: is_not(state) - # - # Determines whether the +state_machine+ is not in the specified +state+. - def is_not(state) - StatePredicate.new(self, state) - end - - def mocha_inspect # :nodoc: - if @current_state - "#{@name} is #{@current_state.mocha_inspect}" - else - "#{@name} has no current state" - end - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/stubbing_error.rb b/vendor/gems/mocha-0.9.3/lib/mocha/stubbing_error.rb deleted file mode 100644 index 34be289..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/stubbing_error.rb +++ /dev/null @@ -1,16 +0,0 @@ -require 'mocha/backtrace_filter' - -module Mocha # :nodoc: - - # Exception raised when an action prevented by Configuration#prevent is attempted. - class StubbingError < StandardError - - def initialize(message = nil, backtrace = []) # :nodoc: - super(message) - filter = BacktraceFilter.new - set_backtrace(filter.filtered(backtrace)) - end - - end - -end diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/test_case_adapter.rb b/vendor/gems/mocha-0.9.3/lib/mocha/test_case_adapter.rb deleted file mode 100644 index 5b33c4a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/test_case_adapter.rb +++ /dev/null @@ -1,103 +0,0 @@ -require 'mocha/expectation_error' - -module Mocha - - module TestCaseAdapter - - class AssertionCounter - - def initialize(test_result) - @test_result = test_result - end - - def increment - @test_result.add_assertion - end - - end - - def self.included(base) - if RUBY_VERSION < '1.8.6' - base.class_eval do - - alias_method :run_before_mocha_test_case_adapter, :run - - def run(result) - assertion_counter = AssertionCounter.new(result) - yield(Test::Unit::TestCase::STARTED, name) - @_result = result - begin - begin - setup - __send__(@method_name) - mocha_verify(assertion_counter) - rescue Mocha::ExpectationError => e - add_failure(e.message, e.backtrace) - rescue Test::Unit::AssertionFailedError => e - add_failure(e.message, e.backtrace) - rescue StandardError, ScriptError - add_error($!) - ensure - begin - teardown - rescue Test::Unit::AssertionFailedError => e - add_failure(e.message, e.backtrace) - rescue StandardError, ScriptError - add_error($!) - end - end - ensure - mocha_teardown - end - result.add_run - yield(Test::Unit::TestCase::FINISHED, name) - end - - end - else - base.class_eval do - - alias_method :run_before_mocha_test_case_adapter, :run - - def run(result) - assertion_counter = AssertionCounter.new(result) - yield(Test::Unit::TestCase::STARTED, name) - @_result = result - begin - begin - setup - __send__(@method_name) - mocha_verify(assertion_counter) - rescue Mocha::ExpectationError => e - add_failure(e.message, e.backtrace) - rescue Test::Unit::AssertionFailedError => e - add_failure(e.message, e.backtrace) - rescue Exception - raise if Test::Unit::TestCase::PASSTHROUGH_EXCEPTIONS.include? $!.class - add_error($!) - ensure - begin - teardown - rescue Test::Unit::AssertionFailedError => e - add_failure(e.message, e.backtrace) - rescue Exception - raise if Test::Unit::TestCase::PASSTHROUGH_EXCEPTIONS.include? $!.class - add_error($!) - end - end - ensure - mocha_teardown - end - result.add_run - yield(Test::Unit::TestCase::FINISHED, name) - end - - end - - end - - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/unexpected_invocation.rb b/vendor/gems/mocha-0.9.3/lib/mocha/unexpected_invocation.rb deleted file mode 100644 index 2eabb6a..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/unexpected_invocation.rb +++ /dev/null @@ -1,18 +0,0 @@ -module Mocha # :nodoc: - - class UnexpectedInvocation - - def initialize(mock, symbol, *arguments) - @mock = mock - @method_matcher = MethodMatcher.new(symbol) - @parameters_matcher = ParametersMatcher.new(arguments) - end - - def to_s - method_signature = "#{@mock.mocha_inspect}.#{@method_matcher.mocha_inspect}#{@parameters_matcher.mocha_inspect}" - "unexpected invocation: #{method_signature}\n" - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha/yield_parameters.rb b/vendor/gems/mocha-0.9.3/lib/mocha/yield_parameters.rb deleted file mode 100644 index 7d6ad12..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha/yield_parameters.rb +++ /dev/null @@ -1,31 +0,0 @@ -require 'mocha/no_yields' -require 'mocha/single_yield' -require 'mocha/multiple_yields' - -module Mocha # :nodoc: - - class YieldParameters # :nodoc: - - def initialize - @parameter_groups = [] - end - - def next_invocation - case @parameter_groups.length - when 0 then NoYields.new - when 1 then @parameter_groups.first - else @parameter_groups.shift - end - end - - def add(*parameters) - @parameter_groups << SingleYield.new(*parameters) - end - - def multiple_add(*parameter_groups) - @parameter_groups << MultipleYields.new(*parameter_groups) - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/lib/mocha_standalone.rb b/vendor/gems/mocha-0.9.3/lib/mocha_standalone.rb deleted file mode 100644 index ce60581..0000000 --- a/vendor/gems/mocha-0.9.3/lib/mocha_standalone.rb +++ /dev/null @@ -1,2 +0,0 @@ -require 'mocha/standalone' -require 'mocha/object' diff --git a/vendor/gems/mocha-0.9.3/lib/stubba.rb b/vendor/gems/mocha-0.9.3/lib/stubba.rb deleted file mode 100644 index ba4d93f..0000000 --- a/vendor/gems/mocha-0.9.3/lib/stubba.rb +++ /dev/null @@ -1,4 +0,0 @@ -# for backwards compatibility -require 'mocha' -require 'mocha/deprecation' -Mocha::Deprecation.warning "require 'stubba' is no longer needed and stubba.rb will soon be removed" diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/acceptance_test_helper.rb b/vendor/gems/mocha-0.9.3/test/acceptance/acceptance_test_helper.rb deleted file mode 100644 index 2341338..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/acceptance_test_helper.rb +++ /dev/null @@ -1,38 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'test_runner' -require 'mocha/configuration' - -module AcceptanceTest - - class FakeLogger - - attr_reader :warnings - - def initialize - @warnings = [] - end - - def warn(message) - @warnings << message - end - - end - - attr_reader :logger - - include TestRunner - - def setup_acceptance_test - Mocha::Configuration.reset_configuration - @logger = FakeLogger.new - mockery = Mocha::Mockery.instance - @original_logger = mockery.logger - mockery.logger = @logger - end - - def teardown_acceptance_test - Mocha::Configuration.reset_configuration - Mocha::Mockery.instance.logger = @original_logger - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/bug_18914_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/bug_18914_test.rb deleted file mode 100644 index 852a5f8..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/bug_18914_test.rb +++ /dev/null @@ -1,43 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class Bug18914Test < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - class AlwaysEql - - def my_method - true - end - - def ==(o) - true - end - - def eql?(o) - true - end - - end - - def test_should_not_allow_stubbing_of_non_mock_instance_disrupted_by_legitimate_overriding_of_eql_method - - always_eql_1 = AlwaysEql.new - always_eql_1.stubs(:my_method).returns(false) - - always_eql_2 = AlwaysEql.new - always_eql_2.stubs(:my_method).returns(false) - - assert_equal false, always_eql_2.my_method - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/bug_21465_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/bug_21465_test.rb deleted file mode 100644 index b7889e5..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/bug_21465_test.rb +++ /dev/null @@ -1,34 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class Bug21465Test < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_expected_method_name_to_be_a_string - test_result = run_test do - mock = mock() - mock.expects('wibble') - mock.wibble - end - assert_passed(test_result) - end - - def test_should_allow_stubbed_method_name_to_be_a_string - test_result = run_test do - mock = mock() - mock.stubs('wibble') - mock.wibble - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/bug_21563_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/bug_21563_test.rb deleted file mode 100644 index 6c78368..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/bug_21563_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class Bug21563Test < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_of_verified_method - test_result = run_test do - object = Object.new - object.stubs(:verified?).returns(false) - assert !object.verified? - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/expected_invocation_count_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/expected_invocation_count_test.rb deleted file mode 100644 index de1282d..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/expected_invocation_count_test.rb +++ /dev/null @@ -1,196 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class ExpectedInvocationCountTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_pass_if_method_is_never_expected_and_is_never_called - test_result = run_test do - mock = mock('mock') - mock.expects(:method).never - 0.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_fail_fast_if_method_is_never_expected_but_is_called_once - test_result = run_test do - mock = mock('mock') - mock.expects(:method).never - 1.times { mock.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nsatisfied expectations:\n- expected never, not yet invoked: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_pass_if_method_is_expected_twice_and_is_called_twice - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2) - 2.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_fail_if_method_is_expected_twice_but_is_called_once - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2) - 1.times { mock.method } - end - assert_failed(test_result) - assert_equal ["not all expectations were satisfied\nunsatisfied expectations:\n- expected exactly twice, already invoked once: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_fail_fast_if_method_is_expected_twice_but_is_called_three_times - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2) - 3.times { mock.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nsatisfied expectations:\n- expected exactly twice, already invoked twice: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_pass_if_method_is_expected_between_two_and_four_times_and_is_called_twice - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2..4) - 2.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_pass_if_method_is_expected_between_two_and_four_times_and_is_called_three_times - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2..4) - 3.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_pass_if_method_is_expected_between_two_and_four_times_and_is_called_four_times - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2..4) - 4.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_fail_if_method_is_expected_between_two_and_four_times_and_is_called_once - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2..4) - 1.times { mock.method } - end - assert_failed(test_result) - assert_equal ["not all expectations were satisfied\nunsatisfied expectations:\n- expected between 2 and 4 times, already invoked once: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_fail_fast_if_method_is_expected_between_two_and_four_times_and_is_called_five_times - test_result = run_test do - mock = mock('mock') - mock.expects(:method).times(2..4) - 5.times { mock.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nsatisfied expectations:\n- expected between 2 and 4 times, already invoked 4 times: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_pass_if_method_is_expected_at_least_once_and_is_called_once - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_least_once - 1.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_pass_if_method_is_expected_at_least_once_and_is_called_twice - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_least_once - 2.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_fail_if_method_is_expected_at_least_once_but_is_never_called - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_least_once - 0.times { mock.method } - end - assert_failed(test_result) - assert_equal ["not all expectations were satisfied\nunsatisfied expectations:\n- expected at least once, not yet invoked: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_pass_if_method_is_expected_at_most_once_and_is_never_called - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_most_once - 0.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_pass_if_method_is_expected_at_most_once_and_called_once - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_most_once - 1.times { mock.method } - end - assert_passed(test_result) - end - - def test_should_fail_fast_if_method_is_expected_at_most_once_but_is_called_twice - test_result = run_test do - mock = mock('mock') - mock.expects(:method).at_most_once - 2.times { mock.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nsatisfied expectations:\n- expected at most once, already invoked once: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_pass_if_method_is_never_expected_and_is_never_called_even_if_everything_is_stubbed - test_result = run_test do - stub = stub_everything('stub') - stub.expects(:method).never - 0.times { stub.method } - end - assert_passed(test_result) - end - - def test_should_fail_fast_if_method_is_never_expected_but_is_called_once_even_if_everything_is_stubbed - test_result = run_test do - stub = stub_everything('stub') - stub.expects(:method).never - 1.times { stub.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nsatisfied expectations:\n- expected never, not yet invoked: #.method(any_parameters)\n"], test_result.failure_messages - end - - def test_should_fail_fast_if_there_is_no_matching_expectation - test_result = run_test do - mock = mock('mock') - mock.expects(:method).with(1) - 1.times { mock.method } - end - assert_failed(test_result) - assert_equal ["unexpected invocation: #.method()\nunsatisfied expectations:\n- expected exactly once, not yet invoked: #.method(1)\n"], test_result.failure_messages - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/failure_messages_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/failure_messages_test.rb deleted file mode 100644 index 678c04a..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/failure_messages_test.rb +++ /dev/null @@ -1,64 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class FailureMessagesTest < Test::Unit::TestCase - - OBJECT_ADDRESS_PATTERN = '0x[0-9A-Fa-f]{1,12}' - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - class Foo; end - - def test_should_display_class_name_when_expectation_was_on_class - test_result = run_test do - Foo.expects(:bar) - end - assert_match Regexp.new('FailureMessagesTest::Foo'), test_result.failures[0].message - end - - def test_should_display_class_name_and_address_when_expectation_was_on_instance - test_result = run_test do - Foo.new.expects(:bar) - end - assert_match Regexp.new("#"), test_result.failures[0].message - end - - def test_should_display_class_name_and_any_instance_prefix_when_expectation_was_on_any_instance - test_result = run_test do - Foo.any_instance.expects(:bar) - end - assert_match Regexp.new('#'), test_result.failures[0].message - end - - def test_should_display_mock_name_when_expectation_was_on_named_mock - test_result = run_test do - foo = mock('foo') - foo.expects(:bar) - end - assert_match Regexp.new('#'), test_result.failures[0].message - end - - def test_should_display_mock_address_when_expectation_was_on_unnamed_mock - test_result = run_test do - foo = mock() - foo.expects(:bar) - end - assert_match Regexp.new("#"), test_result.failures[0].message - end - - def test_should_display_string_when_expectation_was_on_string - test_result = run_test do - 'Foo'.expects(:bar) - end - assert_match Regexp.new("'Foo'"), test_result.failures[0].message - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/minitest_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/minitest_test.rb deleted file mode 100644 index f653945..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/minitest_test.rb +++ /dev/null @@ -1,130 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -if defined?(MiniTest) - - class MiniTestSampleTest < MiniTest::Unit::TestCase - - def test_mocha_with_fulfilled_expectation - mockee = mock() - mockee.expects(:blah) - mockee.blah - end - - def test_mocha_with_unfulfilled_expectation - mockee = mock() - mockee.expects(:blah) - end - - def test_mocha_with_unexpected_invocation - mockee = mock() - mockee.blah - end - - def test_stubba_with_fulfilled_expectation - stubbee = Class.new { define_method(:blah) {} }.new - stubbee.expects(:blah) - stubbee.blah - end - - def test_stubba_with_unfulfilled_expectation - stubbee = Class.new { define_method(:blah) {} }.new - stubbee.expects(:blah) - end - - def test_mocha_with_matching_parameter - mockee = mock() - mockee.expects(:blah).with(has_key(:wibble)) - mockee.blah(:wibble => 1) - end - - def test_mocha_with_non_matching_parameter - mockee = mock() - mockee.expects(:blah).with(has_key(:wibble)) - mockee.blah(:wobble => 2) - end - - end - - class MiniTestAdapterTest < Test::Unit::TestCase - - def setup - @output = StringIO.new - MiniTest::Unit.output = @output - @runner = MiniTest::Unit.new - end - - attr_reader :runner - - def test_should_pass_mocha_test - runner.run(%w(-n test_mocha_with_fulfilled_expectation)) - - assert_equal 0, runner.errors - assert_equal 1, runner.assertion_count - end - - def test_should_fail_mocha_test_due_to_unfulfilled_expectation - runner.run(%w(-n test_mocha_with_unfulfilled_expectation)) - - assert_equal 1, runner.errors - assert_equal 1, runner.assertion_count - assert_not_all_expectation_were_satisfied - end - - def test_should_fail_mocha_test_due_to_unexpected_invocation - runner.run(%w(-n test_mocha_with_unexpected_invocation)) - - assert_equal 1, runner.errors - assert_equal 0, runner.assertion_count - assert_unexpected_invocation - end - - def test_should_pass_stubba_test - runner.run(%w(-n test_stubba_with_fulfilled_expectation)) - - assert_equal 0, runner.errors - assert_equal 1, runner.assertion_count - end - - def test_should_fail_stubba_test_due_to_unfulfilled_expectation - runner.run(%w(-n test_stubba_with_unfulfilled_expectation)) - - assert_equal 1, runner.errors - assert_equal 1, runner.assertion_count - assert_match Regexp.new('not all expectations were satisfied'), output - end - - def test_should_pass_mocha_test_with_matching_parameter - runner.run(%w(-n test_mocha_with_matching_parameter)) - - assert_equal 0, runner.errors - assert_equal 1, runner.assertion_count - end - - def test_should_fail_mocha_test_with_non_matching_parameter - runner.run(%w(-n test_mocha_with_non_matching_parameter)) - - assert_equal 1, runner.errors - assert_unexpected_invocation - end - - private - - def output - @output.rewind - @output.read - end - - def assert_unexpected_invocation - assert_match Regexp.new('unexpected invocation'), output, "MiniTest output:\n#{output}" - end - - def assert_not_all_expectation_were_satisfied - assert_match Regexp.new('not all expectations were satisfied'), output, "MiniTest output:\n#{output}" - end - - end - -else - warn "MiniTest is not available, so MiniTestAdapterTest has not been run." -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/mocha_example_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/mocha_example_test.rb deleted file mode 100644 index 34009c4..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/mocha_example_test.rb +++ /dev/null @@ -1,98 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha' - -class MochaExampleTest < Test::Unit::TestCase - - class Rover - - def initialize(left_track, right_track, steps_per_metre, steps_per_degree) - @left_track, @right_track, @steps_per_metre, @steps_per_degree = left_track, right_track, steps_per_metre, steps_per_degree - end - - def forward(metres) - @left_track.step(metres * @steps_per_metre) - @right_track.step(metres * @steps_per_metre) - wait - end - - def backward(metres) - forward(-metres) - end - - def left(degrees) - @left_track.step(-degrees * @steps_per_degree) - @right_track.step(+degrees * @steps_per_degree) - wait - end - - def right(degrees) - left(-degrees) - end - - def wait - while (@left_track.moving? or @right_track.moving?); end - end - - end - - def test_should_step_both_tracks_forward_ten_steps - left_track = mock('left_track') - right_track = mock('right_track') - steps_per_metre = 5 - rover = Rover.new(left_track, right_track, steps_per_metre, nil) - - left_track.expects(:step).with(10) - right_track.expects(:step).with(10) - - left_track.stubs(:moving?).returns(false) - right_track.stubs(:moving?).returns(false) - - rover.forward(2) - end - - def test_should_step_both_tracks_backward_ten_steps - left_track = mock('left_track') - right_track = mock('right_track') - steps_per_metre = 5 - rover = Rover.new(left_track, right_track, steps_per_metre, nil) - - left_track.expects(:step).with(-10) - right_track.expects(:step).with(-10) - - left_track.stubs(:moving?).returns(false) - right_track.stubs(:moving?).returns(false) - - rover.backward(2) - end - - def test_should_step_left_track_forwards_five_steps_and_right_track_backwards_five_steps - left_track = mock('left_track') - right_track = mock('right_track') - steps_per_degree = 5.0 / 90.0 - rover = Rover.new(left_track, right_track, nil, steps_per_degree) - - left_track.expects(:step).with(+5) - right_track.expects(:step).with(-5) - - left_track.stubs(:moving?).returns(false) - right_track.stubs(:moving?).returns(false) - - rover.right(90) - end - - def test_should_step_left_track_backwards_five_steps_and_right_track_forwards_five_steps - left_track = mock('left_track') - right_track = mock('right_track') - steps_per_degree = 5.0 / 90.0 - rover = Rover.new(left_track, right_track, nil, steps_per_degree) - - left_track.expects(:step).with(-5) - right_track.expects(:step).with(+5) - - left_track.stubs(:moving?).returns(false) - right_track.stubs(:moving?).returns(false) - - rover.left(90) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/mocha_test_result_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/mocha_test_result_test.rb deleted file mode 100644 index 70aa275..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/mocha_test_result_test.rb +++ /dev/null @@ -1,84 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' -require 'execution_point' - -class MochaTestResultTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_include_expectation_verification_in_assertion_count - test_result = run_test do - object = mock() - object.expects(:message) - object.message - end - assert_equal 1, test_result.assertion_count - end - - def test_should_include_assertions_in_assertion_count - test_result = run_test do - assert true - end - assert_equal 1, test_result.assertion_count - end - - def test_should_not_include_stubbing_expectation_verification_in_assertion_count - test_result = run_test do - object = mock() - object.stubs(:message) - object.message - end - assert_equal 0, test_result.assertion_count - end - - def test_should_include_expectation_verification_failure_in_failure_count - test_result = run_test do - object = mock() - object.expects(:message) - end - assert_equal 1, test_result.failure_count - end - - def test_should_include_unexpected_verification_failure_in_failure_count - test_result = run_test do - object = mock() - object.message - end - assert_equal 1, test_result.failure_count - end - - def test_should_include_assertion_failure_in_failure_count - test_result = run_test do - flunk - end - assert_equal 1, test_result.failure_count - end - - def test_should_display_backtrace_indicating_line_number_where_unexpected_method_was_called - execution_point = nil - test_result = run_test do - object = mock() - execution_point = ExecutionPoint.current; object.message - end - assert_equal 1, test_result.failure_count - assert_equal execution_point, ExecutionPoint.new(test_result.failures[0].location) - end - - def test_should_display_backtrace_indicating_line_number_where_failing_assertion_was_called - execution_point = nil - test_result = run_test do - execution_point = ExecutionPoint.current; flunk - end - assert_equal 1, test_result.failure_count - assert_equal execution_point, ExecutionPoint.new(test_result.failures[0].location) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/mock_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/mock_test.rb deleted file mode 100644 index e3bbc22..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/mock_test.rb +++ /dev/null @@ -1,100 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class MockTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_build_mock_and_explicitly_add_an_expectation_which_is_satisfied - test_result = run_test do - foo = mock() - foo.expects(:bar) - foo.bar - end - assert_passed(test_result) - end - - def test_should_build_mock_and_explicitly_add_an_expectation_which_is_not_satisfied - test_result = run_test do - foo = mock() - foo.expects(:bar) - end - assert_failed(test_result) - end - - def test_should_build_named_mock_and_explicitly_add_an_expectation_which_is_satisfied - test_result = run_test do - foo = mock('foo') - foo.expects(:bar) - foo.bar - end - assert_passed(test_result) - end - - def test_should_build_named_mock_and_explicitly_add_an_expectation_which_is_not_satisfied - test_result = run_test do - foo = mock('foo') - foo.expects(:bar) - end - assert_failed(test_result) - end - - def test_should_build_mock_incorporating_two_expectations_which_are_satisifed - test_result = run_test do - foo = mock(:bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - end - assert_passed(test_result) - end - - def test_should_build_mock_incorporating_two_expectations_the_first_of_which_is_not_satisifed - test_result = run_test do - foo = mock(:bar => 'bar', :baz => 'baz') - foo.baz - end - assert_failed(test_result) - end - - def test_should_build_mock_incorporating_two_expectations_the_second_of_which_is_not_satisifed - test_result = run_test do - foo = mock(:bar => 'bar', :baz => 'baz') - foo.bar - end - assert_failed(test_result) - end - - def test_should_build_named_mock_incorporating_two_expectations_which_are_satisifed - test_result = run_test do - foo = mock('foo', :bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - end - assert_passed(test_result) - end - - def test_should_build_named_mock_incorporating_two_expectations_the_first_of_which_is_not_satisifed - test_result = run_test do - foo = mock('foo', :bar => 'bar', :baz => 'baz') - foo.baz - end - assert_failed(test_result) - end - - def test_should_build_named_mock_incorporating_two_expectations_the_second_of_which_is_not_satisifed - test_result = run_test do - foo = mock('foo', :bar => 'bar', :baz => 'baz') - foo.bar - end - assert_failed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/mock_with_initializer_block_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/mock_with_initializer_block_test.rb deleted file mode 100644 index 4ca2152..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/mock_with_initializer_block_test.rb +++ /dev/null @@ -1,51 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class MockWithInitializerBlockTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_expect_two_method_invocations_and_receive_both_of_them - test_result = run_test do - mock = mock() do - expects(:method_1) - expects(:method_2) - end - mock.method_1 - mock.method_2 - end - assert_passed(test_result) - end - - def test_should_expect_two_method_invocations_but_receive_only_one_of_them - test_result = run_test do - mock = mock() do - expects(:method_1) - expects(:method_2) - end - mock.method_1 - end - assert_failed(test_result) - end - - def test_should_stub_methods - test_result = run_test do - mock = mock() do - stubs(:method_1).returns(1) - stubs(:method_2).returns(2) - end - assert_equal 1, mock.method_1 - assert_equal 2, mock.method_2 - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/mocked_methods_dispatch_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/mocked_methods_dispatch_test.rb deleted file mode 100644 index b19b569..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/mocked_methods_dispatch_test.rb +++ /dev/null @@ -1,78 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class MockedMethodDispatchTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_find_latest_matching_expectation - test_result = run_test do - mock = mock() - mock.stubs(:method).returns(1) - mock.stubs(:method).returns(2) - assert_equal 2, mock.method - assert_equal 2, mock.method - assert_equal 2, mock.method - end - assert_passed(test_result) - end - - def test_should_find_latest_expectation_which_has_not_stopped_matching - test_result = run_test do - mock = mock() - mock.stubs(:method).returns(1) - mock.stubs(:method).once.returns(2) - assert_equal 2, mock.method - assert_equal 1, mock.method - assert_equal 1, mock.method - end - assert_passed(test_result) - end - - def test_should_keep_finding_later_stub_and_so_never_satisfy_earlier_expectation - test_result = run_test do - mock = mock() - mock.expects(:method).returns(1) - mock.stubs(:method).returns(2) - assert_equal 2, mock.method - assert_equal 2, mock.method - assert_equal 2, mock.method - end - assert_failed(test_result) - end - - def test_should_find_later_expectation_until_it_stops_matching_then_find_earlier_stub - test_result = run_test do - mock = mock() - mock.stubs(:method).returns(1) - mock.expects(:method).returns(2) - assert_equal 2, mock.method - assert_equal 1, mock.method - assert_equal 1, mock.method - end - assert_passed(test_result) - end - - def test_should_find_latest_expectation_with_range_of_expected_invocation_count_which_has_not_stopped_matching - test_result = run_test do - mock = mock() - mock.stubs(:method).returns(1) - mock.stubs(:method).times(2..3).returns(2) - assert_equal 2, mock.method - assert_equal 2, mock.method - assert_equal 2, mock.method - assert_equal 1, mock.method - assert_equal 1, mock.method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/optional_parameters_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/optional_parameters_test.rb deleted file mode 100644 index e7e0528..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/optional_parameters_test.rb +++ /dev/null @@ -1,70 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class OptionalParameterMatcherTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_pass_if_all_required_parameters_match_and_no_optional_parameters_are_supplied - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2) - end - assert_passed(test_result) - end - - def test_should_pass_if_all_required_and_optional_parameters_match_and_some_optional_parameters_are_supplied - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2, 3) - end - assert_passed(test_result) - end - - def test_should_pass_if_all_required_and_optional_parameters_match_and_all_optional_parameters_are_supplied - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2, 3, 4) - end - assert_passed(test_result) - end - - def test_should_fail_if_all_required_and_optional_parameters_match_but_too_many_optional_parameters_are_supplied - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2, 3, 4, 5) - end - assert_failed(test_result) - end - - def test_should_fail_if_all_required_parameters_match_but_some_optional_parameters_do_not_match - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2, 4) - end - assert_failed(test_result) - end - - def test_should_fail_if_all_required_parameters_match_but_no_optional_parameters_match - test_result = run_test do - mock = mock() - mock.expects(:method).with(1, 2, optionally(3, 4)) - mock.method(1, 2, 4, 5) - end - assert_failed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/parameter_matcher_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/parameter_matcher_test.rb deleted file mode 100644 index 57b4ad6..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/parameter_matcher_test.rb +++ /dev/null @@ -1,209 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class ParameterMatcherTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_match_hash_parameter_with_specified_key - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_key(:key_1)) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_key - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_key(:key_1)) - mock.method(:key_2 => 'value_2') - end - assert_failed(test_result) - end - - def test_should_match_hash_parameter_with_specified_value - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_value('value_1')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_value - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_value('value_1')) - mock.method(:key_2 => 'value_2') - end - assert_failed(test_result) - end - - def test_should_match_hash_parameter_with_specified_key_value_pair - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entry(:key_1, 'value_1')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_key_value_pair - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entry(:key_1, 'value_2')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_failed(test_result) - end - - def test_should_match_hash_parameter_with_specified_hash_entry - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entry(:key_1 => 'value_1')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_hash_entry - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entry(:key_1 => 'value_2')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2') - end - assert_failed(test_result) - end - - def test_should_match_hash_parameter_with_specified_entries - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entries(:key_1 => 'value_1', :key_2 => 'value_2')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2', :key_3 => 'value_3') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_entries - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entries(:key_1 => 'value_1', :key_2 => 'value_2')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_3') - end - assert_failed(test_result) - end - - def test_should_match_parameter_that_matches_regular_expression - test_result = run_test do - mock = mock() - mock.expects(:method).with(regexp_matches(/meter/)) - mock.method('this parameter should match') - end - assert_passed(test_result) - end - - def test_should_not_match_parameter_that_does_not_match_regular_expression - test_result = run_test do - mock = mock() - mock.expects(:method).with(regexp_matches(/something different/)) - mock.method('this parameter should not match') - end - assert_failed(test_result) - end - - def test_should_match_hash_parameter_with_specified_entries_using_nested_matchers - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entries(:key_1 => regexp_matches(/value_1/), kind_of(Symbol) => 'value_2')) - mock.method(:key_1 => 'value_1', :key_2 => 'value_2', :key_3 => 'value_3') - end - assert_passed(test_result) - end - - def test_should_not_match_hash_parameter_with_specified_entries_using_nested_matchers - test_result = run_test do - mock = mock() - mock.expects(:method).with(has_entries(:key_1 => regexp_matches(/value_1/), kind_of(String) => 'value_2')) - mock.method(:key_1 => 'value_2', :key_2 => 'value_3') - end - assert_failed(test_result) - end - - def test_should_match_parameter_that_matches_any_value - test_result = run_test do - mock = mock() - mock.expects(:method).with(any_of('value_1', 'value_2')).times(2) - mock.method('value_1') - mock.method('value_2') - end - assert_passed(test_result) - end - - def test_should_not_match_parameter_that_does_not_match_any_value - test_result = run_test do - mock = mock() - mock.expects(:method).with(any_of('value_1', 'value_2')) - mock.method('value_3') - end - assert_failed(test_result) - end - - def test_should_match_parameter_that_matches_all_values - test_result = run_test do - mock = mock() - mock.expects(:method).with(all_of('value_1', 'value_1')) - mock.method('value_1') - end - assert_passed(test_result) - end - - def test_should_not_match_parameter_that_does_not_match_all_values - test_result = run_test do - mock = mock() - mock.expects(:method).with(all_of('value_1', 'value_2')) - mock.method('value_1') - end - assert_failed(test_result) - end - - def test_should_match_parameter_that_responds_with_specified_value - klass = Class.new do - def quack - 'quack' - end - end - duck = klass.new - test_result = run_test do - mock = mock() - mock.expects(:method).with(responds_with(:quack, 'quack')) - mock.method(duck) - end - assert_passed(test_result) - end - - def test_should_not_match_parameter_that_does_not_respond_with_specified_value - klass = Class.new do - def quack - 'woof' - end - end - duck = klass.new - test_result = run_test do - mock = mock() - mock.expects(:method).with(responds_with(:quack, 'quack')) - mock.method(duck) - end - assert_failed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/partial_mocks_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/partial_mocks_test.rb deleted file mode 100644 index b44f8f5..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/partial_mocks_test.rb +++ /dev/null @@ -1,47 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class PartialMockTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_pass_if_all_expectations_are_satisfied - test_result = run_test do - partial_mock_one = "partial_mock_one" - partial_mock_two = "partial_mock_two" - - partial_mock_one.expects(:first) - partial_mock_one.expects(:second) - partial_mock_two.expects(:third) - - partial_mock_one.first - partial_mock_one.second - partial_mock_two.third - end - assert_passed(test_result) - end - - def test_should_fail_if_all_expectations_are_not_satisfied - test_result = run_test do - partial_mock_one = "partial_mock_one" - partial_mock_two = "partial_mock_two" - - partial_mock_one.expects(:first) - partial_mock_one.expects(:second) - partial_mock_two.expects(:third) - - partial_mock_one.first - partial_mock_two.third - end - assert_failed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/return_value_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/return_value_test.rb deleted file mode 100644 index 22dcbf3..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/return_value_test.rb +++ /dev/null @@ -1,52 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class ReturnValueTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_build_mock_and_explicitly_add_an_expectation_with_a_return_value - test_result = run_test do - foo = mock('foo') - foo.expects(:bar).returns('bar') - assert_equal 'bar', foo.bar - end - assert_passed(test_result) - end - - def test_should_build_mock_incorporating_two_expectations_with_return_values - test_result = run_test do - foo = mock('foo', :bar => 'bar', :baz => 'baz') - assert_equal 'bar', foo.bar - assert_equal 'baz', foo.baz - end - assert_passed(test_result) - end - - def test_should_build_stub_and_explicitly_add_an_expectation_with_a_return_value - test_result = run_test do - foo = stub('foo') - foo.stubs(:bar).returns('bar') - assert_equal 'bar', foo.bar - end - assert_passed(test_result) - end - - def test_should_build_stub_incorporating_two_expectations_with_return_values - test_result = run_test do - foo = stub('foo', :bar => 'bar', :baz => 'baz') - assert_equal 'bar', foo.bar - assert_equal 'baz', foo.baz - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/sequence_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/sequence_test.rb deleted file mode 100644 index 421e313..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/sequence_test.rb +++ /dev/null @@ -1,186 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class SequenceTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_constrain_invocations_to_occur_in_expected_order - test_result = run_test do - mock = mock() - sequence = sequence('one') - - mock.expects(:first).in_sequence(sequence) - mock.expects(:second).in_sequence(sequence) - - mock.second - end - assert_failed(test_result) - end - - def test_should_allow_invocations_in_sequence - test_result = run_test do - mock = mock() - sequence = sequence('one') - - mock.expects(:first).in_sequence(sequence) - mock.expects(:second).in_sequence(sequence) - - mock.first - mock.second - end - assert_passed(test_result) - end - - def test_should_constrain_invocations_to_occur_in_expected_order_even_if_expected_on_different_mocks - test_result = run_test do - mock_one = mock('1') - mock_two = mock('2') - sequence = sequence('one') - - mock_one.expects(:first).in_sequence(sequence) - mock_two.expects(:second).in_sequence(sequence) - - mock_two.second - end - assert_failed(test_result) - end - - def test_should_allow_invocations_in_sequence_even_if_expected_on_different_mocks - test_result = run_test do - mock_one = mock('1') - mock_two = mock('2') - sequence = sequence('one') - - mock_one.expects(:first).in_sequence(sequence) - mock_two.expects(:second).in_sequence(sequence) - - mock_one.first - mock_two.second - end - assert_passed(test_result) - end - - def test_should_constrain_invocations_to_occur_in_expected_order_even_if_expected_on_partial_mocks - test_result = run_test do - partial_mock_one = "1" - partial_mock_two = "2" - sequence = sequence('one') - - partial_mock_one.expects(:first).in_sequence(sequence) - partial_mock_two.expects(:second).in_sequence(sequence) - - partial_mock_two.second - end - assert_failed(test_result) - end - - def test_should_allow_invocations_in_sequence_even_if_expected_on_partial_mocks - test_result = run_test do - partial_mock_one = "1" - partial_mock_two = "2" - sequence = sequence('one') - - partial_mock_one.expects(:first).in_sequence(sequence) - partial_mock_two.expects(:second).in_sequence(sequence) - - partial_mock_one.first - partial_mock_two.second - end - assert_passed(test_result) - end - - def test_should_allow_stub_expectations_to_be_skipped_in_sequence - test_result = run_test do - mock = mock() - sequence = sequence('one') - - mock.expects(:first).in_sequence(sequence) - mock.stubs(:second).in_sequence(sequence) - mock.expects(:third).in_sequence(sequence) - - mock.first - mock.third - end - assert_passed(test_result) - end - - def test_should_regard_sequences_as_independent_of_each_other - test_result = run_test do - mock = mock() - sequence_one = sequence('one') - sequence_two = sequence('two') - - mock.expects(:first).in_sequence(sequence_one) - mock.expects(:second).in_sequence(sequence_one) - - mock.expects(:third).in_sequence(sequence_two) - mock.expects(:fourth).in_sequence(sequence_two) - - mock.first - mock.third - mock.second - mock.fourth - end - assert_passed(test_result) - end - - def test_should_include_sequence_in_failure_message - test_result = run_test do - mock = mock() - sequence = sequence('one') - - mock.expects(:first).in_sequence(sequence) - mock.expects(:second).in_sequence(sequence) - - mock.second - end - assert_failed(test_result) - assert_match Regexp.new("in sequence 'one'"), test_result.failures.first.message - end - - def test_should_allow_expectations_to_be_in_more_than_one_sequence - test_result = run_test do - mock = mock() - sequence_one = sequence('one') - sequence_two = sequence('two') - - mock.expects(:first).in_sequence(sequence_one) - mock.expects(:second).in_sequence(sequence_two) - mock.expects(:three).in_sequence(sequence_one).in_sequence(sequence_two) - - mock.first - mock.three - end - assert_failed(test_result) - assert_match Regexp.new("in sequence 'one'"), test_result.failures.first.message - assert_match Regexp.new("in sequence 'two'"), test_result.failures.first.message - end - - def test_should_have_shortcut_for_expectations_to_be_in_more_than_one_sequence - test_result = run_test do - mock = mock() - sequence_one = sequence('one') - sequence_two = sequence('two') - - mock.expects(:first).in_sequence(sequence_one) - mock.expects(:second).in_sequence(sequence_two) - mock.expects(:three).in_sequence(sequence_one, sequence_two) - - mock.first - mock.three - end - assert_failed(test_result) - assert_match Regexp.new("in sequence 'one'"), test_result.failures.first.message - assert_match Regexp.new("in sequence 'two'"), test_result.failures.first.message - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/standalone_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/standalone_test.rb deleted file mode 100644 index 9ad0461..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/standalone_test.rb +++ /dev/null @@ -1,139 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha_standalone' -require 'simple_counter' - -class NotATestUnitAssertionFailedError < StandardError -end - -class NotATestUnitTestCase - - include Mocha::Standalone - - attr_reader :assertion_counter - - def initialize - @assertion_counter = SimpleCounter.new - end - - def run(test_method) - mocha_setup - begin - prepare - begin - send(test_method) - mocha_verify(@assertion_counter) - rescue Mocha::ExpectationError => e - new_error = NotATestUnitAssertionFailedError.new(e.message) - new_error.set_backtrace(e.backtrace) - raise new_error - ensure - cleanup - end - ensure - mocha_teardown - end - end - - def prepare - end - - def cleanup - end - -end - -class SampleTest < NotATestUnitTestCase - - def mocha_with_fulfilled_expectation - mockee = mock() - mockee.expects(:blah) - mockee.blah - end - - def mocha_with_unfulfilled_expectation - mockee = mock() - mockee.expects(:blah) - end - - def mocha_with_unexpected_invocation - mockee = mock() - mockee.blah - end - - def stubba_with_fulfilled_expectation - stubbee = Class.new { define_method(:blah) {} }.new - stubbee.expects(:blah) - stubbee.blah - end - - def stubba_with_unfulfilled_expectation - stubbee = Class.new { define_method(:blah) {} }.new - stubbee.expects(:blah) - end - - def mocha_with_matching_parameter - mockee = mock() - mockee.expects(:blah).with(has_key(:wibble)) - mockee.blah(:wibble => 1) - end - - def mocha_with_non_matching_parameter - mockee = mock() - mockee.expects(:blah).with(has_key(:wibble)) - mockee.blah(:wobble => 2) - end - -end - -require 'test/unit' - -class StandaloneTest < Test::Unit::TestCase - - attr_reader :sample_test - - include AcceptanceTest - - def setup - @sample_test = SampleTest.new - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_pass_mocha_test - assert_nothing_raised { sample_test.run(:mocha_with_fulfilled_expectation) } - assert_equal 1, sample_test.assertion_counter.count - end - - def test_should_fail_mocha_test_due_to_unfulfilled_exception - assert_raises(NotATestUnitAssertionFailedError) { sample_test.run(:mocha_with_unfulfilled_expectation) } - assert_equal 1, sample_test.assertion_counter.count - end - - def test_should_fail_mocha_test_due_to_unexpected_invocation - assert_raises(NotATestUnitAssertionFailedError) { sample_test.run(:mocha_with_unexpected_invocation) } - assert_equal 0, sample_test.assertion_counter.count - end - - def test_should_pass_stubba_test - assert_nothing_raised { sample_test.run(:stubba_with_fulfilled_expectation) } - assert_equal 1, sample_test.assertion_counter.count - end - - def test_should_fail_stubba_test - assert_raises(NotATestUnitAssertionFailedError) { sample_test.run(:stubba_with_unfulfilled_expectation) } - assert_equal 1, sample_test.assertion_counter.count - end - - def test_should_pass_mocha_test_with_matching_parameter - assert_nothing_raised { sample_test.run(:mocha_with_matching_parameter) } - assert_equal 1, sample_test.assertion_counter.count - end - - def test_should_fail_mocha_test_with_non_matching_parameter - assert_raises(NotATestUnitAssertionFailedError) { sample_test.run(:mocha_with_non_matching_parameter) } - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/states_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/states_test.rb deleted file mode 100644 index 8b4df25..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/states_test.rb +++ /dev/null @@ -1,70 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StatesTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_constrain_expectations_to_occur_within_a_given_state - test_result = run_test do - mock = mock() - readiness = states('readiness') - - mock.stubs(:first).when(readiness.is('ready')) - mock.stubs(:second).then(readiness.is('ready')) - - mock.first - end - assert_failed(test_result) - end - - def test_should_allow_expectations_to_occur_in_correct_state - test_result = run_test do - mock = mock() - readiness = states('readiness') - - mock.stubs(:first).when(readiness.is('ready')) - mock.stubs(:second).then(readiness.is('ready')) - - mock.second - mock.first - end - assert_passed(test_result) - end - - def test_should_be_able_to_start_in_a_specific_state - test_result = run_test do - mock = mock() - readiness = states('readiness') - - mock.stubs(:first).when(readiness.is('ready')) - - readiness.starts_as('ready') - mock.first - end - assert_passed(test_result) - end - - def test_should_switch_state_when_method_raises_an_exception - test_result = run_test do - mock = mock() - readiness = states('readiness') - - mock.expects(:first).raises().then(readiness.is('ready')) - mock.expects(:second).when(readiness.is('ready')) - - mock.first rescue nil - mock.second - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_any_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_any_instance_method_test.rb deleted file mode 100644 index b7f3064..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_any_instance_method_test.rb +++ /dev/null @@ -1,195 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubAnyInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_stub_method_within_test - klass = Class.new do - def my_instance_method - :original_return_value - end - end - instance = klass.new - test_result = run_test do - klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_leave_stubbed_public_method_unchanged_after_test - klass = Class.new do - def my_instance_method - :original_return_value - end - end - instance = klass.new - run_test do - klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.public_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.my_instance_method - end - - def test_should_leave_stubbed_protected_method_unchanged_after_test - klass = Class.new do - def my_instance_method - :original_return_value - end - protected :my_instance_method - end - instance = klass.new - run_test do - klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.protected_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.send(:my_instance_method) - end - - def test_should_leave_stubbed_private_method_unchanged_after_test - klass = Class.new do - def my_instance_method - :original_return_value - end - private :my_instance_method - end - instance = klass.new - run_test do - klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.private_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.send(:my_instance_method) - end - - def test_should_reset_expectations_after_test - klass = Class.new do - def my_instance_method - :original_return_value - end - end - instance = klass.new - run_test do - klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert_equal 0, klass.any_instance.mocha.expectations.length - end - - def test_should_be_able_to_stub_a_superclass_method - superklass = Class.new do - def my_superclass_method - :original_return_value - end - end - klass = Class.new(superklass) - instance = klass.new - test_result = run_test do - klass.any_instance.stubs(:my_superclass_method).returns(:new_return_value) - assert_equal :new_return_value, instance.my_superclass_method - end - assert_passed(test_result) - assert instance.public_methods(true).any? { |m| m.to_s == 'my_superclass_method' } - assert !klass.public_methods(false).any? { |m| m.to_s == 'my_superclass_method' } - assert_equal :original_return_value, instance.my_superclass_method - end - - def test_should_be_able_to_stub_method_if_ruby18_public_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def public_instance_methods(include_superclass = true) - ['my_instance_method'] - end - end - end - test_result = run_test do - ruby18_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.new.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_public_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def public_instance_methods(include_superclass = true) - [:my_instance_method] - end - end - end - test_result = run_test do - ruby19_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.new.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_protected_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def protected_instance_methods(include_superclass = true) - ['my_instance_method'] - end - end - end - test_result = run_test do - ruby18_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.new.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_protected_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def protected_instance_methods(include_superclass = true) - [:my_instance_method] - end - end - end - test_result = run_test do - ruby19_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.new.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_private_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def private_instance_methods(include_superclass = true) - ['my_instance_method'] - end - end - end - test_result = run_test do - ruby18_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.new.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_private_instance_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def private_instance_methods(include_superclass = true) - [:my_instance_method] - end - end - end - test_result = run_test do - ruby19_klass.any_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.new.my_instance_method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_class_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_class_method_test.rb deleted file mode 100644 index 2a21ba6..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_class_method_test.rb +++ /dev/null @@ -1,203 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubClassMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_stub_method_within_test - klass = Class.new do - class << self - def my_class_method - :original_return_value - end - end - end - test_result = run_test do - klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_leave_stubbed_public_method_unchanged_after_test - klass = Class.new do - class << self - def my_class_method - :original_return_value - end - end - end - run_test do - klass.stubs(:my_class_method).returns(:new_return_value) - end - assert klass.public_methods(false).any? { |m| m.to_s == 'my_class_method' } - assert_equal :original_return_value, klass.my_class_method - end - - def test_should_leave_stubbed_protected_method_unchanged_after_test - klass = Class.new do - class << self - def my_class_method - :original_return_value - end - protected :my_class_method - end - end - run_test do - klass.stubs(:my_class_method).returns(:new_return_value) - end - assert klass.protected_methods(false).any? { |m| m.to_s == 'my_class_method' } - assert_equal :original_return_value, klass.send(:my_class_method) - end - - def test_should_leave_stubbed_private_method_unchanged_after_test - klass = Class.new do - class << self - def my_class_method - :original_return_value - end - private :my_class_method - end - end - run_test do - klass.stubs(:my_class_method).returns(:new_return_value) - end - assert klass.private_methods(false).any? { |m| m.to_s == 'my_class_method' } - assert_equal :original_return_value, klass.send(:my_class_method) - end - - def test_should_reset_class_expectations_after_test - klass = Class.new do - class << self - def my_class_method - :original_return_value - end - end - end - run_test do - klass.stubs(:my_class_method) - end - assert_equal 0, klass.mocha.expectations.length - end - - def test_should_be_able_to_stub_a_superclass_method - superklass = Class.new do - class << self - def my_superclass_method - :original_return_value - end - end - end - klass = Class.new(superklass) - test_result = run_test do - klass.stubs(:my_superclass_method).returns(:new_return_value) - assert_equal :new_return_value, klass.my_superclass_method - end - assert_passed(test_result) - superklass_public_methods = superklass.public_methods - superklass.superclass.public_methods - assert superklass_public_methods.any? { |m| m.to_s == 'my_superclass_method' } - klass_public_methods = klass.public_methods - klass.superclass.public_methods - assert !klass_public_methods.any? { |m| m.to_s == 'my_superclass_method' } - assert_equal :original_return_value, superklass.my_superclass_method - end - - def test_should_be_able_to_stub_method_if_ruby18_public_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def public_methods(include_superclass = true) - ['my_class_method'] - end - end - end - test_result = run_test do - ruby18_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_public_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def public_methods(include_superclass = true) - [:my_class_method] - end - end - end - test_result = run_test do - ruby19_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_protected_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def protected_methods(include_superclass = true) - ['my_class_method'] - end - end - end - test_result = run_test do - ruby18_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_protected_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def protected_methods(include_superclass = true) - [:my_class_method] - end - end - end - test_result = run_test do - ruby19_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_private_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_klass = Class.new do - class << self - def private_methods(include_superclass = true) - ['my_class_method'] - end - end - end - test_result = run_test do - ruby18_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_klass.my_class_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_private_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_klass = Class.new do - class << self - def private_methods(include_superclass = true) - [:my_class_method] - end - end - end - test_result = run_test do - ruby19_klass.stubs(:my_class_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_klass.my_class_method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_everything_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_everything_test.rb deleted file mode 100644 index b792eda..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_everything_test.rb +++ /dev/null @@ -1,56 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubEverythingTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_build_stub_and_explicitly_add_an_expectation - test_result = run_test do - foo = stub_everything() - foo.stubs(:bar) - foo.bar - foo.unexpected_invocation - end - assert_passed(test_result) - end - - def test_should_build_named_stub_and_explicitly_add_an_expectation - test_result = run_test do - foo = stub_everything('foo') - foo.stubs(:bar) - foo.bar - foo.unexpected_invocation - end - assert_passed(test_result) - end - - def test_should_build_stub_incorporating_two_expectations - test_result = run_test do - foo = stub_everything(:bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - foo.unexpected_invocation - end - assert_passed(test_result) - end - - def test_should_build_named_stub_incorporating_two_expectations - test_result = run_test do - foo = stub_everything('foo', :bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - foo.unexpected_invocation - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_instance_method_test.rb deleted file mode 100644 index 74b93ec..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_instance_method_test.rb +++ /dev/null @@ -1,165 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_leave_stubbed_public_method_unchanged_after_test - instance = Class.new do - def my_instance_method - :original_return_value - end - end.new - run_test do - instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.public_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.my_instance_method - end - - def test_should_leave_stubbed_protected_method_unchanged_after_test - instance = Class.new do - def my_instance_method - :original_return_value - end - protected :my_instance_method - end.new - run_test do - instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.protected_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.send(:my_instance_method) - end - - def test_should_leave_stubbed_private_method_unchanged_after_test - instance = Class.new do - def my_instance_method - :original_return_value - end - private :my_instance_method - end.new - run_test do - instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert instance.private_methods(false).any? { |m| m.to_s == 'my_instance_method' } - assert_equal :original_return_value, instance.send(:my_instance_method) - end - - def test_should_reset_expectations_after_test - instance = Class.new do - def my_instance_method - :original_return_value - end - end.new - run_test do - instance.stubs(:my_instance_method).returns(:new_return_value) - end - assert_equal 0, instance.mocha.expectations.length - end - - def test_should_be_able_to_stub_a_superclass_method - superklass = Class.new do - def my_superclass_method - :original_return_value - end - end - klass = Class.new(superklass) - instance = klass.new - test_result = run_test do - instance.stubs(:my_superclass_method).returns(:new_return_value) - assert_equal :new_return_value, instance.my_superclass_method - end - assert_passed(test_result) - assert instance.public_methods(true).any? { |m| m.to_s == 'my_superclass_method' } - assert !instance.public_methods(false).any? { |m| m.to_s == 'my_superclass_method' } - assert_equal :original_return_value, instance.my_superclass_method - end - - def test_should_be_able_to_stub_method_if_ruby18_public_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby18_instance = Class.new do - def public_methods(include_superclass = true) - ['my_instance_method'] - end - end.new - test_result = run_test do - ruby18_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_public_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby19_instance = Class.new do - def public_methods(include_superclass = true) - [:my_instance_method] - end - end.new - test_result = run_test do - ruby19_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_protected_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby18_instance = Class.new do - def protected_methods(include_superclass = true) - ['my_instance_method'] - end - end.new - test_result = run_test do - ruby18_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_protected_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby19_instance = Class.new do - def protected_methods(include_superclass = true) - [:my_instance_method] - end - end.new - test_result = run_test do - ruby19_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_private_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby18_instance = Class.new do - def private_methods(include_superclass = true) - ['my_instance_method'] - end - end.new - test_result = run_test do - ruby18_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_instance.my_instance_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_private_methods_include_method_but_method_does_not_exist_like_active_record_association_proxy - ruby19_instance = Class.new do - def private_methods(include_superclass = true) - [:my_instance_method] - end - end.new - test_result = run_test do - ruby19_instance.stubs(:my_instance_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_instance.my_instance_method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_module_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_module_method_test.rb deleted file mode 100644 index 99da7db..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_module_method_test.rb +++ /dev/null @@ -1,163 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubModuleMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_stub_method_within_test - mod = Module.new { def self.my_module_method; :original_return_value; end } - test_result = run_test do - mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_leave_stubbed_public_method_unchanged_after_test - mod = Module.new { class << self; def my_module_method; :original_return_value; end; public :my_module_method; end } - run_test do - mod.stubs(:my_module_method).returns(:new_return_value) - end - assert mod.public_methods(false).any? { |m| m.to_s == 'my_module_method' } - assert_equal :original_return_value, mod.my_module_method - end - - def test_should_leave_stubbed_protected_method_unchanged_after_test - mod = Module.new { class << self; def my_module_method; :original_return_value; end; protected :my_module_method; end } - run_test do - mod.stubs(:my_module_method).returns(:new_return_value) - end - assert mod.protected_methods(false).any? { |m| m.to_s == 'my_module_method' } - assert_equal :original_return_value, mod.send(:my_module_method) - end - - def test_should_leave_stubbed_private_method_unchanged_after_test - mod = Module.new { class << self; def my_module_method; :original_return_value; end; private :my_module_method; end } - run_test do - mod.stubs(:my_module_method).returns(:new_return_value) - end - assert mod.private_methods(false).any? { |m| m.to_s == 'my_module_method' } - assert_equal :original_return_value, mod.send(:my_module_method) - end - - def test_should_reset_expectations_after_test - mod = Module.new { def self.my_module_method; :original_return_value; end } - run_test do - mod.stubs(:my_module_method) - end - assert_equal 0, mod.mocha.expectations.length - end - - def test_should_be_able_to_stub_a_superclass_method - supermod = Module.new { def self.my_superclass_method; :original_return_value; end } - mod = Module.new { include supermod } - test_result = run_test do - mod.stubs(:my_superclass_method).returns(:new_return_value) - assert_equal :new_return_value, mod.my_superclass_method - end - assert_passed(test_result) - assert supermod.public_methods.any? { |m| m.to_s == 'my_superclass_method' } - assert !mod.public_methods(false).any? { |m| m.to_s == 'my_superclass_method' } - assert_equal :original_return_value, supermod.my_superclass_method - end - - def test_should_be_able_to_stub_method_if_ruby18_public_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_mod = Module.new do - class << self - def public_methods(include_superclass = true) - ['my_module_method'] - end - end - end - test_result = run_test do - ruby18_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_public_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_mod = Module.new do - class << self - def public_methods(include_superclass = true) - [:my_module_method] - end - end - end - test_result = run_test do - ruby19_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby_18_protected_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_mod = Module.new do - class << self - def protected_methods(include_superclass = true) - ['my_module_method'] - end - end - end - test_result = run_test do - ruby18_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_protected_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_mod = Module.new do - class << self - def protected_methods(include_superclass = true) - [:my_module_method] - end - end - end - test_result = run_test do - ruby19_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby18_private_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby18_mod = Module.new do - class << self - def private_methods(include_superclass = true) - ['my_module_method'] - end - end - end - test_result = run_test do - ruby18_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby18_mod.my_module_method - end - assert_passed(test_result) - end - - def test_should_be_able_to_stub_method_if_ruby19_private_methods_include_method_but_method_does_not_actually_exist_like_active_record_association_proxy - ruby19_mod = Module.new do - class << self - def private_methods(include_superclass = true) - [:my_module_method] - end - end - end - test_result = run_test do - ruby19_mod.stubs(:my_module_method).returns(:new_return_value) - assert_equal :new_return_value, ruby19_mod.my_module_method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stub_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stub_test.rb deleted file mode 100644 index 5b12042..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stub_test.rb +++ /dev/null @@ -1,52 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_build_stub_and_explicitly_add_an_expectation - test_result = run_test do - foo = stub() - foo.stubs(:bar) - foo.bar - end - assert_passed(test_result) - end - - def test_should_build_named_stub_and_explicitly_add_an_expectation - test_result = run_test do - foo = stub('foo') - foo.stubs(:bar) - foo.bar - end - assert_passed(test_result) - end - - def test_should_build_stub_incorporating_two_expectations - test_result = run_test do - foo = stub(:bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - end - assert_passed(test_result) - end - - def test_should_build_named_stub_incorporating_two_expectations - test_result = run_test do - foo = stub('foo', :bar => 'bar', :baz => 'baz') - foo.bar - foo.baz - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_example_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubba_example_test.rb deleted file mode 100644 index bd5a84d..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_example_test.rb +++ /dev/null @@ -1,102 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha' - -class Widget - - def model - 'original_model' - end - - class << self - - def find(options) - [] - end - - def create(attributes) - Widget.new - end - - end - -end - -module Thingy - - def self.wotsit - :hoojamaflip - end - -end - -class StubbaExampleTest < Test::Unit::TestCase - - def test_should_stub_instance_method - widget = Widget.new - widget.expects(:model).returns('different_model') - assert_equal 'different_model', widget.model - end - - def test_should_stub_module_method - should_stub_module_method - end - - def test_should_stub_module_method_again - should_stub_module_method - end - - def test_should_stub_class_method - should_stub_class_method - end - - def test_should_stub_class_method_again - should_stub_class_method - end - - def test_should_stub_instance_method_on_any_instance_of_a_class - should_stub_instance_method_on_any_instance_of_a_class - end - - def test_should_stub_instance_method_on_any_instance_of_a_class_again - should_stub_instance_method_on_any_instance_of_a_class - end - - def test_should_stub_two_different_class_methods - should_stub_two_different_class_methods - end - - def test_should_stub_two_different_class_methods_again - should_stub_two_different_class_methods - end - - private - - def should_stub_module_method - Thingy.expects(:wotsit).returns(:dooda) - assert_equal :dooda, Thingy.wotsit - end - - def should_stub_class_method - widgets = [Widget.new] - Widget.expects(:find).with(:all).returns(widgets) - assert_equal widgets, Widget.find(:all) - end - - def should_stub_two_different_class_methods - found_widgets = [Widget.new] - created_widget = Widget.new - Widget.expects(:find).with(:all).returns(found_widgets) - Widget.expects(:create).with(:model => 'wombat').returns(created_widget) - assert_equal found_widgets, Widget.find(:all) - assert_equal created_widget, Widget.create(:model => 'wombat') - end - - def should_stub_instance_method_on_any_instance_of_a_class - Widget.any_instance.expects(:model).at_least_once.returns('another_model') - widget_1 = Widget.new - widget_2 = Widget.new - assert_equal 'another_model', widget_1.model - assert_equal 'another_model', widget_2.model - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test.rb deleted file mode 100644 index 120c86e..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test.rb +++ /dev/null @@ -1,15 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'deprecation_disabler' - -class StubbaTest < Test::Unit::TestCase - - include DeprecationDisabler - - def test_should_report_deprecation_of_stubba_which_will_be_removed_in_a_future_release - disable_deprecations do - load 'stubba.rb' - end - assert_equal ["require 'stubba' is no longer needed and stubba.rb will soon be removed"], Mocha::Deprecation.messages - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test_result_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test_result_test.rb deleted file mode 100644 index 3f64078..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubba_test_result_test.rb +++ /dev/null @@ -1,66 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' -require 'execution_point' - -class StubbaTestResultTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_include_expectation_verification_in_assertion_count - test_result = run_test do - object = Class.new { def message; end }.new - object.expects(:message) - object.message - end - assert_equal 1, test_result.assertion_count - end - - def test_should_include_assertions_in_assertion_count - test_result = run_test do - assert true - end - assert_equal 1, test_result.assertion_count - end - - def test_should_not_include_stubbing_expectation_verification_in_assertion_count - test_result = run_test do - object = Class.new { def message; end }.new - object.stubs(:message) - object.message - end - assert_equal 0, test_result.assertion_count - end - - def test_should_include_expectation_verification_failure_in_failure_count - test_result = run_test do - object = Class.new { def message; end }.new - object.expects(:message) - end - assert_equal 1, test_result.failure_count - end - - def test_should_include_assertion_failure_in_failure_count - test_result = run_test do - flunk - end - assert_equal 1, test_result.failure_count - end - - def test_should_display_backtrace_indicating_line_number_where_failing_assertion_was_called - execution_point = nil - test_result = run_test do - execution_point = ExecutionPoint.current; flunk - end - assert_equal 1, test_result.failure_count - assert_equal execution_point, ExecutionPoint.new(test_result.failures[0].location) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_error_backtrace_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_error_backtrace_test.rb deleted file mode 100644 index 526761d..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_error_backtrace_test.rb +++ /dev/null @@ -1,64 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' -require 'execution_point' - -class StubbingErrorBacktraceTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_display_backtrace_indicating_line_number_where_attempt_to_stub_non_existent_method_was_made - execution_point = nil - object = Object.new - Mocha::Configuration.prevent(:stubbing_non_existent_method) - test_result = run_test do - execution_point = ExecutionPoint.current; object.stubs(:non_existent_method) - end - assert_equal 1, test_result.error_count - assert_equal execution_point, ExecutionPoint.new(test_result.errors[0].exception.backtrace) - end - - def test_should_display_backtrace_indicating_line_number_where_attempt_to_stub_non_public_method_was_made - execution_point = nil - object = Class.new do - def non_public_method; end - private :non_public_method - end.new - Mocha::Configuration.prevent(:stubbing_non_public_method) - test_result = run_test do - execution_point = ExecutionPoint.current; object.stubs(:non_public_method) - end - assert_equal 1, test_result.error_count - assert_equal execution_point, ExecutionPoint.new(test_result.errors[0].exception.backtrace) - end - - def test_should_display_backtrace_indicating_line_number_where_attempt_to_stub_method_on_non_mock_object_was_made - execution_point = nil - object = Object.new - Mocha::Configuration.prevent(:stubbing_method_on_non_mock_object) - test_result = run_test do - execution_point = ExecutionPoint.current; object.stubs(:any_method) - end - assert_equal 1, test_result.error_count - assert_equal execution_point, ExecutionPoint.new(test_result.errors[0].exception.backtrace) - end - - def test_should_display_backtrace_indicating_line_number_where_method_was_unnecessarily_stubbed - execution_point = nil - object = Object.new - Mocha::Configuration.prevent(:stubbing_method_unnecessarily) - test_result = run_test do - execution_point = ExecutionPoint.current; object.stubs(:unused_method) - end - assert_equal 1, test_result.error_count - assert_equal execution_point, ExecutionPoint.new(test_result.errors[0].exception.backtrace) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_method_unnecessarily_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_method_unnecessarily_test.rb deleted file mode 100644 index 1d2885a..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_method_unnecessarily_test.rb +++ /dev/null @@ -1,65 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingMethodUnnecessarilyTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_method_unnecessarily - Mocha::Configuration.allow(:stubbing_method_unnecessarily) - test_result = run_test do - mock = mock('mock') - mock.stubs(:public_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?('stubbing method unnecessarily: #.public_method(any_parameters)') - end - - def test_should_warn_when_stubbing_method_unnecessarily - Mocha::Configuration.warn_when(:stubbing_method_unnecessarily) - test_result = run_test do - mock = mock('mock') - mock.stubs(:public_method) - end - assert_passed(test_result) - assert @logger.warnings.include?('stubbing method unnecessarily: #.public_method(any_parameters)') - end - - def test_should_prevent_stubbing_method_unnecessarily - Mocha::Configuration.prevent(:stubbing_method_unnecessarily) - test_result = run_test do - mock = mock('mock') - mock.stubs(:public_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?('Mocha::StubbingError: stubbing method unnecessarily: #.public_method(any_parameters)') - end - - def test_should_default_to_allow_stubbing_method_unnecessarily - test_result = run_test do - mock = mock('mock') - mock.stubs(:public_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?('stubbing method unnecessarily: #.public_method(any_parameters)') - end - - def test_should_allow_stubbing_method_when_stubbed_method_is_invoked - Mocha::Configuration.prevent(:stubbing_method_unnecessarily) - test_result = run_test do - mock = mock('mock') - mock.stubs(:public_method) - mock.public_method - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_any_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_any_instance_method_test.rb deleted file mode 100644 index eb7d408..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_any_instance_method_test.rb +++ /dev/null @@ -1,130 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonExistentAnyInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_non_existent_any_instance_method - Mocha::Configuration.allow(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.any_instance.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{klass.any_instance}.non_existent_method") - assert_passed(test_result) - end - - def test_should_warn_when_stubbing_non_existent_any_instance_method - Mocha::Configuration.warn_when(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.any_instance.stubs(:non_existent_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-existent method: #{klass.any_instance}.non_existent_method") - end - - def test_should_prevent_stubbing_non_existent_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.any_instance.stubs(:non_existent_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-existent method: #{klass.any_instance}.non_existent_method") - end - - def test_should_default_to_allow_stubbing_non_existent_any_instance_method - klass = Class.new - test_result = run_test do - klass.any_instance.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{klass.any_instance}.non_existent_method") - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_public_method; end - public :existing_public_method - end - test_result = run_test do - klass.any_instance.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_protected_method; end - protected :existing_protected_method - end - test_result = run_test do - klass.any_instance.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_private_method; end - private :existing_private_method - end - test_result = run_test do - klass.any_instance.stubs(:existing_private_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_any_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_public_method; end - public :existing_public_method - end - klass = Class.new(superklass) - test_result = run_test do - klass.any_instance.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_any_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_protected_method; end - protected :existing_protected_method - end - klass = Class.new(superklass) - test_result = run_test do - klass.any_instance.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_any_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_private_method; end - private :existing_private_method - end - klass = Class.new(superklass) - test_result = run_test do - klass.any_instance.stubs(:existing_private_method) - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_class_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_class_method_test.rb deleted file mode 100644 index 9acb6d1..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_class_method_test.rb +++ /dev/null @@ -1,157 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonExistentClassMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_non_existent_class_method - Mocha::Configuration.allow(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{klass}.non_existent_method") - assert_passed(test_result) - end - - def test_should_warn_when_stubbing_non_existent_class_method - Mocha::Configuration.warn_when(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.stubs(:non_existent_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-existent method: #{klass}.non_existent_method") - end - - def test_should_prevent_stubbing_non_existent_class_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new - test_result = run_test do - klass.stubs(:non_existent_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-existent method: #{klass}.non_existent_method") - end - - def test_should_default_to_allow_stubbing_non_existent_class_method - klass = Class.new - test_result = run_test do - klass.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{klass}.non_existent_method") - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_class_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - class << self - def existing_public_method; end - public :existing_public_method - end - end - test_result = run_test do - klass.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_method_to_which_class_responds - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - class << self - def respond_to?(method, include_private = false) - (method == :method_to_which_class_responds) - end - end - end - test_result = run_test do - klass.stubs(:method_to_which_class_responds) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_class_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - class << self - def existing_protected_method; end - protected :existing_protected_method - end - end - test_result = run_test do - klass.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_class_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - class << self - def existing_private_method; end - private :existing_private_method - end - end - test_result = run_test do - klass.stubs(:existing_private_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - class << self - def existing_public_method; end - public :existing_public_method - end - end - klass = Class.new(superklass) - test_result = run_test do - klass.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - class << self - def existing_protected_method; end - protected :existing_protected_method - end - end - klass = Class.new(superklass) - test_result = run_test do - klass.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - class << self - def existing_private_method; end - protected :existing_private_method - end - end - klass = Class.new(superklass) - test_result = run_test do - klass.stubs(:existing_private_method) - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_instance_method_test.rb deleted file mode 100644 index 0adc80e..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_existent_instance_method_test.rb +++ /dev/null @@ -1,147 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonExistentInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_non_existent_instance_method - Mocha::Configuration.allow(:stubbing_non_existent_method) - instance = Class.new.new - test_result = run_test do - instance.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{instance}.non_existent_method") - assert_passed(test_result) - end - - def test_should_warn_when_stubbing_non_existent_instance_method - Mocha::Configuration.warn_when(:stubbing_non_existent_method) - instance = Class.new.new - test_result = run_test do - instance.stubs(:non_existent_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-existent method: #{instance}.non_existent_method") - end - - def test_should_prevent_stubbing_non_existent_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - instance = Class.new.new - test_result = run_test do - instance.stubs(:non_existent_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-existent method: #{instance}.non_existent_method") - end - - def test_should_default_to_allow_stubbing_non_existent_instance_method - instance = Class.new.new - test_result = run_test do - instance.stubs(:non_existent_method) - end - assert !@logger.warnings.include?("stubbing non-existent method: #{instance}.non_existent_method") - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_public_method; end - public :existing_public_method - end - instance = klass.new - test_result = run_test do - instance.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_method_to_which_instance_responds - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def respond_to?(method, include_private = false) - (method == :method_to_which_instance_responds) - end - end - instance = klass.new - test_result = run_test do - instance.stubs(:method_to_which_instance_responds) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_protected_method; end - protected :existing_protected_method - end - instance = klass.new - test_result = run_test do - instance.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_instance_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - klass = Class.new do - def existing_private_method; end - private :existing_private_method - end - instance = klass.new - test_result = run_test do - instance.stubs(:existing_private_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_public_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_public_method; end - public :existing_public_method - end - instance = Class.new(superklass).new - test_result = run_test do - instance.stubs(:existing_public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_protected_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_protected_method; end - protected :existing_protected_method - end - instance = Class.new(superklass).new - test_result = run_test do - instance.stubs(:existing_protected_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_existing_private_instance_superclass_method - Mocha::Configuration.prevent(:stubbing_non_existent_method) - superklass = Class.new do - def existing_private_method; end - private :existing_private_method - end - instance = Class.new(superklass).new - test_result = run_test do - instance.stubs(:existing_private_method) - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_any_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_any_instance_method_test.rb deleted file mode 100644 index 30ceeeb..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_any_instance_method_test.rb +++ /dev/null @@ -1,130 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonPublicAnyInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_private_any_instance_method - Mocha::Configuration.allow(:stubbing_non_public_method) - klass = Class.new do - def private_method; end - private :private_method - end - test_result = run_test do - klass.any_instance.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.private_method") - end - - def test_should_allow_stubbing_protected_any_instance_method - Mocha::Configuration.allow(:stubbing_non_public_method) - klass = Class.new do - def protected_method; end - protected :protected_method - end - test_result = run_test do - klass.any_instance.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.protected_method") - end - - def test_should_warn_when_stubbing_private_any_instance_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - klass = Class.new do - def private_method; end - private :private_method - end - test_result = run_test do - klass.any_instance.stubs(:private_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.private_method") - end - - def test_should_warn_when_stubbing_protected_any_instance_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - klass = Class.new do - def protected_method; end - protected :protected_method - end - test_result = run_test do - klass.any_instance.stubs(:protected_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.protected_method") - end - - def test_should_prevent_stubbing_private_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - def private_method; end - private :private_method - end - test_result = run_test do - klass.any_instance.stubs(:private_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{klass.any_instance}.private_method") - end - - def test_should_prevent_stubbing_protected_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - def protected_method; end - protected :protected_method - end - test_result = run_test do - klass.any_instance.stubs(:protected_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{klass.any_instance}.protected_method") - end - - def test_should_default_to_allow_stubbing_private_any_instance_method - klass = Class.new do - def private_method; end - private :private_method - end - test_result = run_test do - klass.any_instance.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.private_method") - end - - def test_should_default_to_allow_stubbing_protected_any_instance_method - klass = Class.new do - def protected_method; end - protected :protected_method - end - test_result = run_test do - klass.any_instance.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass.any_instance}.protected_method") - end - - def test_should_allow_stubbing_public_any_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - def public_method; end - public :public_method - end - test_result = run_test do - klass.any_instance.stubs(:public_method) - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_class_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_class_method_test.rb deleted file mode 100644 index b6a89f2..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_class_method_test.rb +++ /dev/null @@ -1,163 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonPublicClassMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_private_class_method - Mocha::Configuration.allow(:stubbing_non_public_method) - klass = Class.new do - class << self - def private_method; end - private :private_method - end - end - test_result = run_test do - klass.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass}.private_method") - end - - def test_should_allow_stubbing_protected_class_method - Mocha::Configuration.allow(:stubbing_non_public_method) - klass = Class.new do - class << self - def protected_method; end - protected :protected_method - end - end - test_result = run_test do - klass.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass}.protected_method") - end - - def test_should_warn_when_stubbing_private_class_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - klass = Class.new do - class << self - def private_method; end - private :private_method - end - end - test_result = run_test do - klass.stubs(:private_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{klass}.private_method") - end - - def test_should_warn_when_stubbing_protected_class_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - klass = Class.new do - class << self - def protected_method; end - protected :protected_method - end - end - test_result = run_test do - klass.stubs(:protected_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{klass}.protected_method") - end - - def test_should_prevent_stubbing_private_class_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - class << self - def private_method; end - private :private_method - end - end - test_result = run_test do - klass.stubs(:private_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{klass}.private_method") - end - - def test_should_prevent_stubbing_protected_class_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - class << self - def protected_method; end - protected :protected_method - end - end - test_result = run_test do - klass.stubs(:protected_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{klass}.protected_method") - end - - def test_should_default_to_allow_stubbing_private_class_method - klass = Class.new do - class << self - def private_method; end - private :private_method - end - end - test_result = run_test do - klass.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass}.private_method") - end - - def test_should_default_to_allow_stubbing_protected_class_method - klass = Class.new do - class << self - def protected_method; end - protected :protected_method - end - end - test_result = run_test do - klass.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{klass}.protected_method") - end - - def test_should_allow_stubbing_public_class_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - class << self - def public_method; end - public :public_method - end - end - test_result = run_test do - klass.stubs(:public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_method_to_which_class_responds - Mocha::Configuration.prevent(:stubbing_non_public_method) - klass = Class.new do - class << self - def respond_to?(method, include_private_methods = false) - (method == :method_to_which_class_responds) - end - end - end - test_result = run_test do - klass.stubs(:method_to_which_class_responds) - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_instance_method_test.rb deleted file mode 100644 index 71c67d0..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_non_public_instance_method_test.rb +++ /dev/null @@ -1,143 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingNonPublicInstanceMethodTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_private_instance_method - Mocha::Configuration.allow(:stubbing_non_public_method) - instance = Class.new do - def private_method; end - private :private_method - end.new - test_result = run_test do - instance.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{instance}.private_method") - end - - def test_should_allow_stubbing_protected_instance_method - Mocha::Configuration.allow(:stubbing_non_public_method) - instance = Class.new do - def protected_method; end - protected :protected_method - end.new - test_result = run_test do - instance.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{instance}.protected_method") - end - - def test_should_warn_when_stubbing_private_instance_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - instance = Class.new do - def private_method; end - private :private_method - end.new - test_result = run_test do - instance.stubs(:private_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{instance}.private_method") - end - - def test_should_warn_when_stubbing_protected_instance_method - Mocha::Configuration.warn_when(:stubbing_non_public_method) - instance = Class.new do - def protected_method; end - protected :protected_method - end.new - test_result = run_test do - instance.stubs(:protected_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing non-public method: #{instance}.protected_method") - end - - def test_should_prevent_stubbing_private_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - instance = Class.new do - def private_method; end - private :private_method - end.new - test_result = run_test do - instance.stubs(:private_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{instance}.private_method") - end - - def test_should_prevent_stubbing_protected_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - instance = Class.new do - def protected_method; end - protected :protected_method - end.new - test_result = run_test do - instance.stubs(:protected_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing non-public method: #{instance}.protected_method") - end - - def test_should_default_to_allow_stubbing_private_instance_method - instance = Class.new do - def private_method; end - private :private_method - end.new - test_result = run_test do - instance.stubs(:private_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{instance}.private_method") - end - - def test_should_default_to_allow_stubbing_protected_instance_method - instance = Class.new do - def protected_method; end - protected :protected_method - end.new - test_result = run_test do - instance.stubs(:protected_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing non-public method: #{instance}.protected_method") - end - - def test_should_allow_stubbing_public_instance_method - Mocha::Configuration.prevent(:stubbing_non_public_method) - instance = Class.new do - def public_method; end - public :public_method - end.new - test_result = run_test do - instance.stubs(:public_method) - end - assert_passed(test_result) - end - - def test_should_allow_stubbing_method_to_which_instance_responds - Mocha::Configuration.prevent(:stubbing_non_public_method) - instance = Class.new do - def respond_to?(method, include_private_methods = false) - (method == :method_to_which_instance_responds) - end - end.new - test_result = run_test do - instance.stubs(:method_to_which_instance_responds) - end - assert_passed(test_result) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_on_non_mock_object_test.rb b/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_on_non_mock_object_test.rb deleted file mode 100644 index 93bdc0d..0000000 --- a/vendor/gems/mocha-0.9.3/test/acceptance/stubbing_on_non_mock_object_test.rb +++ /dev/null @@ -1,64 +0,0 @@ -require File.join(File.dirname(__FILE__), "acceptance_test_helper") -require 'mocha' - -class StubbingOnNonMockObjectTest < Test::Unit::TestCase - - include AcceptanceTest - - def setup - setup_acceptance_test - end - - def teardown - teardown_acceptance_test - end - - def test_should_allow_stubbing_method_on_non_mock_object - Mocha::Configuration.allow(:stubbing_method_on_non_mock_object) - non_mock_object = Class.new { def existing_method; end } - test_result = run_test do - non_mock_object.stubs(:existing_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing method on non-mock object: #{non_mock_object}.existing_method") - end - - def test_should_warn_on_stubbing_method_on_non_mock_object - Mocha::Configuration.warn_when(:stubbing_method_on_non_mock_object) - non_mock_object = Class.new { def existing_method; end } - test_result = run_test do - non_mock_object.stubs(:existing_method) - end - assert_passed(test_result) - assert @logger.warnings.include?("stubbing method on non-mock object: #{non_mock_object}.existing_method") - end - - def test_should_prevent_stubbing_method_on_non_mock_object - Mocha::Configuration.prevent(:stubbing_method_on_non_mock_object) - non_mock_object = Class.new { def existing_method; end } - test_result = run_test do - non_mock_object.stubs(:existing_method) - end - assert_failed(test_result) - assert test_result.error_messages.include?("Mocha::StubbingError: stubbing method on non-mock object: #{non_mock_object}.existing_method") - end - - def test_should_default_to_allow_stubbing_method_on_non_mock_object - non_mock_object = Class.new { def existing_method; end } - test_result = run_test do - non_mock_object.stubs(:existing_method) - end - assert_passed(test_result) - assert !@logger.warnings.include?("stubbing method on non-mock object: #{non_mock_object}.existing_method") - end - - def test_should_allow_stubbing_method_on_mock_object - Mocha::Configuration.prevent(:stubbing_method_on_non_mock_object) - test_result = run_test do - mock = mock('mock') - mock.stubs(:any_method) - end - assert_passed(test_result) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/active_record_test_case.rb b/vendor/gems/mocha-0.9.3/test/active_record_test_case.rb deleted file mode 100644 index ae65073..0000000 --- a/vendor/gems/mocha-0.9.3/test/active_record_test_case.rb +++ /dev/null @@ -1,36 +0,0 @@ -module ActiveRecordTestCase - - def setup_with_fixtures - methods_called << :setup_with_fixtures - end - - alias_method :setup, :setup_with_fixtures - - def teardown_with_fixtures - methods_called << :teardown_with_fixtures - end - - alias_method :teardown, :teardown_with_fixtures - - def self.method_added(method) - case method.to_s - when 'setup' - unless method_defined?(:setup_without_fixtures) - alias_method :setup_without_fixtures, :setup - define_method(:setup) do - setup_with_fixtures - setup_without_fixtures - end - end - when 'teardown' - unless method_defined?(:teardown_without_fixtures) - alias_method :teardown_without_fixtures, :teardown - define_method(:teardown) do - teardown_without_fixtures - teardown_with_fixtures - end - end - end - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/deprecation_disabler.rb b/vendor/gems/mocha-0.9.3/test/deprecation_disabler.rb deleted file mode 100644 index c57fb3c..0000000 --- a/vendor/gems/mocha-0.9.3/test/deprecation_disabler.rb +++ /dev/null @@ -1,15 +0,0 @@ -require 'mocha/deprecation' - -module DeprecationDisabler - - def disable_deprecations - original_mode = Mocha::Deprecation.mode - Mocha::Deprecation.mode = :disabled - begin - yield - ensure - Mocha::Deprecation.mode = original_mode - end - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/execution_point.rb b/vendor/gems/mocha-0.9.3/test/execution_point.rb deleted file mode 100644 index 5824d2a..0000000 --- a/vendor/gems/mocha-0.9.3/test/execution_point.rb +++ /dev/null @@ -1,36 +0,0 @@ -class ExecutionPoint - - attr_reader :backtrace - - def self.current - new(caller) - end - - def initialize(backtrace) - @backtrace = backtrace - end - - def file_name - return "unknown" unless @backtrace && @backtrace.first - /\A(.*?):\d+/.match(@backtrace.first)[1] - end - - def line_number - return "unknown" unless @backtrace && @backtrace.first - Integer(/\A.*?:(\d+)/.match(@backtrace.first)[1]) - end - - def ==(other) - return false unless other.is_a?(ExecutionPoint) - (file_name == other.file_name) and (line_number == other.line_number) - end - - def to_s - "file: #{file_name}; line: #{line_number}" - end - - def inspect - to_s - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/method_definer.rb b/vendor/gems/mocha-0.9.3/test/method_definer.rb deleted file mode 100644 index 816aa49..0000000 --- a/vendor/gems/mocha-0.9.3/test/method_definer.rb +++ /dev/null @@ -1,24 +0,0 @@ -require 'mocha/metaclass' - -module Mocha - - module ObjectMethods - def define_instance_method(method_symbol, &block) - __metaclass__.send(:define_method, method_symbol, block) - end - - def replace_instance_method(method_symbol, &block) - raise "Cannot replace #{method_symbol} as #{self} does not respond to it." unless self.respond_to?(method_symbol) - define_instance_method(method_symbol, &block) - end - - def define_instance_accessor(*symbols) - symbols.each { |symbol| __metaclass__.send(:attr_accessor, symbol) } - end - end - -end - -class Object - include Mocha::ObjectMethods -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/simple_counter.rb b/vendor/gems/mocha-0.9.3/test/simple_counter.rb deleted file mode 100644 index a7b5b37..0000000 --- a/vendor/gems/mocha-0.9.3/test/simple_counter.rb +++ /dev/null @@ -1,13 +0,0 @@ -class SimpleCounter - - attr_reader :count - - def initialize - @count = 0 - end - - def increment - @count += 1 - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/test_helper.rb b/vendor/gems/mocha-0.9.3/test/test_helper.rb deleted file mode 100644 index 54d3476..0000000 --- a/vendor/gems/mocha-0.9.3/test/test_helper.rb +++ /dev/null @@ -1,11 +0,0 @@ -unless defined?(STANDARD_OBJECT_PUBLIC_INSTANCE_METHODS) - STANDARD_OBJECT_PUBLIC_INSTANCE_METHODS = Object.public_instance_methods -end - -$:.unshift File.expand_path(File.join(File.dirname(__FILE__), "..", "lib")) -$:.unshift File.expand_path(File.join(File.dirname(__FILE__))) -$:.unshift File.expand_path(File.join(File.dirname(__FILE__), 'unit')) -$:.unshift File.expand_path(File.join(File.dirname(__FILE__), 'unit', 'parameter_matchers')) -$:.unshift File.expand_path(File.join(File.dirname(__FILE__), 'acceptance')) - -require 'test/unit' \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/test_runner.rb b/vendor/gems/mocha-0.9.3/test/test_runner.rb deleted file mode 100644 index 6537991..0000000 --- a/vendor/gems/mocha-0.9.3/test/test_runner.rb +++ /dev/null @@ -1,33 +0,0 @@ -require 'test/unit/testresult' -require 'test/unit/testcase' - -module TestRunner - - def run_test(test_result = Test::Unit::TestResult.new, &block) - test_class = Class.new(Test::Unit::TestCase) do - define_method(:test_me, &block) - end - test = test_class.new(:test_me) - test.run(test_result) {} - class << test_result - attr_reader :failures, :errors - def failure_messages - failures.map { |failure| failure.message } - end - def error_messages - errors.map { |error| error.message } - end - end - test_result - end - - def assert_passed(test_result) - flunk "Test failed unexpectedly with message: #{test_result.failures}" if test_result.failure_count > 0 - flunk "Test failed unexpectedly with message: #{test_result.errors}" if test_result.error_count > 0 - end - - def assert_failed(test_result) - flunk "Test passed unexpectedly" if test_result.passed? - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/any_instance_method_test.rb b/vendor/gems/mocha-0.9.3/test/unit/any_instance_method_test.rb deleted file mode 100644 index 1bf4d2a..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/any_instance_method_test.rb +++ /dev/null @@ -1,126 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'method_definer' -require 'mocha/mock' -require 'mocha/any_instance_method' - -class AnyInstanceMethodTest < Test::Unit::TestCase - - include Mocha - - def test_should_hide_original_method - klass = Class.new { def method_x; end } - method = AnyInstanceMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method.to_sym - - method.hide_original_method - - assert klass.method_defined?(hidden_method_x) - end - - def test_should_not_hide_original_method_if_it_is_not_defined - klass = Class.new - method = AnyInstanceMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method.to_sym - - method.hide_original_method - - assert_equal false, klass.method_defined?(hidden_method_x) - end - - def test_should_define_a_new_method - klass = Class.new { def method_x; end } - method = AnyInstanceMethod.new(klass, :method_x) - mocha = Mock.new - mocha.expects(:method_x).with(:param1, :param2).returns(:result) - any_instance = Object.new - any_instance.define_instance_method(:mocha) { mocha } - klass.define_instance_method(:any_instance) { any_instance } - - method.hide_original_method - method.define_new_method - - instance = klass.new - result = instance.method_x(:param1, :param2) - - assert_equal :result, result - assert mocha.__verified__? - end - - def test_should_restore_original_method - klass = Class.new { def method_x; end } - method = AnyInstanceMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method.to_sym - klass.send(:define_method, hidden_method_x, Proc.new { :original_result }) - - method.remove_new_method - method.restore_original_method - - instance = klass.new - assert_equal :original_result, instance.method_x - assert !klass.method_defined?(hidden_method_x) - end - - def test_should_not_restore_original_method_if_hidden_method_not_defined - klass = Class.new { def method_x; :new_result; end } - method = AnyInstanceMethod.new(klass, :method_x) - - method.restore_original_method - - instance = klass.new - assert_equal :new_result, instance.method_x - end - - def test_should_call_remove_new_method - klass = Class.new { def method_x; end } - any_instance = Mock.new - any_instance.stubs(:reset_mocha) - klass.define_instance_method(:any_instance) { any_instance } - method = AnyInstanceMethod.new(klass, :method_x) - method.replace_instance_method(:restore_original_method) { } - method.define_instance_accessor(:remove_called) - method.replace_instance_method(:remove_new_method) { self.remove_called = true } - - method.unstub - - assert method.remove_called - end - - def test_should_call_restore_original_method - klass = Class.new { def method_x; end } - any_instance = Mock.new - any_instance.stubs(:reset_mocha) - klass.define_instance_method(:any_instance) { any_instance } - method = AnyInstanceMethod.new(klass, :method_x) - method.replace_instance_method(:remove_new_method) { } - method.define_instance_accessor(:restore_called) - method.replace_instance_method(:restore_original_method) { self.restore_called = true } - - method.unstub - - assert method.restore_called - end - - def test_should_call_reset_mocha - klass = Class.new { def method_x; end } - any_instance = Class.new { attr_accessor :mocha_was_reset; def reset_mocha; self.mocha_was_reset = true; end }.new - klass.define_instance_method(:any_instance) { any_instance } - method = AnyInstanceMethod.new(klass, :method_x) - method.replace_instance_method(:remove_new_method) { } - method.replace_instance_method(:restore_original_method) { } - - method.unstub - - assert any_instance.mocha_was_reset - end - - def test_should_return_any_instance_mocha_for_stubbee - mocha = Object.new - any_instance = Object.new - any_instance.define_instance_method(:mocha) { mocha } - stubbee = Class.new - stubbee.define_instance_method(:any_instance) { any_instance } - method = AnyInstanceMethod.new(stubbee, :method_name) - assert_equal stubbee.any_instance.mocha, method.mock - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/array_inspect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/array_inspect_test.rb deleted file mode 100644 index 8e555cd..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/array_inspect_test.rb +++ /dev/null @@ -1,16 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/inspect' - -class ArrayInspectTest < Test::Unit::TestCase - - def test_should_use_inspect - array = [1, 2] - assert_equal array.inspect, array.mocha_inspect - end - - def test_should_use_mocha_inspect_on_each_item - array = [1, 2, "chris"] - assert_equal "[1, 2, 'chris']", array.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/backtrace_filter_test.rb b/vendor/gems/mocha-0.9.3/test/unit/backtrace_filter_test.rb deleted file mode 100644 index 6d9379f..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/backtrace_filter_test.rb +++ /dev/null @@ -1,19 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/backtrace_filter' - -class BacktraceFilterTest < Test::Unit::TestCase - - include Mocha - - def test_should_exclude_mocha_locations_from_backtrace - mocha_lib = "/username/workspace/mocha_wibble/lib/" - backtrace = [ mocha_lib + 'exclude/me/1', mocha_lib + 'exclude/me/2', '/keep/me', mocha_lib + 'exclude/me/3'] - filter = BacktraceFilter.new(mocha_lib) - assert_equal ['/keep/me'], filter.filtered(backtrace) - end - - def test_should_determine_path_for_mocha_lib_directory - assert_match Regexp.new("/lib/$"), BacktraceFilter::LIB_DIRECTORY - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/cardinality_test.rb b/vendor/gems/mocha-0.9.3/test/unit/cardinality_test.rb deleted file mode 100644 index 2a5ef9b..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/cardinality_test.rb +++ /dev/null @@ -1,56 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/cardinality' - -class CardinalityTest < Test::Unit::TestCase - - include Mocha - - def test_should_allow_invocations_if_invocation_count_has_not_yet_reached_maximum - cardinality = Cardinality.new(2, 3) - assert cardinality.invocations_allowed?(0) - assert cardinality.invocations_allowed?(1) - assert cardinality.invocations_allowed?(2) - assert !cardinality.invocations_allowed?(3) - end - - def test_should_be_satisfied_if_invocations_so_far_have_reached_required_threshold - cardinality = Cardinality.new(2, 3) - assert !cardinality.satisfied?(0) - assert !cardinality.satisfied?(1) - assert cardinality.satisfied?(2) - assert cardinality.satisfied?(3) - end - - def test_should_describe_cardinality - assert_equal 'allowed any number of times', Cardinality.at_least(0).mocha_inspect - - assert_equal 'expected at most once', Cardinality.at_most(1).mocha_inspect - assert_equal 'expected at most twice', Cardinality.at_most(2).mocha_inspect - assert_equal 'expected at most 3 times', Cardinality.at_most(3).mocha_inspect - - assert_equal 'expected at least once', Cardinality.at_least(1).mocha_inspect - assert_equal 'expected at least twice', Cardinality.at_least(2).mocha_inspect - assert_equal 'expected at least 3 times', Cardinality.at_least(3).mocha_inspect - - assert_equal 'expected never', Cardinality.exactly(0).mocha_inspect - assert_equal 'expected exactly once', Cardinality.exactly(1).mocha_inspect - assert_equal 'expected exactly twice', Cardinality.exactly(2).mocha_inspect - assert_equal 'expected exactly 3 times', Cardinality.times(3).mocha_inspect - - assert_equal 'expected between 2 and 4 times', Cardinality.times(2..4).mocha_inspect - assert_equal 'expected between 1 and 3 times', Cardinality.times(1..3).mocha_inspect - end - - def test_should_need_verifying - assert Cardinality.exactly(2).needs_verifying? - assert Cardinality.at_least(3).needs_verifying? - assert Cardinality.at_most(2).needs_verifying? - assert Cardinality.times(4).needs_verifying? - assert Cardinality.times(2..4).needs_verifying? - end - - def test_should_not_need_verifying - assert_equal false, Cardinality.at_least(0).needs_verifying? - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/central_test.rb b/vendor/gems/mocha-0.9.3/test/unit/central_test.rb deleted file mode 100644 index 03bff91..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/central_test.rb +++ /dev/null @@ -1,65 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/central' -require 'mocha/mock' -require 'method_definer' - -class CentralTest < Test::Unit::TestCase - - include Mocha - - def test_should_start_with_empty_stubba_methods - stubba = Central.new - - assert_equal [], stubba.stubba_methods - end - - def test_should_stub_method_if_not_already_stubbed - method = Mock.new - method.expects(:stub) - stubba = Central.new - - stubba.stub(method) - - assert method.__verified__? - end - - def test_should_not_stub_method_if_already_stubbed - method = Mock.new - method.expects(:stub).times(0) - stubba = Central.new - stubba_methods = Mock.new - stubba_methods.stubs(:include?).with(method).returns(true) - stubba.stubba_methods = stubba_methods - - stubba.stub(method) - - assert method.__verified__? - end - - def test_should_record_method - method = Mock.new - method.expects(:stub) - stubba = Central.new - - stubba.stub(method) - - assert_equal [method], stubba.stubba_methods - end - - def test_should_unstub_all_methods - stubba = Central.new - method_1 = Mock.new - method_1.expects(:unstub) - method_2 = Mock.new - method_2.expects(:unstub) - stubba.stubba_methods = [method_1, method_2] - - stubba.unstub_all - - assert_equal [], stubba.stubba_methods - assert method_1.__verified__? - assert method_2.__verified__? - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/change_state_side_effect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/change_state_side_effect_test.rb deleted file mode 100644 index b48beaa..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/change_state_side_effect_test.rb +++ /dev/null @@ -1,41 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/change_state_side_effect' - -class ChangeStateSideEffectTest < Test::Unit::TestCase - - include Mocha - - class FakeState - - attr_reader :active - attr_writer :description - - def activate - @active = true - end - - def mocha_inspect - @description - end - - end - - def test_should_activate_the_given_state - state = FakeState.new - side_effect = ChangeStateSideEffect.new(state) - - side_effect.perform - - assert state.active - end - - def test_should_describe_itself_in_terms_of_the_activated_state - state = FakeState.new - state.description = 'the-new-state' - side_effect = ChangeStateSideEffect.new(state) - - assert_equal 'then the-new-state', side_effect.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/class_method_test.rb b/vendor/gems/mocha-0.9.3/test/unit/class_method_test.rb deleted file mode 100644 index 47c5999..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/class_method_test.rb +++ /dev/null @@ -1,237 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'method_definer' -require 'mocha/mock' - -require 'mocha/class_method' - -class ClassMethodTest < Test::Unit::TestCase - - include Mocha - - def test_should_provide_hidden_version_of_method_name_starting_with_prefix - method = ClassMethod.new(nil, :original_method_name) - assert_match(/^__stubba__/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_ending_with_suffix - method = ClassMethod.new(nil, :original_method_name) - assert_match(/__stubba__$/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_including_original_method_name - method = ClassMethod.new(nil, :original_method_name) - assert_match(/original_method_name/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_substituting_question_mark - method = ClassMethod.new(nil, :question_mark?) - assert_no_match(/\?/, method.hidden_method.to_s) - assert_match(/question_mark_substituted_character_63/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_substituting_exclamation_mark - method = ClassMethod.new(nil, :exclamation_mark!) - assert_no_match(/!/, method.hidden_method.to_s) - assert_match(/exclamation_mark_substituted_character_33/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_substituting_equals_sign - method = ClassMethod.new(nil, :equals_sign=) - assert_no_match(/\=/, method.hidden_method.to_s) - assert_match(/equals_sign_substituted_character_61/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_substituting_brackets - method = ClassMethod.new(nil, :[]) - assert_no_match(/\[\]/, method.hidden_method.to_s) - assert_match(/substituted_character_91__substituted_character_93/, method.hidden_method.to_s) - end - - def test_should_provide_hidden_version_of_method_name_substituting_plus_sign - method = ClassMethod.new(nil, :+) - assert_no_match(/\+/, method.hidden_method.to_s) - assert_match(/substituted_character_43/, method.hidden_method.to_s) - end - - def test_should_hide_original_method - klass = Class.new { def self.method_x; end } - method = ClassMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method - - method.hide_original_method - - assert klass.respond_to?(hidden_method_x) - end - - def test_should_respond_to_original_method_name_after_original_method_has_been_hidden - klass = Class.new { def self.original_method_name; end } - method = ClassMethod.new(klass, :original_method_name) - hidden_method_x = method.hidden_method - - method.hide_original_method - - assert klass.respond_to?(:original_method_name) - end - - def test_should_not_hide_original_method_if_method_not_defined - klass = Class.new - method = ClassMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method - - method.hide_original_method - - assert_equal false, klass.respond_to?(hidden_method_x) - end - - def test_should_define_a_new_method_which_should_call_mocha_method_missing - klass = Class.new { def self.method_x; end } - mocha = Mocha::Mock.new - klass.define_instance_method(:mocha) { mocha } - mocha.expects(:method_x).with(:param1, :param2).returns(:result) - method = ClassMethod.new(klass, :method_x) - - method.hide_original_method - method.define_new_method - result = klass.method_x(:param1, :param2) - - assert_equal :result, result - assert mocha.__verified__? - end - - def test_should_remove_new_method - klass = Class.new { def self.method_x; end } - method = ClassMethod.new(klass, :method_x) - - method.remove_new_method - - assert_equal false, klass.respond_to?(:method_x) - end - - def test_should_restore_original_method - klass = Class.new { def self.method_x; end } - method = ClassMethod.new(klass, :method_x) - hidden_method_x = method.hidden_method.to_sym - klass.define_instance_method(hidden_method_x) { :original_result } - - method.remove_new_method - method.restore_original_method - - assert_equal :original_result, klass.method_x - assert_equal false, klass.respond_to?(hidden_method_x) - end - - def test_should_not_restore_original_method_if_hidden_method_is_not_defined - klass = Class.new { def self.method_x; :new_result; end } - method = ClassMethod.new(klass, :method_x) - - method.restore_original_method - - assert_equal :new_result, klass.method_x - end - - def test_should_call_hide_original_method - klass = Class.new { def self.method_x; end } - method = ClassMethod.new(klass, :method_x) - method.hide_original_method - method.define_instance_accessor(:hide_called) - method.replace_instance_method(:hide_original_method) { self.hide_called = true } - - method.stub - - assert method.hide_called - end - - def test_should_call_define_new_method - klass = Class.new { def self.method_x; end } - method = ClassMethod.new(klass, :method_x) - method.define_instance_accessor(:define_called) - method.replace_instance_method(:define_new_method) { self.define_called = true } - - method.stub - - assert method.define_called - end - - def test_should_call_remove_new_method - klass = Class.new { def self.method_x; end } - klass.define_instance_method(:reset_mocha) { } - method = ClassMethod.new(klass, :method_x) - method.define_instance_accessor(:remove_called) - method.replace_instance_method(:remove_new_method) { self.remove_called = true } - - method.unstub - - assert method.remove_called - end - - def test_should_call_restore_original_method - klass = Class.new { def self.method_x; end } - klass.define_instance_method(:reset_mocha) { } - method = ClassMethod.new(klass, :method_x) - method.define_instance_accessor(:restore_called) - method.replace_instance_method(:restore_original_method) { self.restore_called = true } - - method.unstub - - assert method.restore_called - end - - def test_should_call_reset_mocha - klass = Class.new { def self.method_x; end } - klass.define_instance_accessor(:reset_called) - klass.define_instance_method(:reset_mocha) { self.reset_called = true } - method = ClassMethod.new(klass, :method_x) - method.replace_instance_method(:restore_original_method) { } - - method.unstub - - assert klass.reset_called - end - - def test_should_return_mock_for_stubbee - mocha = Object.new - stubbee = Object.new - stubbee.define_instance_accessor(:mocha) { mocha } - stubbee.mocha = nil - method = ClassMethod.new(stubbee, :method_name) - assert_equal stubbee.mocha, method.mock - end - - def test_should_not_be_equal_if_other_object_has_a_different_class - class_method = ClassMethod.new(Object.new, :method) - other_object = Object.new - assert class_method != other_object - end - - def test_should_not_be_equal_if_other_class_method_has_different_stubbee - stubbee_1 = Object.new - stubbee_2 = Object.new - class_method_1 = ClassMethod.new(stubbee_1, :method) - class_method_2 = ClassMethod.new(stubbee_2, :method) - assert class_method_1 != class_method_2 - end - - def test_should_not_be_equal_if_other_class_method_has_different_method - stubbee = Object.new - class_method_1 = ClassMethod.new(stubbee, :method_1) - class_method_2 = ClassMethod.new(stubbee, :method_2) - assert class_method_1 != class_method_2 - end - - def test_should_be_equal_if_other_class_method_has_same_stubbee_and_same_method_so_no_attempt_is_made_to_stub_a_method_twice - stubbee = Object.new - class_method_1 = ClassMethod.new(stubbee, :method) - class_method_2 = ClassMethod.new(stubbee, :method) - assert class_method_1 == class_method_2 - end - - def test_should_be_equal_if_other_class_method_has_same_stubbee_and_same_method_but_stubbee_equal_method_lies_like_active_record_association_proxy - stubbee = Class.new do - def equal?(other); false; end - end.new - class_method_1 = ClassMethod.new(stubbee, :method) - class_method_2 = ClassMethod.new(stubbee, :method) - assert class_method_1 == class_method_2 - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/date_time_inspect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/date_time_inspect_test.rb deleted file mode 100644 index 8557365..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/date_time_inspect_test.rb +++ /dev/null @@ -1,21 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/inspect' - -class DateTimeInspectTest < Test::Unit::TestCase - - def test_should_use_include_date_in_seconds - time = Time.now - assert_equal "#{time.inspect} (#{time.to_f} secs)", time.mocha_inspect - end - - def test_should_use_to_s_for_date - date = Date.new(2006, 1, 1) - assert_equal date.to_s, date.mocha_inspect - end - - def test_should_use_to_s_for_datetime - datetime = DateTime.new(2006, 1, 1) - assert_equal datetime.to_s, datetime.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/exception_raiser_test.rb b/vendor/gems/mocha-0.9.3/test/unit/exception_raiser_test.rb deleted file mode 100644 index 942300a..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/exception_raiser_test.rb +++ /dev/null @@ -1,42 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/exception_raiser' -require 'timeout' - -class ExceptionRaiserTest < Test::Unit::TestCase - - include Mocha - - def test_should_raise_exception_with_specified_class_and_default_message - exception_class = Class.new(StandardError) - raiser = ExceptionRaiser.new(exception_class, nil) - exception = assert_raises(exception_class) { raiser.evaluate } - assert_equal exception_class.to_s, exception.message - end - - def test_should_raise_exception_with_specified_class_and_message - exception_class = Class.new(StandardError) - raiser = ExceptionRaiser.new(exception_class, 'message') - exception = assert_raises(exception_class) { raiser.evaluate } - assert_equal 'message', exception.message - end - - def test_should_raise_exception_instance - exception_class = Class.new(StandardError) - raiser = ExceptionRaiser.new(exception_class.new('message'), nil) - exception = assert_raises(exception_class) { raiser.evaluate } - assert_equal 'message', exception.message - end - - def test_should_raise_interrupt_exception_with_default_message_so_it_works_in_ruby_1_8_6 - raiser = ExceptionRaiser.new(Interrupt, nil) - assert_raises(Interrupt) { raiser.evaluate } - end - - def test_should_raise_subclass_of_interrupt_exception_with_default_message_so_it_works_in_ruby_1_8_6 - exception_class = Class.new(Interrupt) - raiser = ExceptionRaiser.new(exception_class, nil) - assert_raises(exception_class) { raiser.evaluate } - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/expectation_list_test.rb b/vendor/gems/mocha-0.9.3/test/unit/expectation_list_test.rb deleted file mode 100644 index bc641c5..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/expectation_list_test.rb +++ /dev/null @@ -1,57 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/expectation_list' -require 'mocha/expectation' -require 'set' -require 'method_definer' - -class ExpectationListTest < Test::Unit::TestCase - - include Mocha - - def test_should_return_added_expectation - expectation_list = ExpectationList.new - expectation = Expectation.new(nil, :my_method) - assert_same expectation, expectation_list.add(expectation) - end - - def test_should_find_matching_expectation - expectation_list = ExpectationList.new - expectation1 = Expectation.new(nil, :my_method).with(:argument1, :argument2) - expectation2 = Expectation.new(nil, :my_method).with(:argument3, :argument4) - expectation_list.add(expectation1) - expectation_list.add(expectation2) - assert_same expectation1, expectation_list.match(:my_method, :argument1, :argument2) - end - - def test_should_find_most_recent_matching_expectation - expectation_list = ExpectationList.new - expectation1 = Expectation.new(nil, :my_method).with(:argument1, :argument2) - expectation2 = Expectation.new(nil, :my_method).with(:argument1, :argument2) - expectation_list.add(expectation1) - expectation_list.add(expectation2) - assert_same expectation2, expectation_list.match(:my_method, :argument1, :argument2) - end - - def test_should_find_matching_expectation_allowing_invocation - expectation_list = ExpectationList.new - expectation1 = Expectation.new(nil, :my_method).with(:argument1, :argument2) - expectation2 = Expectation.new(nil, :my_method).with(:argument3, :argument4) - expectation1.define_instance_method(:invocations_allowed?) { true } - expectation2.define_instance_method(:invocations_allowed?) { true } - expectation_list.add(expectation1) - expectation_list.add(expectation2) - assert_same expectation1, expectation_list.match_allowing_invocation(:my_method, :argument1, :argument2) - end - - def test_should_find_most_recent_matching_expectation_allowing_invocation - expectation_list = ExpectationList.new - expectation1 = Expectation.new(nil, :my_method) - expectation2 = Expectation.new(nil, :my_method) - expectation1.define_instance_method(:invocations_allowed?) { true } - expectation2.define_instance_method(:invocations_allowed?) { false } - expectation_list.add(expectation1) - expectation_list.add(expectation2) - assert_same expectation1, expectation_list.match_allowing_invocation(:my_method) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/expectation_test.rb b/vendor/gems/mocha-0.9.3/test/unit/expectation_test.rb deleted file mode 100644 index b96385e..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/expectation_test.rb +++ /dev/null @@ -1,459 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'method_definer' -require 'mocha/expectation' -require 'mocha/sequence' -require 'execution_point' -require 'simple_counter' - -class ExpectationTest < Test::Unit::TestCase - - include Mocha - - def new_expectation - Expectation.new(nil, :expected_method) - end - - def test_should_match_calls_to_same_method_with_any_parameters - assert new_expectation.match?(:expected_method, 1, 2, 3) - end - - def test_should_match_calls_to_same_method_with_exactly_zero_parameters - expectation = new_expectation.with() - assert expectation.match?(:expected_method) - end - - def test_should_not_match_calls_to_same_method_with_more_than_zero_parameters - expectation = new_expectation.with() - assert !expectation.match?(:expected_method, 1, 2, 3) - end - - def test_should_match_calls_to_same_method_with_expected_parameter_values - expectation = new_expectation.with(1, 2, 3) - assert expectation.match?(:expected_method, 1, 2, 3) - end - - def test_should_match_calls_to_same_method_with_parameters_constrained_as_expected - expectation = new_expectation.with() {|x, y, z| x + y == z} - assert expectation.match?(:expected_method, 1, 2, 3) - end - - def test_should_not_match_calls_to_different_method_with_parameters_constrained_as_expected - expectation = new_expectation.with() {|x, y, z| x + y == z} - assert !expectation.match?(:different_method, 1, 2, 3) - end - - def test_should_not_match_calls_to_different_methods_with_no_parameters - assert !new_expectation.match?(:unexpected_method) - end - - def test_should_not_match_calls_to_same_method_with_too_few_parameters - expectation = new_expectation.with(1, 2, 3) - assert !expectation.match?(:unexpected_method, 1, 2) - end - - def test_should_not_match_calls_to_same_method_with_too_many_parameters - expectation = new_expectation.with(1, 2) - assert !expectation.match?(:unexpected_method, 1, 2, 3) - end - - def test_should_not_match_calls_to_same_method_with_unexpected_parameter_values - expectation = new_expectation.with(1, 2, 3) - assert !expectation.match?(:unexpected_method, 1, 0, 3) - end - - def test_should_not_match_calls_to_same_method_with_parameters_not_constrained_as_expected - expectation = new_expectation.with() {|x, y, z| x + y == z} - assert !expectation.match?(:expected_method, 1, 0, 3) - end - - def test_should_allow_invocations_until_expected_invocation_count_is_one_and_actual_invocation_count_would_be_two - expectation = new_expectation.times(1) - assert expectation.invocations_allowed? - expectation.invoke - assert !expectation.invocations_allowed? - end - - def test_should_allow_invocations_until_expected_invocation_count_is_two_and_actual_invocation_count_would_be_three - expectation = new_expectation.times(2) - assert expectation.invocations_allowed? - expectation.invoke - assert expectation.invocations_allowed? - expectation.invoke - assert !expectation.invocations_allowed? - end - - def test_should_allow_invocations_until_expected_invocation_count_is_a_range_from_two_to_three_and_actual_invocation_count_would_be_four - expectation = new_expectation.times(2..3) - assert expectation.invocations_allowed? - expectation.invoke - assert expectation.invocations_allowed? - expectation.invoke - assert expectation.invocations_allowed? - expectation.invoke - assert !expectation.invocations_allowed? - end - - def test_should_store_provided_backtrace - backtrace = Object.new - expectation = Expectation.new(nil, :expected_method, backtrace) - assert_equal backtrace, expectation.backtrace - end - - def test_should_default_backtrace_to_caller - execution_point = ExecutionPoint.current; expectation = Expectation.new(nil, :expected_method) - assert_equal execution_point, ExecutionPoint.new(expectation.backtrace) - end - - def test_should_not_yield - yielded = false - new_expectation.invoke() { yielded = true } - assert_equal false, yielded - end - - def test_should_yield_no_parameters - expectation = new_expectation().yields() - yielded_parameters = nil - expectation.invoke() { |*parameters| yielded_parameters = parameters } - assert_equal Array.new, yielded_parameters - end - - def test_should_yield_with_specified_parameters - expectation = new_expectation().yields(1, 2, 3) - yielded_parameters = nil - expectation.invoke() { |*parameters| yielded_parameters = parameters } - assert_equal [1, 2, 3], yielded_parameters - end - - def test_should_yield_different_parameters_on_consecutive_invocations - expectation = new_expectation().yields(1, 2, 3).yields(4, 5) - yielded_parameters = [] - expectation.invoke() { |*parameters| yielded_parameters << parameters } - expectation.invoke() { |*parameters| yielded_parameters << parameters } - assert_equal [[1, 2, 3], [4, 5]], yielded_parameters - end - - def test_should_yield_multiple_times_for_single_invocation - expectation = new_expectation().multiple_yields([1, 2, 3], [4, 5]) - yielded_parameters = [] - expectation.invoke() { |*parameters| yielded_parameters << parameters } - assert_equal [[1, 2, 3], [4, 5]], yielded_parameters - end - - def test_should_yield_multiple_times_for_first_invocation_and_once_for_second_invocation - expectation = new_expectation().multiple_yields([1, 2, 3], [4, 5]).then.yields(6, 7) - yielded_parameters = [] - expectation.invoke() { |*parameters| yielded_parameters << parameters } - expectation.invoke() { |*parameters| yielded_parameters << parameters } - assert_equal [[1, 2, 3], [4, 5], [6, 7]], yielded_parameters - end - - def test_should_return_specified_value - expectation = new_expectation.returns(99) - assert_equal 99, expectation.invoke - end - - def test_should_return_same_specified_value_multiple_times - expectation = new_expectation.returns(99) - assert_equal 99, expectation.invoke - assert_equal 99, expectation.invoke - end - - def test_should_return_specified_values_on_consecutive_calls - expectation = new_expectation.returns(99, 100, 101) - assert_equal 99, expectation.invoke - assert_equal 100, expectation.invoke - assert_equal 101, expectation.invoke - end - - def test_should_return_specified_values_on_consecutive_calls_even_if_values_are_modified - values = [99, 100, 101] - expectation = new_expectation.returns(*values) - values.shift - assert_equal 99, expectation.invoke - assert_equal 100, expectation.invoke - assert_equal 101, expectation.invoke - end - - def test_should_return_nil_by_default - assert_nil new_expectation.invoke - end - - def test_should_return_nil_if_no_value_specified - expectation = new_expectation.returns() - assert_nil expectation.invoke - end - - def test_should_raise_runtime_exception - expectation = new_expectation.raises - assert_raise(RuntimeError) { expectation.invoke } - end - - def test_should_raise_custom_exception - exception = Class.new(Exception) - expectation = new_expectation.raises(exception) - assert_raise(exception) { expectation.invoke } - end - - def test_should_raise_same_instance_of_custom_exception - exception_klass = Class.new(StandardError) - expected_exception = exception_klass.new - expectation = new_expectation.raises(expected_exception) - actual_exception = assert_raise(exception_klass) { expectation.invoke } - assert_same expected_exception, actual_exception - end - - def test_should_use_the_default_exception_message - expectation = new_expectation.raises(Exception) - exception = assert_raise(Exception) { expectation.invoke } - assert_equal Exception.new.message, exception.message - end - - def test_should_raise_custom_exception_with_message - exception_msg = "exception message" - expectation = new_expectation.raises(Exception, exception_msg) - exception = assert_raise(Exception) { expectation.invoke } - assert_equal exception_msg, exception.message - end - - def test_should_return_values_then_raise_exception - expectation = new_expectation.returns(1, 2).then.raises() - assert_equal 1, expectation.invoke - assert_equal 2, expectation.invoke - assert_raise(RuntimeError) { expectation.invoke } - end - - def test_should_raise_exception_then_return_values - expectation = new_expectation.raises().then.returns(1, 2) - assert_raise(RuntimeError) { expectation.invoke } - assert_equal 1, expectation.invoke - assert_equal 2, expectation.invoke - end - - def test_should_verify_successfully_if_expected_call_was_made - expectation = new_expectation - expectation.invoke - assert expectation.verified? - end - - def test_should_not_verify_successfully_if_call_expected_once_but_invoked_twice - expectation = new_expectation.once - expectation.invoke - expectation.invoke - assert !expectation.verified? - end - - def test_should_not_verify_successfully_if_call_expected_once_but_not_invoked - expectation = new_expectation.once - assert !expectation.verified? - end - - def test_should_verify_successfully_if_call_expected_once_and_invoked_once - expectation = new_expectation.once - expectation.invoke - assert expectation.verified? - end - - def test_should_verify_successfully_if_expected_call_was_made_at_least_once - expectation = new_expectation.at_least_once - 3.times {expectation.invoke} - assert expectation.verified? - end - - def test_should_not_verify_successfully_if_expected_call_was_not_made_at_least_once - expectation = new_expectation.with(1, 2, 3).at_least_once - assert !expectation.verified? - assert_match(/expected at least once, not yet invoked/i, expectation.mocha_inspect) - end - - def test_should_verify_successfully_if_expected_call_was_made_expected_number_of_times - expectation = new_expectation.times(2) - 2.times {expectation.invoke} - assert expectation.verified? - end - - def test_should_not_verify_successfully_if_expected_call_was_made_too_few_times - expectation = new_expectation.times(2) - 1.times {expectation.invoke} - assert !expectation.verified? - assert_match(/expected exactly twice, already invoked once/i, expectation.mocha_inspect) - end - - def test_should_not_verify_successfully_if_expected_call_was_made_too_many_times - expectation = new_expectation.times(2) - 3.times {expectation.invoke} - assert !expectation.verified? - end - - def test_should_increment_assertion_counter_for_expectation_because_it_does_need_verifyng - expectation = new_expectation - expectation.invoke - assertion_counter = SimpleCounter.new - expectation.verified?(assertion_counter) - assert_equal 1, assertion_counter.count - end - - def test_should_not_increment_assertion_counter_for_stub_because_it_does_not_need_verifying - stub = Expectation.new(nil, :expected_method).at_least(0) - assertion_counter = SimpleCounter.new - stub.verified?(assertion_counter) - assert_equal 0, assertion_counter.count - end - - def test_should_store_backtrace_from_point_where_expectation_was_created - execution_point = ExecutionPoint.current; expectation = Expectation.new(nil, :expected_method) - assert_equal execution_point, ExecutionPoint.new(expectation.backtrace) - end - - class FakeMock - - def initialize(name) - @name = name - end - - def mocha_inspect - @name - end - - end - - def test_should_raise_error_with_message_indicating_which_method_was_expected_to_be_called_on_which_mock_object_with_which_parameters_and_in_what_sequences - mock = FakeMock.new('mock') - sequence_one = Sequence.new('one') - sequence_two = Sequence.new('two') - expectation = Expectation.new(mock, :expected_method).with(1, 2, {'a' => true}, {:b => false}, [1, 2, 3]).in_sequence(sequence_one, sequence_two) - assert !expectation.verified? - assert_match "mock.expected_method(1, 2, {'a' => true}, {:b => false}, [1, 2, 3]); in sequence 'one'; in sequence 'two'", expectation.mocha_inspect - end - - class FakeConstraint - - def initialize(allows_invocation_now) - @allows_invocation_now = allows_invocation_now - end - - def allows_invocation_now? - @allows_invocation_now - end - - end - - def test_should_be_in_correct_order_if_all_ordering_constraints_allow_invocation_now - constraint_one = FakeConstraint.new(allows_invocation_now = true) - constraint_two = FakeConstraint.new(allows_invocation_now = true) - expectation = Expectation.new(nil, :method_one) - expectation.add_ordering_constraint(constraint_one) - expectation.add_ordering_constraint(constraint_two) - assert expectation.in_correct_order? - end - - def test_should_not_be_in_correct_order_if_one_ordering_constraint_does_not_allow_invocation_now - constraint_one = FakeConstraint.new(allows_invocation_now = true) - constraint_two = FakeConstraint.new(allows_invocation_now = false) - expectation = Expectation.new(nil, :method_one) - expectation.add_ordering_constraint(constraint_one) - expectation.add_ordering_constraint(constraint_two) - assert !expectation.in_correct_order? - end - - def test_should_match_if_all_ordering_constraints_allow_invocation_now - constraint_one = FakeConstraint.new(allows_invocation_now = true) - constraint_two = FakeConstraint.new(allows_invocation_now = true) - expectation = Expectation.new(nil, :method_one) - expectation.add_ordering_constraint(constraint_one) - expectation.add_ordering_constraint(constraint_two) - assert expectation.match?(:method_one) - end - - def test_should_not_match_if_one_ordering_constraints_does_not_allow_invocation_now - constraint_one = FakeConstraint.new(allows_invocation_now = true) - constraint_two = FakeConstraint.new(allows_invocation_now = false) - expectation = Expectation.new(nil, :method_one) - expectation.add_ordering_constraint(constraint_one) - expectation.add_ordering_constraint(constraint_two) - assert !expectation.match?(:method_one) - end - - def test_should_not_be_satisfied_when_required_invocation_has_not_been_made - expectation = Expectation.new(nil, :method_one).times(1) - assert !expectation.satisfied? - end - - def test_should_be_satisfied_when_required_invocation_has_been_made - expectation = Expectation.new(nil, :method_one).times(1) - expectation.invoke - assert expectation.satisfied? - end - - def test_should_not_be_satisfied_when_minimum_number_of_invocations_has_not_been_made - expectation = Expectation.new(nil, :method_one).at_least(2) - expectation.invoke - assert !expectation.satisfied? - end - - def test_should_be_satisfied_when_minimum_number_of_invocations_has_been_made - expectation = Expectation.new(nil, :method_one).at_least(2) - 2.times { expectation.invoke } - assert expectation.satisfied? - end - - class FakeSequence - - attr_reader :expectations - - def initialize - @expectations = [] - end - - def constrain_as_next_in_sequence(expectation) - @expectations << expectation - end - - end - - def test_should_tell_sequences_to_constrain_expectation_as_next_in_sequence - sequence_one = FakeSequence.new - sequence_two = FakeSequence.new - expectation = Expectation.new(nil, :method_one) - assert_equal expectation, expectation.in_sequence(sequence_one, sequence_two) - assert_equal [expectation], sequence_one.expectations - assert_equal [expectation], sequence_two.expectations - end - - class FakeState - - def initialize - @active = false - end - - def activate - @active = true - end - - def active? - @active - end - - end - - def test_should_change_state_when_expectation_is_invoked - state = FakeState.new - expectation = Expectation.new(nil, :method_one) - - expectation.then(state) - - expectation.invoke - assert state.active? - end - - def test_should_match_when_state_is_active - state = FakeState.new - expectation = Expectation.new(nil, :method_one) - - expectation.when(state) - assert !expectation.match?(:method_one) - - state.activate - assert expectation.match?(:method_one) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/hash_inspect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/hash_inspect_test.rb deleted file mode 100644 index 15ad415..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/hash_inspect_test.rb +++ /dev/null @@ -1,16 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/inspect' - -class HashInspectTest < Test::Unit::TestCase - - def test_should_keep_spacing_between_key_value - hash = {:a => true} - assert_equal '{:a => true}', hash.mocha_inspect - end - - def test_should_use_mocha_inspect_on_each_item - hash = {:a => 'mocha'} - assert_equal "{:a => 'mocha'}", hash.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/in_state_ordering_constraint_test.rb b/vendor/gems/mocha-0.9.3/test/unit/in_state_ordering_constraint_test.rb deleted file mode 100644 index 96e20c5..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/in_state_ordering_constraint_test.rb +++ /dev/null @@ -1,43 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/in_state_ordering_constraint' - -class InStateOrderingConstraintTest < Test::Unit::TestCase - - include Mocha - - class FakeStatePredicate - - attr_writer :active, :description - - def active? - @active - end - - def mocha_inspect - @description - end - - end - - def test_should_allow_invocation_when_state_is_active - state_predicate = FakeStatePredicate.new - ordering_constraint = InStateOrderingConstraint.new(state_predicate) - - state_predicate.active = true - assert ordering_constraint.allows_invocation_now? - - state_predicate.active = false - assert !ordering_constraint.allows_invocation_now? - end - - def test_should_describe_itself_in_terms_of_the_state_predicates_description - state_predicate = FakeStatePredicate.new - ordering_constraint = InStateOrderingConstraint.new(state_predicate) - - state_predicate.description = 'the-state-predicate' - - assert_equal 'when the-state-predicate', ordering_constraint.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/metaclass_test.rb b/vendor/gems/mocha-0.9.3/test/unit/metaclass_test.rb deleted file mode 100644 index 956bcb4..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/metaclass_test.rb +++ /dev/null @@ -1,22 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/metaclass' - -class MetaclassTest < Test::Unit::TestCase - - def test_should_return_objects_singleton_class - object = Object.new - assert_raises(NoMethodError) { object.success? } - - object = Object.new - assert object.__metaclass__.ancestors.include?(Object) - assert object.__metaclass__.ancestors.include?(Kernel) - assert object.__metaclass__.is_a?(Class) - - object.__metaclass__.class_eval { def success?; true; end } - assert object.success? - - object = Object.new - assert_raises(NoMethodError) { object.success? } - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/method_matcher_test.rb b/vendor/gems/mocha-0.9.3/test/unit/method_matcher_test.rb deleted file mode 100644 index 0167433..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/method_matcher_test.rb +++ /dev/null @@ -1,23 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/method_matcher' - -class MethodMatcherTest < Test::Unit::TestCase - - include Mocha - - def test_should_match_if_actual_method_name_is_same_as_expected_method_name - method_matcher = MethodMatcher.new(:method_name) - assert method_matcher.match?(:method_name) - end - - def test_should_not_match_if_actual_method_name_is_not_same_as_expected_method_name - method_matcher = MethodMatcher.new(:method_name) - assert !method_matcher.match?(:different_method_name) - end - - def test_should_describe_what_method_is_expected - method_matcher = MethodMatcher.new(:method_name) - assert_equal "method_name", method_matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/mock_test.rb b/vendor/gems/mocha-0.9.3/test/unit/mock_test.rb deleted file mode 100644 index 753105a..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/mock_test.rb +++ /dev/null @@ -1,295 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/mock' -require 'mocha/expectation_error' -require 'set' -require 'simple_counter' - -class MockTest < Test::Unit::TestCase - - include Mocha - - def test_should_set_single_expectation - mock = Mock.new - mock.expects(:method1).returns(1) - assert_nothing_raised(ExpectationError) do - assert_equal 1, mock.method1 - end - end - - def test_should_build_and_store_expectations - mock = Mock.new - expectation = mock.expects(:method1) - assert_not_nil expectation - assert_equal [expectation], mock.expectations.to_a - end - - def test_should_not_stub_everything_by_default - mock = Mock.new - assert_equal false, mock.everything_stubbed - end - - def test_should_stub_everything - mock = Mock.new - mock.stub_everything - assert_equal true, mock.everything_stubbed - end - - def test_should_be_able_to_extend_mock_object_with_module - mock = Mock.new - assert_nothing_raised(ExpectationError) { mock.extend(Module.new) } - end - - def test_should_be_equal - mock = Mock.new - assert_equal true, mock.eql?(mock) - end - - if RUBY_VERSION < '1.9' - OBJECT_METHODS = STANDARD_OBJECT_PUBLIC_INSTANCE_METHODS.reject { |m| m =~ /^__.*__$/ } - else - OBJECT_METHODS = STANDARD_OBJECT_PUBLIC_INSTANCE_METHODS.reject { |m| m =~ /^__.*__$/ || m == :object_id } - end - - def test_should_be_able_to_mock_standard_object_methods - mock = Mock.new - OBJECT_METHODS.each { |method| mock.__expects__(method.to_sym).returns(method) } - OBJECT_METHODS.each { |method| assert_equal method, mock.__send__(method.to_sym) } - assert mock.__verified__? - end - - def test_should_be_able_to_stub_standard_object_methods - mock = Mock.new - OBJECT_METHODS.each { |method| mock.__stubs__(method.to_sym).returns(method) } - OBJECT_METHODS.each { |method| assert_equal method, mock.__send__(method.to_sym) } - end - - def test_should_create_and_add_expectations - mock = Mock.new - expectation1 = mock.expects(:method1) - expectation2 = mock.expects(:method2) - assert_equal [expectation1, expectation2].to_set, mock.expectations.to_set - end - - def test_should_pass_backtrace_into_expectation - mock = Mock.new - backtrace = Object.new - expectation = mock.expects(:method1, backtrace) - assert_equal backtrace, expectation.backtrace - end - - def test_should_pass_backtrace_into_stub - mock = Mock.new - backtrace = Object.new - stub = mock.stubs(:method1, backtrace) - assert_equal backtrace, stub.backtrace - end - - def test_should_create_and_add_stubs - mock = Mock.new - stub1 = mock.stubs(:method1) - stub2 = mock.stubs(:method2) - assert_equal [stub1, stub2].to_set, mock.expectations.to_set - end - - def test_should_invoke_expectation_and_return_result - mock = Mock.new - mock.expects(:my_method).returns(:result) - result = mock.my_method - assert_equal :result, result - end - - def test_should_not_raise_error_if_stubbing_everything - mock = Mock.new - mock.stub_everything - result = nil - assert_nothing_raised(ExpectationError) do - result = mock.unexpected_method - end - assert_nil result - end - - def test_should_raise_assertion_error_for_unexpected_method_call - mock = Mock.new - error = assert_raise(ExpectationError) do - mock.unexpected_method_called(:my_method, :argument1, :argument2) - end - assert_match(/unexpected invocation/, error.message) - assert_match(/my_method/, error.message) - assert_match(/argument1/, error.message) - assert_match(/argument2/, error.message) - end - - def test_should_not_verify_successfully_because_not_all_expectations_have_been_satisfied - mock = Mock.new - mock.expects(:method1) - mock.expects(:method2) - mock.method1 - assert !mock.__verified__? - end - - def test_should_increment_assertion_counter_for_every_verified_expectation - mock = Mock.new - - mock.expects(:method1) - mock.method1 - - mock.expects(:method2) - mock.method2 - - assertion_counter = SimpleCounter.new - - mock.__verified__?(assertion_counter) - - assert_equal 2, assertion_counter.count - end - - def test_should_yield_supplied_parameters_to_block - mock = Mock.new - parameters_for_yield = [1, 2, 3] - mock.expects(:method1).yields(*parameters_for_yield) - yielded_parameters = nil - mock.method1() { |*parameters| yielded_parameters = parameters } - assert_equal parameters_for_yield, yielded_parameters - end - - def test_should_set_up_multiple_expectations_with_return_values - mock = Mock.new - mock.expects(:method1 => :result1, :method2 => :result2) - assert_equal :result1, mock.method1 - assert_equal :result2, mock.method2 - end - - def test_should_set_up_multiple_stubs_with_return_values - mock = Mock.new - mock.stubs(:method1 => :result1, :method2 => :result2) - assert_equal :result1, mock.method1 - assert_equal :result2, mock.method2 - end - - def test_should_keep_returning_specified_value_for_stubs - mock = Mock.new - mock.stubs(:method1).returns(1) - assert_equal 1, mock.method1 - assert_equal 1, mock.method1 - end - - def test_should_keep_returning_specified_value_for_expects - mock = Mock.new - mock.expects(:method1).times(2).returns(1) - assert_equal 1, mock.method1 - assert_equal 1, mock.method1 - end - - def test_should_match_most_recent_call_to_expects - mock = Mock.new - mock.expects(:method1).returns(0) - mock.expects(:method1).returns(1) - assert_equal 1, mock.method1 - end - - def test_should_match_most_recent_call_to_stubs - mock = Mock.new - mock.stubs(:method1).returns(0) - mock.stubs(:method1).returns(1) - assert_equal 1, mock.method1 - end - - def test_should_match_most_recent_call_to_stubs_or_expects - mock = Mock.new - mock.stubs(:method1).returns(0) - mock.expects(:method1).returns(1) - assert_equal 1, mock.method1 - end - - def test_should_match_most_recent_call_to_expects_or_stubs - mock = Mock.new - mock.expects(:method1).returns(0) - mock.stubs(:method1).returns(1) - assert_equal 1, mock.method1 - end - - def test_should_respond_to_expected_method - mock = Mock.new - mock.expects(:method1) - assert_equal true, mock.respond_to?(:method1) - end - - def test_should_not_respond_to_unexpected_method - mock = Mock.new - assert_equal false, mock.respond_to?(:method1) - end - - def test_should_respond_to_methods_which_the_responder_does_responds_to - instance = Class.new do - define_method(:respond_to?) { |symbol| true } - end.new - mock = Mock.new - mock.responds_like(instance) - assert_equal true, mock.respond_to?(:invoked_method) - end - - def test_should_not_respond_to_methods_which_the_responder_does_not_responds_to - instance = Class.new do - define_method(:respond_to?) { |symbol| false } - end.new - mock = Mock.new - mock.responds_like(instance) - assert_equal false, mock.respond_to?(:invoked_method) - end - - def test_should_return_itself_to_allow_method_chaining - mock = Mock.new - assert_same mock.responds_like(Object.new), mock - end - - def test_should_not_raise_no_method_error_if_mock_is_not_restricted_to_respond_like_a_responder - instance = Class.new do - define_method(:respond_to?) { true } - end.new - mock = Mock.new - mock.stubs(:invoked_method) - assert_nothing_raised(NoMethodError) { mock.invoked_method } - end - - def test_should_not_raise_no_method_error_if_responder_does_respond_to_invoked_method - instance = Class.new do - define_method(:respond_to?) { |symbol| true } - end.new - mock = Mock.new - mock.responds_like(instance) - mock.stubs(:invoked_method) - assert_nothing_raised(NoMethodError) { mock.invoked_method } - end - - def test_should_raise_no_method_error_if_responder_does_not_respond_to_invoked_method - instance = Class.new do - define_method(:respond_to?) { |symbol| false } - define_method(:mocha_inspect) { 'mocha_inspect' } - end.new - mock = Mock.new - mock.responds_like(instance) - mock.stubs(:invoked_method) - assert_raises(NoMethodError) { mock.invoked_method } - end - - def test_should_raise_no_method_error_with_message_indicating_that_mock_is_constrained_to_respond_like_responder - instance = Class.new do - define_method(:respond_to?) { |symbol| false } - define_method(:mocha_inspect) { 'mocha_inspect' } - end.new - mock = Mock.new - mock.responds_like(instance) - mock.stubs(:invoked_method) - begin - mock.invoked_method - rescue NoMethodError => e - assert_match(/which responds like mocha_inspect/, e.message) - end - end - - def test_should_handle_respond_to_with_private_methods_param_without_error - mock = Mock.new - assert_nothing_raised{ mock.respond_to?(:object_id, false) } - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/mockery_test.rb b/vendor/gems/mocha-0.9.3/test/unit/mockery_test.rb deleted file mode 100644 index db05ec2..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/mockery_test.rb +++ /dev/null @@ -1,149 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/mockery' -require 'mocha/state_machine' - -class MockeryTest < Test::Unit::TestCase - - include Mocha - - def test_should_build_instance_of_mockery - mockery = Mockery.instance - assert_not_nil mockery - assert_kind_of Mockery, mockery - end - - def test_should_cache_instance_of_mockery - mockery_1 = Mockery.instance - mockery_2 = Mockery.instance - assert_same mockery_1, mockery_2 - end - - def test_should_expire_mockery_instance_cache - mockery_1 = Mockery.instance - Mockery.reset_instance - mockery_2 = Mockery.instance - assert_not_same mockery_1, mockery_2 - end - - def test_should_raise_expectation_error_because_not_all_expectations_are_satisfied - mockery = Mockery.new - mock_1 = mockery.named_mock('mock-1') { expects(:method_1) } - mock_2 = mockery.named_mock('mock-2') { expects(:method_2) } - 1.times { mock_1.method_1 } - 0.times { mock_2.method_2 } - assert_raises(ExpectationError) { mockery.verify } - end - - def test_should_reset_list_of_mocks_on_teardown - mockery = Mockery.new - mock = mockery.unnamed_mock { expects(:my_method) } - mockery.teardown - assert_nothing_raised(ExpectationError) { mockery.verify } - end - - def test_should_build_instance_of_stubba_on_instantiation - mockery = Mockery.new - assert_not_nil mockery.stubba - assert_kind_of Central, mockery.stubba - end - - def test_should_build_new_instance_of_stubba_on_teardown - mockery = Mockery.new - stubba_1 = mockery.stubba - mockery.teardown - stubba_2 = mockery.stubba - assert_not_same stubba_1, stubba_2 - end - - def test_should_build_and_store_new_state_machine - mockery = Mockery.new - mockery.new_state_machine('state-machine-name') - assert_equal 1, mockery.state_machines.length - assert_kind_of StateMachine, mockery.state_machines[0] - end - - def test_should_reset_list_of_state_machines_on_teardown - mockery = Mockery.new - mockery.new_state_machine('state-machine-name') - mockery.teardown - assert_equal 0, mockery.state_machines.length - end - - class FakeMethod - def stub; end - def unstub; end - end - - def test_should_unstub_all_methods_on_teardown - mockery = Mockery.new - stubba = mockery.stubba - stubba.stub(FakeMethod.new) - mockery.teardown - assert stubba.stubba_methods.empty? - end - - def test_should_display_object_id_for_mocha_inspect_if_mock_has_no_name - mockery = Mockery.new - mock = mockery.unnamed_mock - assert_match Regexp.new("^#$"), mock.mocha_inspect - end - - def test_should_display_object_id_for_inspect_if_mock_has_no_name - mockery = Mockery.new - mock = mockery.unnamed_mock - assert_match Regexp.new("^#$"), mock.inspect - end - - def test_should_display_name_for_mocha_inspect_if_mock_has_string_name - mockery = Mockery.new - mock = mockery.named_mock('named_mock') - assert_equal "#", mock.mocha_inspect - end - - def test_should_display_name_for_mocha_inspect_if_mock_has_symbol_name - mockery = Mockery.new - mock = mockery.named_mock(:named_mock) - assert_equal "#", mock.mocha_inspect - end - - def test_should_display_name_for_inspect_if_mock_has_string_name - mockery = Mockery.new - mock = mockery.named_mock('named_mock') - assert_equal "#", mock.inspect - end - - def test_should_display_name_for_inspect_if_mock_has_symbol_name - mockery = Mockery.new - mock = mockery.named_mock(:named_mock) - assert_equal "#", mock.inspect - end - - def test_should_display_impersonated_object_for_mocha_inspect - mockery = Mockery.new - instance = Object.new - mock = mockery.mock_impersonating(instance) - assert_equal "#{instance.mocha_inspect}", mock.mocha_inspect - end - - def test_should_display_impersonated_object_for_inspect - mockery = Mockery.new - instance = Object.new - mock = mockery.mock_impersonating(instance) - assert_equal "#{instance.mocha_inspect}", mock.inspect - end - - class FakeClass; end - - def test_should_display_any_instance_prefix_followed_by_class_whose_instances_are_being_impersonated_for_mocha_inspect - mockery = Mockery.new - mock = mockery.mock_impersonating_any_instance_of(FakeClass) - assert_equal "#", mock.mocha_inspect - end - - def test_should_display_any_instance_prefix_followed_by_class_whose_instances_are_being_impersonated_for_inspect - mockery = Mockery.new - mock = mockery.mock_impersonating_any_instance_of(FakeClass) - assert_equal "#", mock.inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/multiple_yields_test.rb b/vendor/gems/mocha-0.9.3/test/unit/multiple_yields_test.rb deleted file mode 100644 index 65724a8..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/multiple_yields_test.rb +++ /dev/null @@ -1,18 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/multiple_yields' - -class MultipleYieldsTest < Test::Unit::TestCase - - include Mocha - - def test_should_provide_parameters_for_multiple_yields_in_single_invocation - parameter_group = MultipleYields.new([1, 2, 3], [4, 5]) - parameter_groups = [] - parameter_group.each do |parameters| - parameter_groups << parameters - end - assert_equal [[1, 2, 3], [4, 5]], parameter_groups - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/no_yields_test.rb b/vendor/gems/mocha-0.9.3/test/unit/no_yields_test.rb deleted file mode 100644 index 544d1ef..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/no_yields_test.rb +++ /dev/null @@ -1,18 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/no_yields' - -class NoYieldsTest < Test::Unit::TestCase - - include Mocha - - def test_should_provide_parameters_for_no_yields_in_single_invocation - parameter_group = NoYields.new - parameter_groups = [] - parameter_group.each do |parameters| - parameter_groups << parameters - end - assert_equal [], parameter_groups - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/object_inspect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/object_inspect_test.rb deleted file mode 100644 index 56d84a9..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/object_inspect_test.rb +++ /dev/null @@ -1,37 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/inspect' -require 'method_definer' - -class ObjectInspectTest < Test::Unit::TestCase - - def test_should_return_default_string_representation_of_object_not_including_instance_variables - object = Object.new - class << object - attr_accessor :attribute - end - object.attribute = 'instance_variable' - assert_match Regexp.new("^#$"), object.mocha_inspect - assert_no_match(/instance_variable/, object.mocha_inspect) - end - - def test_should_return_customized_string_representation_of_object - object = Object.new - class << object - define_method(:inspect) { 'custom_inspect' } - end - assert_equal 'custom_inspect', object.mocha_inspect - end - - def test_should_use_underscored_id_instead_of_object_id_or_id_so_that_they_can_be_stubbed - object = Object.new - object.define_instance_accessor(:called) - object.called = false - object.replace_instance_method(:object_id) { self.called = true; 1 } - if RUBY_VERSION < '1.9' - object.replace_instance_method(:id) { self.called = true; 1 } - end - object.mocha_inspect - assert_equal false, object.called - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/object_test.rb b/vendor/gems/mocha-0.9.3/test/unit/object_test.rb deleted file mode 100644 index 57262e4..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/object_test.rb +++ /dev/null @@ -1,82 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/object' -require 'mocha/mockery' -require 'mocha/mock' -require 'method_definer' - -class ObjectTest < Test::Unit::TestCase - - include Mocha - - def test_should_build_mocha_referring_to_self - instance = Object.new - mocha = instance.mocha - assert_not_nil mocha - assert mocha.is_a?(Mock) - assert_equal instance.mocha_inspect, mocha.mocha_inspect - end - - def test_should_reuse_existing_mocha - instance = Object.new - mocha_1 = instance.mocha - mocha_2 = instance.mocha - assert_equal mocha_1, mocha_2 - end - - def test_should_reset_mocha - instance = Object.new - assert_nil instance.reset_mocha - end - - def test_should_build_any_instance_object - klass = Class.new - any_instance = klass.any_instance - assert_not_nil any_instance - assert any_instance.is_a?(Class::AnyInstance) - end - - def test_should_return_same_any_instance_object - klass = Class.new - any_instance_1 = klass.any_instance - any_instance_2 = klass.any_instance - assert_equal any_instance_1, any_instance_2 - end - - def test_should_use_stubba_instance_method_for_object - assert_equal Mocha::InstanceMethod, Object.new.stubba_method - end - - def test_should_use_stubba_module_method_for_module - assert_equal Mocha::ModuleMethod, Module.new.stubba_method - end - - def test_should_use_stubba_class_method_for_class - assert_equal Mocha::ClassMethod, Class.new.stubba_method - end - - def test_should_use_stubba_class_method_for_any_instance - assert_equal Mocha::AnyInstanceMethod, Class::AnyInstance.new(nil).stubba_method - end - - def test_should_stub_self_for_object - object = Object.new - assert_equal object, object.stubba_object - end - - def test_should_stub_self_for_module - mod = Module.new - assert_equal mod, mod.stubba_object - end - - def test_should_stub_self_for_class - klass = Class.new - assert_equal klass, klass.stubba_object - end - - def test_should_stub_relevant_class_for_any_instance - klass = Class.new - any_instance = Class::AnyInstance.new(klass) - assert_equal klass, any_instance.stubba_object - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/all_of_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/all_of_test.rb deleted file mode 100644 index 14028f5..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/all_of_test.rb +++ /dev/null @@ -1,26 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/all_of' -require 'mocha/inspect' -require 'stub_matcher' - -class AllOfTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_if_all_matchers_match - matcher = all_of(Stub::Matcher.new(true), Stub::Matcher.new(true), Stub::Matcher.new(true)) - assert matcher.matches?(['any_old_value']) - end - - def test_should_not_match_if_any_matcher_does_not_match - matcher = all_of(Stub::Matcher.new(true), Stub::Matcher.new(false), Stub::Matcher.new(true)) - assert !matcher.matches?(['any_old_value']) - end - - def test_should_describe_matcher - matcher = all_of(Stub::Matcher.new(true), Stub::Matcher.new(false), Stub::Matcher.new(true)) - assert_equal 'all_of(matcher(true), matcher(false), matcher(true))', matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/any_of_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/any_of_test.rb deleted file mode 100644 index 503d6dc..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/any_of_test.rb +++ /dev/null @@ -1,26 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/any_of' -require 'mocha/inspect' -require 'stub_matcher' - -class AnyOfTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_if_any_matchers_match - matcher = any_of(Stub::Matcher.new(false), Stub::Matcher.new(true), Stub::Matcher.new(false)) - assert matcher.matches?(['any_old_value']) - end - - def test_should_not_match_if_no_matchers_match - matcher = any_of(Stub::Matcher.new(false), Stub::Matcher.new(false), Stub::Matcher.new(false)) - assert !matcher.matches?(['any_old_value']) - end - - def test_should_describe_matcher - matcher = any_of(Stub::Matcher.new(false), Stub::Matcher.new(true), Stub::Matcher.new(false)) - assert_equal 'any_of(matcher(false), matcher(true), matcher(false))', matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/anything_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/anything_test.rb deleted file mode 100644 index 42a88a1..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/anything_test.rb +++ /dev/null @@ -1,21 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/anything' -require 'mocha/inspect' - -class AnythingTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_anything - matcher = anything - assert matcher.matches?([:something]) - assert matcher.matches?([{'x' => 'y'}]) - end - - def test_should_describe_matcher - matcher = anything - assert_equal "anything", matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/equals_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/equals_test.rb deleted file mode 100644 index df1eb6e..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/equals_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/equals' -require 'mocha/inspect' - -class EqualsTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_object_that_equals_value - matcher = equals('x') - assert matcher.matches?(['x']) - end - - def test_should_not_match_object_that_does_not_equal_value - matcher = equals('x') - assert !matcher.matches?(['y']) - end - - def test_should_describe_matcher - matcher = equals('x') - assert_equal "'x'", matcher.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entries_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entries_test.rb deleted file mode 100644 index 181c9b2..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entries_test.rb +++ /dev/null @@ -1,51 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/has_entries' -require 'mocha/parameter_matchers/object' -require 'mocha/inspect' - -class HasEntriesTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_hash_including_specified_entries - matcher = has_entries(:key_1 => 'value_1', :key_2 => 'value_2') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2', :key_3 => 'value_3' }]) - end - - def test_should_not_match_hash_not_including_specified_entries - matcher = has_entries(:key_1 => 'value_2', :key_2 => 'value_2', :key_3 => 'value_3') - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_describe_matcher - matcher = has_entries(:key_1 => 'value_1', :key_2 => 'value_2') - description = matcher.mocha_inspect - matches = /has_entries\((.*)\)/.match(description) - assert_not_nil matches[0] - entries = eval(matches[1], binding, __FILE__, __LINE__) - assert_equal 'value_1', entries[:key_1] - assert_equal 'value_2', entries[:key_2] - end - - def test_should_match_hash_including_specified_entries_with_nested_key_matchers - matcher = has_entries(equals(:key_1) => 'value_1', equals(:key_2) => 'value_2') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2', :key_3 => 'value_3' }]) - end - - def test_should_not_match_hash_not_including_specified_entries_with_nested_key_matchers - matcher = has_entries(equals(:key_1) => 'value_2', equals(:key_2) => 'value_2', equals(:key_3) => 'value_3') - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_match_hash_including_specified_entries_with_nested_value_matchers - matcher = has_entries(:key_1 => equals('value_1'), :key_2 => equals('value_2')) - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2', :key_3 => 'value_3' }]) - end - - def test_should_not_match_hash_not_including_specified_entries_with_nested_value_matchers - matcher = has_entries(:key_1 => equals('value_2'), :key_2 => equals('value_2'), :key_3 => equals('value_3')) - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entry_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entry_test.rb deleted file mode 100644 index 5211c04..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_entry_test.rb +++ /dev/null @@ -1,62 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/has_entry' -require 'mocha/parameter_matchers/object' -require 'mocha/parameter_matchers/equals' -require 'mocha/inspect' - -class HasEntryTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_hash_including_specified_key_value_pair - matcher = has_entry(:key_1, 'value_1') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_key_value_pair - matcher = has_entry(:key_1, 'value_2') - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_match_hash_including_specified_entry - matcher = has_entry(:key_1 => 'value_1') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_entry - matcher = has_entry(:key_1 => 'value_2') - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_describe_matcher_with_key_value_pair - matcher = has_entry(:key_1, 'value_1') - assert_equal "has_entry(:key_1 => 'value_1')", matcher.mocha_inspect - end - - def test_should_describe_matcher_with_entry - matcher = has_entry(:key_1 => 'value_1') - assert_equal "has_entry(:key_1 => 'value_1')", matcher.mocha_inspect - end - - def test_should_match_hash_including_specified_entry_with_nested_key_matcher - matcher = has_entry(equals(:key_1) => 'value_1') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_match_hash_including_specified_entry_with_nested_value_matcher - matcher = has_entry(:key_1 => equals('value_1')) - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_entry_with_nested_key_matcher - matcher = has_entry(equals(:key_1) => 'value_2') - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_entry_with_nested_value_matcher - matcher = has_entry(:key_1 => equals('value_2')) - assert !matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_key_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_key_test.rb deleted file mode 100644 index ad8362e..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_key_test.rb +++ /dev/null @@ -1,36 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/has_key' -require 'mocha/parameter_matchers/object' -require 'mocha/inspect' - -class HasKeyTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_hash_including_specified_key - matcher = has_key(:key_1) - assert matcher.matches?([{ :key_1 => 1, :key_2 => 2 }]) - end - - def test_should_not_match_hash_not_including_specified_key - matcher = has_key(:key_1) - assert !matcher.matches?([{ :key_2 => 2 }]) - end - - def test_should_describe_matcher - matcher = has_key(:key) - assert_equal 'has_key(:key)', matcher.mocha_inspect - end - - def test_should_match_hash_including_specified_key_with_nested_key_matcher - matcher = has_key(equals(:key_1)) - assert matcher.matches?([{ :key_1 => 1, :key_2 => 2 }]) - end - - def test_should_not_match_hash_not_including_specified_key_with_nested_key_matcher - matcher = has_key(equals(:key_1)) - assert !matcher.matches?([{ :key_2 => 2 }]) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_value_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_value_test.rb deleted file mode 100644 index 0219f6a..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/has_value_test.rb +++ /dev/null @@ -1,37 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/has_value' -require 'mocha/parameter_matchers/object' -require 'mocha/parameter_matchers/equals' -require 'mocha/inspect' - -class HasValueTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_hash_including_specified_value - matcher = has_value('value_1') - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_value - matcher = has_value('value_1') - assert !matcher.matches?([{ :key_2 => 'value_2' }]) - end - - def test_should_describe_matcher - matcher = has_value('value_1') - assert_equal "has_value('value_1')", matcher.mocha_inspect - end - - def test_should_match_hash_including_specified_value_with_nested_value_matcher - matcher = has_value(equals('value_1')) - assert matcher.matches?([{ :key_1 => 'value_1', :key_2 => 'value_2' }]) - end - - def test_should_not_match_hash_not_including_specified_value_with_nested_value_matcher - matcher = has_value(equals('value_1')) - assert !matcher.matches?([{ :key_2 => 'value_2' }]) - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/includes_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/includes_test.rb deleted file mode 100644 index 70fb649..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/includes_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/includes' -require 'mocha/inspect' - -class IncludesTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_object_including_value - matcher = includes(:x) - assert matcher.matches?([[:x, :y, :z]]) - end - - def test_should_not_match_object_that_does_not_include_value - matcher = includes(:not_included) - assert !matcher.matches?([[:x, :y, :z]]) - end - - def test_should_describe_matcher - matcher = includes(:x) - assert_equal "includes(:x)", matcher.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/instance_of_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/instance_of_test.rb deleted file mode 100644 index 415b79a..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/instance_of_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/instance_of' -require 'mocha/inspect' - -class InstanceOfTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_object_that_is_an_instance_of_specified_class - matcher = instance_of(String) - assert matcher.matches?(['string']) - end - - def test_should_not_match_object_that_is_not_an_instance_of_specified_class - matcher = instance_of(String) - assert !matcher.matches?([99]) - end - - def test_should_describe_matcher - matcher = instance_of(String) - assert_equal "instance_of(String)", matcher.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/is_a_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/is_a_test.rb deleted file mode 100644 index c9ef919..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/is_a_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/is_a' -require 'mocha/inspect' - -class IsATest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_object_that_is_a_specified_class - matcher = is_a(Integer) - assert matcher.matches?([99]) - end - - def test_should_not_match_object_that_is_not_a_specified_class - matcher = is_a(Integer) - assert !matcher.matches?(['string']) - end - - def test_should_describe_matcher - matcher = is_a(Integer) - assert_equal "is_a(Integer)", matcher.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/kind_of_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/kind_of_test.rb deleted file mode 100644 index 1167e5c..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/kind_of_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/kind_of' -require 'mocha/inspect' - -class KindOfTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_object_that_is_a_kind_of_specified_class - matcher = kind_of(Integer) - assert matcher.matches?([99]) - end - - def test_should_not_match_object_that_is_not_a_kind_of_specified_class - matcher = kind_of(Integer) - assert !matcher.matches?(['string']) - end - - def test_should_describe_matcher - matcher = kind_of(Integer) - assert_equal "kind_of(Integer)", matcher.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/not_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/not_test.rb deleted file mode 100644 index 4cb6790..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/not_test.rb +++ /dev/null @@ -1,26 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/not' -require 'mocha/inspect' -require 'stub_matcher' - -class NotTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_if_matcher_does_not_match - matcher = Not(Stub::Matcher.new(false)) - assert matcher.matches?(['any_old_value']) - end - - def test_should_not_match_if_matcher_does_match - matcher = Not(Stub::Matcher.new(true)) - assert !matcher.matches?(['any_old_value']) - end - - def test_should_describe_matcher - matcher = Not(Stub::Matcher.new(true)) - assert_equal 'Not(matcher(true))', matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/regexp_matches_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/regexp_matches_test.rb deleted file mode 100644 index 5aec002..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/regexp_matches_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/regexp_matches' -require 'mocha/inspect' - -class RegexpMatchesTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_parameter_matching_regular_expression - matcher = regexp_matches(/oo/) - assert matcher.matches?(['foo']) - end - - def test_should_not_match_parameter_not_matching_regular_expression - matcher = regexp_matches(/oo/) - assert !matcher.matches?(['bar']) - end - - def test_should_describe_matcher - matcher = regexp_matches(/oo/) - assert_equal "regexp_matches(/oo/)", matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/responds_with_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/responds_with_test.rb deleted file mode 100644 index f32bf8b..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/responds_with_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/responds_with' -require 'mocha/inspect' - -class RespondsWithTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_parameter_responding_with_expected_value - matcher = responds_with(:upcase, 'FOO') - assert matcher.matches?(['foo']) - end - - def test_should_not_match_parameter_responding_with_unexpected_value - matcher = responds_with(:upcase, 'FOO') - assert !matcher.matches?(['bar']) - end - - def test_should_describe_matcher - matcher = responds_with(:foo, :bar) - assert_equal 'responds_with(:foo, :bar)', matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/stub_matcher.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/stub_matcher.rb deleted file mode 100644 index 8bb8172..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/stub_matcher.rb +++ /dev/null @@ -1,27 +0,0 @@ -module Stub - - class Matcher - - attr_accessor :value - - def initialize(matches) - @matches = matches - end - - def matches?(available_parameters) - value = available_parameters.shift - @value = value - @matches - end - - def mocha_inspect - "matcher(#{@matches})" - end - - def to_matcher - self - end - - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/yaml_equivalent_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/yaml_equivalent_test.rb deleted file mode 100644 index b163f30..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameter_matchers/yaml_equivalent_test.rb +++ /dev/null @@ -1,25 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "..", "test_helper") - -require 'mocha/parameter_matchers/yaml_equivalent' -require 'mocha/inspect' - -class YamlEquivalentTest < Test::Unit::TestCase - - include Mocha::ParameterMatchers - - def test_should_match_parameter_matching_yaml_representation_of_object - matcher = yaml_equivalent([1, 2, 3]) - assert matcher.matches?(["--- \n- 1\n- 2\n- 3\n"]) - end - - def test_should_not_match_parameter_matching_yaml_representation_of_object - matcher = yaml_equivalent([1, 2, 3]) - assert !matcher.matches?(["--- \n- 4\n- 5\n"]) - end - - def test_should_describe_matcher - matcher = yaml_equivalent([1, 2, 3]) - assert_equal "yaml_equivalent([1, 2, 3])", matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/parameters_matcher_test.rb b/vendor/gems/mocha-0.9.3/test/unit/parameters_matcher_test.rb deleted file mode 100644 index 612805e..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/parameters_matcher_test.rb +++ /dev/null @@ -1,121 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/parameters_matcher' - -class ParametersMatcherTest < Test::Unit::TestCase - - include Mocha - - def test_should_match_any_actual_parameters_if_no_expected_parameters_specified - parameters_matcher = ParametersMatcher.new - assert parameters_matcher.match?(actual_parameters = [1, 2, 3]) - end - - def test_should_match_if_actual_parameters_are_same_as_expected_parameters - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, 6]) - assert parameters_matcher.match?(actual_parameters = [4, 5, 6]) - end - - def test_should_not_match_if_actual_parameters_are_different_from_expected_parameters - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, 6]) - assert !parameters_matcher.match?(actual_parameters = [1, 2, 3]) - end - - def test_should_not_match_if_there_are_less_actual_parameters_than_expected_parameters - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, 6]) - assert !parameters_matcher.match?(actual_parameters = [4, 5]) - end - - def test_should_not_match_if_there_are_more_actual_parameters_than_expected_parameters - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5]) - assert !parameters_matcher.match?(actual_parameters = [4, 5, 6]) - end - - def test_should_not_match_if_not_all_required_parameters_are_supplied - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert !parameters_matcher.match?(actual_parameters = [4]) - end - - def test_should_match_if_all_required_parameters_match_and_no_optional_parameters_are_supplied - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert parameters_matcher.match?(actual_parameters = [4, 5]) - end - - def test_should_match_if_all_required_and_optional_parameters_match_and_some_optional_parameters_are_supplied - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert parameters_matcher.match?(actual_parameters = [4, 5, 6]) - end - - def test_should_match_if_all_required_and_optional_parameters_match_and_all_optional_parameters_are_supplied - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert parameters_matcher.match?(actual_parameters = [4, 5, 6, 7]) - end - - def test_should_not_match_if_all_required_and_optional_parameters_match_but_too_many_optional_parameters_are_supplied - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert !parameters_matcher.match?(actual_parameters = [4, 5, 6, 7, 8]) - end - - def test_should_not_match_if_all_required_parameters_match_but_some_optional_parameters_do_not_match - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert !parameters_matcher.match?(actual_parameters = [4, 5, 6, 0]) - end - - def test_should_not_match_if_some_required_parameters_do_not_match_although_all_optional_parameters_do_match - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert !parameters_matcher.match?(actual_parameters = [4, 0, 6]) - end - - def test_should_not_match_if_all_required_parameters_match_but_no_optional_parameters_match - optionals = ParameterMatchers::Optionally.new(6, 7) - parameters_matcher = ParametersMatcher.new(expected_parameters = [4, 5, optionals]) - assert !parameters_matcher.match?(actual_parameters = [4, 5, 0, 0]) - end - - def test_should_match_if_actual_parameters_satisfy_matching_block - parameters_matcher = ParametersMatcher.new { |x, y| x + y == 3 } - assert parameters_matcher.match?(actual_parameters = [1, 2]) - end - - def test_should_not_match_if_actual_parameters_do_not_satisfy_matching_block - parameters_matcher = ParametersMatcher.new { |x, y| x + y == 3 } - assert !parameters_matcher.match?(actual_parameters = [2, 3]) - end - - def test_should_remove_outer_array_braces - params = [1, 2, [3, 4]] - parameters_matcher = ParametersMatcher.new(params) - assert_equal '(1, 2, [3, 4])', parameters_matcher.mocha_inspect - end - - def test_should_display_numeric_arguments_as_is - params = [1, 2, 3] - parameters_matcher = ParametersMatcher.new(params) - assert_equal '(1, 2, 3)', parameters_matcher.mocha_inspect - end - - def test_should_remove_curly_braces_if_hash_is_only_argument - params = [{:a => 1, :z => 2}] - parameters_matcher = ParametersMatcher.new(params) - assert_nil parameters_matcher.mocha_inspect.index('{') - assert_nil parameters_matcher.mocha_inspect.index('}') - end - - def test_should_not_remove_curly_braces_if_hash_is_not_the_only_argument - params = [1, {:a => 1}] - parameters_matcher = ParametersMatcher.new(params) - assert_equal '(1, {:a => 1})', parameters_matcher.mocha_inspect - end - - def test_should_indicate_that_matcher_will_match_any_actual_parameters - parameters_matcher = ParametersMatcher.new - assert_equal '(any_parameters)', parameters_matcher.mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/return_values_test.rb b/vendor/gems/mocha-0.9.3/test/unit/return_values_test.rb deleted file mode 100644 index 01ddfbc..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/return_values_test.rb +++ /dev/null @@ -1,63 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/return_values' - -class ReturnValuesTest < Test::Unit::TestCase - - include Mocha - - def test_should_return_nil - values = ReturnValues.new - assert_nil values.next - end - - def test_should_keep_returning_nil - values = ReturnValues.new - values.next - assert_nil values.next - assert_nil values.next - end - - def test_should_return_evaluated_single_return_value - values = ReturnValues.new(SingleReturnValue.new('value')) - assert_equal 'value', values.next - end - - def test_should_keep_returning_evaluated_single_return_value - values = ReturnValues.new(SingleReturnValue.new('value')) - values.next - assert_equal 'value', values.next - assert_equal 'value', values.next - end - - def test_should_return_consecutive_evaluated_single_return_values - values = ReturnValues.new(SingleReturnValue.new('value_1'), SingleReturnValue.new('value_2')) - assert_equal 'value_1', values.next - assert_equal 'value_2', values.next - end - - def test_should_keep_returning_last_of_consecutive_evaluated_single_return_values - values = ReturnValues.new(SingleReturnValue.new('value_1'), SingleReturnValue.new('value_2')) - values.next - values.next - assert_equal 'value_2', values.next - assert_equal 'value_2', values.next - end - - def test_should_build_single_return_values_for_each_values - values = ReturnValues.build('value_1', 'value_2', 'value_3').values - assert_equal 'value_1', values[0].evaluate - assert_equal 'value_2', values[1].evaluate - assert_equal 'value_3', values[2].evaluate - end - - def test_should_combine_two_sets_of_return_values - values_1 = ReturnValues.build('value_1') - values_2 = ReturnValues.build('value_2a', 'value_2b') - values = (values_1 + values_2).values - assert_equal 'value_1', values[0].evaluate - assert_equal 'value_2a', values[1].evaluate - assert_equal 'value_2b', values[2].evaluate - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/sequence_test.rb b/vendor/gems/mocha-0.9.3/test/unit/sequence_test.rb deleted file mode 100644 index 544b3fe..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/sequence_test.rb +++ /dev/null @@ -1,104 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/sequence' -require 'mocha/expectation' - -class SequenceTest < Test::Unit::TestCase - - include Mocha - - class FakeExpectation - - attr_reader :ordering_constraints - - def initialize(satisfied = false) - @satisfied = satisfied - @ordering_constraints = [] - end - - def add_ordering_constraint(ordering_constraint) - @ordering_constraints << ordering_constraint - end - - def satisfied? - @satisfied - end - - end - - def test_should_be_satisfied_if_no_expectations_added - sequence = Sequence.new('name') - assert sequence.satisfied_to_index?(0) - end - - def test_should_be_satisfied_if_one_unsatisfied_expectations_added_but_it_is_not_included_by_index - sequence = Sequence.new('name') - expectation = FakeExpectation.new(satisfied = false) - sequence.constrain_as_next_in_sequence(expectation) - assert sequence.satisfied_to_index?(0) - end - - def test_should_not_be_satisfied_if_one_unsatisfied_expectations_added_and_it_is_included_by_index - sequence = Sequence.new('name') - expectation = FakeExpectation.new(satisfied = false) - sequence.constrain_as_next_in_sequence(expectation) - assert !sequence.satisfied_to_index?(1) - end - - def test_should_be_satisfied_if_one_satisfied_expectations_added_and_it_is_included_by_index - sequence = Sequence.new('name') - expectation = FakeExpectation.new(satisfied = true) - sequence.constrain_as_next_in_sequence(expectation) - assert sequence.satisfied_to_index?(1) - end - - def test_should_not_be_satisfied_if_one_satisfied_and_one_unsatisfied_expectation_added_and_both_are_included_by_index - sequence = Sequence.new('name') - expectation_one = FakeExpectation.new(satisfied = true) - expectation_two = FakeExpectation.new(satisfied = false) - sequence.constrain_as_next_in_sequence(expectation_one) - sequence.constrain_as_next_in_sequence(expectation_two) - assert !sequence.satisfied_to_index?(2) - end - - def test_should_be_satisfied_if_two_satisfied_expectations_added_and_both_are_included_by_index - sequence = Sequence.new('name') - expectation_one = FakeExpectation.new(satisfied = true) - expectation_two = FakeExpectation.new(satisfied = true) - sequence.constrain_as_next_in_sequence(expectation_one) - sequence.constrain_as_next_in_sequence(expectation_two) - assert sequence.satisfied_to_index?(2) - end - - def test_should_add_ordering_constraint_to_expectation - sequence = Sequence.new('name') - expectation = FakeExpectation.new - sequence.constrain_as_next_in_sequence(expectation) - assert_equal 1, expectation.ordering_constraints.length - end - - def test_should_not_allow_invocation_of_second_method_when_first_n_sequence_has_not_been_invoked - sequence = Sequence.new('name') - expectation_one = FakeExpectation.new(satisfied = false) - expectation_two = FakeExpectation.new(satisfied = false) - sequence.constrain_as_next_in_sequence(expectation_one) - sequence.constrain_as_next_in_sequence(expectation_two) - assert !expectation_two.ordering_constraints[0].allows_invocation_now? - end - - def test_should_allow_invocation_of_second_method_when_first_in_sequence_has_been_invoked - sequence = Sequence.new('name') - expectation_one = FakeExpectation.new(satisfied = true) - expectation_two = FakeExpectation.new(satisfied = false) - sequence.constrain_as_next_in_sequence(expectation_one) - sequence.constrain_as_next_in_sequence(expectation_two) - assert expectation_two.ordering_constraints[0].allows_invocation_now? - end - - def test_should_describe_ordering_constraint_as_being_part_of_named_sequence - sequence = Sequence.new('wibble') - expectation = FakeExpectation.new - sequence.constrain_as_next_in_sequence(expectation) - assert_equal "in sequence 'wibble'", expectation.ordering_constraints[0].mocha_inspect - end - -end \ No newline at end of file diff --git a/vendor/gems/mocha-0.9.3/test/unit/single_return_value_test.rb b/vendor/gems/mocha-0.9.3/test/unit/single_return_value_test.rb deleted file mode 100644 index 9a94e09..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/single_return_value_test.rb +++ /dev/null @@ -1,14 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/single_return_value' - -class SingleReturnValueTest < Test::Unit::TestCase - - include Mocha - - def test_should_return_value - value = SingleReturnValue.new('value') - assert_equal 'value', value.evaluate - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/single_yield_test.rb b/vendor/gems/mocha-0.9.3/test/unit/single_yield_test.rb deleted file mode 100644 index 12bd0a2..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/single_yield_test.rb +++ /dev/null @@ -1,18 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/single_yield' - -class SingleYieldTest < Test::Unit::TestCase - - include Mocha - - def test_should_provide_parameters_for_single_yield_in_single_invocation - parameter_group = SingleYield.new(1, 2, 3) - parameter_groups = [] - parameter_group.each do |parameters| - parameter_groups << parameters - end - assert_equal [[1, 2, 3]], parameter_groups - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/state_machine_test.rb b/vendor/gems/mocha-0.9.3/test/unit/state_machine_test.rb deleted file mode 100644 index 4ccb229..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/state_machine_test.rb +++ /dev/null @@ -1,98 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/state_machine' - -class StateMachineTest < Test::Unit::TestCase - - include Mocha - - def test_should_initially_be_in_no_state - state_machine = StateMachine.new('name') - any_state.each do |state| - assert !state_machine.is(state).active? - assert state_machine.is_not(state).active? - end - end - - def test_should_be_able_to_enter_a_state - state_machine = StateMachine.new('name') - state = 'A' - other_states = any_state.reject { |s| s == state } - - state_machine.is(state).activate - - assert state_machine.is(state).active? - assert !state_machine.is_not(state).active? - other_states.each do |s| - assert !state_machine.is(s).active? - assert state_machine.is_not(s).active? - end - end - - def test_should_be_able_to_change_state - state_machine = StateMachine.new('name') - state = 'B' - other_states = any_state.reject { |s| s == state } - - state_machine.is('A').activate - state_machine.is(state).activate - - assert state_machine.is(state).active? - assert !state_machine.is_not(state).active? - other_states.each do |s| - assert !state_machine.is(s).active? - assert state_machine.is_not(s).active? - end - end - - def test_should_be_put_into_an_initial_state - state_machine = StateMachine.new('name') - initial_state = 'A' - other_states = any_state.reject { |s| s == initial_state } - - state_machine.starts_as(initial_state) - - assert state_machine.is(initial_state).active? - assert !state_machine.is_not(initial_state).active? - other_states.each do |state| - assert !state_machine.is(state).active? - assert state_machine.is_not(state).active? - end - end - - def test_should_be_put_into_a_new_state - next_state = 'B' - - other_states = any_state.reject { |s| s == next_state } - state_machine = StateMachine.new('name').starts_as('A') - - state_machine.become(next_state) - - assert state_machine.is(next_state).active? - assert !state_machine.is_not(next_state).active? - other_states.each do |state| - assert !state_machine.is(state).active? - assert state_machine.is_not(state).active? - end - end - - def test_should_describe_itself_as_name_and_current_state - state_machine = StateMachine.new('state_machine_name') - assert_equal 'state_machine_name has no current state', state_machine.mocha_inspect - inspectable_state = Class.new { define_method(:mocha_inspect) { "'inspectable_state'" } }.new - state_machine.is(inspectable_state).activate - assert_equal "state_machine_name is 'inspectable_state'", state_machine.mocha_inspect - end - - def test_should_have_self_describing_states - state_machine = StateMachine.new('state_machine_name') - inspectable_state = Class.new { define_method(:mocha_inspect) { "'inspectable_state'" } }.new - assert_equal "state_machine_name is 'inspectable_state'", state_machine.is(inspectable_state).mocha_inspect - assert_equal "state_machine_name is not 'inspectable_state'", state_machine.is_not(inspectable_state).mocha_inspect - end - - def any_state - %w(A B C D) - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/string_inspect_test.rb b/vendor/gems/mocha-0.9.3/test/unit/string_inspect_test.rb deleted file mode 100644 index 43b9c4e..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/string_inspect_test.rb +++ /dev/null @@ -1,11 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") -require 'mocha/inspect' - -class StringInspectTest < Test::Unit::TestCase - - def test_should_replace_escaped_quotes_with_single_quote - string = "my_string" - assert_equal "'my_string'", string.mocha_inspect - end - -end diff --git a/vendor/gems/mocha-0.9.3/test/unit/yield_parameters_test.rb b/vendor/gems/mocha-0.9.3/test/unit/yield_parameters_test.rb deleted file mode 100644 index 4e93f13..0000000 --- a/vendor/gems/mocha-0.9.3/test/unit/yield_parameters_test.rb +++ /dev/null @@ -1,93 +0,0 @@ -require File.join(File.dirname(__FILE__), "..", "test_helper") - -require 'mocha/yield_parameters' -require 'mocha/no_yields' -require 'mocha/single_yield' -require 'mocha/multiple_yields' - -class YieldParametersTest < Test::Unit::TestCase - - include Mocha - - def test_should_return_null_yield_parameter_group_by_default - yield_parameters = YieldParameters.new - assert yield_parameters.next_invocation.is_a?(NoYields) - end - - def test_should_return_single_yield_parameter_group - yield_parameters = YieldParameters.new - yield_parameters.add(1, 2, 3) - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [1, 2, 3], parameter_group.parameters - end - - def test_should_keep_returning_single_yield_parameter_group - yield_parameters = YieldParameters.new - yield_parameters.add(1, 2, 3) - yield_parameters.next_invocation - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [1, 2, 3], parameter_group.parameters - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [1, 2, 3], parameter_group.parameters - end - - def test_should_return_consecutive_single_yield_parameter_groups - yield_parameters = YieldParameters.new - yield_parameters.add(1, 2, 3) - yield_parameters.add(4, 5) - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [1, 2, 3], parameter_group.parameters - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [4, 5], parameter_group.parameters - end - - def test_should_return_multiple_yield_parameter_group - yield_parameters = YieldParameters.new - yield_parameters.multiple_add([1, 2, 3], [4, 5]) - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[1, 2, 3], [4, 5]], parameter_group.parameter_groups - end - - def test_should_keep_returning_multiple_yield_parameter_group - yield_parameters = YieldParameters.new - yield_parameters.multiple_add([1, 2, 3], [4, 5]) - yield_parameters.next_invocation - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[1, 2, 3], [4, 5]], parameter_group.parameter_groups - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[1, 2, 3], [4, 5]], parameter_group.parameter_groups - end - - def test_should_return_consecutive_multiple_yield_parameter_groups - yield_parameters = YieldParameters.new - yield_parameters.multiple_add([1, 2, 3], [4, 5]) - yield_parameters.multiple_add([6, 7], [8, 9, 0]) - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[1, 2, 3], [4, 5]], parameter_group.parameter_groups - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[6, 7], [8, 9, 0]], parameter_group.parameter_groups - end - - def test_should_return_consecutive_single_and_multiple_yield_parameter_groups - yield_parameters = YieldParameters.new - yield_parameters.add(1, 2, 3) - yield_parameters.multiple_add([4, 5, 6], [7, 8]) - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(SingleYield) - assert_equal [1, 2, 3], parameter_group.parameters - parameter_group = yield_parameters.next_invocation - assert parameter_group.is_a?(MultipleYields) - assert_equal [[4, 5, 6], [7, 8]], parameter_group.parameter_groups - end - -end \ No newline at end of file diff --git a/vendor/gems/paperclip-2.1.2/.specification b/vendor/gems/paperclip-2.1.2/.specification deleted file mode 100644 index 14db791..0000000 --- a/vendor/gems/paperclip-2.1.2/.specification +++ /dev/null @@ -1,96 +0,0 @@ ---- !ruby/object:Gem::Specification -name: paperclip -version: !ruby/object:Gem::Version - version: 2.1.2 -platform: ruby -authors: -- Jon Yurek -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-05-13 00:00:00 -04:00 -default_executable: -dependencies: [] - -description: -email: jyurek@thoughtbot.com -executables: [] - -extensions: [] - -extra_rdoc_files: -- README -files: -- README -- LICENSE -- Rakefile -- init.rb -- generators/paperclip -- generators/paperclip/paperclip_generator.rb -- generators/paperclip/templates -- generators/paperclip/templates/paperclip_migration.rb -- generators/paperclip/USAGE -- lib/paperclip -- lib/paperclip/attachment.rb -- lib/paperclip/geometry.rb -- lib/paperclip/iostream.rb -- lib/paperclip/storage.rb -- lib/paperclip/thumbnail.rb -- lib/paperclip/upfile.rb -- lib/paperclip.rb -- tasks/paperclip_tasks.rake -- test/database.yml -- test/debug.log -- test/fixtures -- test/fixtures/12k.png -- test/fixtures/50x50.png -- test/fixtures/5k.png -- test/fixtures/bad.png -- test/fixtures/text.txt -- test/helper.rb -- test/s3.yml -- test/test_attachment.rb -- test/test_geometry.rb -- test/test_integration.rb -- test/test_iostream.rb -- test/test_paperclip.rb -- test/test_storage.rb -- test/test_thumbnail.rb -has_rdoc: true -homepage: http://www.thoughtbot.com/ -licenses: [] - -post_install_message: -rdoc_options: -- --line-numbers -- --inline-source -require_paths: -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: -- ImageMagick -rubyforge_project: paperclip -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: File attachments as attributes for ActiveRecord -test_files: -- test/test_attachment.rb -- test/test_geometry.rb -- test/test_integration.rb -- test/test_iostream.rb -- test/test_paperclip.rb -- test/test_storage.rb -- test/test_thumbnail.rb diff --git a/vendor/gems/paperclip-2.1.2/LICENSE b/vendor/gems/paperclip-2.1.2/LICENSE deleted file mode 100644 index 299b9ed..0000000 --- a/vendor/gems/paperclip-2.1.2/LICENSE +++ /dev/null @@ -1,26 +0,0 @@ - -LICENSE - -The MIT License - -Copyright (c) 2008 Jon Yurek and thoughtbot, inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - - diff --git a/vendor/gems/paperclip-2.1.2/README b/vendor/gems/paperclip-2.1.2/README deleted file mode 100644 index 53383a9..0000000 --- a/vendor/gems/paperclip-2.1.2/README +++ /dev/null @@ -1,48 +0,0 @@ -=Paperclip - -Paperclip is intended as an easy file attachment library for ActiveRecord. The intent behind it was to keep setup as easy as possible and to treat files as much like other attributes as possible. This means they aren't saved to their final locations on disk, nor are they deleted if set to nil, until ActiveRecord::Base#save is called. It manages validations based on size and presence, if required. It can transform its assigned image into thumbnails if needed, and the prerequisites are as simple as installing ImageMagick (which, for most modern Unix-based systems, is as easy as installing the right packages). Attached files are saved to the filesystem and referenced in the browser by an easily understandable specification, which has sensible and useful defaults. - -See the documentation for the +has_attached_file+ method for options. - -==Usage - -In your model: - - class User < ActiveRecord::Base - has_attached_file :avatar, :styles => { :medium => "300x300>", :thumb => "100x100>" } - end - -In your migrations: - - class AddAvatarColumsToUser < ActiveRecord::Migration - def self.up - add_column :users, :avatar_file_name, :string - add_column :users, :avatar_content_type, :string - add_column :users, :avatar_file_size, :integer - end - - def self.down - remove_column :users, :avatar_file_name - remove_column :users, :avatar_content_type - remove_column :users, :avatar_file_size - end - end - -In your edit and new views: - - <% form_for :user, @user, :url => user_path, :html => { :multipart => true } do |form| %> - <%= form.file_field :avatar %> - <% end %> - -In your controller: - - def create - @user = User.create( params[:user] ) - end - -In your show view: - - <%= image_tag @user.avatar.url %> - <%= image_tag @user.avatar.url(:medium) %> - <%= image_tag @user.avatar.url(:thumb) %> - diff --git a/vendor/gems/paperclip-2.1.2/Rakefile b/vendor/gems/paperclip-2.1.2/Rakefile deleted file mode 100644 index 60d9ebe..0000000 --- a/vendor/gems/paperclip-2.1.2/Rakefile +++ /dev/null @@ -1,84 +0,0 @@ -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' -require 'rake/gempackagetask' - -$LOAD_PATH << File.join(File.dirname(__FILE__), 'lib') -require 'paperclip' - -desc 'Default: run unit tests.' -task :default => [:clean, :test] - -desc 'Test the paperclip plugin.' -Rake::TestTask.new(:test) do |t| - t.libs << 'lib' << 'profile' - t.pattern = 'test/**/test_*.rb' - t.verbose = true -end - -desc 'Start an IRB session with all necessary files required.' -task :shell do |t| - chdir File.dirname(__FILE__) - exec 'irb -I lib/ -I lib/paperclip -r rubygems -r active_record -r tempfile -r init' -end - -desc 'Generate documentation for the paperclip plugin.' -Rake::RDocTask.new(:rdoc) do |rdoc| - rdoc.rdoc_dir = 'doc' - rdoc.title = 'Paperclip' - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.rdoc_files.include('README') - rdoc.rdoc_files.include('lib/**/*.rb') -end - -desc 'Update documentation on website' -task :sync_docs => 'rdoc' do - `rsync -ave ssh doc/ dev@dev.thoughtbot.com:/home/dev/www/dev.thoughtbot.com/paperclip` -end - -desc 'Clean up files.' -task :clean do |t| - FileUtils.rm_rf "doc" - FileUtils.rm_rf "tmp" - FileUtils.rm_rf "pkg" - FileUtils.rm "test/debug.log" rescue nil - FileUtils.rm "test/paperclip.db" rescue nil -end - -spec = Gem::Specification.new do |s| - s.name = "paperclip" - s.version = Paperclip::VERSION - s.author = "Jon Yurek" - s.email = "jyurek@thoughtbot.com" - s.homepage = "http://www.thoughtbot.com/" - s.platform = Gem::Platform::RUBY - s.summary = "File attachments as attributes for ActiveRecord" - s.files = FileList["README", - "LICENSE", - "Rakefile", - "init.rb", - "{generators,lib,tasks,test}/**/*"].to_a - s.require_path = "lib" - s.test_files = FileList["test/**/test_*.rb"].to_a - s.rubyforge_project = "paperclip" - s.has_rdoc = true - s.extra_rdoc_files = ["README"] - s.rdoc_options << '--line-numbers' << '--inline-source' - s.requirements << "ImageMagick" -end - -Rake::GemPackageTask.new(spec) do |pkg| - pkg.need_tar = true -end - -desc "Release new version" -task :release => [:test, :sync_docs, :gem] do - require 'rubygems' - require 'rubyforge' - r = RubyForge.new - r.login - r.add_release spec.rubyforge_project, - spec.name, - spec.version, - File.join("pkg", "#{spec.name}-#{spec.version}.gem") -end diff --git a/vendor/gems/paperclip-2.1.2/generators/paperclip/USAGE b/vendor/gems/paperclip-2.1.2/generators/paperclip/USAGE deleted file mode 100644 index 27656ff..0000000 --- a/vendor/gems/paperclip-2.1.2/generators/paperclip/USAGE +++ /dev/null @@ -1,5 +0,0 @@ -Usage: - - script/generate attachment Class attachment1 attachment2 - -This will create a migration that will add the proper columns to your class's table. \ No newline at end of file diff --git a/vendor/gems/paperclip-2.1.2/generators/paperclip/paperclip_generator.rb b/vendor/gems/paperclip-2.1.2/generators/paperclip/paperclip_generator.rb deleted file mode 100644 index 3fe6481..0000000 --- a/vendor/gems/paperclip-2.1.2/generators/paperclip/paperclip_generator.rb +++ /dev/null @@ -1,27 +0,0 @@ -class PaperclipGenerator < Rails::Generator::NamedBase - attr_accessor :attachments, :migration_name - - def initialize(args, options = {}) - super - @class_name, @attachments = args[0], args[1..-1] - end - - def manifest - file_name = generate_file_name - @migration_name = file_name.camelize - record do |m| - m.migration_template "paperclip_migration.rb", - File.join('db', 'migrate'), - :migration_file_name => file_name - end - end - - private - - def generate_file_name - names = attachments.map{|a| a.underscore } - names = names[0..-2] + ["and", names[-1]] if names.length > 1 - "add_attachments_#{names.join("_")}_to_#{@class_name.underscore}" - end - -end \ No newline at end of file diff --git a/vendor/gems/paperclip-2.1.2/generators/paperclip/templates/paperclip_migration.rb b/vendor/gems/paperclip-2.1.2/generators/paperclip/templates/paperclip_migration.rb deleted file mode 100644 index 5afb11f..0000000 --- a/vendor/gems/paperclip-2.1.2/generators/paperclip/templates/paperclip_migration.rb +++ /dev/null @@ -1,17 +0,0 @@ -class <%= migration_name %> < ActiveRecord::Migration - def self.up -<% attachments.each do |attachment| -%> - add_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_file_name, :string - add_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_content_type, :string - add_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_file_size, :integer -<% end -%> - end - - def self.down -<% attachments.each do |attachment| -%> - remove_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_file_name - remove_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_content_type - remove_column :<%= class_name.underscore.camelize.tableize %>, :<%= attachment %>_file_size -<% end -%> - end -end diff --git a/vendor/gems/paperclip-2.1.2/init.rb b/vendor/gems/paperclip-2.1.2/init.rb deleted file mode 100644 index 5a07dda..0000000 --- a/vendor/gems/paperclip-2.1.2/init.rb +++ /dev/null @@ -1 +0,0 @@ -require File.join(File.dirname(__FILE__), "lib", "paperclip") diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip.rb deleted file mode 100644 index 6140048..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip.rb +++ /dev/null @@ -1,244 +0,0 @@ -# Paperclip allows file attachments that are stored in the filesystem. All graphical -# transformations are done using the Graphics/ImageMagick command line utilities and -# are stored in Tempfiles until the record is saved. Paperclip does not require a -# separate model for storing the attachment's information, instead adding a few simple -# columns to your table. -# -# Author:: Jon Yurek -# Copyright:: Copyright (c) 2008 thoughtbot, inc. -# License:: MIT License (http://www.opensource.org/licenses/mit-license.php) -# -# Paperclip defines an attachment as any file, though it makes special considerations -# for image files. You can declare that a model has an attached file with the -# +has_attached_file+ method: -# -# class User < ActiveRecord::Base -# has_attached_file :avatar, :styles => { :thumb => "100x100" } -# end -# -# user = User.new -# user.avatar = params[:user][:avatar] -# user.avatar.url -# # => "/users/avatars/4/original_me.jpg" -# user.avatar.url(:thumb) -# # => "/users/avatars/4/thumb_me.jpg" -# -# See the +has_attached_file+ documentation for more details. - -require 'tempfile' -require 'paperclip/upfile' -require 'paperclip/iostream' -require 'paperclip/geometry' -require 'paperclip/thumbnail' -require 'paperclip/storage' -require 'paperclip/attachment' - -# The base module that gets included in ActiveRecord::Base. -module Paperclip - - VERSION = "2.1.2" - - class << self - # Provides configurability to Paperclip. There are a number of options available, such as: - # * whiny_thumbnails: Will raise an error if Paperclip cannot process thumbnails of - # an uploaded image. Defaults to true. - # * image_magick_path: Defines the path at which to find the +convert+ and +identify+ - # programs if they are not visible to Rails the system's search path. Defaults to - # nil, which uses the first executable found in the search path. - def options - @options ||= { - :whiny_thumbnails => true, - :image_magick_path => nil - } - end - - def path_for_command command #:nodoc: - path = [options[:image_magick_path], command].compact - File.join(*path) - end - - def included base #:nodoc: - base.extend ClassMethods - end - end - - class PaperclipError < StandardError #:nodoc: - end - - class NotIdentifiedByImageMagickError < PaperclipError #:nodoc: - end - - module ClassMethods - # +has_attached_file+ gives the class it is called on an attribute that maps to a file. This - # is typically a file stored somewhere on the filesystem and has been uploaded by a user. - # The attribute returns a Paperclip::Attachment object which handles the management of - # that file. The intent is to make the attachment as much like a normal attribute. The - # thumbnails will be created when the new file is assigned, but they will *not* be saved - # until +save+ is called on the record. Likewise, if the attribute is set to +nil+ is - # called on it, the attachment will *not* be deleted until +save+ is called. See the - # Paperclip::Attachment documentation for more specifics. There are a number of options - # you can set to change the behavior of a Paperclip attachment: - # * +url+: The full URL of where the attachment is publically accessible. This can just - # as easily point to a directory served directly through Apache as it can to an action - # that can control permissions. You can specify the full domain and path, but usually - # just an absolute path is sufficient. The leading slash must be included manually for - # absolute paths. The default value is "/:class/:attachment/:id/:style_:filename". See - # Paperclip::Attachment#interpolate for more information on variable interpolaton. - # :url => "/:attachment/:id/:style_:basename:extension" - # :url => "http://some.other.host/stuff/:class/:id_:extension" - # * +default_url+: The URL that will be returned if there is no attachment assigned. - # This field is interpolated just as the url is. The default value is - # "/:class/:attachment/missing_:style.png" - # has_attached_file :avatar, :default_url => "/images/default_:style_avatar.png" - # User.new.avatar_url(:small) # => "/images/default_small_avatar.png" - # * +styles+: A hash of thumbnail styles and their geometries. You can find more about - # geometry strings at the ImageMagick website - # (http://www.imagemagick.org/script/command-line-options.php#resize). Paperclip - # also adds the "#" option (e.g. "50x50#"), which will resize the image to fit maximally - # inside the dimensions and then crop the rest off (weighted at the center). The - # default value is to generate no thumbnails. - # * +default_style+: The thumbnail style that will be used by default URLs. - # Defaults to +original+. - # has_attached_file :avatar, :styles => { :normal => "100x100#" }, - # :default_style => :normal - # user.avatar.url # => "/avatars/23/normal_me.png" - # * +path+: The location of the repository of attachments on disk. This can be coordinated - # with the value of the +url+ option to allow files to be saved into a place where Apache - # can serve them without hitting your app. Defaults to - # ":rails_root/public/:class/:attachment/:id/:style_:filename". - # By default this places the files in the app's public directory which can be served - # directly. If you are using capistrano for deployment, a good idea would be to - # make a symlink to the capistrano-created system directory from inside your app's - # public directory. - # See Paperclip::Attachment#interpolate for more information on variable interpolaton. - # :path => "/var/app/attachments/:class/:id/:style/:filename" - # * +whiny_thumbnails+: Will raise an error if Paperclip cannot process thumbnails of an - # uploaded image. This will ovrride the global setting for this attachment. - # Defaults to true. - def has_attached_file name, options = {} - include InstanceMethods - - write_inheritable_attribute(:attachment_definitions, {}) if attachment_definitions.nil? - attachment_definitions[name] = {:validations => []}.merge(options) - - after_save :save_attached_files - before_destroy :destroy_attached_files - - define_method name do |*args| - a = attachment_for(name) - (args.length > 0) ? a.to_s(args.first) : a - end - - define_method "#{name}=" do |file| - attachment_for(name).assign(file) - end - - define_method "#{name}?" do - ! attachment_for(name).original_filename.blank? - end - - validates_each(name) do |record, attr, value| - value.send(:flush_errors) - end - end - - # Places ActiveRecord-style validations on the size of the file assigned. The - # possible options are: - # * +in+: a Range of bytes (i.e. +1..1.megabyte+), - # * +less_than+: equivalent to :in => 0..options[:less_than] - # * +greater_than+: equivalent to :in => options[:greater_than]..Infinity - # * +message+: error message to display, use :min and :max as replacements - def validates_attachment_size name, options = {} - attachment_definitions[name][:validations] << lambda do |attachment, instance| - unless options[:greater_than].nil? - options[:in] = (options[:greater_than]..(1/0)) # 1/0 => Infinity - end - unless options[:less_than].nil? - options[:in] = (0..options[:less_than]) - end - unless attachment.original_filename.blank? || options[:in].include?(instance[:"#{name}_file_size"].to_i) - min = options[:in].first - max = options[:in].last - - if options[:message] - options[:message].gsub(/:min/, min.to_s).gsub(/:max/, max.to_s) - else - "file size is not between #{min} and #{max} bytes." - end - end - end - end - - # Adds errors if thumbnail creation fails. The same as specifying :whiny_thumbnails => true. - def validates_attachment_thumbnails name, options = {} - attachment_definitions[name][:whiny_thumbnails] = true - end - - # Places ActiveRecord-style validations on the presence of a file. - def validates_attachment_presence name, options = {} - attachment_definitions[name][:validations] << lambda do |attachment, instance| - if attachment.original_filename.blank? - options[:message] || "must be set." - end - end - end - - # Places ActiveRecord-style validations on the content type of the file assigned. The - # possible options are: - # * +content_type+: Allowed content types. Can be a single content type or an array. Allows all by default. - # * +message+: The message to display when the uploaded file has an invalid content type. - def validates_attachment_content_type name, options = {} - attachment_definitions[name][:validations] << lambda do |attachment, instance| - valid_types = [options[:content_type]].flatten - - unless attachment.original_filename.nil? - unless options[:content_type].blank? - content_type = instance[:"#{name}_content_type"] - unless valid_types.any?{|t| t === content_type } - options[:message] || ActiveRecord::Errors.default_error_messages[:inclusion] - end - end - end - end - end - - # Returns the attachment definitions defined by each call to has_attached_file. - def attachment_definitions - read_inheritable_attribute(:attachment_definitions) - end - - end - - module InstanceMethods #:nodoc: - def attachment_for name - @attachments ||= {} - @attachments[name] ||= Attachment.new(name, self, self.class.attachment_definitions[name]) - end - - def each_attachment - self.class.attachment_definitions.each do |name, definition| - yield(name, attachment_for(name)) - end - end - - def save_attached_files - each_attachment do |name, attachment| - attachment.send(:save) - end - end - - def destroy_attached_files - each_attachment do |name, attachment| - attachment.send(:queue_existing_for_delete) - attachment.send(:flush_deletes) - end - end - end - -end - -# Set it all up. -if Object.const_defined?("ActiveRecord") - ActiveRecord::Base.send(:include, Paperclip) - File.send(:include, Paperclip::Upfile) -end diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/attachment.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/attachment.rb deleted file mode 100644 index 855ae25..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/attachment.rb +++ /dev/null @@ -1,243 +0,0 @@ -module Paperclip - # The Attachment class manages the files for a given attachment. It saves when the model saves, - # deletes when the model is destroyed, and processes the file upon assignment. - class Attachment - - def self.default_options - @default_options ||= { - :url => "/:attachment/:id/:style/:basename.:extension", - :path => ":rails_root/public/:attachment/:id/:style/:basename.:extension", - :styles => {}, - :default_url => "/:attachment/:style/missing.png", - :default_style => :original, - :validations => [], - :storage => :filesystem - } - end - - attr_reader :name, :instance, :styles, :default_style - - # Creates an Attachment object. +name+ is the name of the attachment, +instance+ is the - # ActiveRecord object instance it's attached to, and +options+ is the same as the hash - # passed to +has_attached_file+. - def initialize name, instance, options = {} - @name = name - @instance = instance - - options = self.class.default_options.merge(options) - - @url = options[:url] - @path = options[:path] - @styles = options[:styles] - @default_url = options[:default_url] - @validations = options[:validations] - @default_style = options[:default_style] - @storage = options[:storage] - @whiny_thumbnails = options[:whiny_thumbnails] - @options = options - @queued_for_delete = [] - @queued_for_write = {} - @errors = [] - @validation_errors = nil - @dirty = false - - normalize_style_definition - initialize_storage - end - - # What gets called when you call instance.attachment = File. It clears errors, - # assigns attributes, processes the file, and runs validations. It also queues up - # the previous file for deletion, to be flushed away on #save of its host. - def assign uploaded_file - return nil unless valid_assignment?(uploaded_file) - - queue_existing_for_delete - @errors = [] - @validation_errors = nil - - return nil if uploaded_file.nil? - - @queued_for_write[:original] = uploaded_file.to_tempfile - @instance[:"#{@name}_file_name"] = uploaded_file.original_filename - @instance[:"#{@name}_content_type"] = uploaded_file.content_type - @instance[:"#{@name}_file_size"] = uploaded_file.size - - @dirty = true - - post_process - ensure - validate - end - - # Returns the public URL of the attachment, with a given style. Note that this - # does not necessarily need to point to a file that your web server can access - # and can point to an action in your app, if you need fine grained security. - # This is not recommended if you don't need the security, however, for - # performance reasons. - def url style = default_style - original_filename.nil? ? interpolate(@default_url, style) : interpolate(@url, style) - end - - # Returns the path of the attachment as defined by the :path optionn. If the - # file is stored in the filesystem the path refers to the path of the file on - # disk. If the file is stored in S3, the path is the "key" part of th URL, - # and the :bucket option refers to the S3 bucket. - def path style = nil #:nodoc: - interpolate(@path, style) - end - - # Alias to +url+ - def to_s style = nil - url(style) - end - - # Returns true if there are any errors on this attachment. - def valid? - errors.length == 0 - end - - # Returns an array containing the errors on this attachment. - def errors - @errors.compact.uniq - end - - # Returns true if there are changes that need to be saved. - def dirty? - @dirty - end - - # Saves the file, if there are no errors. If there are, it flushes them to - # the instance's errors and returns false, cancelling the save. - def save - if valid? - flush_deletes - flush_writes - @dirty = false - true - else - flush_errors - false - end - end - - # Returns the name of the file as originally assigned, and as lives in the - # _file_name attribute of the model. - def original_filename - instance[:"#{name}_file_name"] - end - - # A hash of procs that are run during the interpolation of a path or url. - # A variable of the format :name will be replaced with the return value of - # the proc named ":name". Each lambda takes the attachment and the current - # style as arguments. This hash can be added to with your own proc if - # necessary. - def self.interpolations - @interpolations ||= { - :rails_root => lambda{|attachment,style| RAILS_ROOT }, - :class => lambda do |attachment,style| - attachment.instance.class.name.underscore.pluralize - end, - :basename => lambda do |attachment,style| - attachment.original_filename.gsub(File.extname(attachment.original_filename), "") - end, - :extension => lambda do |attachment,style| - ((style = attachment.styles[style]) && style.last) || - File.extname(attachment.original_filename).gsub(/^\.+/, "") - end, - :id => lambda{|attachment,style| attachment.instance.id }, - :id_partition => lambda do |attachment, style| - ("%09d" % attachment.instance.id).scan(/\d{3}/).join("/") - end, - :attachment => lambda{|attachment,style| attachment.name.to_s.downcase.pluralize }, - :style => lambda{|attachment,style| style || attachment.default_style }, - } - end - - # This method really shouldn't be called that often. It's expected use is in the - # paperclip:refresh rake task and that's it. It will regenerate all thumbnails - # forcefully, by reobtaining the original file and going through the post-process - # again. - def reprocess! - new_original = Tempfile.new("paperclip-reprocess") - old_original = to_file(:original) - new_original.write( old_original.read ) - new_original.rewind - - @queued_for_write = { :original => new_original } - post_process - - old_original.close if old_original.respond_to?(:close) - end - - private - - def valid_assignment? file #:nodoc: - file.nil? || (file.respond_to?(:original_filename) && file.respond_to?(:content_type)) - end - - def validate #:nodoc: - unless @validation_errors - @validation_errors = @validations.collect do |v| - v.call(self, instance) - end.flatten.compact.uniq - @errors += @validation_errors - end - end - - def normalize_style_definition - @styles.each do |name, args| - dimensions, format = [args, nil].flatten[0..1] - format = nil if format == "" - @styles[name] = [dimensions, format] - end - end - - def initialize_storage - @storage_module = Paperclip::Storage.const_get(@storage.to_s.capitalize) - self.extend(@storage_module) - end - - def post_process #:nodoc: - return if @queued_for_write[:original].nil? - @styles.each do |name, args| - begin - dimensions, format = args - @queued_for_write[name] = Thumbnail.make(@queued_for_write[:original], - dimensions, - format, - @whiny_thumnails) - rescue PaperclipError => e - @errors << e.message if @whiny_thumbnails - end - end - end - - def interpolate pattern, style = default_style #:nodoc: - interpolations = self.class.interpolations.sort{|a,b| a.first.to_s <=> b.first.to_s } - interpolations.reverse.inject( pattern.dup ) do |result, interpolation| - tag, blk = interpolation - result.gsub(/:#{tag}/) do |match| - blk.call( self, style ) - end - end - end - - def queue_existing_for_delete #:nodoc: - return if original_filename.blank? - @queued_for_delete += [:original, *@styles.keys].uniq.map do |style| - path(style) if exists?(style) - end.compact - @instance[:"#{@name}_file_name"] = nil - @instance[:"#{@name}_content_type"] = nil - @instance[:"#{@name}_file_size"] = nil - end - - def flush_errors #:nodoc: - @errors.each do |error| - instance.errors.add(name, error) - end - end - - end -end - diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/geometry.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/geometry.rb deleted file mode 100644 index 05b66f1..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/geometry.rb +++ /dev/null @@ -1,109 +0,0 @@ -module Paperclip - - # Defines the geometry of an image. - class Geometry - attr_accessor :height, :width, :modifier - - # Gives a Geometry representing the given height and width - def initialize width = nil, height = nil, modifier = nil - height = nil if height == "" - width = nil if width == "" - @height = (height || width).to_f - @width = (width || height).to_f - @modifier = modifier - end - - # Uses ImageMagick to determing the dimensions of a file, passed in as either a - # File or path. - def self.from_file file - file = file.path if file.respond_to? "path" - parse(`#{Paperclip.path_for_command('identify')} "#{file}"`) || - raise(NotIdentifiedByImageMagickError.new("#{file} is not recognized by the 'identify' command.")) - end - - # Parses a "WxH" formatted string, where W is the width and H is the height. - def self.parse string - if match = (string && string.match(/\b(\d*)x(\d*)\b([\>\<\#\@\%^!])?/)) - Geometry.new(*match[1,3]) - end - end - - # True if the dimensions represent a square - def square? - height == width - end - - # True if the dimensions represent a horizontal rectangle - def horizontal? - height < width - end - - # True if the dimensions represent a vertical rectangle - def vertical? - height > width - end - - # The aspect ratio of the dimensions. - def aspect - width / height - end - - # Returns the larger of the two dimensions - def larger - [height, width].max - end - - # Returns the smaller of the two dimensions - def smaller - [height, width].min - end - - # Returns the width and height in a format suitable to be passed to Geometry.parse - def to_s - "%dx%d%s" % [width, height, modifier] - end - - # Same as to_s - def inspect - to_s - end - - # Returns the scaling and cropping geometries (in string-based ImageMagick format) - # neccessary to transform this Geometry into the Geometry given. If crop is true, - # then it is assumed the destination Geometry will be the exact final resolution. - # In this case, the source Geometry is scaled so that an image containing the - # destination Geometry would be completely filled by the source image, and any - # overhanging image would be cropped. Useful for square thumbnail images. The cropping - # is weighted at the center of the Geometry. - def transformation_to dst, crop = false - ratio = Geometry.new( dst.width / self.width, dst.height / self.height ) - - if crop - scale_geometry, scale = scaling(dst, ratio) - crop_geometry = cropping(dst, ratio, scale) - else - scale_geometry = dst.to_s - end - - [ scale_geometry, crop_geometry ] - end - - private - - def scaling dst, ratio - if ratio.horizontal? || ratio.square? - [ "%dx" % dst.width, ratio.width ] - else - [ "x%d" % dst.height, ratio.height ] - end - end - - def cropping dst, ratio, scale - if ratio.horizontal? || ratio.square? - "%dx%d+%d+%d" % [ dst.width, dst.height, 0, (self.height * scale - dst.height) / 2 ] - else - "%dx%d+%d+%d" % [ dst.width, dst.height, (self.width * scale - dst.width) / 2, 0 ] - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/iostream.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/iostream.rb deleted file mode 100644 index af54c9f..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/iostream.rb +++ /dev/null @@ -1,43 +0,0 @@ -# Provides method that can be included on File-type objects (IO, StringIO, Tempfile, etc) to allow stream copying -# and Tempfile conversion. -module IOStream - - # Returns a Tempfile containing the contents of the readable object. - def to_tempfile - tempfile = Tempfile.new("stream") - tempfile.binmode - self.stream_to(tempfile) - end - - # Copies one read-able object from one place to another in blocks, obviating the need to load - # the whole thing into memory. Defaults to 8k blocks. If this module is included in both - # both StringIO and Tempfile, then either can have its data copied anywhere else without typing - # worries or memory overhead worries. Returns a File if a String is passed in as the destination - # and returns the IO or Tempfile as passed in if one is sent as the destination. - def stream_to path_or_file, in_blocks_of = 8192 - dstio = case path_or_file - when String then File.new(path_or_file, "wb+") - when IO then path_or_file - when Tempfile then path_or_file - end - buffer = "" - self.rewind - while self.read(in_blocks_of, buffer) do - dstio.write(buffer) - end - dstio.rewind - dstio - end -end - -class IO - include IOStream -end - -%w( Tempfile StringIO ).each do |klass| - if Object.const_defined? klass - Object.const_get(klass).class_eval do - include IOStream - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/storage.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/storage.rb deleted file mode 100644 index 8df93c0..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/storage.rb +++ /dev/null @@ -1,132 +0,0 @@ -module Paperclip - module Storage - - module Filesystem - def self.extended base - end - - def exists?(style = default_style) - if original_filename - File.exist?(path(style)) - else - false - end - end - - # Returns representation of the data of the file assigned to the given - # style, in the format most representative of the current storage. - def to_file style = default_style - @queued_for_write[style] || (File.new(path(style)) if exists?(style)) - end - alias_method :to_io, :to_file - - def flush_writes #:nodoc: - @queued_for_write.each do |style, file| - FileUtils.mkdir_p(File.dirname(path(style))) - result = file.stream_to(path(style)) - file.close - result.close - end - @queued_for_write = {} - end - - def flush_deletes #:nodoc: - @queued_for_delete.each do |path| - begin - FileUtils.rm(path) if File.exist?(path) - rescue Errno::ENOENT => e - # ignore file-not-found, let everything else pass - end - end - @queued_for_delete = [] - end - end - - module S3 - def self.extended base - require 'right_aws' - base.instance_eval do - @bucket = @options[:bucket] - @s3_credentials = parse_credentials(@options[:s3_credentials]) - @s3_options = @options[:s3_options] || {} - @s3_permissions = @options[:s3_permissions] || 'public-read' - @url = ":s3_url" - end - base.class.interpolations[:s3_url] = lambda do |attachment, style| - "https://s3.amazonaws.com/#{attachment.bucket_name}/#{attachment.path(style).gsub(%r{^/}, "")}" - end - end - - def s3 - @s3 ||= RightAws::S3.new(@s3_credentials[:access_key_id], - @s3_credentials[:secret_access_key], - @s3_options) - end - - def s3_bucket - @s3_bucket ||= s3.bucket(@bucket, true, @s3_permissions) - end - - def bucket_name - @bucket - end - - def parse_credentials creds - creds = find_credentials(creds).stringify_keys - (creds[ENV['RAILS_ENV']] || creds).symbolize_keys - end - - def exists?(style = default_style) - s3_bucket.key(path(style)) ? true : false - end - - # Returns representation of the data of the file assigned to the given - # style, in the format most representative of the current storage. - def to_file style = default_style - @queued_for_write[style] || s3_bucket.key(path(style)) - end - alias_method :to_io, :to_file - - def flush_writes #:nodoc: - @queued_for_write.each do |style, file| - begin - key = s3_bucket.key(path(style)) - key.data = file - key.put(nil, @s3_permissions) - rescue RightAws::AwsError => e - raise - end - end - @queued_for_write = {} - end - - def flush_deletes #:nodoc: - @queued_for_delete.each do |path| - begin - if file = s3_bucket.key(path) - file.delete - end - rescue RightAws::AwsError - # Ignore this. - end - end - @queued_for_delete = [] - end - - def find_credentials creds - case creds - when File: - YAML.load_file(creds.path) - when String: - YAML.load_file(creds) - when Hash: - creds - else - raise ArgumentError, "Credentials are not a path, file, or hash." - end - end - private :find_credentials - - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/thumbnail.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/thumbnail.rb deleted file mode 100644 index baa5971..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/thumbnail.rb +++ /dev/null @@ -1,80 +0,0 @@ -module Paperclip - # Handles thumbnailing images that are uploaded. - class Thumbnail - - attr_accessor :file, :current_geometry, :target_geometry, :format, :whiny_thumbnails - - # Creates a Thumbnail object set to work on the +file+ given. It - # will attempt to transform the image into one defined by +target_geometry+ - # which is a "WxH"-style string. +format+ will be inferred from the +file+ - # unless specified. Thumbnail creation will raise no errors unless - # +whiny_thumbnails+ is true (which it is, by default. - def initialize file, target_geometry, format = nil, whiny_thumbnails = true - @file = file - @crop = target_geometry[-1,1] == '#' - @target_geometry = Geometry.parse target_geometry - @current_geometry = Geometry.from_file file - @whiny_thumbnails = whiny_thumbnails - - @current_format = File.extname(@file.path) - @basename = File.basename(@file.path, @current_format) - - @format = format - end - - # Creates a thumbnail, as specified in +initialize+, +make+s it, and returns the - # resulting Tempfile. - def self.make file, dimensions, format = nil, whiny_thumbnails = true - new(file, dimensions, format, whiny_thumbnails).make - end - - # Returns true if the +target_geometry+ is meant to crop. - def crop? - @crop - end - - # Performs the conversion of the +file+ into a thumbnail. Returns the Tempfile - # that contains the new image. - def make - src = @file - dst = Tempfile.new([@basename, @format].compact.join(".")) - dst.binmode - - command = <<-end_command - #{ Paperclip.path_for_command('convert') } - "#{ File.expand_path(src.path) }" - #{ transformation_command } - "#{ File.expand_path(dst.path) }" - end_command - success = system(command.gsub(/\s+/, " ")) - - if success && $?.exitstatus != 0 && @whiny_thumbnails - raise PaperclipError, "There was an error processing this thumbnail" - end - - dst - end - - # Returns the command ImageMagick's +convert+ needs to transform the image - # into the thumbnail. - def transformation_command - scale, crop = @current_geometry.transformation_to(@target_geometry, crop?) - trans = "-scale \"#{scale}\"" - trans << " -crop \"#{crop}\" +repage" if crop - trans - end - end - - # Due to how ImageMagick handles its image format conversion and how Tempfile - # handles its naming scheme, it is necessary to override how Tempfile makes - # its names so as to allow for file extensions. Idea taken from the comments - # on this blog post: - # http://marsorange.com/archives/of-mogrify-ruby-tempfile-dynamic-class-definitions - class Tempfile < ::Tempfile - # Replaces Tempfile's +make_tmpname+ with one that honors file extensions. - def make_tmpname(basename, n) - extension = File.extname(basename) - sprintf("%s,%d,%d%s", File.basename(basename, extension), $$, n, extension) - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/lib/paperclip/upfile.rb b/vendor/gems/paperclip-2.1.2/lib/paperclip/upfile.rb deleted file mode 100644 index 54747c7..0000000 --- a/vendor/gems/paperclip-2.1.2/lib/paperclip/upfile.rb +++ /dev/null @@ -1,33 +0,0 @@ -module Paperclip - # The Upfile module is a convenience module for adding uploaded-file-type methods - # to the +File+ class. Useful for testing. - # user.avatar = File.new("test/test_avatar.jpg") - module Upfile - - # Infer the MIME-type of the file from the extension. - def content_type - type = self.path.match(/\.(\w+)$/)[1] rescue "octet-stream" - case type - when "jpg", "png", "gif" then "image/#{type}" - when "txt" then "text/plain" - when "csv", "xml", "html", "htm", "css", "js" then "text/#{type}" - else "x-application/#{type}" - end - end - - # Returns the file's normal name. - def original_filename - File.basename(self.path) - end - - # Returns the size of the file. - def size - File.size(self) - end - end - -end - -class File #:nodoc: - include Paperclip::Upfile -end diff --git a/vendor/gems/paperclip-2.1.2/tasks/paperclip_tasks.rake b/vendor/gems/paperclip-2.1.2/tasks/paperclip_tasks.rake deleted file mode 100644 index 64b7310..0000000 --- a/vendor/gems/paperclip-2.1.2/tasks/paperclip_tasks.rake +++ /dev/null @@ -1,38 +0,0 @@ -def obtain_class - class_name = ENV['CLASS'] || ENV['class'] - raise "Must specify CLASS" unless class_name - @klass = Object.const_get(class_name) -end - -def obtain_attachments - name = ENV['ATTACHMENT'] || ENV['attachment'] - raise "Class #{@klass.name} has no attachments specified" unless @klass.respond_to?(:attachment_definitions) - if !name.blank? && @klass.attachment_definitions.keys.include?(name) - [ name ] - else - @klass.attachment_definitions.keys - end -end - -namespace :paperclip do - desc "Regenerates thumbnails for a given CLASS (and optional ATTACHMENT)" - task :refresh => :environment do - klass = obtain_class - names = obtain_attachments - instances = klass.find(:all) - - puts "Regenerating thumbnails for #{instances.length} instances of #{klass.name}:" - instances.each do |instance| - names.each do |name| - result = if instance.send("#{ name }?") - instance.send(name).reprocess! - instance.send(name).save - else - true - end - print result ? "." : "x"; $stdout.flush - end - end - puts " Done." - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/database.yml b/vendor/gems/paperclip-2.1.2/test/database.yml deleted file mode 100644 index 551dbb1..0000000 --- a/vendor/gems/paperclip-2.1.2/test/database.yml +++ /dev/null @@ -1,5 +0,0 @@ -test: - adapter: sqlite3 - #dbfile: paperclip.db - database: ":memory:" - diff --git a/vendor/gems/paperclip-2.1.2/test/debug.log b/vendor/gems/paperclip-2.1.2/test/debug.log deleted file mode 100644 index 78f6669..0000000 --- a/vendor/gems/paperclip-2.1.2/test/debug.log +++ /dev/null @@ -1,1745 +0,0 @@ -# Logfile created on Tue May 13 16:18:14 -0400 2008 by / - SQL (0.000212) select sqlite_version(*) - SQL (0.000000) SQLite3::SQLException: no such table: dummies: DROP TABLE dummies - SQL (0.000245) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000385) DROP TABLE dummies - SQL (0.000194) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000156) DROP TABLE dummies - SQL (0.000170) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000155) DROP TABLE dummies - SQL (0.000157) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000173) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000119) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000117) DROP TABLE dummies - SQL (0.000345) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000114) DROP TABLE dummies - SQL (0.000165) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000127) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000115) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000123) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000120) DROP TABLE dummies - SQL (0.000231) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000122) DROP TABLE dummies - SQL (0.000191) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000243) DROP TABLE dummies - SQL (0.000172) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000160) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000143) DROP TABLE dummies - SQL (0.000181) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000123) DROP TABLE dummies - SQL (0.000154) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000721) DROP TABLE dummies - SQL (0.000301) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000137) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Update (0.000225) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Load (0.000247) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000171) DROP TABLE dummies - SQL (0.000175) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000132) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000222) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000362) DROP TABLE dummies - SQL (0.000185) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000130) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000518) DROP TABLE dummies - SQL (0.000186) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000127) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000270) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000104) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - SQL (0.000149) DROP TABLE dummies - SQL (0.000168) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000125) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000828) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000506) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Update (0.000104) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - Dummy Load (0.000202) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000186) DROP TABLE dummies - SQL (0.000175) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000136) DROP TABLE dummies - SQL (0.000171) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000135) DROP TABLE dummies - SQL (0.000162) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000165) DROP TABLE dummies - SQL (0.000170) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000216) DROP TABLE dummies - SQL (0.000189) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000133) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000224) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000141) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - SQL (0.000379) DROP TABLE dummies - SQL (0.000182) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000136) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000369) DROP TABLE dummies - SQL (0.000187) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.063239) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000143) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000143) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000125) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000128) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000111) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000102) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000112) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000107) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000156) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000127) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000164) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000153) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000114) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000122) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000113) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000131) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000105) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.018985) SELECT * FROM dummies  - SQL (0.000168) DROP TABLE dummies - SQL (0.000158) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000118) DROP TABLE dummies - SQL (0.000158) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000280) DROP TABLE dummies - SQL (0.000163) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000131) DROP TABLE dummies - SQL (0.000154) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SubDummy Create (0.000111) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000138) DROP TABLE dummies - SQL (0.000158) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000125) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000136) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000151) DROP TABLE dummies - SQL (0.000163) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000157) DROP TABLE dummies - SQL (0.000851) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000122) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000184) DROP TABLE dummies - SQL (0.000179) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000109) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000157) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000108) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000114) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000114) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000152) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000109) DROP TABLE dummies - SQL (0.000168) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000207) DROP TABLE dummies - SQL (0.000171) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000164) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000201) select sqlite_version(*) - SQL (0.000000) SQLite3::SQLException: no such table: dummies: DROP TABLE dummies - SQL (0.000244) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000369) DROP TABLE dummies - SQL (0.000192) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000198) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000118) DROP TABLE dummies - SQL (0.000171) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000116) DROP TABLE dummies - SQL (0.000155) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000167) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000121) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000328) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000219) DROP TABLE dummies - SQL (0.000164) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000104) DROP TABLE dummies - SQL (0.000174) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000108) DROP TABLE dummies - SQL (0.000186) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000469) DROP TABLE dummies - SQL (0.000208) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000147) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Update (0.000140) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Load (0.000233) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000174) DROP TABLE dummies - SQL (0.000161) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000127) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000211) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000497) DROP TABLE dummies - SQL (0.000181) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000126) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000402) DROP TABLE dummies - SQL (0.000193) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000128) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000220) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000094) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - SQL (0.000170) DROP TABLE dummies - SQL (0.000174) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000135) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000826) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000147) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Update (0.000090) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - Dummy Load (0.000204) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000162) DROP TABLE dummies - SQL (0.000173) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000104) DROP TABLE dummies - SQL (0.000178) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000171) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000202) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000154) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000276) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000138) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - SQL (0.000472) DROP TABLE dummies - SQL (0.000308) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000162) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000373) DROP TABLE dummies - SQL (0.000258) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000108) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000143) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000107) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000136) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000099) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000124) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000113) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000510) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000416) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000131) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000152) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000129) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000110) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000099) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000099) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000096) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000103) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000106) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000134) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000144) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000096) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000109) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000109) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000103) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000105) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.002186) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000105) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000102) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000103) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000109) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000107) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000102) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000118) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000107) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.001407) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000110) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000102) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.001523) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000106) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000116) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.023801) SELECT * FROM dummies  - SQL (0.000196) DROP TABLE dummies - SQL (0.000224) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000119) DROP TABLE dummies - SQL (0.000268) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000116) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000218) DROP TABLE dummies - SQL (0.000155) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000169) DROP TABLE dummies - SQL (0.000160) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SubDummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000115) DROP TABLE dummies - SQL (0.000161) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000135) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000834) DROP TABLE dummies - SQL (0.000362) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000151) DROP TABLE dummies - SQL (0.000167) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000124) DROP TABLE dummies - SQL (0.000154) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000132) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000131) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000127) DROP TABLE dummies - SQL (0.000158) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000123) DROP TABLE dummies - SQL (0.000153) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000108) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000122) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000195) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000135) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000121) DROP TABLE dummies - SQL (0.000154) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000155) DROP TABLE dummies - SQL (0.000252) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000139) DROP TABLE dummies - SQL (0.000164) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000154) DROP TABLE dummies - SQL (0.000162) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000442) DROP TABLE dummies - SQL (0.000162) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000131) DROP TABLE dummies - SQL (0.000160) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000205) select sqlite_version(*) - SQL (0.000000) SQLite3::SQLException: no such table: dummies: DROP TABLE dummies - SQL (0.000371) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000562) DROP TABLE dummies - SQL (0.000195) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000150) DROP TABLE dummies - SQL (0.000200) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000118) DROP TABLE dummies - SQL (0.000172) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000114) DROP TABLE dummies - SQL (0.000159) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000109) DROP TABLE dummies - SQL (0.000167) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000125) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000156) DROP TABLE dummies - SQL (0.000329) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000195) DROP TABLE dummies - SQL (0.000160) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000109) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000105) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000115) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000154) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000508) DROP TABLE dummies - SQL (0.000196) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000146) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Update (0.000141) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Load (0.000215) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000165) DROP TABLE dummies - SQL (0.000161) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000129) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000197) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000446) DROP TABLE dummies - SQL (0.000180) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000122) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000462) DROP TABLE dummies - SQL (0.000203) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000132) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000227) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000117) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - SQL (0.000170) DROP TABLE dummies - SQL (0.000175) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000134) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000858) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000148) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Update (0.000093) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - Dummy Load (0.000205) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000162) DROP TABLE dummies - SQL (0.000174) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000202) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000105) DROP TABLE dummies - SQL (0.000170) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000168) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000154) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000274) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000143) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - SQL (0.000526) DROP TABLE dummies - SQL (0.000380) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000157) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000373) DROP TABLE dummies - SQL (0.000262) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000119) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000093) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000099) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000113) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000122) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000164) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000153) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000112) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000089) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000110) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000148) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000165) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000111) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.019678) SELECT * FROM dummies  - SQL (0.000186) DROP TABLE dummies - SQL (0.000165) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000130) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000109) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000146) DROP TABLE dummies - SQL (0.000166) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000127) DROP TABLE dummies - SQL (0.000155) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SubDummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000136) DROP TABLE dummies - SQL (0.000158) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000124) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000133) DROP TABLE dummies - SQL (0.000156) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000169) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000197) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000166) DROP TABLE dummies - SQL (0.000168) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000157) DROP TABLE dummies - SQL (0.000277) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000117) DROP TABLE dummies - SQL (0.000155) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000203) select sqlite_version(*) - SQL (0.000000) SQLite3::SQLException: no such table: dummies: DROP TABLE dummies - SQL (0.000243) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000387) DROP TABLE dummies - SQL (0.000193) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000141) DROP TABLE dummies - SQL (0.000196) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000124) DROP TABLE dummies - SQL (0.000170) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000114) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000165) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000119) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000148) DROP TABLE dummies - SQL (0.000308) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000194) DROP TABLE dummies - SQL (0.000160) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000108) DROP TABLE dummies - SQL (0.000178) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000117) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000127) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000190) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000105) DROP TABLE dummies - SQL (0.000145) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000501) DROP TABLE dummies - SQL (0.000197) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000147) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Update (0.000141) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Load (0.000212) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000165) DROP TABLE dummies - SQL (0.000161) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000128) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000196) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000621) DROP TABLE dummies - SQL (0.000193) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000123) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000365) DROP TABLE dummies - SQL (0.000196) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000130) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000218) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000090) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - SQL (0.000172) DROP TABLE dummies - SQL (0.000174) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000137) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000998) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000142) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - Dummy Update (0.000088) UPDATE dummies SET "other" = NULL, "avatar_file_size" = NULL, "avatar_file_name" = NULL, "avatar_content_type" = NULL WHERE "id" = 1 - Dummy Load (0.000205) SELECT * FROM dummies WHERE (dummies."id" = 1)  - SQL (0.000165) DROP TABLE dummies - SQL (0.000174) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000105) DROP TABLE dummies - SQL (0.000193) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000104) DROP TABLE dummies - SQL (0.000194) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000167) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000154) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.000337) SELECT * FROM dummies WHERE (dummies."id" = 1)  - Dummy Update (0.000138) UPDATE dummies SET "other" = NULL, "avatar_file_size" = 4456, "avatar_file_name" = '5k.png', "avatar_content_type" = 'image/png' WHERE "id" = 1 - SQL (0.000485) DROP TABLE dummies - SQL (0.000302) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000158) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000376) DROP TABLE dummies - SQL (0.000275) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000120) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000108) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000109) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000099) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000096) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000100) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000144) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000111) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000094) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000113) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000084) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000092) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000091) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000082) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000086) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000097) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000087) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000081) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000090) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000095) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000104) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000088) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000160) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000150) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000115) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000112) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000133) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000120) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000080) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000076) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000115) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000078) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000115) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000083) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000105) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000116) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000074) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000098) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000117) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000075) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000077) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000079) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000073) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000070) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000072) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Create (0.000071) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - Dummy Load (0.019244) SELECT * FROM dummies  - SQL (0.000163) DROP TABLE dummies - SQL (0.000162) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000126) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000104) DROP TABLE dummies - SQL (0.000146) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000108) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000106) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SubDummy Create (0.000085) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000112) DROP TABLE dummies - SQL (0.000162) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000155) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000117) DROP TABLE dummies - SQL (0.000152) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000112) DROP TABLE dummies - SQL (0.000148) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000113) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000107) DROP TABLE dummies - SQL (0.000147) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000118) DROP TABLE dummies - SQL (0.000153) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000149) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000109) DROP TABLE dummies - SQL (0.000614) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000115) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000134) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000119) DROP TABLE dummies - SQL (0.000151) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000111) DROP TABLE dummies - SQL (0.000150) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000110) DROP TABLE dummies - SQL (0.000264) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - Dummy Create (0.000101) INSERT INTO dummies ("avatar_file_name", "avatar_file_size", "avatar_content_type", "other") VALUES('5k.png', 4456, 'image/png', NULL) - SQL (0.000140) DROP TABLE dummies - SQL (0.000264) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000195) DROP TABLE dummies - SQL (0.000172) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000172) DROP TABLE dummies - SQL (0.000169) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  - SQL (0.000168) DROP TABLE dummies - SQL (0.000169) CREATE TABLE dummies ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "other" varchar(255) DEFAULT NULL, "avatar_file_name" varchar(255) DEFAULT NULL, "avatar_content_type" varchar(255) DEFAULT NULL, "avatar_file_size" integer DEFAULT NULL)  diff --git a/vendor/gems/paperclip-2.1.2/test/fixtures/12k.png b/vendor/gems/paperclip-2.1.2/test/fixtures/12k.png deleted file mode 100644 index f819d45..0000000 Binary files a/vendor/gems/paperclip-2.1.2/test/fixtures/12k.png and /dev/null differ diff --git a/vendor/gems/paperclip-2.1.2/test/fixtures/50x50.png b/vendor/gems/paperclip-2.1.2/test/fixtures/50x50.png deleted file mode 100644 index 63f5646..0000000 Binary files a/vendor/gems/paperclip-2.1.2/test/fixtures/50x50.png and /dev/null differ diff --git a/vendor/gems/paperclip-2.1.2/test/fixtures/5k.png b/vendor/gems/paperclip-2.1.2/test/fixtures/5k.png deleted file mode 100644 index 75d9f04..0000000 Binary files a/vendor/gems/paperclip-2.1.2/test/fixtures/5k.png and /dev/null differ diff --git a/vendor/gems/paperclip-2.1.2/test/fixtures/bad.png b/vendor/gems/paperclip-2.1.2/test/fixtures/bad.png deleted file mode 100644 index 7ba4f07..0000000 --- a/vendor/gems/paperclip-2.1.2/test/fixtures/bad.png +++ /dev/null @@ -1 +0,0 @@ -This is not an image. diff --git a/vendor/gems/paperclip-2.1.2/test/fixtures/text.txt b/vendor/gems/paperclip-2.1.2/test/fixtures/text.txt deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/gems/paperclip-2.1.2/test/helper.rb b/vendor/gems/paperclip-2.1.2/test/helper.rb deleted file mode 100644 index c24040b..0000000 --- a/vendor/gems/paperclip-2.1.2/test/helper.rb +++ /dev/null @@ -1,44 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'shoulda' -require 'mocha' -require 'tempfile' - -require 'active_record' -begin - require 'ruby-debug' -rescue LoadError - puts "ruby-debug not loaded" -end - -ROOT = File.join(File.dirname(__FILE__), '..') -RAILS_ROOT = ROOT - -$LOAD_PATH << File.join(ROOT, 'lib') -$LOAD_PATH << File.join(ROOT, 'lib', 'paperclip') - -require File.join(ROOT, 'lib', 'paperclip.rb') - -ENV['RAILS_ENV'] ||= 'test' - -FIXTURES_DIR = File.join(File.dirname(__FILE__), "fixtures") -config = YAML::load(IO.read(File.dirname(__FILE__) + '/database.yml')) -ActiveRecord::Base.logger = Logger.new(File.dirname(__FILE__) + "/debug.log") -ActiveRecord::Base.establish_connection(config[ENV['RAILS_ENV'] || 'test']) - -def rebuild_model options = {} - ActiveRecord::Base.connection.create_table :dummies, :force => true do |table| - table.column :other, :string - table.column :avatar_file_name, :string - table.column :avatar_content_type, :string - table.column :avatar_file_size, :integer - end - - ActiveRecord::Base.send(:include, Paperclip) - Object.send(:remove_const, "Dummy") rescue nil - Object.const_set("Dummy", Class.new(ActiveRecord::Base)) - Dummy.class_eval do - include Paperclip - has_attached_file :avatar, options - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/s3.yml b/vendor/gems/paperclip-2.1.2/test/s3.yml deleted file mode 100644 index 7d68578..0000000 --- a/vendor/gems/paperclip-2.1.2/test/s3.yml +++ /dev/null @@ -1,2 +0,0 @@ -access_key_id: 11196XQ35WGRKEVQN782 -secret_access_key: TczVRadcH5sealaWUMpbY/Hnex90iCmJ3LDM5w74 diff --git a/vendor/gems/paperclip-2.1.2/test/test_attachment.rb b/vendor/gems/paperclip-2.1.2/test/test_attachment.rb deleted file mode 100644 index 3bc2367..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_attachment.rb +++ /dev/null @@ -1,230 +0,0 @@ -require 'test/helper' - -class Dummy - # This is a dummy class -end - -class AttachmentTest < Test::Unit::TestCase - context "Attachment default_options" do - setup do - rebuild_model - @old_default_options = Paperclip::Attachment.default_options.dup - @new_default_options = @old_default_options.merge({ - :path => "argle/bargle", - :url => "fooferon", - :default_url => "not here.png" - }) - end - - teardown do - Paperclip::Attachment.default_options.merge! @old_default_options - end - - should "be overrideable" do - Paperclip::Attachment.default_options.merge!(@new_default_options) - @new_default_options.keys.each do |key| - assert_equal @new_default_options[key], - Paperclip::Attachment.default_options[key] - end - end - - context "without an Attachment" do - setup do - @dummy = Dummy.new - end - - should "return false when asked exists?" do - assert !@dummy.avatar.exists? - end - end - - context "on an Attachment" do - setup do - @dummy = Dummy.new - @attachment = @dummy.avatar - end - - Paperclip::Attachment.default_options.keys.each do |key| - should "be the default_options for #{key}" do - assert_equal @old_default_options[key], - @attachment.instance_variable_get("@#{key}"), - key - end - end - - context "when redefined" do - setup do - Paperclip::Attachment.default_options.merge!(@new_default_options) - @dummy = Dummy.new - @attachment = @dummy.avatar - end - - Paperclip::Attachment.default_options.keys.each do |key| - should "be the new default_options for #{key}" do - assert_equal @new_default_options[key], - @attachment.instance_variable_get("@#{key}"), - key - end - end - end - end - end - - context "An attachment with similarly named interpolations" do - setup do - rebuild_model :path => ":id.omg/:id-bbq/:idwhat/:id_partition.wtf" - @dummy = Dummy.new - @dummy.stubs(:id).returns(1024) - @file = File.new(File.join(File.dirname(__FILE__), - "fixtures", - "5k.png")) - @dummy.avatar = @file - end - - should "make sure that they are interpolated correctly" do - assert_equal "1024.omg/1024-bbq/1024what/000/001/024.wtf", @dummy.avatar.path - end - end - - context "An attachment" do - setup do - Paperclip::Attachment.default_options.merge!({ - :path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension" - }) - FileUtils.rm_rf("tmp") - @instance = stub - @instance.stubs(:id).returns(41) - @instance.stubs(:class).returns(Dummy) - @instance.stubs(:[]).with(:test_file_name).returns(nil) - @instance.stubs(:[]).with(:test_content_type).returns(nil) - @instance.stubs(:[]).with(:test_file_size).returns(nil) - @attachment = Paperclip::Attachment.new(:test, - @instance) - @file = File.new(File.join(File.dirname(__FILE__), - "fixtures", - "5k.png")) - end - - should "return its default_url when no file assigned" do - assert @attachment.to_file.nil? - assert_equal "/tests/original/missing.png", @attachment.url - assert_equal "/tests/blah/missing.png", @attachment.url(:blah) - end - - context "with a file assigned in the database" do - setup do - @instance.stubs(:[]).with(:test_file_name).returns("5k.png") - @instance.stubs(:[]).with(:test_content_type).returns("image/png") - @instance.stubs(:[]).with(:test_file_size).returns(12345) - end - - should "return a correct url even if the file does not exist" do - assert_nil @attachment.to_file - assert_equal "/tests/41/blah/5k.png", @attachment.url(:blah) - end - - should "return the proper path when filename has a single .'s" do - assert_equal "./test/../tmp/tests/dummies/original/41/5k.png", @attachment.path - end - - should "return the proper path when filename has multiple .'s" do - @instance.stubs(:[]).with(:test_file_name).returns("5k.old.png") - assert_equal "./test/../tmp/tests/dummies/original/41/5k.old.png", @attachment.path - end - - context "when expecting three styles" do - setup do - styles = {:styles => { :large => ["400x400", :png], - :medium => ["100x100", :gif], - :small => ["32x32#", :jpg]}} - @attachment = Paperclip::Attachment.new(:test, - @instance, - styles) - end - - context "and assigned a file" do - setup do - @instance.expects(:[]=).with(:test_file_name, - File.basename(@file.path)) - @instance.expects(:[]=).with(:test_content_type, "image/png") - @instance.expects(:[]=).with(:test_file_size, @file.size) - @instance.expects(:[]=).with(:test_file_name, nil) - @instance.expects(:[]=).with(:test_content_type, nil) - @instance.expects(:[]=).with(:test_file_size, nil) - @attachment.assign(@file) - end - - should "be dirty" do - assert @attachment.dirty? - end - - context "and saved" do - setup do - @attachment.save - end - - should "return the real url" do - assert @attachment.to_file - assert_equal "/tests/41/original/5k.png", @attachment.url - assert_equal "/tests/41/small/5k.jpg", @attachment.url(:small) - end - - should "commit the files to disk" do - [:large, :medium, :small].each do |style| - io = @attachment.to_io(style) - assert File.exists?(io) - assert ! io.is_a?(::Tempfile) - end - end - - should "save the files as the right formats and sizes" do - [[:large, 400, 61, "PNG"], - [:medium, 100, 15, "GIF"], - [:small, 32, 32, "JPEG"]].each do |style| - cmd = "identify -format '%w %h %b %m' " + - "#{@attachment.to_io(style.first).path}" - out = `#{cmd}` - width, height, size, format = out.split(" ") - assert_equal style[1].to_s, width.to_s - assert_equal style[2].to_s, height.to_s - assert_equal style[3].to_s, format.to_s - end - end - - should "still have its #file attribute not be nil" do - assert ! @attachment.to_file.nil? - end - - context "and deleted" do - setup do - @existing_names = @attachment.styles.keys.collect do |style| - @attachment.path(style) - end - @instance.expects(:[]=).with(:test_file_name, nil) - @instance.expects(:[]=).with(:test_content_type, nil) - @instance.expects(:[]=).with(:test_file_size, nil) - @attachment.assign nil - @attachment.save - end - - should "delete the files" do - @existing_names.each{|f| assert ! File.exists?(f) } - end - end - end - end - end - - end - - context "when trying a nonexistant storage type" do - setup do - rebuild_model :storage => :not_here - end - - should "not be able to find the module" do - assert_raise(NameError){ Dummy.new.avatar } - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/test_geometry.rb b/vendor/gems/paperclip-2.1.2/test/test_geometry.rb deleted file mode 100644 index c803e12..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_geometry.rb +++ /dev/null @@ -1,142 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'shoulda' - -require File.join(File.dirname(__FILE__), '..', 'lib', 'paperclip', 'geometry.rb') - -class GeometryTest < Test::Unit::TestCase - context "Paperclip::Geometry" do - should "correctly report its given dimensions" do - assert @geo = Paperclip::Geometry.new(1024, 768) - assert_equal 1024, @geo.width - assert_equal 768, @geo.height - end - - should "correctly create a square if the height dimension is missing" do - assert @geo = Paperclip::Geometry.new(1024) - assert_equal 1024, @geo.width - assert_equal 1024, @geo.height - end - - should "correctly create a square if the width dimension is missing" do - assert @geo = Paperclip::Geometry.new(nil, 768) - assert_equal 768, @geo.width - assert_equal 768, @geo.height - end - - should "be generated from a WxH-formatted string" do - assert @geo = Paperclip::Geometry.parse("800x600") - assert_equal 800, @geo.width - assert_equal 600, @geo.height - end - - should "be generated from a xH-formatted string" do - assert @geo = Paperclip::Geometry.parse("x600") - assert_equal 600, @geo.width - assert_equal 600, @geo.height - end - - should "be generated from a Wx-formatted string" do - assert @geo = Paperclip::Geometry.parse("800x") - assert_equal 800, @geo.width - assert_equal 800, @geo.height - end - - should "ensure the modifier is nil if only one dimension present" do - assert @geo = Paperclip::Geometry.parse("123x") - assert_nil @geo.modifier - end - - should "ensure the modifier is nil if not present" do - assert @geo = Paperclip::Geometry.parse("123x456") - assert_nil @geo.modifier - end - - ['>', '<', '#', '@', '%', '^', '!'].each do |mod| - should "ensure the modifier #{mod} is preserved" do - assert @geo = Paperclip::Geometry.parse("123x456#{mod}") - assert_equal mod, @geo.modifier - end - end - - should "make sure the modifier gets passed during transformation_to" do - assert @src = Paperclip::Geometry.parse("123x456") - assert @dst = Paperclip::Geometry.parse("123x456>") - assert_equal "123x456>", @src.transformation_to(@dst).to_s - end - - should "be generated from a file" do - file = File.join(File.dirname(__FILE__), "fixtures", "5k.png") - file = File.new(file) - assert_nothing_raised{ @geo = Paperclip::Geometry.from_file(file) } - assert @geo.height > 0 - assert @geo.width > 0 - end - - should "be generated from a file path" do - file = File.join(File.dirname(__FILE__), "fixtures", "5k.png") - assert_nothing_raised{ @geo = Paperclip::Geometry.from_file(file) } - assert @geo.height > 0 - assert @geo.width > 0 - end - - should "not generate from a bad file" do - file = "/home/This File Does Not Exist.omg" - assert_raise(Paperclip::NotIdentifiedByImageMagickError){ @geo = Paperclip::Geometry.from_file(file) } - end - - [['vertical', 900, 1440, true, false, false, 1440, 900, 0.625], - ['horizontal', 1024, 768, false, true, false, 1024, 768, 1.3333], - ['square', 100, 100, false, false, true, 100, 100, 1]].each do |args| - context "performing calculations on a #{args[0]} viewport" do - setup do - @geo = Paperclip::Geometry.new(args[1], args[2]) - end - - should "#{args[3] ? "" : "not"} be vertical" do - assert_equal args[3], @geo.vertical? - end - - should "#{args[4] ? "" : "not"} be horizontal" do - assert_equal args[4], @geo.horizontal? - end - - should "#{args[5] ? "" : "not"} be square" do - assert_equal args[5], @geo.square? - end - - should "report that #{args[6]} is the larger dimension" do - assert_equal args[6], @geo.larger - end - - should "report that #{args[7]} is the smaller dimension" do - assert_equal args[7], @geo.smaller - end - - should "have an aspect ratio of #{args[8]}" do - assert_in_delta args[8], @geo.aspect, 0.0001 - end - end - end - - [[ [1000, 100], [64, 64], "x64", "64x64+288+0" ], - [ [100, 1000], [50, 950], "x950", "50x950+22+0" ], - [ [100, 1000], [50, 25], "50x", "50x25+0+237" ]]. each do |args| - context "of #{args[0].inspect} and given a Geometry #{args[1].inspect} and sent transform_to" do - setup do - @geo = Paperclip::Geometry.new(*args[0]) - @dst = Paperclip::Geometry.new(*args[1]) - @scale, @crop = @geo.transformation_to @dst, true - end - - should "be able to return the correct scaling transformation geometry #{args[2]}" do - assert_equal args[2], @scale - end - - should "be able to return the correct crop transformation geometry #{args[3]}" do - assert_equal args[3], @crop - end - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/test_integration.rb b/vendor/gems/paperclip-2.1.2/test/test_integration.rb deleted file mode 100644 index 8531c18..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_integration.rb +++ /dev/null @@ -1,331 +0,0 @@ -require 'test/helper.rb' - -class IntegrationTest < Test::Unit::TestCase - context "Many models at once" do - setup do - rebuild_model - @file = File.new(File.join(FIXTURES_DIR, "5k.png")) - 300.times do |i| - Dummy.create! :avatar => @file - end - end - - should "not exceed the open file limit" do - assert_nothing_raised do - dummies = Dummy.find(:all) - dummies.each { |dummy| dummy.avatar } - end - end - end - - context "An attachment" do - setup do - rebuild_model :styles => { :thumb => "50x50#" } - @dummy = Dummy.new - @file = File.new(File.join(File.dirname(__FILE__), - "fixtures", - "5k.png")) - @dummy.avatar = @file - assert @dummy.save - end - - should "create its thumbnails properly" do - assert_match /\b50x50\b/, `identify '#{@dummy.avatar.path(:thumb)}'` - end - - context "redefining its attachment styles" do - setup do - Dummy.class_eval do - has_attached_file :avatar, :styles => { :thumb => "150x25#" } - end - @d2 = Dummy.find(@dummy.id) - @d2.avatar.reprocess! - @d2.save - end - - should "create its thumbnails properly" do - assert_match /\b150x25\b/, `identify '#{@dummy.avatar.path(:thumb)}'` - end - end - end - - context "A model with no attachment validation" do - setup do - rebuild_model :styles => { :large => "300x300>", - :medium => "100x100", - :thumb => ["32x32#", :gif] }, - :default_style => :medium, - :url => "/:attachment/:class/:style/:id/:basename.:extension", - :path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension" - @dummy = Dummy.new - end - - should "have its definition return false when asked about whiny_thumbnails" do - assert ! Dummy.attachment_definitions[:avatar][:whiny_thumbnails] - end - - context "when validates_attachment_thumbnails is called" do - setup do - Dummy.validates_attachment_thumbnails :avatar - end - - should "have its definition return true when asked about whiny_thumbnails" do - assert_equal true, Dummy.attachment_definitions[:avatar][:whiny_thumbnails] - end - end - - context "redefined to have attachment validations" do - setup do - rebuild_model :styles => { :large => "300x300>", - :medium => "100x100", - :thumb => ["32x32#", :gif] }, - :whiny_thumbnails => true, - :default_style => :medium, - :url => "/:attachment/:class/:style/:id/:basename.:extension", - :path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension" - end - - should "have its definition return true when asked about whiny_thumbnails" do - assert_equal true, Dummy.attachment_definitions[:avatar][:whiny_thumbnails] - end - end - end - - context "A model with a filesystem attachment" do - setup do - rebuild_model :styles => { :large => "300x300>", - :medium => "100x100", - :thumb => ["32x32#", :gif] }, - :whiny_thumbnails => true, - :default_style => :medium, - :url => "/:attachment/:class/:style/:id/:basename.:extension", - :path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension" - @dummy = Dummy.new - @file = File.new(File.join(FIXTURES_DIR, "5k.png")) - @bad_file = File.new(File.join(FIXTURES_DIR, "bad.png")) - - assert @dummy.avatar = @file - assert @dummy.valid? - assert @dummy.save - end - - should "write and delete its files" do - [["434x66", :original], - ["300x46", :large], - ["100x15", :medium], - ["32x32", :thumb]].each do |geo, style| - cmd = %Q[identify -format "%wx%h" #{@dummy.avatar.to_file(style).path}] - assert_equal geo, `#{cmd}`.chomp, cmd - end - - saved_paths = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.to_file(s).path } - - @d2 = Dummy.find(@dummy.id) - assert_equal "100x15", `identify -format "%wx%h" #{@d2.avatar.to_file.path}`.chomp - assert_equal "434x66", `identify -format "%wx%h" #{@d2.avatar.to_file(:original).path}`.chomp - assert_equal "300x46", `identify -format "%wx%h" #{@d2.avatar.to_file(:large).path}`.chomp - assert_equal "100x15", `identify -format "%wx%h" #{@d2.avatar.to_file(:medium).path}`.chomp - assert_equal "32x32", `identify -format "%wx%h" #{@d2.avatar.to_file(:thumb).path}`.chomp - - @dummy.avatar = "not a valid file but not nil" - assert_equal File.basename(@file.path), @dummy.avatar_file_name - assert @dummy.valid? - assert @dummy.save - - saved_paths.each do |p| - assert File.exists?(p) - end - - @dummy.avatar = nil - assert_nil @dummy.avatar_file_name - assert @dummy.valid? - assert @dummy.save - - saved_paths.each do |p| - assert ! File.exists?(p) - end - - @d2 = Dummy.find(@dummy.id) - assert_nil @d2.avatar_file_name - end - - should "work exactly the same when new as when reloaded" do - @d2 = Dummy.find(@dummy.id) - - assert_equal @dummy.avatar_file_name, @d2.avatar_file_name - [:thumb, :medium, :large, :original].each do |style| - assert_equal @dummy.avatar.to_file(style).path, @d2.avatar.to_file(style).path - end - - saved_paths = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.to_file(s).path } - - @d2.avatar = nil - assert @d2.save - - saved_paths.each do |p| - assert ! File.exists?(p) - end - end - - should "know the difference between good files, bad files, not files, and nil" do - expected = @dummy.avatar.to_file - @dummy.avatar = "not a file" - assert @dummy.valid? - assert_equal expected.path, @dummy.avatar.to_file.path - - @dummy.avatar = @bad_file - assert ! @dummy.valid? - @dummy.avatar = nil - assert @dummy.valid? - end - - should "know the difference between good files, bad files, not files, and nil when validating" do - Dummy.validates_attachment_presence :avatar - @d2 = Dummy.find(@dummy.id) - @d2.avatar = @file - assert @d2.valid? - @d2.avatar = @bad_file - assert ! @d2.valid? - @d2.avatar = nil - assert ! @d2.valid? - end - - should "be able to reload without saving an not have the file disappear" do - @dummy.avatar = @file - assert @dummy.save - @dummy.avatar = nil - assert_nil @dummy.avatar_file_name - @dummy.reload - assert_equal "5k.png", @dummy.avatar_file_name - end - end - - if ENV['S3_TEST_BUCKET'] - def s3_files_for attachment - [:thumb, :medium, :large, :original].inject({}) do |files, style| - data = `curl '#{attachment.url(style)}' 2>/dev/null`.chomp - t = Tempfile.new("paperclip-test") - t.write(data) - t.rewind - files[style] = t - files - end - end - - context "A model with an S3 attachment" do - setup do - rebuild_model :styles => { :large => "300x300>", - :medium => "100x100", - :thumb => ["32x32#", :gif] }, - :storage => :s3, - :whiny_thumbnails => true, - # :s3_options => {:logger => Logger.new(StringIO.new)}, - :s3_credentials => File.new(File.join(File.dirname(__FILE__), "s3.yml")), - :default_style => :medium, - :bucket => ENV['S3_TEST_BUCKET'], - :path => ":class/:attachment/:id/:style/:basename.:extension" - @dummy = Dummy.new - @file = File.new(File.join(FIXTURES_DIR, "5k.png")) - @bad_file = File.new(File.join(FIXTURES_DIR, "bad.png")) - - assert @dummy.avatar = @file - assert @dummy.valid? - assert @dummy.save - - @files_on_s3 = s3_files_for @dummy.avatar - end - - should "write and delete its files" do - [["434x66", :original], - ["300x46", :large], - ["100x15", :medium], - ["32x32", :thumb]].each do |geo, style| - cmd = %Q[identify -format "%wx%h" #{@files_on_s3[style].path}] - assert_equal geo, `#{cmd}`.chomp, cmd - end - - @d2 = Dummy.find(@dummy.id) - @d2_files = s3_files_for @d2.avatar - [["434x66", :original], - ["300x46", :large], - ["100x15", :medium], - ["32x32", :thumb]].each do |geo, style| - cmd = %Q[identify -format "%wx%h" #{@d2_files[style].path}] - assert_equal geo, `#{cmd}`.chomp, cmd - end - - @dummy.avatar = "not a valid file but not nil" - assert_equal File.basename(@file.path), @dummy.avatar_file_name - assert @dummy.valid? - assert @dummy.save - - saved_keys = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.to_file(s) } - - saved_keys.each do |key| - assert key.exists? - end - - @dummy.avatar = nil - assert_nil @dummy.avatar_file_name - assert @dummy.valid? - assert @dummy.save - - saved_keys.each do |key| - assert ! key.exists? - end - - @d2 = Dummy.find(@dummy.id) - assert_nil @d2.avatar_file_name - end - - should "work exactly the same when new as when reloaded" do - @d2 = Dummy.find(@dummy.id) - - assert_equal @dummy.avatar_file_name, @d2.avatar_file_name - [:thumb, :medium, :large, :original].each do |style| - assert_equal @dummy.avatar.to_file(style).to_s, @d2.avatar.to_file(style).to_s - end - - saved_keys = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.to_file(s) } - - @d2.avatar = nil - assert @d2.save - - saved_keys.each do |key| - assert ! key.exists? - end - end - - should "know the difference between good files, bad files, not files, and nil" do - expected = @dummy.avatar.to_file - @dummy.avatar = "not a file" - assert @dummy.valid? - assert_equal expected.full_name, @dummy.avatar.to_file.full_name - - @dummy.avatar = @bad_file - assert ! @dummy.valid? - @dummy.avatar = nil - assert @dummy.valid? - - Dummy.validates_attachment_presence :avatar - @d2 = Dummy.find(@dummy.id) - @d2.avatar = @file - assert @d2.valid? - @d2.avatar = @bad_file - assert ! @d2.valid? - @d2.avatar = nil - assert ! @d2.valid? - end - - should "be able to reload without saving an not have the file disappear" do - @dummy.avatar = @file - assert @dummy.save - @dummy.avatar = nil - assert_nil @dummy.avatar_file_name - @dummy.reload - assert_equal "5k.png", @dummy.avatar_file_name - end - end - end -end - diff --git a/vendor/gems/paperclip-2.1.2/test/test_iostream.rb b/vendor/gems/paperclip-2.1.2/test/test_iostream.rb deleted file mode 100644 index 39332fd..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_iostream.rb +++ /dev/null @@ -1,60 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'stringio' -require 'tempfile' -require 'shoulda' - -require File.join(File.dirname(__FILE__), '..', 'lib', 'paperclip', 'iostream.rb') - -class IOStreamTest < Test::Unit::TestCase - context "IOStream" do - should "be included in IO, File, Tempfile, and StringIO" do - [IO, File, Tempfile, StringIO].each do |klass| - assert klass.included_modules.include?(IOStream), "Not in #{klass}" - end - end - end - - context "A file" do - setup do - @file = File.new(File.join(File.dirname(__FILE__), "fixtures", "5k.png")) - end - - context "that is sent #stream_to" do - - [["/tmp/iostream.string.test", File], - [Tempfile.new('iostream.test'), Tempfile]].each do |args| - - context "and given a #{args[0].class.to_s}" do - setup do - assert @result = @file.stream_to(args[0]) - end - - should "return a #{args[1].to_s}" do - assert @result.is_a?(args[1]) - end - - should "contain the same data as the original file" do - @file.rewind; @result.rewind - assert_equal @file.read, @result.read - end - end - end - end - - context "that is sent #to_tempfile" do - setup do - assert @tempfile = @file.to_tempfile - end - - should "convert it to a Tempfile" do - assert @tempfile.is_a?(Tempfile) - end - - should "have the Tempfile contain the same data as the file" do - @file.rewind; @tempfile.rewind - assert_equal @file.read, @tempfile.read - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/test_paperclip.rb b/vendor/gems/paperclip-2.1.2/test/test_paperclip.rb deleted file mode 100644 index b909879..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_paperclip.rb +++ /dev/null @@ -1,123 +0,0 @@ -require 'test/helper.rb' - -class PaperclipTest < Test::Unit::TestCase - context "An ActiveRecord model with an 'avatar' attachment" do - setup do - rebuild_model :path => "tmp/:class/omg/:style.:extension" - @file = File.new(File.join(FIXTURES_DIR, "5k.png")) - end - - context "that is attr_protected" do - setup do - Dummy.class_eval do - attr_protected :avatar - end - @dummy = Dummy.new - end - - should "not assign the avatar on mass-set" do - @dummy.logger.expects(:debug) - - @dummy.attributes = { :other => "I'm set!", - :avatar => @file } - - assert_equal "I'm set!", @dummy.other - assert ! @dummy.avatar? - end - - should "still allow assigment on normal set" do - @dummy.logger.expects(:debug).times(0) - - @dummy.other = "I'm set!" - @dummy.avatar = @file - - assert_equal "I'm set!", @dummy.other - assert @dummy.avatar? - end - end - - context "with a subclass" do - setup do - class ::SubDummy < Dummy; end - end - - should "be able to use the attachment from the subclass" do - assert_nothing_raised do - @subdummy = SubDummy.create(:avatar => @file) - end - end - - should "be able to see the attachment definition from the subclass's class" do - assert_equal "tmp/:class/omg/:style.:extension", SubDummy.attachment_definitions[:avatar][:path] - end - - teardown do - Object.send(:remove_const, "SubDummy") rescue nil - end - end - - should "have an #avatar method" do - assert Dummy.new.respond_to?(:avatar) - end - - should "have an #avatar= method" do - assert Dummy.new.respond_to?(:avatar=) - end - - [[:presence, nil, "5k.png", nil], - [:size, {:in => 1..10240}, "5k.png", "12k.png"], - [:size2, {:in => 1..10240}, nil, "12k.png"], - [:content_type1, {:content_type => "image/png"}, "5k.png", "text.txt"], - [:content_type2, {:content_type => "text/plain"}, "text.txt", "5k.png"], - [:content_type3, {:content_type => %r{image/.*}}, "5k.png", "text.txt"], - [:content_type4, {:content_type => "image/png"}, nil, "text.txt"]].each do |args| - context "with #{args[0]} validations" do - setup do - Dummy.class_eval do - send(*[:"validates_attachment_#{args[0].to_s[/[a-z_]*/]}", :avatar, args[1]].compact) - end - @dummy = Dummy.new - end - - context "and a valid file" do - setup do - @file = args[2] && File.new(File.join(FIXTURES_DIR, args[2])) - end - - should "not have any errors" do - @dummy.avatar = @file - assert @dummy.avatar.valid? - assert_equal 0, @dummy.avatar.errors.length - end - end - - context "and an invalid file" do - setup do - @file = args[3] && File.new(File.join(FIXTURES_DIR, args[3])) - end - - should "have errors" do - @dummy.avatar = @file - assert ! @dummy.avatar.valid? - assert_equal 1, @dummy.avatar.errors.length - end - end - -# context "and an invalid file with :message" do -# setup do -# @file = args[3] && File.new(File.join(FIXTURES_DIR, args[3])) -# end -# -# should "have errors" do -# if args[1] && args[1][:message] && args[4] -# @dummy.avatar = @file -# assert ! @dummy.avatar.valid? -# assert_equal 1, @dummy.avatar.errors.length -# assert_equal args[4], @dummy.avatar.errors[0] -# end -# end -# end - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/test_storage.rb b/vendor/gems/paperclip-2.1.2/test/test_storage.rb deleted file mode 100644 index d0b7614..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_storage.rb +++ /dev/null @@ -1,136 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'shoulda' -require 'right_aws' - -require File.join(File.dirname(__FILE__), '..', 'lib', 'paperclip', 'geometry.rb') - -class StorageTest < Test::Unit::TestCase - context "Parsing S3 credentials" do - setup do - rebuild_model :storage => :s3, - :bucket => "testing", - :s3_credentials => {:not => :important} - - @dummy = Dummy.new - @avatar = @dummy.avatar - - @current_env = ENV['RAILS_ENV'] - end - - teardown do - ENV['RAILS_ENV'] = @current_env - end - - should "get the correct credentials when RAILS_ENV is production" do - ENV['RAILS_ENV'] = 'production' - assert_equal({:key => "12345"}, - @avatar.parse_credentials('production' => {:key => '12345'}, - :development => {:key => "54321"})) - end - - should "get the correct credentials when RAILS_ENV is development" do - ENV['RAILS_ENV'] = 'development' - assert_equal({:key => "54321"}, - @avatar.parse_credentials('production' => {:key => '12345'}, - :development => {:key => "54321"})) - end - - should "return the argument if the key does not exist" do - ENV['RAILS_ENV'] = "not really an env" - assert_equal({:test => "12345"}, @avatar.parse_credentials(:test => "12345")) - end - end - - context "An attachment with S3 storage" do - setup do - rebuild_model :storage => :s3, - :bucket => "testing", - :path => ":attachment/:style/:basename.:extension", - :s3_credentials => { - 'access_key_id' => "12345", - 'secret_access_key' => "54321" - } - end - - should "be extended by the S3 module" do - assert Dummy.new.avatar.is_a?(Paperclip::Storage::S3) - end - - should "not be extended by the Filesystem module" do - assert ! Dummy.new.avatar.is_a?(Paperclip::Storage::Filesystem) - end - - context "when assigned" do - setup do - @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png')) - @dummy = Dummy.new - @dummy.avatar = @file - end - - should "not get a bucket to get a URL" do - @dummy.avatar.expects(:s3).never - @dummy.avatar.expects(:s3_bucket).never - assert_equal "https://s3.amazonaws.com/testing/avatars/original/5k.png", @dummy.avatar.url - end - - context "and saved" do - setup do - @s3_mock = stub - @bucket_mock = stub - RightAws::S3.expects(:new).with("12345", "54321", {}).returns(@s3_mock) - @s3_mock.expects(:bucket).with("testing", true, "public-read").returns(@bucket_mock) - @key_mock = stub - @bucket_mock.expects(:key).returns(@key_mock) - @key_mock.expects(:data=) - @key_mock.expects(:put) - @dummy.save - end - - should "succeed" do - assert true - end - end - end - end - - unless ENV["S3_TEST_BUCKET"].blank? - context "Using S3 for real, an attachment with S3 storage" do - setup do - rebuild_model :styles => { :thumb => "100x100", :square => "32x32#" }, - :storage => :s3, - :bucket => ENV["S3_TEST_BUCKET"], - :path => ":class/:attachment/:id/:style.:extension", - :s3_credentials => File.new(File.join(File.dirname(__FILE__), "s3.yml")) - - Dummy.delete_all - @dummy = Dummy.new - end - - should "be extended by the S3 module" do - assert Dummy.new.avatar.is_a?(Paperclip::Storage::S3) - end - - context "when assigned" do - setup do - @file = File.new(File.join(File.dirname(__FILE__), 'fixtures', '5k.png')) - @dummy.avatar = @file - end - - should "still return a Tempfile when sent #to_io" do - assert_equal Tempfile, @dummy.avatar.to_io.class - end - - context "and saved" do - setup do - @dummy.save - end - - should "be on S3" do - assert true - end - end - end - end - end -end diff --git a/vendor/gems/paperclip-2.1.2/test/test_thumbnail.rb b/vendor/gems/paperclip-2.1.2/test/test_thumbnail.rb deleted file mode 100644 index 14f8c29..0000000 --- a/vendor/gems/paperclip-2.1.2/test/test_thumbnail.rb +++ /dev/null @@ -1,107 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'shoulda' -require 'mocha' -require 'tempfile' - -require File.join(File.dirname(__FILE__), '..', 'lib', 'paperclip', 'geometry.rb') -require File.join(File.dirname(__FILE__), '..', 'lib', 'paperclip', 'thumbnail.rb') - -class ThumbnailTest < Test::Unit::TestCase - - context "A Paperclip Tempfile" do - setup do - @tempfile = Paperclip::Tempfile.new("file.jpg") - end - - should "have its path contain a real extension" do - assert_equal ".jpg", File.extname(@tempfile.path) - end - - should "be a real Tempfile" do - assert @tempfile.is_a?(::Tempfile) - end - end - - context "Another Paperclip Tempfile" do - setup do - @tempfile = Paperclip::Tempfile.new("file") - end - - should "not have an extension if not given one" do - assert_equal "", File.extname(@tempfile.path) - end - - should "still be a real Tempfile" do - assert @tempfile.is_a?(::Tempfile) - end - end - - context "An image" do - setup do - @file = File.new(File.join(File.dirname(__FILE__), "fixtures", "5k.png")) - end - - [["600x600>", "434x66"], - ["400x400>", "400x61"], - ["32x32<", "434x66"] - ].each do |args| - context "being thumbnailed with a geometry of #{args[0]}" do - setup do - @thumb = Paperclip::Thumbnail.new(@file, args[0]) - end - - should "start with dimensions of 434x66" do - cmd = %Q[identify -format "%wx%h" #{@file.path}] - assert_equal "434x66", `#{cmd}`.chomp - end - - should "report the correct target geometry" do - assert_equal args[0], @thumb.target_geometry.to_s - end - - context "when made" do - setup do - @thumb_result = @thumb.make - end - - should "be the size we expect it to be" do - cmd = %Q[identify -format "%wx%h" #{@thumb_result.path}] - assert_equal args[1], `#{cmd}`.chomp - end - end - end - end - - context "being thumbnailed at 100x50 with cropping" do - setup do - @thumb = Paperclip::Thumbnail.new(@file, "100x50#") - end - - should "report its correct current and target geometries" do - assert_equal "100x50#", @thumb.target_geometry.to_s - assert_equal "434x66", @thumb.current_geometry.to_s - end - - should "report its correct format" do - assert_nil @thumb.format - end - - should "have whiny_thumbnails turned on by default" do - assert @thumb.whiny_thumbnails - end - - should "send the right command to convert when sent #make" do - @thumb.expects(:system).with do |arg| - arg.match %r{convert\s+"#{File.expand_path(@thumb.file.path)}"\s+-scale\s+\"x50\"\s+-crop\s+\"100x50\+114\+0\"\s+\+repage\s+".*?"} - end - @thumb.make - end - - should "create the thumbnail when sent #make" do - dst = @thumb.make - assert_match /100x50/, `identify #{dst.path}` - end - end - end -end diff --git a/vendor/gems/redgreen-1.2.2/.specification b/vendor/gems/redgreen-1.2.2/.specification deleted file mode 100644 index 3b23585..0000000 --- a/vendor/gems/redgreen-1.2.2/.specification +++ /dev/null @@ -1,63 +0,0 @@ ---- !ruby/object:Gem::Specification -name: redgreen -version: !ruby/object:Gem::Version - version: 1.2.2 -platform: ruby -authors: -- Chris Wanstrath and Pat Eyler -autorequire: redgreen -bindir: bin -cert_chain: [] - -date: 2007-04-17 00:00:00 -04:00 -default_executable: rg -dependencies: [] - -description: redgreen is an expanded version of Pat Eyler's RedGreen. It will install a 'rg' file in your bin directory. Use that as you would use 'ruby' when running a test. -email: -- chris@ozmm.org -- pat.eyler@gmail.com -executables: -- rg -extensions: [] - -extra_rdoc_files: [] - -files: -- README -- bin/rg -- lib/redgreen -- lib/redgreen.rb -- lib/redgreen/autotest.rb -- test/test_fake.rb -has_rdoc: false -homepage: http://errtheblog.com/post/15, http://on-ruby.blogspot.com/ -licenses: [] - -post_install_message: -rdoc_options: [] - -require_paths: -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: redgreen is an expanded version of Pat Eyler's RedGreen -test_files: [] - diff --git a/vendor/gems/redgreen-1.2.2/README b/vendor/gems/redgreen-1.2.2/README deleted file mode 100644 index b472f19..0000000 --- a/vendor/gems/redgreen-1.2.2/README +++ /dev/null @@ -1,17 +0,0 @@ -== redgreen - -redgreen is an expanded version of Pat Eyler's RedGreen. Use it as you would -the ruby interpreter when running your unit test. - -Like so: - -$ rg test/test_units.rb - -Relevant bloggings: -- http://errtheblog.com/post/15 -- http://on-ruby.blogspot.com/2006/05/red-and-green-for-ruby.html - -Enjoy. - ->> Chris Wanstrath -=> chris[at]ozmm[dot]org diff --git a/vendor/gems/redgreen-1.2.2/bin/rg b/vendor/gems/redgreen-1.2.2/bin/rg deleted file mode 100644 index 760fb06..0000000 --- a/vendor/gems/redgreen-1.2.2/bin/rg +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env ruby -require 'rubygems' -require 'redgreen' -require 'win32console' if PLATFORM =~ /win32/ - -require $0 = ARGV.first diff --git a/vendor/gems/redgreen-1.2.2/lib/redgreen.rb b/vendor/gems/redgreen-1.2.2/lib/redgreen.rb deleted file mode 100644 index 9a4d707..0000000 --- a/vendor/gems/redgreen-1.2.2/lib/redgreen.rb +++ /dev/null @@ -1,64 +0,0 @@ -require 'test/unit' -require 'test/unit/ui/console/testrunner' - -# cute. -module Color - COLORS = { :clear => 0, :red => 31, :green => 32, :yellow => 33 } - def self.method_missing(color_name, *args) - color(color_name) + args.first + color(:clear) - end - def self.color(color) - "\e[#{COLORS[color.to_sym]}m" - end -end - -class Test::Unit::UI::Console::RedGreenTestRunner < Test::Unit::UI::Console::TestRunner - def initialize(suite, output_level=NORMAL, io=$stdout) - super - end - - def output_single(something, level=NORMAL) - return unless (output?(level)) - something = case something - when '.' then Color.green('.') - when 'F' then Color.red("F") - when 'E' then Color.yellow("E") - else something - end - @io.write(something) - @io.flush - end -end - -class Test::Unit::AutoRunner - alias :old_initialize :initialize - def initialize(standalone) - old_initialize(standalone) - @runner = proc do |r| - Test::Unit::UI::Console::RedGreenTestRunner - end - end -end - -class Test::Unit::TestResult - alias :old_to_s :to_s - def to_s - if old_to_s =~ /\d+ tests, \d+ assertions, (\d+) failures, (\d+) errors/ - Color.send($1.to_i != 0 || $2.to_i != 0 ? :red : :green, $&) - end - end -end - -class Test::Unit::Failure - alias :old_long_display :long_display - def long_display - old_long_display.sub('Failure', Color.red('Failure')) - end -end - -class Test::Unit::Error - alias :old_long_display :long_display - def long_display - old_long_display.sub('Error', Color.yellow('Error')) - end -end diff --git a/vendor/gems/redgreen-1.2.2/lib/redgreen/autotest.rb b/vendor/gems/redgreen-1.2.2/lib/redgreen/autotest.rb deleted file mode 100644 index 9117341..0000000 --- a/vendor/gems/redgreen-1.2.2/lib/redgreen/autotest.rb +++ /dev/null @@ -1,43 +0,0 @@ -Autotest.send(:alias_method, :real_ruby, :ruby) -Autotest.send(:define_method, :ruby) do |*args| - real_ruby + %[ -rrubygems -e "require 'redgreen'" ] -end - -if PLATFORM =~ /win32/ - require 'win32console' - Autotest.send(:define_method, :run_tests) do |*args| - find_files_to_test # failed + changed/affected - cmd = make_test_cmd @files_to_test - - hook :run_command - puts cmd - old_sync = $stdout.sync - $stdout.sync = true - @results = [] - line = [] - begin - open("| #{cmd}", "r") do |f| - until f.eof? do - c = f.getc - #~ putc c - line << c - if c == ?\n then - @results << line.pack("c*") - line.clear - end - end - end - ensure - $stdout.sync = old_sync - end - puts @results - hook :ran_command - @results = @results.join - handle_results(@results) - end - - Autotest.add_hook(:ran_command) do |at| p - include Term::ANSIColor - at.results = at.results.map{ |r| uncolored(r) } - end -end diff --git a/vendor/gems/redgreen-1.2.2/test/test_fake.rb b/vendor/gems/redgreen-1.2.2/test/test_fake.rb deleted file mode 100644 index a60491d..0000000 --- a/vendor/gems/redgreen-1.2.2/test/test_fake.rb +++ /dev/null @@ -1,33 +0,0 @@ -require 'test/unit' - -class TestFake < Test::Unit::TestCase - - def test_true - assert true - end - - def test_fail - assert false - end - - def test_true_2 - assert true - end - - def test_true_3 - assert true - end - - def test_error - assert method_dont_exist - end - - def test_true_4 - assert true - end - - def test_fail_again - assert false - end - -end diff --git a/vendor/gems/uuidtools-1.0.4/.specification b/vendor/gems/uuidtools-1.0.4/.specification deleted file mode 100644 index 75f4f7c..0000000 --- a/vendor/gems/uuidtools-1.0.4/.specification +++ /dev/null @@ -1,98 +0,0 @@ ---- !ruby/object:Gem::Specification -name: uuidtools -version: !ruby/object:Gem::Version - version: 1.0.4 -platform: ruby -authors: -- Bob Aman -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-09-29 00:00:00 -04:00 -default_executable: -dependencies: -- !ruby/object:Gem::Dependency - name: rake - type: :runtime - version_requirement: - version_requirements: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: 0.7.3 - version: -- !ruby/object:Gem::Dependency - name: rspec - type: :runtime - version_requirement: - version_requirements: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: 1.0.8 - version: -description: A simple universally unique ID generation library. -email: bob@sporkmonger.com -executables: [] - -extensions: [] - -extra_rdoc_files: -- README -files: -- lib/uuidtools -- lib/uuidtools/version.rb -- lib/uuidtools.rb -- spec/spec.opts -- spec/spec_helper.rb -- spec/uuidtools -- spec/uuidtools/mac_address_spec.rb -- spec/uuidtools/uuid_creation_spec.rb -- spec/uuidtools/uuid_parsing_spec.rb -- tasks/benchmark.rake -- tasks/clobber.rake -- tasks/gem.rake -- tasks/git.rake -- tasks/metrics.rake -- tasks/rdoc.rake -- tasks/rubyforge.rake -- tasks/spec.rake -- website/index.html -- CHANGELOG -- LICENSE -- Rakefile -- README -has_rdoc: true -homepage: http://uuidtools.rubyforge.org/ -licenses: [] - -post_install_message: -rdoc_options: -- --main -- README -require_paths: -- bin -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: uuidtools -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: UUID generator -test_files: [] - diff --git a/vendor/gems/uuidtools-1.0.4/CHANGELOG b/vendor/gems/uuidtools-1.0.4/CHANGELOG deleted file mode 100644 index 0c1fc62..0000000 --- a/vendor/gems/uuidtools-1.0.4/CHANGELOG +++ /dev/null @@ -1,38 +0,0 @@ -== UUIDTools 1.0.4 - * calculates random node id with multicast bit if there is no MAC address - * uses RSpec instead of Test::Unit - * works in Ruby 1.9 - * cleaned up some code - * removed deprecated methods - * changed version constant - * new gem file structure -== UUIDTools 1.0.3 - * improved code for obtaining a MAC address within JRuby -== UUIDTools 1.0.2 - * improved code for obtaining a MAC address for Solaris and OpenBSD - * added hash and eql? methods -== UUIDTools 1.0.1 - * improved code for obtaining a MAC address for Solaris and NetBSD - * MAC addresses can now be set manually - * replaced random number generator, less effective on Windows, but faster - * fixed inheritance issues - * changed UUID#to_uri method to return a string instead of a URI object - * removed UUID#to_uri_string -== UUIDTools 1.0.0 - * slight improvements to the random number generator - * fixed issue with failing to obtain mac address in certain environments -== UUIDTools 0.1.4 - * improved speed when generating timestamp-based uuids - * fixed bug with rapid generation of timestamp uuids leading to duplicates - * improved code for detection of mac address -== UUIDTools 0.1.3 - * fixed issue with UUID#raw attempting to call protected class methods -== UUIDTools 0.1.2 - * fixed variant method - * added valid? method - * changed timestamp_create method to allow creation of UUIDs from - arbitrary timestamps -== UUIDTools 0.1.1 - * changed helper methods to be protected like they should have been -== UUIDTools 0.1.0 - * parsing and generation of UUIDs implemented diff --git a/vendor/gems/uuidtools-1.0.4/LICENSE b/vendor/gems/uuidtools-1.0.4/LICENSE deleted file mode 100644 index 9e71de7..0000000 --- a/vendor/gems/uuidtools-1.0.4/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -UUIDTools, Copyright (c) 2005-2008 Bob Aman - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gems/uuidtools-1.0.4/README b/vendor/gems/uuidtools-1.0.4/README deleted file mode 100644 index 332a2c4..0000000 --- a/vendor/gems/uuidtools-1.0.4/README +++ /dev/null @@ -1,13 +0,0 @@ - UUIDTools was designed to be a simple library for generating any - of the various types of UUIDs. It conforms to RFC 4122 whenever - possible. - -== Example - UUID.md5_create(UUID_DNS_NAMESPACE, "www.widgets.com") - => # - UUID.sha1_create(UUID_DNS_NAMESPACE, "www.widgets.com") - => # - UUID.timestamp_create - => # - UUID.random_create - => # diff --git a/vendor/gems/uuidtools-1.0.4/Rakefile b/vendor/gems/uuidtools-1.0.4/Rakefile deleted file mode 100644 index 3713efd..0000000 --- a/vendor/gems/uuidtools-1.0.4/Rakefile +++ /dev/null @@ -1,44 +0,0 @@ -lib_dir = File.expand_path(File.join(File.dirname(__FILE__), "lib")) -$:.unshift(lib_dir) -$:.uniq! - -require 'rubygems' -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' -require 'rake/packagetask' -require 'rake/gempackagetask' -require 'rake/contrib/rubyforgepublisher' -require 'spec/rake/spectask' - -require File.join(File.dirname(__FILE__), 'lib/uuidtools', 'version') - -PKG_DISPLAY_NAME = 'UUIDTools' -PKG_NAME = PKG_DISPLAY_NAME.downcase -PKG_VERSION = UUID::VERSION::STRING -PKG_FILE_NAME = "#{PKG_NAME}-#{PKG_VERSION}" - -RELEASE_NAME = "REL #{PKG_VERSION}" - -RUBY_FORGE_PROJECT = PKG_NAME -RUBY_FORGE_USER = "sporkmonger" -RUBY_FORGE_PATH = "/var/www/gforge-projects/#{RUBY_FORGE_PROJECT}" -RUBY_FORGE_URL = "http://#{RUBY_FORGE_PROJECT}.rubyforge.org/" - -PKG_SUMMARY = "UUID generator" -PKG_DESCRIPTION = <<-TEXT -A simple universally unique ID generation library. -TEXT - -PKG_FILES = FileList[ - "lib/**/*", "spec/**/*", "vendor/**/*", - "tasks/**/*", "website/**/*", - "[A-Z]*", "Rakefile" -].exclude(/database\.yml/).exclude(/[_\.]git$/) - -task :default => "spec:verify" - -WINDOWS = (RUBY_PLATFORM =~ /mswin|win32|mingw|bccwin|cygwin/) rescue false -SUDO = WINDOWS ? '' : ('sudo' unless ENV['SUDOLESS']) - -Dir['tasks/**/*.rake'].each { |rake| load rake } diff --git a/vendor/gems/uuidtools-1.0.4/lib/uuidtools.rb b/vendor/gems/uuidtools-1.0.4/lib/uuidtools.rb deleted file mode 100644 index e1ee655..0000000 --- a/vendor/gems/uuidtools-1.0.4/lib/uuidtools.rb +++ /dev/null @@ -1,604 +0,0 @@ -#-- -# UUIDTools, Copyright (c) 2005-2008 Bob Aman -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -#++ - -$:.unshift(File.dirname(__FILE__)) - -require 'uri' -require 'time' -require 'thread' -require 'digest/sha1' -require 'digest/md5' - -require 'uuidtools/version' - -#= uuidtools.rb -# -# UUIDTools was designed to be a simple library for generating any -# of the various types of UUIDs. It conforms to RFC 4122 whenever -# possible. -# -#== Example -# UUID.md5_create(UUID_DNS_NAMESPACE, "www.widgets.com") -# => # -# UUID.sha1_create(UUID_DNS_NAMESPACE, "www.widgets.com") -# => # -# UUID.timestamp_create -# => # -# UUID.random_create -# => # -class UUID - @@last_timestamp = nil - @@last_node_id = nil - @@last_clock_sequence = nil - @@state_file = nil - @@mutex = Mutex.new - - def initialize(time_low, time_mid, time_hi_and_version, - clock_seq_hi_and_reserved, clock_seq_low, nodes) - unless time_low >= 0 && time_low < 4294967296 - raise ArgumentError, - "Expected unsigned 32-bit number for time_low, got #{time_low}." - end - unless time_mid >= 0 && time_mid < 65536 - raise ArgumentError, - "Expected unsigned 16-bit number for time_mid, got #{time_mid}." - end - unless time_hi_and_version >= 0 && time_hi_and_version < 65536 - raise ArgumentError, - "Expected unsigned 16-bit number for time_hi_and_version, " + - "got #{time_hi_and_version}." - end - unless clock_seq_hi_and_reserved >= 0 && clock_seq_hi_and_reserved < 256 - raise ArgumentError, - "Expected unsigned 8-bit number for clock_seq_hi_and_reserved, " + - "got #{clock_seq_hi_and_reserved}." - end - unless clock_seq_low >= 0 && clock_seq_low < 256 - raise ArgumentError, - "Expected unsigned 8-bit number for clock_seq_low, " + - "got #{clock_seq_low}." - end - unless nodes.respond_to? :size - raise ArgumentError, - "Expected nodes to respond to :size." - end - unless nodes.size == 6 - raise ArgumentError, - "Expected nodes to have size of 6." - end - for node in nodes - unless node >= 0 && node < 256 - raise ArgumentError, - "Expected unsigned 8-bit number for each node, " + - "got #{node}." - end - end - @time_low = time_low - @time_mid = time_mid - @time_hi_and_version = time_hi_and_version - @clock_seq_hi_and_reserved = clock_seq_hi_and_reserved - @clock_seq_low = clock_seq_low - @nodes = nodes - end - - attr_accessor :time_low - attr_accessor :time_mid - attr_accessor :time_hi_and_version - attr_accessor :clock_seq_hi_and_reserved - attr_accessor :clock_seq_low - attr_accessor :nodes - - # Parses a UUID from a string. - def self.parse(uuid_string) - unless uuid_string.kind_of? String - raise ArgumentError, - "Expected String, got #{uuid_string.class.name} instead." - end - uuid_components = uuid_string.downcase.scan( - Regexp.new("^([0-9a-f]{8})-([0-9a-f]{4})-([0-9a-f]{4})-" + - "([0-9a-f]{2})([0-9a-f]{2})-([0-9a-f]{12})$")).first - raise ArgumentError, "Invalid UUID format." if uuid_components.nil? - time_low = uuid_components[0].to_i(16) - time_mid = uuid_components[1].to_i(16) - time_hi_and_version = uuid_components[2].to_i(16) - clock_seq_hi_and_reserved = uuid_components[3].to_i(16) - clock_seq_low = uuid_components[4].to_i(16) - nodes = [] - for i in 0..5 - nodes << uuid_components[5][(i * 2)..(i * 2) + 1].to_i(16) - end - return self.new(time_low, time_mid, time_hi_and_version, - clock_seq_hi_and_reserved, clock_seq_low, nodes) - end - - # Parses a UUID from a raw byte string. - def self.parse_raw(raw_string) - unless raw_string.kind_of? String - raise ArgumentError, - "Expected String, got #{raw_string.class.name} instead." - end - integer = self.convert_byte_string_to_int(raw_string) - - time_low = (integer >> 96) & 0xFFFFFFFF - time_mid = (integer >> 80) & 0xFFFF - time_hi_and_version = (integer >> 64) & 0xFFFF - clock_seq_hi_and_reserved = (integer >> 56) & 0xFF - clock_seq_low = (integer >> 48) & 0xFF - nodes = [] - for i in 0..5 - nodes << ((integer >> (40 - (i * 8))) & 0xFF) - end - return self.new(time_low, time_mid, time_hi_and_version, - clock_seq_hi_and_reserved, clock_seq_low, nodes) - end - - # Creates a UUID from a random value. - def self.random_create() - new_uuid = self.parse_raw(self.random_bits) - new_uuid.time_hi_and_version &= 0x0FFF - new_uuid.time_hi_and_version |= (4 << 12) - new_uuid.clock_seq_hi_and_reserved &= 0x3F - new_uuid.clock_seq_hi_and_reserved |= 0x80 - return new_uuid - end - - # Creates a UUID from a timestamp. - def self.timestamp_create(timestamp=nil) - # We need a lock here to prevent two threads from ever - # getting the same timestamp. - @@mutex.synchronize do - # Always use GMT to generate UUIDs. - if timestamp.nil? - gmt_timestamp = Time.now.gmtime - else - gmt_timestamp = timestamp.gmtime - end - # Convert to 100 nanosecond blocks - gmt_timestamp_100_nanoseconds = (gmt_timestamp.tv_sec * 10000000) + - (gmt_timestamp.tv_usec * 10) + 0x01B21DD213814000 - mac_address = self.mac_address - node_id = 0 - if mac_address != nil - nodes = mac_address.split(":").collect do |octet| - octet.to_i(16) - end - else - nodes = self.random_bits(48).split("").map do |chr| - if chr.respond_to?(:ord) - # Ruby 1.9 - chr.ord - else - # Ruby 1.8 - chr.sum(8) - end - end - nodes[0] |= 0b00000001 - end - for i in 0..5 - node_id += (nodes[i] << (40 - (i * 8))) - end - clock_sequence = @@last_clock_sequence - if clock_sequence.nil? - clock_sequence = self.convert_byte_string_to_int(self.random_bits) - end - if @@last_node_id != nil && @@last_node_id != node_id - # The node id has changed. Change the clock id. - clock_sequence = self.convert_byte_string_to_int(self.random_bits) - elsif @@last_timestamp != nil && - gmt_timestamp_100_nanoseconds <= @@last_timestamp - clock_sequence = clock_sequence + 1 - end - @@last_timestamp = gmt_timestamp_100_nanoseconds - @@last_node_id = node_id - @@last_clock_sequence = clock_sequence - - time_low = gmt_timestamp_100_nanoseconds & 0xFFFFFFFF - time_mid = ((gmt_timestamp_100_nanoseconds >> 32) & 0xFFFF) - time_hi_and_version = ((gmt_timestamp_100_nanoseconds >> 48) & 0x0FFF) - time_hi_and_version |= (1 << 12) - clock_seq_low = clock_sequence & 0xFF; - clock_seq_hi_and_reserved = (clock_sequence & 0x3F00) >> 8 - clock_seq_hi_and_reserved |= 0x80 - - return self.new(time_low, time_mid, time_hi_and_version, - clock_seq_hi_and_reserved, clock_seq_low, nodes) - end - end - - # Creates a UUID using the MD5 hash. (Version 3) - def self.md5_create(namespace, name) - return self.create_from_hash(Digest::MD5, namespace, name) - end - - # Creates a UUID using the SHA1 hash. (Version 5) - def self.sha1_create(namespace, name) - return self.create_from_hash(Digest::SHA1, namespace, name) - end - - # This method applies only to version 1 UUIDs. - # Checks if the node ID was generated from a random number - # or from an IEEE 802 address (MAC address). - # Always returns false for UUIDs that aren't version 1. - # This should not be confused with version 4 UUIDs where - # more than just the node id is random. - def random_node_id? - return false if self.version != 1 - return ((self.nodes.first & 0x01) == 1) - end - - # Returns true if this UUID is the - # nil UUID (00000000-0000-0000-0000-000000000000). - def nil_uuid? - return false if self.time_low != 0 - return false if self.time_mid != 0 - return false if self.time_hi_and_version != 0 - return false if self.clock_seq_hi_and_reserved != 0 - return false if self.clock_seq_low != 0 - self.nodes.each do |node| - return false if node != 0 - end - return true - end - - # Returns the UUID version type. - # Possible values: - # 1 - Time-based with unique or random host identifier - # 2 - DCE Security version (with POSIX UIDs) - # 3 - Name-based (MD5 hash) - # 4 - Random - # 5 - Name-based (SHA-1 hash) - def version - return (time_hi_and_version >> 12) - end - - # Returns the UUID variant. - # Possible values: - # 0b000 - Reserved, NCS backward compatibility. - # 0b100 - The variant specified in this document. - # 0b110 - Reserved, Microsoft Corporation backward compatibility. - # 0b111 - Reserved for future definition. - def variant - variant_raw = (clock_seq_hi_and_reserved >> 5) - result = nil - if (variant_raw >> 2) == 0 - result = 0x000 - elsif (variant_raw >> 1) == 2 - result = 0x100 - else - result = variant_raw - end - return (result >> 6) - end - - # Returns true if this UUID is valid. - def valid? - if [0b000, 0b100, 0b110, 0b111].include?(self.variant) && - (1..5).include?(self.version) - return true - else - return false - end - end - - # Returns the IEEE 802 address used to generate this UUID or - # nil if a MAC address was not used. - def mac_address - return nil if self.version != 1 - return nil if self.random_node_id? - return (self.nodes.collect do |node| - sprintf("%2.2x", node) - end).join(":") - end - - # Returns the timestamp used to generate this UUID - def timestamp - return nil if self.version != 1 - gmt_timestamp_100_nanoseconds = 0 - gmt_timestamp_100_nanoseconds += - ((self.time_hi_and_version & 0x0FFF) << 48) - gmt_timestamp_100_nanoseconds += (self.time_mid << 32) - gmt_timestamp_100_nanoseconds += self.time_low - return Time.at( - (gmt_timestamp_100_nanoseconds - 0x01B21DD213814000) / 10000000.0) - end - - # Compares two UUIDs lexically - def <=>(other_uuid) - check = self.time_low <=> other_uuid.time_low - return check if check != 0 - check = self.time_mid <=> other_uuid.time_mid - return check if check != 0 - check = self.time_hi_and_version <=> other_uuid.time_hi_and_version - return check if check != 0 - check = self.clock_seq_hi_and_reserved <=> - other_uuid.clock_seq_hi_and_reserved - return check if check != 0 - check = self.clock_seq_low <=> other_uuid.clock_seq_low - return check if check != 0 - for i in 0..5 - if (self.nodes[i] < other_uuid.nodes[i]) - return -1 - end - if (self.nodes[i] > other_uuid.nodes[i]) - return 1 - end - end - return 0 - end - - # Returns a representation of the object's state - def inspect - return "#" - end - - # Returns the hex digest of the UUID object. - def hexdigest - return self.to_i.to_s(16) - end - - # Returns the raw bytes that represent this UUID. - def raw - return self.class.convert_int_to_byte_string(self.to_i, 16) - end - - # Returns a string representation for this UUID. - def to_s - result = sprintf("%8.8x-%4.4x-%4.4x-%2.2x%2.2x-", @time_low, @time_mid, - @time_hi_and_version, @clock_seq_hi_and_reserved, @clock_seq_low); - for i in 0..5 - result << sprintf("%2.2x", @nodes[i]) - end - return result.downcase - end - - # Returns an integer representation for this UUID. - def to_i - bytes = (time_low << 96) + (time_mid << 80) + - (time_hi_and_version << 64) + (clock_seq_hi_and_reserved << 56) + - (clock_seq_low << 48) - for i in 0..5 - bytes += (nodes[i] << (40 - (i * 8))) - end - return bytes - end - - # Returns a URI string for this UUID. - def to_uri - return "urn:uuid:#{self.to_s}" - end - - # Returns an integer hash value. - def hash - return self.to_i - end - - # Returns true if this UUID is exactly equal to the other UUID. - def eql?(other) - return (self <=> other) == 0 - end - - # Returns the MAC address of the current computer's network card. - # Returns nil if a MAC address could not be found. - def self.mac_address #:nodoc: - if !defined?(@@mac_address) - require 'rbconfig' - os_platform = Config::CONFIG['target_os'] - if os_platform =~ /win/ && !(os_platform =~ /darwin/) - script_in_path = true - else - script_in_path = !(`which ifconfig`.strip =~ /no .+ in/) - end - if os_platform =~ /solaris/ - begin - ifconfig_output = - (script_in_path ? `ifconfig -a` : `/sbin/ifconfig -a`) - ip_addresses = ifconfig_output.scan( - /inet\s?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})/) - ip = ip_addresses.find {|addr| addr[0] != '127.0.0.1'}[0] - @@mac_address = `/usr/sbin/arp #{ip}`.split(' ')[3] - rescue Exception - end - if @@mac_address == "" || @@mac_address == nil - begin - ifconfig_output = - (script_in_path ? - `ifconfig -a` : `/sbin/ifconfig -a`).split(' ') - index = ifconfig_output.index("inet") + 1 - ip = ifconfig_output[index] - @@mac_address = `arp #{ip}`.split(' ')[3] - rescue Exception - end - end - elsif os_platform =~ /win/ && !(os_platform =~ /darwin/) - begin - ifconfig_output = `ipconfig /all` - mac_addresses = ifconfig_output.scan( - Regexp.new("(#{(["[0-9a-fA-F]{2}"] * 6).join("-")})")) - if mac_addresses.size > 0 - @@mac_address = mac_addresses.first.first.downcase.gsub(/-/, ":") - end - rescue - end - else - begin - mac_addresses = [] - if os_platform =~ /netbsd/ - ifconfig_output = - (script_in_path ? `ifconfig -a 2>&1` : `/sbin/ifconfig -a 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("address\: (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - elsif os_platform =~ /openbsd/ - ifconfig_output = `/sbin/ifconfig -a 2>&1` - ifconfig_output = - (script_in_path ? `ifconfig -a 2>&1` : `/sbin/ifconfig -a 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("addr (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - elsif File.exists?('/sbin/ifconfig') - ifconfig_output = - (script_in_path ? `ifconfig 2>&1` : `/sbin/ifconfig 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("ether (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - if mac_addresses.size == 0 - ifconfig_output = - (script_in_path ? - `ifconfig -a 2>&1` : `/sbin/ifconfig -a 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("ether (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - end - if mac_addresses.size == 0 - ifconfig_output = - (script_in_path ? - `ifconfig | grep HWaddr | cut -c39- 2>&1` : - `/sbin/ifconfig | grep HWaddr | cut -c39- 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("(#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - end - else - ifconfig_output = - (script_in_path ? `ifconfig 2>&1` : `/sbin/ifconfig 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("ether (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - if mac_addresses.size == 0 - ifconfig_output = - (script_in_path ? - `ifconfig -a 2>&1` : `/sbin/ifconfig -a 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("ether (#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - end - if mac_addresses.size == 0 - ifconfig_output = - (script_in_path ? - `ifconfig | grep HWaddr | cut -c39- 2>&1` : - `/sbin/ifconfig | grep HWaddr | cut -c39- 2>&1`) - mac_addresses = ifconfig_output.scan( - Regexp.new("(#{(["[0-9a-fA-F]{2}"] * 6).join(":")})")) - end - end - if mac_addresses.size > 0 - @@mac_address = mac_addresses.first.first - end - rescue - end - end - if @@mac_address != nil - if @@mac_address.respond_to?(:to_str) - @@mac_address = @@mac_address.to_str - else - @@mac_address = @@mac_address.to_s - end - @@mac_address.downcase! - @@mac_address.strip! - end - - # Verify that the MAC address is in the right format. - # Nil it out if it isn't. - unless @@mac_address.respond_to?(:scan) && - @@mac_address.scan(/#{(["[0-9a-f]{2}"] * 6).join(":")}/) - @@mac_address = nil - end - end - return @@mac_address - end - - # Allows users to set the MAC address manually in cases where the MAC - # address cannot be obtained programatically. - def self.mac_address=(new_mac_address) - @@mac_address = new_mac_address - end - -protected - # Creates a new UUID from a SHA1 or MD5 hash - def self.create_from_hash(hash_class, namespace, name) #:nodoc: - if hash_class == Digest::MD5 - version = 3 - elsif hash_class == Digest::SHA1 - version = 5 - else - raise ArgumentError, - "Expected Digest::SHA1 or Digest::MD5, got #{hash_class.name}." - end - hash = hash_class.new - hash.update(namespace.raw) - hash.update(name) - hash_string = hash.to_s[0..31] - new_uuid = self.parse("#{hash_string[0..7]}-#{hash_string[8..11]}-" + - "#{hash_string[12..15]}-#{hash_string[16..19]}-#{hash_string[20..31]}") - - new_uuid.time_hi_and_version &= 0x0FFF - new_uuid.time_hi_and_version |= (version << 12) - new_uuid.clock_seq_hi_and_reserved &= 0x3F - new_uuid.clock_seq_hi_and_reserved |= 0x80 - return new_uuid - end - - # N bits of unpredictable data. - def self.random_bits(size=128) #:nodoc: - if 128 % 16 != 0 - raise ArgumentError, "Value must be divisible by 16." - end - if !defined?(@random_device) || @random_device == nil - begin - @random_device = nil - if File.exists? "/dev/urandom" - @random_device = File.open "/dev/urandom", "r" - elsif File.exists? "/dev/random" - @random_device = File.open "/dev/random", "r" - end - rescue Exception - end - end - begin - return @random_device.read(size / 8) if @random_device != nil - rescue Exception - end - return (1..(size / 16)).to_a.map { rand(0x10000) }.pack("n#{size / 16}") - end - - def self.convert_int_to_byte_string(integer, size) #:nodoc: - byte_string = "" - for i in 0..(size - 1) - byte_string << ((integer >> (((size - 1) - i) * 8)) & 0xFF) - end - return byte_string - end - - def self.convert_byte_string_to_int(byte_string) #:nodoc: - integer = 0 - size = byte_string.size - for i in 0..(size - 1) - if byte_string[i].respond_to?(:ord) - # Ruby 1.9 - integer += (byte_string[i].ord << (((size - 1) - i) * 8)) - else - # Ruby 1.8 - integer += (byte_string[i] << (((size - 1) - i) * 8)) - end - end - return integer - end -end - -UUID_DNS_NAMESPACE = UUID.parse("6ba7b810-9dad-11d1-80b4-00c04fd430c8") -UUID_URL_NAMESPACE = UUID.parse("6ba7b811-9dad-11d1-80b4-00c04fd430c8") -UUID_OID_NAMESPACE = UUID.parse("6ba7b812-9dad-11d1-80b4-00c04fd430c8") -UUID_X500_NAMESPACE = UUID.parse("6ba7b814-9dad-11d1-80b4-00c04fd430c8") diff --git a/vendor/gems/uuidtools-1.0.4/lib/uuidtools/version.rb b/vendor/gems/uuidtools-1.0.4/lib/uuidtools/version.rb deleted file mode 100644 index 9afa2bc..0000000 --- a/vendor/gems/uuidtools-1.0.4/lib/uuidtools/version.rb +++ /dev/null @@ -1,32 +0,0 @@ -#-- -# UUIDTools, Copyright (c) 2005-2008 Bob Aman -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -#++ - -class UUID - module VERSION #:nodoc: - MAJOR = 1 - MINOR = 0 - TINY = 4 - - STRING = [MAJOR, MINOR, TINY].join('.') - end -end diff --git a/vendor/gems/uuidtools-1.0.4/spec/spec.opts b/vendor/gems/uuidtools-1.0.4/spec/spec.opts deleted file mode 100644 index 53607ea..0000000 --- a/vendor/gems/uuidtools-1.0.4/spec/spec.opts +++ /dev/null @@ -1 +0,0 @@ ---colour diff --git a/vendor/gems/uuidtools-1.0.4/spec/spec_helper.rb b/vendor/gems/uuidtools-1.0.4/spec/spec_helper.rb deleted file mode 100644 index 7336d7f..0000000 --- a/vendor/gems/uuidtools-1.0.4/spec/spec_helper.rb +++ /dev/null @@ -1,7 +0,0 @@ -spec_dir = File.expand_path(File.dirname(__FILE__)) -lib_dir = File.expand_path(File.join(spec_dir, "../lib")) - -$:.unshift(lib_dir) -$:.uniq! - -require "uuidtools" diff --git a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/mac_address_spec.rb b/vendor/gems/uuidtools-1.0.4/spec/uuidtools/mac_address_spec.rb deleted file mode 100644 index d679fe4..0000000 --- a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/mac_address_spec.rb +++ /dev/null @@ -1,15 +0,0 @@ -require File.join(File.dirname(__FILE__), "../spec_helper.rb") - -describe UUID, "when obtaining a MAC address" do - before do - @mac_address = UUID.mac_address - end - - it "should obtain a MAC address" do - @mac_address.should_not be_nil - end - - it "should cache the MAC address" do - @mac_address.object_id.should == UUID.mac_address.object_id - end -end diff --git a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_creation_spec.rb b/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_creation_spec.rb deleted file mode 100644 index 5e1bdfd..0000000 --- a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_creation_spec.rb +++ /dev/null @@ -1,37 +0,0 @@ -require File.join(File.dirname(__FILE__), "../spec_helper.rb") - -describe UUID, "when generating" do - it "should correctly generate SHA1 variant UUIDs" do - UUID.sha1_create( - UUID_URL_NAMESPACE, 'http://sporkmonger.com' - ).to_s.should == "f2d04685-b787-55da-8644-9bd28a6f5a53" - end - - it "should correctly generate MD5 variant UUIDs" do - UUID.md5_create( - UUID_URL_NAMESPACE, 'http://sporkmonger.com' - ).to_s.should == "15074785-9071-3fe3-89bd-876e4b9e919b" - end - - it "should correctly generate timestamp variant UUIDs" do - UUID.timestamp_create.to_s.should_not == UUID.timestamp_create.to_s - current_time = Time.now - UUID.timestamp_create(current_time).to_s.should_not == - UUID.timestamp_create(current_time).to_s - uuids = [] - 1000.times do - uuids << UUID.timestamp_create - end - # Check to make sure that none of the 10,000 UUIDs were duplicates - (uuids.map {|x| x.to_s}).uniq.size.should == uuids.size - end - - it "should correctly generate random number variant UUIDs" do - uuids = [] - 1000.times do - uuids << UUID.random_create - end - # Check to make sure that none of the 10,000 UUIDs were duplicates - (uuids.map {|x| x.to_s}).uniq.size.should == uuids.size - end -end diff --git a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_parsing_spec.rb b/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_parsing_spec.rb deleted file mode 100644 index 191c7e6..0000000 --- a/vendor/gems/uuidtools-1.0.4/spec/uuidtools/uuid_parsing_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -require File.join(File.dirname(__FILE__), "../spec_helper.rb") - -describe UUID, "when parsing" do - it "should correctly parse the MAC address from a timestamp version UUID" do - UUID.timestamp_create.mac_address.should == UUID.mac_address - end - - it "should correctly parse the variant from a timestamp version UUID" do - UUID.timestamp_create.variant.should == 0b100 - end - - it "should correctly parse the version from a timestamp version UUID" do - UUID.timestamp_create.version.should == 1 - end - - it "should correctly parse the timestamp from a timestamp version UUID" do - UUID.timestamp_create.timestamp.should < Time.now + 1 - UUID.timestamp_create.timestamp.should > Time.now - 1 - end - - it "should not treat a timestamp version UUID as a nil UUID" do - UUID.timestamp_create.should_not be_nil_uuid - end - - it "should not treat a timestamp version UUID as a random node UUID" do - UUID.timestamp_create.should_not be_random_node_id - end - - it "should treat a timestamp version UUID as a random node UUID " + - "if there is no MAC address" do - old_mac_address = UUID.mac_address - UUID.mac_address = nil - UUID.timestamp_create.should be_random_node_id - UUID.mac_address = old_mac_address - end -end diff --git a/vendor/gems/uuidtools-1.0.4/tasks/benchmark.rake b/vendor/gems/uuidtools-1.0.4/tasks/benchmark.rake deleted file mode 100644 index b88bf12..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/benchmark.rake +++ /dev/null @@ -1,38 +0,0 @@ -task :benchmark do - require 'lib/uuidtools' - require 'benchmark' - - # Version 1 - result = Benchmark.measure do - 10000.times do - UUID.timestamp_create.to_s - end - end - puts "#{(10000.0 / result.real)} version 1 per second." - - # Version 3 - result = Benchmark.measure do - 10000.times do - UUID.md5_create(UUID_URL_NAMESPACE, - "http://www.ietf.org/rfc/rfc4122.txt").to_s - end - end - puts "#{(10000.0 / result.real)} version 3 per second." - - # Version 4 - result = Benchmark.measure do - 10000.times do - UUID.random_create.to_s - end - end - puts "#{(10000.0 / result.real)} version 4 per second." - - # Version 5 - result = Benchmark.measure do - 10000.times do - UUID.sha1_create(UUID_URL_NAMESPACE, - "http://www.ietf.org/rfc/rfc4122.txt").to_s - end - end - puts "#{(10000.0 / result.real)} version 5 per second." -end diff --git a/vendor/gems/uuidtools-1.0.4/tasks/clobber.rake b/vendor/gems/uuidtools-1.0.4/tasks/clobber.rake deleted file mode 100644 index 093ce81..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/clobber.rake +++ /dev/null @@ -1,2 +0,0 @@ -desc "Remove all build products" -task "clobber" diff --git a/vendor/gems/uuidtools-1.0.4/tasks/gem.rake b/vendor/gems/uuidtools-1.0.4/tasks/gem.rake deleted file mode 100644 index ec02a11..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/gem.rake +++ /dev/null @@ -1,62 +0,0 @@ -require "rake/gempackagetask" - -namespace :gem do - GEM_SPEC = Gem::Specification.new do |s| - s.name = PKG_NAME - s.version = PKG_VERSION - s.summary = PKG_SUMMARY - s.description = PKG_DESCRIPTION - - s.files = PKG_FILES.to_a - - s.has_rdoc = true - s.extra_rdoc_files = %w( README ) - s.rdoc_options.concat ["--main", "README"] - - s.add_dependency("rake", ">= 0.7.3") - s.add_dependency("rspec", ">= 1.0.8") - - s.require_path = "lib" - - s.author = "Bob Aman" - s.email = "bob@sporkmonger.com" - s.homepage = "http://#{PKG_NAME}.rubyforge.org/" - s.rubyforge_project = RUBY_FORGE_PROJECT - end - - Rake::GemPackageTask.new(GEM_SPEC) do |p| - p.gem_spec = GEM_SPEC - p.need_tar = true - p.need_zip = true - end - - desc "Show information about the gem" - task :debug do - puts GEM_SPEC.to_ruby - end - - desc "Install the gem" - task :install => ["clobber", "gem:package"] do - sh "#{SUDO} gem install --local pkg/#{GEM_SPEC.full_name}" - end - - desc "Uninstall the gem" - task :uninstall do - installed_list = Gem.source_index.find_name(PKG_NAME) - if installed_list && - (installed_list.collect { |s| s.version.to_s}.include?(PKG_VERSION)) - sh( - "#{SUDO} gem uninstall --version '#{PKG_VERSION}' " + - "--ignore-dependencies --executables #{PKG_NAME}" - ) - end - end - - desc "Reinstall the gem" - task :reinstall => [:uninstall, :install] -end - -desc "Alias to gem:package" -task "gem" => "gem:package" - -task "clobber" => ["gem:clobber_package"] diff --git a/vendor/gems/uuidtools-1.0.4/tasks/git.rake b/vendor/gems/uuidtools-1.0.4/tasks/git.rake deleted file mode 100644 index 8c318cb..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/git.rake +++ /dev/null @@ -1,40 +0,0 @@ -namespace :git do - namespace :tag do - desc "List tags from the Git repository" - task :list do - tags = `git tag -l` - tags.gsub!("\r", "") - tags = tags.split("\n").sort {|a, b| b <=> a } - puts tags.join("\n") - end - - desc "Create a new tag in the Git repository" - task :create do - changelog = File.open("CHANGELOG", "r") { |file| file.read } - puts "-" * 80 - puts changelog - puts "-" * 80 - puts - - v = ENV["VERSION"] or abort "Must supply VERSION=x.y.z" - abort "Versions don't match #{v} vs #{PKG_VERSION}" if v != PKG_VERSION - - tag = "#{PKG_NAME}-#{PKG_VERSION}" - msg = "Release #{PKG_NAME}-#{PKG_VERSION}" - - existing_tags = `git tag -l #{PKG_NAME}-*`.split("\n") - if existing_tags.include?(tag) - warn("Tag already exists, deleting...") - unless system "git tag -d #{tag}" - abort "Tag deletion failed." - end - end - puts "Creating git tag '#{tag}'..." - unless system "git tag -a -m \"#{msg}\" #{tag}" - abort "Tag creation failed." - end - end - end -end - -task "gem:release" => "git:tag:create" diff --git a/vendor/gems/uuidtools-1.0.4/tasks/metrics.rake b/vendor/gems/uuidtools-1.0.4/tasks/metrics.rake deleted file mode 100644 index 41fc5c2..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/metrics.rake +++ /dev/null @@ -1,22 +0,0 @@ -namespace :metrics do - task :lines do - lines, codelines, total_lines, total_codelines = 0, 0, 0, 0 - for file_name in FileList["lib/**/*.rb"] - f = File.open(file_name) - while line = f.gets - lines += 1 - next if line =~ /^\s*$/ - next if line =~ /^\s*#/ - codelines += 1 - end - puts "L: #{sprintf("%4d", lines)}, " + - "LOC #{sprintf("%4d", codelines)} | #{file_name}" - total_lines += lines - total_codelines += codelines - - lines, codelines = 0, 0 - end - - puts "Total: Lines #{total_lines}, LOC #{total_codelines}" - end -end diff --git a/vendor/gems/uuidtools-1.0.4/tasks/rdoc.rake b/vendor/gems/uuidtools-1.0.4/tasks/rdoc.rake deleted file mode 100644 index 1f636c9..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/rdoc.rake +++ /dev/null @@ -1,29 +0,0 @@ -require "rake/rdoctask" - -namespace :doc do - desc "Generate RDoc documentation" - Rake::RDocTask.new do |rdoc| - rdoc.rdoc_dir = "doc" - rdoc.title = "#{PKG_NAME}-#{PKG_VERSION} Documentation" - rdoc.options << "--line-numbers" << "--inline-source" << - "--accessor" << "cattr_accessor=object" << "--charset" << "utf-8" - rdoc.template = "#{ENV["template"]}.rb" if ENV["template"] - rdoc.rdoc_files.include("README", "CHANGELOG", "LICENSE") - rdoc.rdoc_files.include("lib/**/*.rb") - end - - desc "Generate ri locally for testing" - task :ri do - sh "rdoc --ri -o ri ." - end - - desc "Remove ri products" - task :clobber_ri do - rm_r "ri" rescue nil - end -end - -desc "Alias to doc:rdoc" -task "doc" => "doc:rdoc" - -task "clobber" => ["doc:clobber_rdoc", "doc:clobber_ri"] diff --git a/vendor/gems/uuidtools-1.0.4/tasks/rubyforge.rake b/vendor/gems/uuidtools-1.0.4/tasks/rubyforge.rake deleted file mode 100644 index a3abb7d..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/rubyforge.rake +++ /dev/null @@ -1,78 +0,0 @@ -require 'rubyforge' -require 'rake/contrib/sshpublisher' - -namespace :gem do - desc 'Package and upload to RubyForge' - task :release => ["gem:package"] do |t| - v = ENV['VERSION'] or abort 'Must supply VERSION=x.y.z' - abort "Versions don't match #{v} vs #{PROJ.version}" if v != PKG_VERSION - pkg = "pkg/#{GEM_SPEC.full_name}" - - rf = RubyForge.new - rf.configure - puts 'Logging in...' - rf.login - - c = rf.userconfig - changelog = File.open("CHANGELOG") { |file| file.read } - c['release_changes'] = changelog - c['preformatted'] = true - - files = ["#{pkg}.tgz", "#{pkg}.zip", "#{pkg}.gem"] - - puts "Releasing #{PKG_NAME} v. #{PKG_VERSION}" - rf.add_release RUBY_FORGE_PROJECT, PKG_NAME, PKG_VERSION, *files - end -end - -namespace :doc do - desc "Publish RDoc to RubyForge" - task :release => ["doc:rdoc"] do - config = YAML.load( - File.read(File.expand_path('~/.rubyforge/user-config.yml')) - ) - host = "#{config['username']}@rubyforge.org" - remote_dir = RUBY_FORGE_PATH + "/api" - local_dir = "doc" - Rake::SshDirPublisher.new(host, remote_dir, local_dir).upload - end -end - -namespace :spec do - desc "Publish specdoc to RubyForge" - task :release => ["spec:specdoc"] do - config = YAML.load( - File.read(File.expand_path('~/.rubyforge/user-config.yml')) - ) - host = "#{config['username']}@rubyforge.org" - remote_dir = RUBY_FORGE_PATH + "/specdoc" - local_dir = "specdoc" - Rake::SshDirPublisher.new(host, remote_dir, local_dir).upload - end - - namespace :rcov do - desc "Publish coverage report to RubyForge" - task :release => ["spec:rcov"] do - config = YAML.load( - File.read(File.expand_path('~/.rubyforge/user-config.yml')) - ) - host = "#{config['username']}@rubyforge.org" - remote_dir = RUBY_FORGE_PATH + "/coverage" - local_dir = "coverage" - Rake::SshDirPublisher.new(host, remote_dir, local_dir).upload - end - end -end - -namespace :website do - desc "Publish website to RubyForge" - task :release => ["doc:release", "spec:release", "spec:rcov:release"] do - config = YAML.load( - File.read(File.expand_path('~/.rubyforge/user-config.yml')) - ) - host = "#{config['username']}@rubyforge.org" - remote_dir = RUBY_FORGE_PATH - local_dir = "website" - Rake::SshDirPublisher.new(host, remote_dir, local_dir).upload - end -end diff --git a/vendor/gems/uuidtools-1.0.4/tasks/spec.rake b/vendor/gems/uuidtools-1.0.4/tasks/spec.rake deleted file mode 100644 index d8126f5..0000000 --- a/vendor/gems/uuidtools-1.0.4/tasks/spec.rake +++ /dev/null @@ -1,89 +0,0 @@ -require 'spec/rake/verify_rcov' - -namespace :spec do - Spec::Rake::SpecTask.new(:rcov) do |t| - t.spec_files = FileList['spec/**/*_spec.rb'] - t.spec_opts = ['--color', '--format', 'specdoc'] - t.rcov = true - t.rcov_opts = [ - '--exclude', 'spec', - '--exclude', '1\\.8\\/gems', - '--exclude', '1\\.9\\/gems' - ] - end - - RCov::VerifyTask.new(:verify) do |t| - t.threshold = 100.0 - t.index_html = 'coverage/index.html' - end - - task :verify => :rcov - - desc "Generate HTML Specdocs for all specs" - Spec::Rake::SpecTask.new(:specdoc) do |t| - specdoc_path = File.expand_path( - File.join(File.dirname(__FILE__), '../specdoc/')) - Dir.mkdir(specdoc_path) if !File.exist?(specdoc_path) - - output_file = File.join(specdoc_path, 'index.html') - t.spec_files = FileList['spec/**/*_spec.rb'] - t.spec_opts = ["--format", "\"html:#{output_file}\"", "--diff"] - t.fail_on_error = false - end - - namespace :rcov do - desc "Browse the code coverage report." - task :browse => "spec:rcov" do - Rake.browse("coverage/index.html") - end - end -end - -desc "Alias to spec:verify" -task "spec" => "spec:verify" - -task "clobber" => ["spec:clobber_rcov"] - -module Rake - def self.browse(filepath) - if RUBY_PLATFORM =~ /mswin/ - system(filepath) - else - try_browsers = lambda do - result = true - if !(`which firefox 2>&1` =~ /no firefox/) - system("firefox #{filepath}") - elsif !(`which mozilla 2>&1` =~ /no mozilla/) - system("mozilla #{filepath}") - elsif !(`which netscape 2>&1` =~ /no netscape/) - system("netscape #{filepath}") - elsif !(`which links 2>&1` =~ /no links/) - system("links #{filepath}") - elsif !(`which lynx 2>&1` =~ /no lynx/) - system("lynx #{filepath}") - else - result = false - end - result - end - opened = false - if RUBY_PLATFORM =~ /darwin/ - opened = true - system("open #{filepath}") - elsif !(`which gnome-open 2>&1` =~ /no gnome-open/) - success = - !(`gnome-open #{filepath} 2>&1` =~ /There is no default action/) - if !success - opened = try_browsers.call() - else - opened = true - end - else - opened = try_browsers.call() - end - if !opened - puts "Don't know how to browse to location." - end - end - end -end diff --git a/vendor/gems/uuidtools-1.0.4/website/index.html b/vendor/gems/uuidtools-1.0.4/website/index.html deleted file mode 100644 index f329ad3..0000000 --- a/vendor/gems/uuidtools-1.0.4/website/index.html +++ /dev/null @@ -1,95 +0,0 @@ - - - - - UUIDTools - - - -

UUIDTools

-
-

- A simple universally unique ID generation library. -

- -

- You know what to do: -

-

- sudo gem install uuidtools -

-
- - diff --git a/vendor/gems/will_paginate-2.2.2/.specification b/vendor/gems/will_paginate-2.2.2/.specification deleted file mode 100644 index 3dda756..0000000 --- a/vendor/gems/will_paginate-2.2.2/.specification +++ /dev/null @@ -1,103 +0,0 @@ ---- !ruby/object:Gem::Specification -name: will_paginate -version: !ruby/object:Gem::Version - version: 2.2.2 -platform: ruby -authors: -- "Mislav Marohni\xC4\x87" -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-04-20 00:00:00 -04:00 -default_executable: -dependencies: [] - -description: -email: mislav.marohnic@gmail.com -executables: [] - -extensions: [] - -extra_rdoc_files: [] - -files: -- CHANGELOG -- LICENSE -- README.rdoc -- Rakefile -- examples -- examples/apple-circle.gif -- examples/index.haml -- examples/index.html -- examples/pagination.css -- examples/pagination.sass -- init.rb -- lib -- lib/will_paginate -- lib/will_paginate.rb -- lib/will_paginate/array.rb -- lib/will_paginate/collection.rb -- lib/will_paginate/core_ext.rb -- lib/will_paginate/finder.rb -- lib/will_paginate/named_scope.rb -- lib/will_paginate/named_scope_patch.rb -- lib/will_paginate/version.rb -- lib/will_paginate/view_helpers.rb -- test -- test/boot.rb -- test/collection_test.rb -- test/console -- test/database.yml -- test/finder_test.rb -- test/fixtures -- test/fixtures/admin.rb -- test/fixtures/developer.rb -- test/fixtures/developers_projects.yml -- test/fixtures/project.rb -- test/fixtures/projects.yml -- test/fixtures/replies.yml -- test/fixtures/reply.rb -- test/fixtures/schema.rb -- test/fixtures/topic.rb -- test/fixtures/topics.yml -- test/fixtures/user.rb -- test/fixtures/users.yml -- test/helper.rb -- test/lib -- test/lib/activerecord_test_case.rb -- test/lib/activerecord_test_connector.rb -- test/lib/load_fixtures.rb -- test/lib/view_test_process.rb -- test/view_test.rb -has_rdoc: true -homepage: http://github.com/mislav/will_paginate -licenses: [] - -post_install_message: -rdoc_options: [] - -require_paths: -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: will-paginate -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: Most awesome pagination solution for Rails -test_files: [] - diff --git a/vendor/gems/will_paginate-2.2.2/CHANGELOG b/vendor/gems/will_paginate-2.2.2/CHANGELOG deleted file mode 100644 index fc0b893..0000000 --- a/vendor/gems/will_paginate-2.2.2/CHANGELOG +++ /dev/null @@ -1,49 +0,0 @@ -== 2.2.2, released 2008-04-21 - -* Add support for page parameter in custom routes like "/foo/page/2" -* Change output of "page_entries_info" on single-page collection and erraneous - output with empty collection as reported by Tim Chater - -== 2.2.1, released 2008-04-08 - -* take less risky path when monkeypatching named_scope; fix that it no longer - requires ActiveRecord::VERSION -* use strings in "respond_to?" calls to work around a bug in acts_as_ferret - stable (ugh) -* add rake release task - - -== 2.2.0, released 2008-04-07 - -=== API changes -* Rename WillPaginate::Collection#page_count to "total_pages" for consistency. - If you implemented this interface, change your implementation accordingly. -* Remove old, deprecated style of calling Array#paginate as "paginate(page, - per_page)". If you want to specify :page, :per_page or :total_entries, use a - parameter hash. -* Rename LinkRenderer#url_options to "url_for" and drastically optimize it - -=== View changes -* Added "prev_page" and "next_page" CSS classes on previous/next page buttons -* Add examples of pagination links styling in "examples/index.html" -* Change gap in pagination links from "..." to - "". -* Add "paginated_section", a block helper that renders pagination both above and - below content in the block -* Add rel="prev|next|start" to page links - -=== Other - -* Add ability to opt-in for Rails 2.1 feature "named_scope" by calling - WillPaginate.enable_named_scope (tested in Rails 1.2.6 and 2.0.2) -* Support complex page parameters like "developers[page]" -* Move Array#paginate definition to will_paginate/array.rb. You can now easily - use pagination on arrays outside of Rails: - - gem 'will_paginate' - require 'will_paginate/array' - -* Add "paginated_each" method for iterating through every record by loading only - one page of records at the time -* Rails 2: Rescue from WillPaginate::InvalidPage error with 404 Not Found by - default diff --git a/vendor/gems/will_paginate-2.2.2/LICENSE b/vendor/gems/will_paginate-2.2.2/LICENSE deleted file mode 100644 index 96a48cb..0000000 --- a/vendor/gems/will_paginate-2.2.2/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2007 PJ Hyett and Mislav Marohnić - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gems/will_paginate-2.2.2/README.rdoc b/vendor/gems/will_paginate-2.2.2/README.rdoc deleted file mode 100644 index b04fafe..0000000 --- a/vendor/gems/will_paginate-2.2.2/README.rdoc +++ /dev/null @@ -1,135 +0,0 @@ -= WillPaginate - -Pagination is just limiting the number of records displayed. Why should you let -it get in your way while developing, then? This plugin makes magic happen. Did -you ever want to be able to do just this on a model: - - Post.paginate :page => 1, :order => 'created_at DESC' - -... and then render the page links with a single view helper? Well, now you -can. - -Some resources to get you started: - -* Your mind reels with questions? Join our Google - group[http://groups.google.com/group/will_paginate]. -* The will_paginate project page: http://github.com/mislav/will_paginate -* How to report bugs: http://github.com/mislav/will_paginate/wikis/report-bugs -* Ryan Bates made an awesome screencast[http://railscasts.com/episodes/51], - check it out. - -== Installation - -Previously, the plugin was available on the following SVN location: - - svn://errtheblog.com/svn/plugins/will_paginate - -In February 2008, it moved to GitHub to be tracked with git version control. -The SVN repo continued to have updates for some time, but now it doesn't. - -You should switch to using the gem: - - gem install will_paginate - -After that, you can remove the plugin from your application and add -a simple require to the end of config/environment.rb: - - require 'will_paginate' - -That's it, just remember to install the gem on all machines that -you are deploying to. - -There are extensive -installation[http://github.com/mislav/will_paginate/wikis/installation] -instructions on the wiki[http://github.com/mislav/will_paginate/wikis]. - - -== Example usage - -Use a paginate finder in the controller: - - @posts = Post.paginate_by_board_id @board.id, :page => params[:page], :order => 'updated_at DESC' - -Yeah, +paginate+ works just like +find+ -- it just doesn't fetch all the -records. Don't forget to tell it which page you want, or it will complain! -Read more on WillPaginate::Finder::ClassMethods. - -Render the posts in your view like you would normally do. When you need to render -pagination, just stick this in: - - <%= will_paginate @posts %> - -You're done. (Copy and paste the example fancy CSS styles from the bottom.) You -can find the option list at WillPaginate::ViewHelpers. - -How does it know how much items to fetch per page? It asks your model by calling -its per_page class method. You can define it like this: - - class Post < ActiveRecord::Base - cattr_reader :per_page - @@per_page = 50 - end - -... or like this: - - class Post < ActiveRecord::Base - def self.per_page - 50 - end - end - -... or don't worry about it at all. WillPaginate defines it to be 30 by default. -But you can always specify the count explicitly when calling +paginate+: - - @posts = Post.paginate :page => params[:page], :per_page => 50 - -The +paginate+ finder wraps the original finder and returns your resultset that now has -some new properties. You can use the collection as you would with any ActiveRecord -resultset. WillPaginate view helpers also need that object to be able to render pagination: - -
    - <% for post in @posts -%> -
  1. Render `post` in some nice way.
  2. - <% end -%> -
- -

Now let's render us some pagination!

- <%= will_paginate @posts %> - -More detailed documentation: - -* WillPaginate::Finder::ClassMethods for pagination on your models; -* WillPaginate::ViewHelpers for your views. - - -== Authors and credits - -Authors:: Mislav Marohnić, PJ Hyett -Original announcement:: http://errtheblog.com/post/929 -Original PHP source:: http://www.strangerstudios.com/sandbox/pagination/diggstyle.php - -All these people helped making will_paginate what it is now with their code -contributions or just simply awesome ideas: - -Chris Wanstrath, Dr. Nic Williams, K. Adam Christensen, Mike Garey, Bence -Golda, Matt Aimonetti, Charles Brian Quinn, Desi McAdam, James Coglan, Matijs -van Zuijlen, Maria, Brendan Ribera, Todd Willey, Bryan Helmkamp, Jan Berkel, -Lourens Naudé, Rick Olson, Russell Norris. - - -== Usable pagination in the UI - -There are some CSS styles to get you started in the "examples/" directory. They -are showcased in the "examples/index.html" file. - -More reading about pagination as design pattern: - -* Pagination 101: - http://kurafire.net/log/archive/2007/06/22/pagination-101 -* Pagination gallery: - http://www.smashingmagazine.com/2007/11/16/pagination-gallery-examples-and-good-practices/ -* Pagination on Yahoo Design Pattern Library: - http://developer.yahoo.com/ypatterns/parent.php?pattern=pagination - -Want to discuss, request features, ask questions? Join the Google group: -http://groups.google.com/group/will_paginate diff --git a/vendor/gems/will_paginate-2.2.2/Rakefile b/vendor/gems/will_paginate-2.2.2/Rakefile deleted file mode 100644 index fa39376..0000000 --- a/vendor/gems/will_paginate-2.2.2/Rakefile +++ /dev/null @@ -1,116 +0,0 @@ -require File.dirname(__FILE__) + '/lib/rake_hacks' - -desc 'Default: run unit tests.' -task :default => :test - -desc 'Test the will_paginate plugin.' -Rake::TestTask.new(:test) do |t| - t.pattern = 'test/**/*_test.rb' - t.verbose = true - t.libs << 'test' -end - -for configuration in %w( sqlite3 mysql postgres ) - EnvTestTask.new("test_#{configuration}") do |t| - t.pattern = 'test/finder_test.rb' - t.verbose = true - t.env = { 'DB' => configuration } - t.libs << 'test' - end -end - -task :test_databases => %w(test_mysql test_sqlite3 test_postgres) - -desc %{Test everything on SQLite3, MySQL and PostgreSQL} -task :test_full => %w(test test_mysql test_postgres) - -desc %{Test everything with Rails 1.2.x and 2.0.x gems} -task :test_all do - all = Rake::Task['test_full'] - ENV['RAILS_VERSION'] = '~>1.2.6' - all.invoke - # reset the invoked flag - %w( test_full test test_mysql test_postgres ).each do |name| - Rake::Task[name].instance_variable_set '@already_invoked', false - end - # do it again - ENV['RAILS_VERSION'] = '~>2.0.2' - all.invoke -end - -desc 'Generate RDoc documentation for the will_paginate plugin.' -Rake::RDocTask.new(:doc) do |rdoc| - files = ['README.rdoc', 'LICENSE', 'CHANGELOG'] - files << FileList.new('lib/**/*.rb'). - exclude('lib/will_paginate/named_scope*'). - exclude('lib/will_paginate/array.rb'). - exclude('lib/will_paginate/version.rb') - - rdoc.rdoc_files.add(files) - rdoc.main = "README.rdoc" # page to start on - rdoc.title = "will_paginate documentation" - - my_template = '/home/mislav/projects/rdoc_template/mislav.rb' - rdoc.template = my_template if File.exists? my_template - - rdoc.rdoc_dir = 'doc' # rdoc output folder - rdoc.options << '--inline-source' - rdoc.options << '--charset=UTF-8' - rdoc.options << '--webcvs=http://github.com/mislav/will_paginate/tree/master/' -end - -task :manifest do - list = Dir['**/*'] - - File.read('.gitignore').each_line do |glob| - glob = glob.chomp.sub(/^\//, '') - list -= Dir[glob] - list -= Dir["#{glob}/**/*"] if File.directory?(glob) and !File.symlink?(glob) - puts "excluding #{glob}" - end - - File.open('.manifest', 'w') do |file| - file.write list.sort.join("\n") - end -end - -desc 'Package and upload the release to rubyforge.' -task :release do - require 'yaml' - require 'rubyforge' - - meta = YAML::load open('.gemified') - version = meta[:version] - - v = ENV['VERSION'] or abort "Must supply VERSION=x.y.z" - abort "Version doesn't match #{version}" if v != version - - gem = "#{meta[:name]}-#{version}.gem" - project = meta[:rubyforge_project] - - rf = RubyForge.new - puts "Logging in to RubyForge" - rf.login - - c = rf.userconfig - c['release_notes'] = meta[:summary] - c['release_changes'] = File.read('CHANGELOG').split(/^== .+\n/)[1].strip - c['preformatted'] = true - - puts "Releasing #{meta[:name]} #{version}" - rf.add_release project, project, version, gem -end - -task :examples do - %x(haml examples/index.haml examples/index.html) - %x(sass examples/pagination.sass examples/pagination.css) -end - -task :rcov do - excludes = %w( lib/will_paginate/named_scope* - lib/will_paginate/core_ext.rb - lib/will_paginate.rb - rails* ) - - system %[rcov -Itest:lib test/*.rb -x #{excludes.join(',')}] -end diff --git a/vendor/gems/will_paginate-2.2.2/examples/apple-circle.gif b/vendor/gems/will_paginate-2.2.2/examples/apple-circle.gif deleted file mode 100644 index df8cbf7..0000000 Binary files a/vendor/gems/will_paginate-2.2.2/examples/apple-circle.gif and /dev/null differ diff --git a/vendor/gems/will_paginate-2.2.2/examples/index.haml b/vendor/gems/will_paginate-2.2.2/examples/index.haml deleted file mode 100644 index fb41ac8..0000000 --- a/vendor/gems/will_paginate-2.2.2/examples/index.haml +++ /dev/null @@ -1,69 +0,0 @@ -!!! -%html -%head - %title Samples of pagination styling for will_paginate - %link{ :rel => 'stylesheet', :type => 'text/css', :href => 'pagination.css' } - %style{ :type => 'text/css' } - :sass - html - :margin 0 - :padding 0 - :background #999 - :font normal 76% "Lucida Grande", Verdana, Helvetica, sans-serif - body - :margin 2em - :padding 2em - :border 2px solid gray - :background white - :color #222 - h1 - :font-size 2em - :font-weight normal - :margin 0 0 1em 0 - h2 - :font-size 1.4em - :margin 1em 0 .5em 0 - pre - :font-size 13px - :font-family Monaco, "DejaVu Sans Mono", "Bitstream Vera Mono", "Courier New", monospace - -- pagination = '« Previous 1 3 4 5 6 7 8 9 29 30 ' -- pagination_no_page_links = '« Previous ' - -%body - %h1 Samples of pagination styling for will_paginate - %p - Find these styles in "examples/pagination.css" of will_paginate library. - There is a Sass version of it for all you sassy people. - %p - Read about good rules for pagination: - %a{ :href => 'http://kurafire.net/log/archive/2007/06/22/pagination-101' } Pagination 101 - %p - %em Warning: - page links below don't lead anywhere (so don't click on them). - - %h2 Unstyled pagination (ewww!) - %div= pagination - - %h2 Digg.com - .digg_pagination= pagination - - %h2 Digg-style, no page links - .digg_pagination= pagination_no_page_links - %p Code that renders this: - %pre= '%s' % %[<%= will_paginate @posts, :page_links => false %>].gsub('<', '<').gsub('>', '>') - - %h2 Digg-style, extra content - .digg_pagination - .page_info Displaying entries 1 - 6 of 180 in total - = pagination - %p Code that renders this: - %pre= '%s' % %[
\n
\n <%= page_entries_info @posts %>\n
\n <%= will_paginate @posts, :container => false %>\n
].gsub('<', '<').gsub('>', '>') - - %h2 Apple.com store - .apple_pagination= pagination - - %h2 Flickr.com - .flickr_pagination - = pagination - .page_info (118 photos) diff --git a/vendor/gems/will_paginate-2.2.2/examples/index.html b/vendor/gems/will_paginate-2.2.2/examples/index.html deleted file mode 100644 index 858f7c6..0000000 --- a/vendor/gems/will_paginate-2.2.2/examples/index.html +++ /dev/null @@ -1,92 +0,0 @@ - - - - - Samples of pagination styling for will_paginate - - - - -

Samples of pagination styling for will_paginate

-

- Find these styles in "examples/pagination.css" of will_paginate library. - There is a Sass version of it for all you sassy people. -

-

- Read about good rules for pagination: - Pagination 101 -

-

- Warning: - page links below don't lead anywhere (so don't click on them). -

-

- Unstyled pagination (ewww!) -

-
- « Previous 1 3 4 5 6 7 8 9 29 30 -
-

Digg.com

-
- « Previous 1 3 4 5 6 7 8 9 29 30 -
-

Digg-style, no page links

-
- « Previous -
-

Code that renders this:

-
-    <%= will_paginate @posts, :page_links => false %>
-  
-

Digg-style, extra content

-
-
- Displaying entries 1 - 6 of 180 in total -
- « Previous 1 3 4 5 6 7 8 9 29 30 -
-

Code that renders this:

-
-    <div class="digg_pagination">
-      <div clas="page_info">
-        <%= page_entries_info @posts %>
-      </div>
-      <%= will_paginate @posts, :container => false %>
-    </div>
-  
-

Apple.com store

-
- « Previous 1 3 4 5 6 7 8 9 29 30 -
-

Flickr.com

-
- « Previous 1 3 4 5 6 7 8 9 29 30 -
(118 photos)
-
- diff --git a/vendor/gems/will_paginate-2.2.2/examples/pagination.css b/vendor/gems/will_paginate-2.2.2/examples/pagination.css deleted file mode 100644 index b55e977..0000000 --- a/vendor/gems/will_paginate-2.2.2/examples/pagination.css +++ /dev/null @@ -1,90 +0,0 @@ -.digg_pagination { - background: white; - /* self-clearing method: */ } - .digg_pagination a, .digg_pagination span { - padding: .2em .5em; - display: block; - float: left; - margin-right: 1px; } - .digg_pagination span.disabled { - color: #999; - border: 1px solid #DDD; } - .digg_pagination span.current { - font-weight: bold; - background: #2E6AB1; - color: white; - border: 1px solid #2E6AB1; } - .digg_pagination a { - text-decoration: none; - color: #105CB6; - border: 1px solid #9AAFE5; } - .digg_pagination a:hover, .digg_pagination a:focus { - color: #003; - border-color: #003; } - .digg_pagination .page_info { - background: #2E6AB1; - color: white; - padding: .4em .6em; - width: 22em; - margin-bottom: .3em; - text-align: center; } - .digg_pagination .page_info b { - color: #003; - background: #6aa6ed; - padding: .1em .25em; } - .digg_pagination:after { - content: "."; - display: block; - height: 0; - clear: both; - visibility: hidden; } - * html .digg_pagination { - height: 1%; } - *:first-child+html .digg_pagination { - overflow: hidden; } - -.apple_pagination { - background: #F1F1F1; - border: 1px solid #E5E5E5; - text-align: center; - padding: 1em; } - .apple_pagination a, .apple_pagination span { - padding: .2em .3em; } - .apple_pagination span.disabled { - color: #AAA; } - .apple_pagination span.current { - font-weight: bold; - background: transparent url(apple-circle.gif) no-repeat 50% 50%; } - .apple_pagination a { - text-decoration: none; - color: black; } - .apple_pagination a:hover, .apple_pagination a:focus { - text-decoration: underline; } - -.flickr_pagination { - text-align: center; - padding: .3em; } - .flickr_pagination a, .flickr_pagination span { - padding: .2em .5em; } - .flickr_pagination span.disabled { - color: #AAA; } - .flickr_pagination span.current { - font-weight: bold; - color: #FF0084; } - .flickr_pagination a { - border: 1px solid #DDDDDD; - color: #0063DC; - text-decoration: none; } - .flickr_pagination a:hover, .flickr_pagination a:focus { - border-color: #003366; - background: #0063DC; - color: white; } - .flickr_pagination .page_info { - color: #aaa; - padding-top: .8em; } - .flickr_pagination .prev_page, .flickr_pagination .next_page { - border-width: 2px; } - .flickr_pagination .prev_page { - margin-right: 1em; } - .flickr_pagination .next_page { - margin-left: 1em; } diff --git a/vendor/gems/will_paginate-2.2.2/examples/pagination.sass b/vendor/gems/will_paginate-2.2.2/examples/pagination.sass deleted file mode 100644 index 737a97b..0000000 --- a/vendor/gems/will_paginate-2.2.2/examples/pagination.sass +++ /dev/null @@ -1,91 +0,0 @@ -.digg_pagination - :background white - a, span - :padding .2em .5em - :display block - :float left - :margin-right 1px - span.disabled - :color #999 - :border 1px solid #DDD - span.current - :font-weight bold - :background #2E6AB1 - :color white - :border 1px solid #2E6AB1 - a - :text-decoration none - :color #105CB6 - :border 1px solid #9AAFE5 - &:hover, &:focus - :color #003 - :border-color #003 - .page_info - :background #2E6AB1 - :color white - :padding .4em .6em - :width 22em - :margin-bottom .3em - :text-align center - b - :color #003 - :background = #2E6AB1 + 60 - :padding .1em .25em - - /* self-clearing method: - &:after - :content "." - :display block - :height 0 - :clear both - :visibility hidden - * html & - :height 1% - *:first-child+html & - :overflow hidden - -.apple_pagination - :background #F1F1F1 - :border 1px solid #E5E5E5 - :text-align center - :padding 1em - a, span - :padding .2em .3em - span.disabled - :color #AAA - span.current - :font-weight bold - :background transparent url(apple-circle.gif) no-repeat 50% 50% - a - :text-decoration none - :color black - &:hover, &:focus - :text-decoration underline - -.flickr_pagination - :text-align center - :padding .3em - a, span - :padding .2em .5em - span.disabled - :color #AAA - span.current - :font-weight bold - :color #FF0084 - a - :border 1px solid #DDDDDD - :color #0063DC - :text-decoration none - &:hover, &:focus - :border-color #003366 - :background #0063DC - :color white - .page_info - :color #aaa - :padding-top .8em - .prev_page, .next_page - :border-width 2px - .prev_page - :margin-right 1em - .next_page - :margin-left 1em diff --git a/vendor/gems/will_paginate-2.2.2/init.rb b/vendor/gems/will_paginate-2.2.2/init.rb deleted file mode 100644 index 838d30e..0000000 --- a/vendor/gems/will_paginate-2.2.2/init.rb +++ /dev/null @@ -1 +0,0 @@ -require 'will_paginate' diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate.rb deleted file mode 100644 index 366e39c..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate.rb +++ /dev/null @@ -1,86 +0,0 @@ -require 'active_support' - -# = You *will* paginate! -# -# First read about WillPaginate::Finder::ClassMethods, then see -# WillPaginate::ViewHelpers. The magical array you're handling in-between is -# WillPaginate::Collection. -# -# Happy paginating! -module WillPaginate - class << self - # shortcut for enable_actionpack; enable_activerecord - def enable - enable_actionpack - enable_activerecord - end - - # mixes in WillPaginate::ViewHelpers in ActionView::Base - def enable_actionpack - return if ActionView::Base.instance_methods.include? 'will_paginate' - require 'will_paginate/view_helpers' - ActionView::Base.class_eval { include ViewHelpers } - - if defined?(ActionController::Base) and ActionController::Base.respond_to? :rescue_responses - ActionController::Base.rescue_responses['WillPaginate::InvalidPage'] = :not_found - end - end - - # mixes in WillPaginate::Finder in ActiveRecord::Base and classes that deal - # with associations - def enable_activerecord - return if ActiveRecord::Base.respond_to? :paginate - require 'will_paginate/finder' - ActiveRecord::Base.class_eval { include Finder } - - # support pagination on associations - a = ActiveRecord::Associations - returning([ a::AssociationCollection ]) { |classes| - # detect http://dev.rubyonrails.org/changeset/9230 - unless a::HasManyThroughAssociation.superclass == a::HasManyAssociation - classes << a::HasManyThroughAssociation - end - }.each do |klass| - klass.class_eval do - include Finder::ClassMethods - alias_method_chain :method_missing, :paginate - end - end - end - - # Enable named_scope, a feature of Rails 2.1, even if you have older Rails - # (tested on Rails 2.0.2 and 1.2.6). - # - # You can pass +false+ for +patch+ parameter to skip monkeypatching - # *associations*. Use this if you feel that named_scope broke - # has_many, has_many :through or has_and_belongs_to_many associations in - # your app. By passing +false+, you can still use named_scope in - # your models, but not through associations. - def enable_named_scope(patch = true) - return if defined? ActiveRecord::NamedScope - require 'will_paginate/named_scope' - require 'will_paginate/named_scope_patch' if patch - - ActiveRecord::Base.class_eval do - include WillPaginate::NamedScope - end - end - end - - module Deprecation #:nodoc: - extend ActiveSupport::Deprecation - - def self.warn(message, callstack = caller) - message = 'WillPaginate: ' + message.strip.gsub(/\s+/, ' ') - behavior.call(message, callstack) if behavior && !silenced? - end - - def self.silenced? - ActiveSupport::Deprecation.silenced? - end - end -end - -if defined?(Rails) and defined?(ActiveRecord) and defined?(ActionController) - WillPaginate.enable -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/array.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/array.rb deleted file mode 100644 index d061d2b..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/array.rb +++ /dev/null @@ -1,16 +0,0 @@ -require 'will_paginate/collection' - -# http://www.desimcadam.com/archives/8 -Array.class_eval do - def paginate(options = {}) - raise ArgumentError, "parameter hash expected (got #{options.inspect})" unless Hash === options - - WillPaginate::Collection.create( - options[:page] || 1, - options[:per_page] || 30, - options[:total_entries] || self.length - ) { |pager| - pager.replace self[pager.offset, pager.per_page].to_a - } - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/collection.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/collection.rb deleted file mode 100644 index 89d992f..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/collection.rb +++ /dev/null @@ -1,145 +0,0 @@ -module WillPaginate - # = Invalid page number error - # This is an ArgumentError raised in case a page was requested that is either - # zero or negative number. You should decide how do deal with such errors in - # the controller. - # - # If you're using Rails 2, then this error will automatically get handled like - # 404 Not Found. The hook is in "will_paginate.rb": - # - # ActionController::Base.rescue_responses['WillPaginate::InvalidPage'] = :not_found - # - # If you don't like this, use your preffered method of rescuing exceptions in - # public from your controllers to handle this differently. The +rescue_from+ - # method is a nice addition to Rails 2. - # - # This error is *not* raised when a page further than the last page is - # requested. Use WillPaginate::Collection#out_of_bounds? method to - # check for those cases and manually deal with them as you see fit. - class InvalidPage < ArgumentError - def initialize(page, page_num) - super "#{page.inspect} given as value, which translates to '#{page_num}' as page number" - end - end - - # = The key to pagination - # Arrays returned from paginating finds are, in fact, instances of this little - # class. You may think of WillPaginate::Collection as an ordinary array with - # some extra properties. Those properties are used by view helpers to generate - # correct page links. - # - # WillPaginate::Collection also assists in rolling out your own pagination - # solutions: see +create+. - # - # If you are writing a library that provides a collection which you would like - # to conform to this API, you don't have to copy these methods over; simply - # make your plugin/gem dependant on the "will_paginate" gem: - # - # gem 'will_paginate' - # require 'will_paginate/collection' - # - # # now use WillPaginate::Collection directly or subclass it - class Collection < Array - attr_reader :current_page, :per_page, :total_entries, :total_pages - - # Arguments to the constructor are the current page number, per-page limit - # and the total number of entries. The last argument is optional because it - # is best to do lazy counting; in other words, count *conditionally* after - # populating the collection using the +replace+ method. - def initialize(page, per_page, total = nil) - @current_page = page.to_i - raise InvalidPage.new(page, @current_page) if @current_page < 1 - @per_page = per_page.to_i - raise ArgumentError, "`per_page` setting cannot be less than 1 (#{@per_page} given)" if @per_page < 1 - - self.total_entries = total if total - end - - # Just like +new+, but yields the object after instantiation and returns it - # afterwards. This is very useful for manual pagination: - # - # @entries = WillPaginate::Collection.create(1, 10) do |pager| - # result = Post.find(:all, :limit => pager.per_page, :offset => pager.offset) - # # inject the result array into the paginated collection: - # pager.replace(result) - # - # unless pager.total_entries - # # the pager didn't manage to guess the total count, do it manually - # pager.total_entries = Post.count - # end - # end - # - # The possibilities with this are endless. For another example, here is how - # WillPaginate used to define pagination for Array instances: - # - # Array.class_eval do - # def paginate(page = 1, per_page = 15) - # WillPaginate::Collection.create(page, per_page, size) do |pager| - # pager.replace self[pager.offset, pager.per_page].to_a - # end - # end - # end - # - # The Array#paginate API has since then changed, but this still serves as a - # fine example of WillPaginate::Collection usage. - def self.create(page, per_page, total = nil, &block) - pager = new(page, per_page, total) - yield pager - pager - end - - # Helper method that is true when someone tries to fetch a page with a - # larger number than the last page. Can be used in combination with flashes - # and redirecting. - def out_of_bounds? - current_page > total_pages - end - - # Current offset of the paginated collection. If we're on the first page, - # it is always 0. If we're on the 2nd page and there are 30 entries per page, - # the offset is 30. This property is useful if you want to render ordinals - # besides your records: simply start with offset + 1. - def offset - (current_page - 1) * per_page - end - - # current_page - 1 or nil if there is no previous page - def previous_page - current_page > 1 ? (current_page - 1) : nil - end - - # current_page + 1 or nil if there is no next page - def next_page - current_page < total_pages ? (current_page + 1) : nil - end - - def total_entries=(number) - @total_entries = number.to_i - @total_pages = (@total_entries / per_page.to_f).ceil - end - - # This is a magic wrapper for the original Array#replace method. It serves - # for populating the paginated collection after initialization. - # - # Why magic? Because it tries to guess the total number of entries judging - # by the size of given array. If it is shorter than +per_page+ limit, then we - # know we're on the last page. This trick is very useful for avoiding - # unnecessary hits to the database to do the counting after we fetched the - # data for the current page. - # - # However, after using +replace+ you should always test the value of - # +total_entries+ and set it to a proper value if it's +nil+. See the example - # in +create+. - def replace(array) - result = super - - # The collection is shorter then page limit? Rejoice, because - # then we know that we are on the last page! - if total_entries.nil? and length < per_page and (current_page == 1 or length > 0) - self.total_entries = offset + length - end - - result - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/core_ext.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/core_ext.rb deleted file mode 100644 index 32f10f5..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/core_ext.rb +++ /dev/null @@ -1,32 +0,0 @@ -require 'set' -require 'will_paginate/array' - -unless Hash.instance_methods.include? 'except' - Hash.class_eval do - # Returns a new hash without the given keys. - def except(*keys) - rejected = Set.new(respond_to?(:convert_key) ? keys.map { |key| convert_key(key) } : keys) - reject { |key,| rejected.include?(key) } - end - - # Replaces the hash without only the given keys. - def except!(*keys) - replace(except(*keys)) - end - end -end - -unless Hash.instance_methods.include? 'slice' - Hash.class_eval do - # Returns a new hash with only the given keys. - def slice(*keys) - allowed = Set.new(respond_to?(:convert_key) ? keys.map { |key| convert_key(key) } : keys) - reject { |key,| !allowed.include?(key) } - end - - # Replaces the hash with only the given keys. - def slice!(*keys) - replace(slice(*keys)) - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/finder.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/finder.rb deleted file mode 100644 index 6aac7e6..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/finder.rb +++ /dev/null @@ -1,239 +0,0 @@ -require 'will_paginate/core_ext' - -module WillPaginate - # A mixin for ActiveRecord::Base. Provides +per_page+ class method - # and hooks things up to provide paginating finders. - # - # Find out more in WillPaginate::Finder::ClassMethods - # - module Finder - def self.included(base) - base.extend ClassMethods - class << base - alias_method_chain :method_missing, :paginate - # alias_method_chain :find_every, :paginate - define_method(:per_page) { 30 } unless respond_to?(:per_page) - end - end - - # = Paginating finders for ActiveRecord models - # - # WillPaginate adds +paginate+, +per_page+ and other methods to - # ActiveRecord::Base class methods and associations. It also hooks into - # +method_missing+ to intercept pagination calls to dynamic finders such as - # +paginate_by_user_id+ and translate them to ordinary finders - # (+find_all_by_user_id+ in this case). - # - # In short, paginating finders are equivalent to ActiveRecord finders; the - # only difference is that we start with "paginate" instead of "find" and - # that :page is required parameter: - # - # @posts = Post.paginate :all, :page => params[:page], :order => 'created_at DESC' - # - # In paginating finders, "all" is implicit. There is no sense in paginating - # a single record, right? So, you can drop the :all argument: - # - # Post.paginate(...) => Post.find :all - # Post.paginate_all_by_something => Post.find_all_by_something - # Post.paginate_by_something => Post.find_all_by_something - # - # == The importance of the :order parameter - # - # In ActiveRecord finders, :order parameter specifies columns for - # the ORDER BY clause in SQL. It is important to have it, since - # pagination only makes sense with ordered sets. Without the ORDER - # BY clause, databases aren't required to do consistent ordering when - # performing SELECT queries; this is especially true for - # PostgreSQL. - # - # Therefore, make sure you are doing ordering on a column that makes the - # most sense in the current context. Make that obvious to the user, also. - # For perfomance reasons you will also want to add an index to that column. - module ClassMethods - # This is the main paginating finder. - # - # == Special parameters for paginating finders - # * :page -- REQUIRED, but defaults to 1 if false or nil - # * :per_page -- defaults to CurrentModel.per_page (which is 30 if not overridden) - # * :total_entries -- use only if you manually count total entries - # * :count -- additional options that are passed on to +count+ - # * :finder -- name of the ActiveRecord finder used (default: "find") - # - # All other options (+conditions+, +order+, ...) are forwarded to +find+ - # and +count+ calls. - def paginate(*args, &block) - options = args.pop - page, per_page, total_entries = wp_parse_options(options) - finder = (options[:finder] || 'find').to_s - - if finder == 'find' - # an array of IDs may have been given: - total_entries ||= (Array === args.first and args.first.size) - # :all is implicit - args.unshift(:all) if args.empty? - end - - WillPaginate::Collection.create(page, per_page, total_entries) do |pager| - count_options = options.except :page, :per_page, :total_entries, :finder - find_options = count_options.except(:count).update(:offset => pager.offset, :limit => pager.per_page) - - args << find_options - # @options_from_last_find = nil - pager.replace send(finder, *args, &block) - - # magic counting for user convenience: - pager.total_entries = wp_count(count_options, args, finder) unless pager.total_entries - end - end - - # Iterates through all records by loading one page at a time. This is useful - # for migrations or any other use case where you don't want to load all the - # records in memory at once. - # - # It uses +paginate+ internally; therefore it accepts all of its options. - # You can specify a starting page with :page (default is 1). Default - # :order is "id", override if necessary. - # - # See http://weblog.jamisbuck.org/2007/4/6/faking-cursors-in-activerecord where - # Jamis Buck describes this and also uses a more efficient way for MySQL. - def paginated_each(options = {}, &block) - options = { :order => 'id', :page => 1 }.merge options - options[:page] = options[:page].to_i - options[:total_entries] = 0 # skip the individual count queries - total = 0 - - begin - collection = paginate(options) - total += collection.each(&block).size - options[:page] += 1 - end until collection.size < collection.per_page - - total - end - - # Wraps +find_by_sql+ by simply adding LIMIT and OFFSET to your SQL string - # based on the params otherwise used by paginating finds: +page+ and - # +per_page+. - # - # Example: - # - # @developers = Developer.paginate_by_sql ['select * from developers where salary > ?', 80000], - # :page => params[:page], :per_page => 3 - # - # A query for counting rows will automatically be generated if you don't - # supply :total_entries. If you experience problems with this - # generated SQL, you might want to perform the count manually in your - # application. - # - def paginate_by_sql(sql, options) - WillPaginate::Collection.create(*wp_parse_options(options)) do |pager| - query = sanitize_sql(sql) - original_query = query.dup - # add limit, offset - add_limit! query, :offset => pager.offset, :limit => pager.per_page - # perfom the find - pager.replace find_by_sql(query) - - unless pager.total_entries - count_query = original_query.sub /\bORDER\s+BY\s+[\w`,\s]+$/mi, '' - count_query = "SELECT COUNT(*) FROM (#{count_query})" - - unless ['oracle', 'oci'].include?(self.connection.adapter_name.downcase) - count_query << ' AS count_table' - end - # perform the count query - pager.total_entries = count_by_sql(count_query) - end - end - end - - def respond_to?(method, include_priv = false) #:nodoc: - case method.to_sym - when :paginate, :paginate_by_sql - true - else - super(method.to_s.sub(/^paginate/, 'find'), include_priv) - end - end - - protected - - def method_missing_with_paginate(method, *args, &block) #:nodoc: - # did somebody tried to paginate? if not, let them be - unless method.to_s.index('paginate') == 0 - return method_missing_without_paginate(method, *args, &block) - end - - # paginate finders are really just find_* with limit and offset - finder = method.to_s.sub('paginate', 'find') - finder.sub!('find', 'find_all') if finder.index('find_by_') == 0 - - options = args.pop - raise ArgumentError, 'parameter hash expected' unless options.respond_to? :symbolize_keys - options = options.dup - options[:finder] = finder - args << options - - paginate(*args, &block) - end - - # Does the not-so-trivial job of finding out the total number of entries - # in the database. It relies on the ActiveRecord +count+ method. - def wp_count(options, args, finder) - excludees = [:count, :order, :limit, :offset, :readonly] - unless options[:select] and options[:select] =~ /^\s*DISTINCT\b/i - excludees << :select # only exclude the select param if it doesn't begin with DISTINCT - end - # count expects (almost) the same options as find - count_options = options.except *excludees - - # merge the hash found in :count - # this allows you to specify :select, :order, or anything else just for the count query - count_options.update options[:count] if options[:count] - - # we may have to scope ... - counter = Proc.new { count(count_options) } - - # we may be in a model or an association proxy! - klass = (@owner and @reflection) ? @reflection.klass : self - - count = if finder.index('find_') == 0 and klass.respond_to?(scoper = finder.sub('find', 'with')) - # scope_out adds a 'with_finder' method which acts like with_scope, if it's present - # then execute the count with the scoping provided by the with_finder - send(scoper, &counter) - elsif match = /^find_(all_by|by)_([_a-zA-Z]\w*)$/.match(finder) - # extract conditions from calls like "paginate_by_foo_and_bar" - attribute_names = extract_attribute_names_from_match(match) - conditions = construct_attributes_from_arguments(attribute_names, args) - with_scope(:find => { :conditions => conditions }, &counter) - else - counter.call - end - - count.respond_to?(:length) ? count.length : count - end - - def wp_parse_options(options) #:nodoc: - raise ArgumentError, 'parameter hash expected' unless options.respond_to? :symbolize_keys - options = options.symbolize_keys - raise ArgumentError, ':page parameter required' unless options.key? :page - - if options[:count] and options[:total_entries] - raise ArgumentError, ':count and :total_entries are mutually exclusive' - end - - page = options[:page] || 1 - per_page = options[:per_page] || self.per_page - total = options[:total_entries] - [page, per_page, total] - end - - private - - # def find_every_with_paginate(options) - # @options_from_last_find = options - # find_every_without_paginate(options) - # end - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope.rb deleted file mode 100644 index 6f00cf7..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope.rb +++ /dev/null @@ -1,132 +0,0 @@ -## stolen from: http://dev.rubyonrails.org/browser/trunk/activerecord/lib/active_record/named_scope.rb?rev=9084 - -module WillPaginate - # This is a feature backported from Rails 2.1 because of its usefullness not only with will_paginate, - # but in other aspects when managing complex conditions that you want to be reusable. - module NamedScope - # All subclasses of ActiveRecord::Base have two named_scopes: - # * all, which is similar to a find(:all) query, and - # * scoped, which allows for the creation of anonymous scopes, on the fly: - # - # Shirt.scoped(:conditions => {:color => 'red'}).scoped(:include => :washing_instructions) - # - # These anonymous scopes tend to be useful when procedurally generating complex queries, where passing - # intermediate values (scopes) around as first-class objects is convenient. - def self.included(base) - base.class_eval do - extend ClassMethods - named_scope :all - named_scope :scoped, lambda { |scope| scope } - end - end - - module ClassMethods - def scopes #:nodoc: - read_inheritable_attribute(:scopes) || write_inheritable_attribute(:scopes, {}) - end - - # Adds a class method for retrieving and querying objects. A scope represents a narrowing of a database query, - # such as :conditions => {:color => :red}, :select => 'shirts.*', :include => :washing_instructions. - # - # class Shirt < ActiveRecord::Base - # named_scope :red, :conditions => {:color => 'red'} - # named_scope :dry_clean_only, :joins => :washing_instructions, :conditions => ['washing_instructions.dry_clean_only = ?', true] - # end - # - # The above calls to named_scope define class methods Shirt.red and Shirt.dry_clean_only. Shirt.red, - # in effect, represents the query Shirt.find(:all, :conditions => {:color => 'red'}). - # - # Unlike Shirt.find(...), however, the object returned by Shirt.red is not an Array; it resembles the association object - # constructed by a has_many declaration. For instance, you can invoke Shirt.red.find(:first), Shirt.red.count, - # Shirt.red.find(:all, :conditions => {:size => 'small'}). Also, just - # as with the association objects, name scopes acts like an Array, implementing Enumerable; Shirt.red.each(&block), - # Shirt.red.first, and Shirt.red.inject(memo, &block) all behave as if Shirt.red really were an Array. - # - # These named scopes are composable. For instance, Shirt.red.dry_clean_only will produce all shirts that are both red and dry clean only. - # Nested finds and calculations also work with these compositions: Shirt.red.dry_clean_only.count returns the number of garments - # for which these criteria obtain. Similarly with Shirt.red.dry_clean_only.average(:thread_count). - # - # All scopes are available as class methods on the ActiveRecord descendent upon which the scopes were defined. But they are also available to - # has_many associations. If, - # - # class Person < ActiveRecord::Base - # has_many :shirts - # end - # - # then elton.shirts.red.dry_clean_only will return all of Elton's red, dry clean - # only shirts. - # - # Named scopes can also be procedural. - # - # class Shirt < ActiveRecord::Base - # named_scope :colored, lambda { |color| - # { :conditions => { :color => color } } - # } - # end - # - # In this example, Shirt.colored('puce') finds all puce shirts. - # - # Named scopes can also have extensions, just as with has_many declarations: - # - # class Shirt < ActiveRecord::Base - # named_scope :red, :conditions => {:color => 'red'} do - # def dom_id - # 'red_shirts' - # end - # end - # end - # - def named_scope(name, options = {}, &block) - scopes[name] = lambda do |parent_scope, *args| - Scope.new(parent_scope, case options - when Hash - options - when Proc - options.call(*args) - end, &block) - end - (class << self; self end).instance_eval do - define_method name do |*args| - scopes[name].call(self, *args) - end - end - end - end - - class Scope #:nodoc: - attr_reader :proxy_scope, :proxy_options - [].methods.each { |m| delegate m, :to => :proxy_found unless m =~ /(^__|^nil\?|^send|class|extend|find|count|sum|average|maximum|minimum|paginate)/ } - delegate :scopes, :with_scope, :to => :proxy_scope - - def initialize(proxy_scope, options, &block) - [options[:extend]].flatten.each { |extension| extend extension } if options[:extend] - extend Module.new(&block) if block_given? - @proxy_scope, @proxy_options = proxy_scope, options.except(:extend) - end - - def reload - load_found; self - end - - protected - def proxy_found - @found || load_found - end - - private - def method_missing(method, *args, &block) - if scopes.include?(method) - scopes[method].call(self, *args) - else - with_scope :find => proxy_options do - proxy_scope.send(method, *args, &block) - end - end - end - - def load_found - @found = find(:all) - end - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope_patch.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope_patch.rb deleted file mode 100644 index bdc1997..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/named_scope_patch.rb +++ /dev/null @@ -1,39 +0,0 @@ -## based on http://dev.rubyonrails.org/changeset/9084 - -ActiveRecord::Associations::AssociationProxy.class_eval do - protected - def with_scope(*args, &block) - @reflection.klass.send :with_scope, *args, &block - end -end - -[ ActiveRecord::Associations::AssociationCollection, - ActiveRecord::Associations::HasManyThroughAssociation ].each do |klass| - klass.class_eval do - protected - alias :method_missing_without_scopes :method_missing_without_paginate - def method_missing_without_paginate(method, *args, &block) - if @reflection.klass.scopes.include?(method) - @reflection.klass.scopes[method].call(self, *args, &block) - else - method_missing_without_scopes(method, *args, &block) - end - end - end -end - -# Rails 1.2.6 -ActiveRecord::Associations::HasAndBelongsToManyAssociation.class_eval do - protected - def method_missing(method, *args, &block) - if @target.respond_to?(method) || (!@reflection.klass.respond_to?(method) && Class.respond_to?(method)) - super - elsif @reflection.klass.scopes.include?(method) - @reflection.klass.scopes[method].call(self, *args) - else - @reflection.klass.with_scope(:find => { :conditions => @finder_sql, :joins => @join_sql, :readonly => false }) do - @reflection.klass.send(method, *args, &block) - end - end - end -end if ActiveRecord::Base.respond_to? :find_first diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/version.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/version.rb deleted file mode 100644 index bdd322a..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/version.rb +++ /dev/null @@ -1,9 +0,0 @@ -module WillPaginate #:nodoc: - module VERSION #:nodoc: - MAJOR = 2 - MINOR = 2 - TINY = 2 - - STRING = [MAJOR, MINOR, TINY].join('.') - end -end diff --git a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/view_helpers.rb b/vendor/gems/will_paginate-2.2.2/lib/will_paginate/view_helpers.rb deleted file mode 100644 index 00d3627..0000000 --- a/vendor/gems/will_paginate-2.2.2/lib/will_paginate/view_helpers.rb +++ /dev/null @@ -1,328 +0,0 @@ -require 'will_paginate/core_ext' - -module WillPaginate - # = Will Paginate view helpers - # - # Currently there is only one view helper: +will_paginate+. It renders the - # pagination links for the given collection. The helper itself is lightweight - # and serves only as a wrapper around link renderer instantiation; the - # renderer then does all the hard work of generating the HTML. - # - # == Global options for helpers - # - # Options for pagination helpers are optional and get their default values from the - # WillPaginate::ViewHelpers.pagination_options hash. You can write to this hash to - # override default options on the global level: - # - # WillPaginate::ViewHelpers.pagination_options[:prev_label] = 'Previous page' - # - # By putting this into your environment.rb you can easily translate link texts to previous - # and next pages, as well as override some other defaults to your liking. - module ViewHelpers - # default options that can be overridden on the global level - @@pagination_options = { - :class => 'pagination', - :prev_label => '« Previous', - :next_label => 'Next »', - :inner_window => 4, # links around the current page - :outer_window => 1, # links around beginning and end - :separator => ' ', # single space is friendly to spiders and non-graphic browsers - :param_name => :page, - :params => nil, - :renderer => 'WillPaginate::LinkRenderer', - :page_links => true, - :container => true - } - mattr_reader :pagination_options - - # Renders Digg/Flickr-style pagination for a WillPaginate::Collection - # object. Nil is returned if there is only one page in total; no point in - # rendering the pagination in that case... - # - # ==== Options - # * :class -- CSS class name for the generated DIV (default: "pagination") - # * :prev_label -- default: "« Previous" - # * :next_label -- default: "Next »" - # * :inner_window -- how many links are shown around the current page (default: 4) - # * :outer_window -- how many links are around the first and the last page (default: 1) - # * :separator -- string separator for page HTML elements (default: single space) - # * :param_name -- parameter name for page number in URLs (default: :page) - # * :params -- additional parameters when generating pagination links - # (eg. :controller => "foo", :action => nil) - # * :renderer -- class name of the link renderer (default: WillPaginate::LinkRenderer) - # * :page_links -- when false, only previous/next links are rendered (default: true) - # * :container -- toggles rendering of the DIV container for pagination links, set to - # false only when you are rendering your own pagination markup (default: true) - # * :id -- HTML ID for the container (default: nil). Pass +true+ to have the ID automatically - # generated from the class name of objects in collection: for example, paginating - # ArticleComment models would yield an ID of "article_comments_pagination". - # - # All options beside listed ones are passed as HTML attributes to the container - # element for pagination links (the DIV). For example: - # - # <%= will_paginate @posts, :id => 'wp_posts' %> - # - # ... will result in: - # - # - # - # ==== Using the helper without arguments - # If the helper is called without passing in the collection object, it will - # try to read from the instance variable inferred by the controller name. - # For example, calling +will_paginate+ while the current controller is - # PostsController will result in trying to read from the @posts - # variable. Example: - # - # <%= will_paginate :id => true %> - # - # ... will result in @post collection getting paginated: - # - # - # - def will_paginate(collection = nil, options = {}) - options, collection = collection, nil if collection.is_a? Hash - unless collection or !controller - collection_name = "@#{controller.controller_name}" - collection = instance_variable_get(collection_name) - raise ArgumentError, "The #{collection_name} variable appears to be empty. Did you " + - "forget to pass the collection object for will_paginate?" unless collection - end - # early exit if there is nothing to render - return nil unless WillPaginate::ViewHelpers.total_pages_for_collection(collection) > 1 - - options = options.symbolize_keys.reverse_merge WillPaginate::ViewHelpers.pagination_options - # create the renderer instance - renderer_class = options[:renderer].to_s.constantize - renderer = renderer_class.new collection, options, self - # render HTML for pagination - renderer.to_html - end - - # Wrapper for rendering pagination links at both top and bottom of a block - # of content. - # - # <% paginated_section @posts do %> - #
    - # <% for post in @posts %> - #
  1. ...
  2. - # <% end %> - #
- # <% end %> - # - # will result in: - # - # - #
    - # ... - #
- # - # - # Arguments are passed to a will_paginate call, so the same options - # apply. Don't use the :id option; otherwise you'll finish with two - # blocks of pagination links sharing the same ID (which is invalid HTML). - def paginated_section(*args, &block) - pagination = will_paginate(*args).to_s - content = pagination + capture(&block) + pagination - concat content, block.binding - end - - # Renders a helpful message with numbers of displayed vs. total entries. - # You can use this as a blueprint for your own, similar helpers. - # - # <%= page_entries_info @posts %> - # #-> Displaying entries 6 - 10 of 26 in total - def page_entries_info(collection) - if collection.total_pages < 2 - case collection.size - when 0; 'No entries found' - when 1; 'Displaying 1 entry' - else; "Displaying all #{collection.size} entries" - end - else - %{Displaying entries %d - %d of %d in total} % [ - collection.offset + 1, - collection.offset + collection.length, - collection.total_entries - ] - end - end - - def self.total_pages_for_collection(collection) #:nodoc: - if collection.respond_to?('page_count') and !collection.respond_to?('total_pages') - WillPaginate::Deprecation.warn <<-MSG - You are using a paginated collection of class #{collection.class.name} - which conforms to the old API of WillPaginate::Collection by using - `page_count`, while the current method name is `total_pages`. Please - upgrade yours or 3rd-party code that provides the paginated collection. - MSG - class << collection - def total_pages; page_count; end - end - end - collection.total_pages - end - end - - # This class does the heavy lifting of actually building the pagination - # links. It is used by +will_paginate+ helper internally. - class LinkRenderer - # * +collection+ is a WillPaginate::Collection instance or any other object - # that conforms to that API - # * +options+ are forwarded from +will_paginate+ view helper - # * +template+ is the reference to the template being rendered - def initialize(collection, options, template) - @collection = collection - @options = options - @template = template - end - - # Process it! This method returns the complete HTML string which contains - # pagination links. Feel free to subclass LinkRenderer and change this - # method as you see fit. - def to_html - links = @options[:page_links] ? windowed_links : [] - # previous/next buttons - links.unshift page_link_or_span(@collection.previous_page, %w(disabled prev_page), @options[:prev_label]) - links.push page_link_or_span(@collection.next_page, %w(disabled next_page), @options[:next_label]) - - html = links.join(@options[:separator]) - @options[:container] ? @template.content_tag(:div, html, html_attributes) : html - end - - # Returns the subset of +options+ this instance was initialized with that - # represent HTML attributes for the container element of pagination links. - def html_attributes - return @html_attributes if @html_attributes - @html_attributes = @options.except *(WillPaginate::ViewHelpers.pagination_options.keys - [:class]) - # pagination of Post models will have the ID of "posts_pagination" - if @options[:container] and @options[:id] === true - @html_attributes[:id] = @collection.first.class.name.underscore.pluralize + '_pagination' - end - @html_attributes - end - - protected - - # The gap in page links is represented by: - # - # - def gap_marker - '' - end - - # Collects link items for visible page numbers. - def windowed_links - prev = nil - - visible_page_numbers.inject [] do |links, n| - # detect gaps: - links << gap_marker if prev and n > prev + 1 - links << page_link_or_span(n, 'current') - prev = n - links - end - end - - # Calculates visible page numbers using the :inner_window and - # :outer_window options. - def visible_page_numbers - inner_window, outer_window = @options[:inner_window].to_i, @options[:outer_window].to_i - window_from = current_page - inner_window - window_to = current_page + inner_window - - # adjust lower or upper limit if other is out of bounds - if window_to > total_pages - window_from -= window_to - total_pages - window_to = total_pages - end - if window_from < 1 - window_to += 1 - window_from - window_from = 1 - window_to = total_pages if window_to > total_pages - end - - visible = (1..total_pages).to_a - left_gap = (2 + outer_window)...window_from - right_gap = (window_to + 1)...(total_pages - outer_window) - visible -= left_gap.to_a if left_gap.last - left_gap.first > 1 - visible -= right_gap.to_a if right_gap.last - right_gap.first > 1 - - visible - end - - def page_link_or_span(page, span_class, text = nil) - text ||= page.to_s - classnames = Array[*span_class] - - if page and page != current_page - @template.link_to text, url_for(page), :rel => rel_value(page), :class => classnames[1] - else - @template.content_tag :span, text, :class => classnames.join(' ') - end - end - - # Returns URL params for +page_link_or_span+, taking the current GET params - # and :params option into account. - def url_for(page) - unless @url_string - @url_params = { :escape => false } - # page links should preserve GET parameters - stringified_merge @url_params, @template.params if @template.request.get? - stringified_merge @url_params, @options[:params] if @options[:params] - - if param_name.index(/[^\w-]/) - page_param = (defined?(CGIMethods) ? CGIMethods : ActionController::AbstractRequest). - parse_query_parameters("#{param_name}=#{page}") - - stringified_merge @url_params, page_param - else - @url_params[param_name] = page - end - - url = @template.url_for(@url_params) - @url_string = url.sub(%r!([?&/]#{CGI.escape param_name}[=/])#{page}!, '\1@') - return url - end - @url_string.sub '@', page.to_s - end - - private - - def rel_value(page) - case page - when @collection.previous_page; 'prev' + (page == 1 ? ' start' : '') - when @collection.next_page; 'next' - when 1; 'start' - end - end - - def current_page - @collection.current_page - end - - def total_pages - @total_pages ||= WillPaginate::ViewHelpers.total_pages_for_collection(@collection) - end - - def param_name - @param_name ||= @options[:param_name].to_s - end - - def stringified_merge(target, other) - other.each do |key, value| - key = key.to_s - existing = target[key] - - if value.is_a?(Hash) - target[key] = existing = {} if existing.nil? - if existing.is_a?(Hash) - stringified_merge(existing, value) - return - end - end - - target[key] = value - end - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/boot.rb b/vendor/gems/will_paginate-2.2.2/test/boot.rb deleted file mode 100644 index 622fc93..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/boot.rb +++ /dev/null @@ -1,21 +0,0 @@ -plugin_root = File.join(File.dirname(__FILE__), '..') -version = ENV['RAILS_VERSION'] -version = nil if version and version == "" - -# first look for a symlink to a copy of the framework -if !version and framework_root = ["#{plugin_root}/rails", "#{plugin_root}/../../rails"].find { |p| File.directory? p } - puts "found framework root: #{framework_root}" - # this allows for a plugin to be tested outside of an app and without Rails gems - $:.unshift "#{framework_root}/activesupport/lib", "#{framework_root}/activerecord/lib", "#{framework_root}/actionpack/lib" -else - # simply use installed gems if available - puts "using Rails#{version ? ' ' + version : nil} gems" - require 'rubygems' - - if version - gem 'rails', version - else - gem 'actionpack' - gem 'activerecord' - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/collection_test.rb b/vendor/gems/will_paginate-2.2.2/test/collection_test.rb deleted file mode 100644 index b336090..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/collection_test.rb +++ /dev/null @@ -1,140 +0,0 @@ -require 'helper' -require 'will_paginate/array' - -class ArrayPaginationTest < Test::Unit::TestCase - def test_simple - collection = ('a'..'e').to_a - - [{ :page => 1, :per_page => 3, :expected => %w( a b c ) }, - { :page => 2, :per_page => 3, :expected => %w( d e ) }, - { :page => 1, :per_page => 5, :expected => %w( a b c d e ) }, - { :page => 3, :per_page => 5, :expected => [] }, - ]. - each do |conditions| - expected = conditions.delete :expected - assert_equal expected, collection.paginate(conditions) - end - end - - def test_defaults - result = (1..50).to_a.paginate - assert_equal 1, result.current_page - assert_equal 30, result.size - end - - def test_deprecated_api - assert_raise(ArgumentError) { [].paginate(2) } - assert_raise(ArgumentError) { [].paginate(2, 10) } - end - - def test_total_entries_has_precedence - result = %w(a b c).paginate :total_entries => 5 - assert_equal 5, result.total_entries - end - - def test_argument_error_with_params_and_another_argument - assert_raise ArgumentError do - [].paginate({}, 5) - end - end - - def test_paginated_collection - entries = %w(a b c) - collection = create(2, 3, 10) do |pager| - assert_equal entries, pager.replace(entries) - end - - assert_equal entries, collection - assert_respond_to_all collection, %w(total_pages each offset size current_page per_page total_entries) - assert_kind_of Array, collection - assert_instance_of Array, collection.entries - assert_equal 3, collection.offset - assert_equal 4, collection.total_pages - assert !collection.out_of_bounds? - end - - def test_previous_next_pages - collection = create(1, 1, 3) - assert_nil collection.previous_page - assert_equal 2, collection.next_page - - collection = create(2, 1, 3) - assert_equal 1, collection.previous_page - assert_equal 3, collection.next_page - - collection = create(3, 1, 3) - assert_equal 2, collection.previous_page - assert_nil collection.next_page - end - - def test_out_of_bounds - entries = create(2, 3, 2){} - assert entries.out_of_bounds? - - entries = create(1, 3, 2){} - assert !entries.out_of_bounds? - end - - def test_guessing_total_count - entries = create do |pager| - # collection is shorter than limit - pager.replace array - end - assert_equal 8, entries.total_entries - - entries = create(2, 5, 10) do |pager| - # collection is shorter than limit, but we have an explicit count - pager.replace array - end - assert_equal 10, entries.total_entries - - entries = create do |pager| - # collection is the same as limit; we can't guess - pager.replace array(5) - end - assert_equal nil, entries.total_entries - - entries = create do |pager| - # collection is empty; we can't guess - pager.replace array(0) - end - assert_equal nil, entries.total_entries - - entries = create(1) do |pager| - # collection is empty and we're on page 1, - # so the whole thing must be empty, too - pager.replace array(0) - end - assert_equal 0, entries.total_entries - end - - def test_invalid_page - bad_inputs = [0, -1, nil, '', 'Schnitzel'] - - bad_inputs.each do |bad| - assert_raise(WillPaginate::InvalidPage) { create bad } - end - end - - def test_invalid_per_page_setting - assert_raise(ArgumentError) { create(1, -1) } - end - - def test_page_count_was_removed - assert_raise(NoMethodError) { create.page_count } - # It's `total_pages` now. - end - - private - def create(page = 2, limit = 5, total = nil, &block) - if block_given? - WillPaginate::Collection.create(page, limit, total, &block) - else - WillPaginate::Collection.new(page, limit, total) - end - end - - def array(size = 3) - Array.new(size) - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/console b/vendor/gems/will_paginate-2.2.2/test/console deleted file mode 100755 index 3f282f1..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/console +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env ruby -irb = RUBY_PLATFORM =~ /(:?mswin|mingw)/ ? 'irb.bat' : 'irb' -libs = [] - -libs << 'irb/completion' -libs << File.join('lib', 'load_fixtures') - -exec "#{irb} -Ilib:test#{libs.map{ |l| " -r #{l}" }.join} --simple-prompt" diff --git a/vendor/gems/will_paginate-2.2.2/test/database.yml b/vendor/gems/will_paginate-2.2.2/test/database.yml deleted file mode 100644 index 7ef1e73..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/database.yml +++ /dev/null @@ -1,22 +0,0 @@ -sqlite3: - database: ":memory:" - adapter: sqlite3 - timeout: 500 - -sqlite2: - database: ":memory:" - adapter: sqlite2 - -mysql: - adapter: mysql - username: rails - password: mislav - encoding: utf8 - database: will_paginate_unittest - -postgres: - adapter: postgresql - username: mislav - password: mislav - database: will_paginate_unittest - min_messages: warning diff --git a/vendor/gems/will_paginate-2.2.2/test/finder_test.rb b/vendor/gems/will_paginate-2.2.2/test/finder_test.rb deleted file mode 100644 index 055109c..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/finder_test.rb +++ /dev/null @@ -1,416 +0,0 @@ -require 'helper' -require 'lib/activerecord_test_case' - -require 'will_paginate' -WillPaginate.enable_activerecord -WillPaginate.enable_named_scope - -class FinderTest < ActiveRecordTestCase - fixtures :topics, :replies, :users, :projects, :developers_projects - - def test_new_methods_presence - assert_respond_to_all Topic, %w(per_page paginate paginate_by_sql) - end - - def test_simple_paginate - assert_queries(1) do - entries = Topic.paginate :page => nil - assert_equal 1, entries.current_page - assert_equal 1, entries.total_pages - assert_equal 4, entries.size - end - - assert_queries(2) do - entries = Topic.paginate :page => 2 - assert_equal 1, entries.total_pages - assert entries.empty? - end - end - - def test_parameter_api - # :page parameter in options is required! - assert_raise(ArgumentError){ Topic.paginate } - assert_raise(ArgumentError){ Topic.paginate({}) } - - # explicit :all should not break anything - assert_equal Topic.paginate(:page => nil), Topic.paginate(:all, :page => 1) - - # :count could be nil and we should still not cry - assert_nothing_raised { Topic.paginate :page => 1, :count => nil } - end - - def test_paginate_with_per_page - entries = Topic.paginate :page => 1, :per_page => 1 - assert_equal 1, entries.size - assert_equal 4, entries.total_pages - - # Developer class has explicit per_page at 10 - entries = Developer.paginate :page => 1 - assert_equal 10, entries.size - assert_equal 2, entries.total_pages - - entries = Developer.paginate :page => 1, :per_page => 5 - assert_equal 11, entries.total_entries - assert_equal 5, entries.size - assert_equal 3, entries.total_pages - end - - def test_paginate_with_order - entries = Topic.paginate :page => 1, :order => 'created_at desc' - expected = [topics(:futurama), topics(:harvey_birdman), topics(:rails), topics(:ar)].reverse - assert_equal expected, entries.to_a - assert_equal 1, entries.total_pages - end - - def test_paginate_with_conditions - entries = Topic.paginate :page => 1, :conditions => ["created_at > ?", 30.minutes.ago] - expected = [topics(:rails), topics(:ar)] - assert_equal expected, entries.to_a - assert_equal 1, entries.total_pages - end - - def test_paginate_with_include_and_conditions - entries = Topic.paginate \ - :page => 1, - :include => :replies, - :conditions => "replies.content LIKE 'Bird%' ", - :per_page => 10 - - expected = Topic.find :all, - :include => 'replies', - :conditions => "replies.content LIKE 'Bird%' ", - :limit => 10 - - assert_equal expected, entries.to_a - assert_equal 1, entries.total_entries - end - - def test_paginate_with_include_and_order - entries = nil - assert_queries(2) do - entries = Topic.paginate \ - :page => 1, - :include => :replies, - :order => 'replies.created_at asc, topics.created_at asc', - :per_page => 10 - end - - expected = Topic.find :all, - :include => 'replies', - :order => 'replies.created_at asc, topics.created_at asc', - :limit => 10 - - assert_equal expected, entries.to_a - assert_equal 4, entries.total_entries - end - - def test_paginate_associations_with_include - entries, project = nil, projects(:active_record) - - assert_nothing_raised "THIS IS A BUG in Rails 1.2.3 that was fixed in [7326]. " + - "Please upgrade to a newer version of Rails." do - entries = project.topics.paginate \ - :page => 1, - :include => :replies, - :conditions => "replies.content LIKE 'Nice%' ", - :per_page => 10 - end - - expected = Topic.find :all, - :include => 'replies', - :conditions => "project_id = #{project.id} AND replies.content LIKE 'Nice%' ", - :limit => 10 - - assert_equal expected, entries.to_a - end - - def test_paginate_associations - dhh = users :david - expected_name_ordered = [projects(:action_controller), projects(:active_record)] - expected_id_ordered = [projects(:active_record), projects(:action_controller)] - - assert_queries(2) do - # with association-specified order - entries = dhh.projects.paginate(:page => 1) - assert_equal expected_name_ordered, entries - assert_equal 2, entries.total_entries - end - - # with explicit order - entries = dhh.projects.paginate(:page => 1, :order => 'projects.id') - assert_equal expected_id_ordered, entries - assert_equal 2, entries.total_entries - - assert_nothing_raised { dhh.projects.find(:all, :order => 'projects.id', :limit => 4) } - entries = dhh.projects.paginate(:page => 1, :order => 'projects.id', :per_page => 4) - assert_equal expected_id_ordered, entries - - # has_many with implicit order - topic = Topic.find(1) - expected = [replies(:spam), replies(:witty_retort)] - assert_equal expected.map(&:id).sort, topic.replies.paginate(:page => 1).map(&:id).sort - assert_equal expected.reverse, topic.replies.paginate(:page => 1, :order => 'replies.id ASC') - end - - def test_paginate_association_extension - project = Project.find(:first) - - assert_queries(2) do - entries = project.replies.paginate_recent :page => 1 - assert_equal [replies(:brave)], entries - end - end - - def test_paginate_with_joins - entries = nil - - assert_queries(1) do - entries = Developer.paginate :page => 1, - :joins => 'LEFT JOIN developers_projects ON users.id = developers_projects.developer_id', - :conditions => 'project_id = 1' - assert_equal 2, entries.size - developer_names = entries.map &:name - assert developer_names.include?('David') - assert developer_names.include?('Jamis') - end - - assert_queries(1) do - expected = entries.to_a - entries = Developer.paginate :page => 1, - :joins => 'LEFT JOIN developers_projects ON users.id = developers_projects.developer_id', - :conditions => 'project_id = 1', :count => { :select => "users.id" } - assert_equal expected, entries.to_a - assert_equal 2, entries.total_entries - end - end - - def test_paginate_with_group - entries = nil - assert_queries(1) do - entries = Developer.paginate :page => 1, :per_page => 10, - :group => 'salary', :select => 'salary', :order => 'salary' - end - - expected = [ users(:david), users(:jamis), users(:dev_10), users(:poor_jamis) ].map(&:salary).sort - assert_equal expected, entries.map(&:salary) - end - - def test_paginate_with_dynamic_finder - expected = [replies(:witty_retort), replies(:spam)] - assert_equal expected, Reply.paginate_by_topic_id(1, :page => 1) - - entries = Developer.paginate :conditions => { :salary => 100000 }, :page => 1, :per_page => 5 - assert_equal 8, entries.total_entries - assert_equal entries, Developer.paginate_by_salary(100000, :page => 1, :per_page => 5) - - # dynamic finder + conditions - entries = Developer.paginate_by_salary(100000, :page => 1, - :conditions => ['id > ?', 6]) - assert_equal 4, entries.total_entries - assert_equal (7..10).to_a, entries.map(&:id) - - assert_raises NoMethodError do - Developer.paginate_by_inexistent_attribute 100000, :page => 1 - end - end - - def test_scoped_paginate - entries = Developer.with_poor_ones { Developer.paginate :page => 1 } - - assert_equal 2, entries.size - assert_equal 2, entries.total_entries - end - - ## named_scope ## - - def test_paginate_in_named_scope - entries = Developer.poor.paginate :page => 1, :per_page => 1 - - assert_equal 1, entries.size - assert_equal 2, entries.total_entries - end - - def test_paginate_in_named_scope_on_habtm_association - project = projects(:active_record) - assert_queries(2) do - entries = project.developers.poor.paginate :page => 1, :per_page => 1 - - assert_equal 1, entries.size, 'one developer should be found' - assert_equal 1, entries.total_entries, 'only one developer should be found' - end - end - - def test_paginate_in_named_scope_on_hmt_association - project = projects(:active_record) - expected = [replies(:brave)] - - assert_queries(2) do - entries = project.replies.recent.paginate :page => 1, :per_page => 1 - assert_equal expected, entries - assert_equal 1, entries.total_entries, 'only one reply should be found' - end - end - - def test_paginate_in_named_scope_on_has_many_association - project = projects(:active_record) - expected = [topics(:ar)] - - assert_queries(2) do - entries = project.topics.mentions_activerecord.paginate :page => 1, :per_page => 1 - assert_equal expected, entries - assert_equal 1, entries.total_entries, 'only one topic should be found' - end - end - - ## misc ## - - def test_count_and_total_entries_options_are_mutually_exclusive - e = assert_raise ArgumentError do - Developer.paginate :page => 1, :count => {}, :total_entries => 1 - end - assert_match /exclusive/, e.to_s - end - - def test_readonly - assert_nothing_raised { Developer.paginate :readonly => true, :page => 1 } - end - - # this functionality is temporarily removed - def xtest_pagination_defines_method - pager = "paginate_by_created_at" - assert !User.methods.include?(pager), "User methods should not include `#{pager}` method" - # paginate! - assert 0, User.send(pager, nil, :page => 1).total_entries - # the paging finder should now be defined - assert User.methods.include?(pager), "`#{pager}` method should be defined on User" - end - - # Is this Rails 2.0? Find out by testing find_all which was removed in [6998] - unless ActiveRecord::Base.respond_to? :find_all - def test_paginate_array_of_ids - # AR finders also accept arrays of IDs - # (this was broken in Rails before [6912]) - assert_queries(1) do - entries = Developer.paginate((1..8).to_a, :per_page => 3, :page => 2, :order => 'id') - assert_equal (4..6).to_a, entries.map(&:id) - assert_equal 8, entries.total_entries - end - end - end - - uses_mocha 'internals' do - def test_implicit_all_with_dynamic_finders - Topic.expects(:find_all_by_foo).returns([]) - Topic.expects(:count).returns(0) - Topic.paginate_by_foo :page => 2 - end - - def test_guessing_the_total_count - Topic.expects(:find).returns(Array.new(2)) - Topic.expects(:count).never - - entries = Topic.paginate :page => 2, :per_page => 4 - assert_equal 6, entries.total_entries - end - - def test_guessing_that_there_are_no_records - Topic.expects(:find).returns([]) - Topic.expects(:count).never - - entries = Topic.paginate :page => 1, :per_page => 4 - assert_equal 0, entries.total_entries - end - - def test_extra_parameters_stay_untouched - Topic.expects(:find).with(:all, {:foo => 'bar', :limit => 4, :offset => 0 }).returns(Array.new(5)) - Topic.expects(:count).with({:foo => 'bar'}).returns(1) - - Topic.paginate :foo => 'bar', :page => 1, :per_page => 4 - end - - def test_count_skips_select - Developer.stubs(:find).returns([]) - Developer.expects(:count).with({}).returns(0) - Developer.paginate :select => 'salary', :page => 2 - end - - def test_count_select_when_distinct - Developer.stubs(:find).returns([]) - Developer.expects(:count).with(:select => 'DISTINCT salary').returns(0) - Developer.paginate :select => 'DISTINCT salary', :page => 2 - end - - def test_should_use_scoped_finders_if_present - # scope-out compatibility - Topic.expects(:find_best).returns(Array.new(5)) - Topic.expects(:with_best).returns(1) - - Topic.paginate_best :page => 1, :per_page => 4 - end - - def test_paginate_by_sql - assert_respond_to Developer, :paginate_by_sql - Developer.expects(:find_by_sql).with(regexp_matches(/sql LIMIT 3(,| OFFSET) 3/)).returns([]) - Developer.expects(:count_by_sql).with('SELECT COUNT(*) FROM (sql) AS count_table').returns(0) - - entries = Developer.paginate_by_sql 'sql', :page => 2, :per_page => 3 - end - - def test_paginate_by_sql_respects_total_entries_setting - Developer.expects(:find_by_sql).returns([]) - Developer.expects(:count_by_sql).never - - entries = Developer.paginate_by_sql 'sql', :page => 1, :total_entries => 999 - assert_equal 999, entries.total_entries - end - - def test_paginate_by_sql_strips_order_by_when_counting - Developer.expects(:find_by_sql).returns([]) - Developer.expects(:count_by_sql).with("SELECT COUNT(*) FROM (sql\n ) AS count_table").returns(0) - - Developer.paginate_by_sql "sql\n ORDER\nby foo, bar, `baz` ASC", :page => 2 - end - - # TODO: counts are still wrong - def test_ability_to_use_with_custom_finders - # acts_as_taggable defines find_tagged_with(tag, options) - Topic.expects(:find_tagged_with).with('will_paginate', :offset => 5, :limit => 5).returns([]) - Topic.expects(:count).with({}).returns(0) - - Topic.paginate_tagged_with 'will_paginate', :page => 2, :per_page => 5 - end - - def test_array_argument_doesnt_eliminate_count - ids = (1..8).to_a - Developer.expects(:find_all_by_id).returns([]) - Developer.expects(:count).returns(0) - - Developer.paginate_by_id(ids, :per_page => 3, :page => 2, :order => 'id') - end - - def test_paginating_finder_doesnt_mangle_options - Developer.expects(:find).returns([]) - options = { :page => 1 } - options.expects(:delete).never - options_before = options.dup - - Developer.paginate(options) - assert_equal options, options_before - end - - def test_paginated_each - collection = stub('collection', :size => 5, :empty? => false, :per_page => 5) - collection.expects(:each).times(2).returns(collection) - last_collection = stub('collection', :size => 4, :empty? => false, :per_page => 5) - last_collection.expects(:each).returns(last_collection) - - params = { :order => 'id', :total_entries => 0 } - - Developer.expects(:paginate).with(params.merge(:page => 2)).returns(collection) - Developer.expects(:paginate).with(params.merge(:page => 3)).returns(collection) - Developer.expects(:paginate).with(params.merge(:page => 4)).returns(last_collection) - - assert_equal 14, Developer.paginated_each(:page => '2') { } - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/admin.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/admin.rb deleted file mode 100644 index 1d5e7f3..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/admin.rb +++ /dev/null @@ -1,3 +0,0 @@ -class Admin < User - has_many :companies, :finder_sql => 'SELECT * FROM companies' -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/developer.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/developer.rb deleted file mode 100644 index 7105355..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/developer.rb +++ /dev/null @@ -1,13 +0,0 @@ -class Developer < User - has_and_belongs_to_many :projects, :include => :topics, :order => 'projects.name' - - def self.with_poor_ones(&block) - with_scope :find => { :conditions => ['salary <= ?', 80000], :order => 'salary' } do - yield - end - end - - named_scope :poor, :conditions => ['salary <= ?', 80000], :order => 'salary' - - def self.per_page() 10 end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/developers_projects.yml b/vendor/gems/will_paginate-2.2.2/test/fixtures/developers_projects.yml deleted file mode 100644 index cee359c..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/developers_projects.yml +++ /dev/null @@ -1,13 +0,0 @@ -david_action_controller: - developer_id: 1 - project_id: 2 - joined_on: 2004-10-10 - -david_active_record: - developer_id: 1 - project_id: 1 - joined_on: 2004-10-10 - -jamis_active_record: - developer_id: 2 - project_id: 1 \ No newline at end of file diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/project.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/project.rb deleted file mode 100644 index 0f85ef5..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/project.rb +++ /dev/null @@ -1,15 +0,0 @@ -class Project < ActiveRecord::Base - has_and_belongs_to_many :developers, :uniq => true - - has_many :topics - # :finder_sql => 'SELECT * FROM topics WHERE (topics.project_id = #{id})', - # :counter_sql => 'SELECT COUNT(*) FROM topics WHERE (topics.project_id = #{id})' - - has_many :replies, :through => :topics do - def find_recent(params = {}) - with_scope :find => { :conditions => ['replies.created_at > ?', 15.minutes.ago] } do - find :all, params - end - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/projects.yml b/vendor/gems/will_paginate-2.2.2/test/fixtures/projects.yml deleted file mode 100644 index 74f3c32..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/projects.yml +++ /dev/null @@ -1,6 +0,0 @@ -active_record: - id: 1 - name: Active Record -action_controller: - id: 2 - name: Active Controller diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/replies.yml b/vendor/gems/will_paginate-2.2.2/test/fixtures/replies.yml deleted file mode 100644 index 9a83c00..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/replies.yml +++ /dev/null @@ -1,29 +0,0 @@ -witty_retort: - id: 1 - topic_id: 1 - content: Birdman is better! - created_at: <%= 6.hours.ago.to_s(:db) %> - -another: - id: 2 - topic_id: 2 - content: Nuh uh! - created_at: <%= 1.hour.ago.to_s(:db) %> - -spam: - id: 3 - topic_id: 1 - content: Nice site! - created_at: <%= 1.hour.ago.to_s(:db) %> - -decisive: - id: 4 - topic_id: 4 - content: "I'm getting to the bottom of this" - created_at: <%= 30.minutes.ago.to_s(:db) %> - -brave: - id: 5 - topic_id: 4 - content: "AR doesn't scare me a bit" - created_at: <%= 10.minutes.ago.to_s(:db) %> diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/reply.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/reply.rb deleted file mode 100644 index ecaf3c1..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/reply.rb +++ /dev/null @@ -1,7 +0,0 @@ -class Reply < ActiveRecord::Base - belongs_to :topic, :include => [:replies] - - named_scope :recent, :conditions => ['replies.created_at > ?', 15.minutes.ago] - - validates_presence_of :content -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/schema.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/schema.rb deleted file mode 100644 index 8831aad..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/schema.rb +++ /dev/null @@ -1,38 +0,0 @@ -ActiveRecord::Schema.define do - - create_table "users", :force => true do |t| - t.column "name", :text - t.column "salary", :integer, :default => 70000 - t.column "created_at", :datetime - t.column "updated_at", :datetime - t.column "type", :text - end - - create_table "projects", :force => true do |t| - t.column "name", :text - end - - create_table "developers_projects", :id => false, :force => true do |t| - t.column "developer_id", :integer, :null => false - t.column "project_id", :integer, :null => false - t.column "joined_on", :date - t.column "access_level", :integer, :default => 1 - end - - create_table "topics", :force => true do |t| - t.column "project_id", :integer - t.column "title", :string - t.column "subtitle", :string - t.column "content", :text - t.column "created_at", :datetime - t.column "updated_at", :datetime - end - - create_table "replies", :force => true do |t| - t.column "content", :text - t.column "created_at", :datetime - t.column "updated_at", :datetime - t.column "topic_id", :integer - end - -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/topic.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/topic.rb deleted file mode 100644 index 77be0dd..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/topic.rb +++ /dev/null @@ -1,6 +0,0 @@ -class Topic < ActiveRecord::Base - has_many :replies, :dependent => :destroy, :order => 'replies.created_at DESC' - belongs_to :project - - named_scope :mentions_activerecord, :conditions => ['topics.title LIKE ?', '%ActiveRecord%'] -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/topics.yml b/vendor/gems/will_paginate-2.2.2/test/fixtures/topics.yml deleted file mode 100644 index 0a26904..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/topics.yml +++ /dev/null @@ -1,30 +0,0 @@ -futurama: - id: 1 - title: Isnt futurama awesome? - subtitle: It really is, isnt it. - content: I like futurama - created_at: <%= 1.day.ago.to_s(:db) %> - updated_at: - -harvey_birdman: - id: 2 - title: Harvey Birdman is the king of all men - subtitle: yup - content: He really is - created_at: <%= 2.hours.ago.to_s(:db) %> - updated_at: - -rails: - id: 3 - project_id: 1 - title: Rails is nice - subtitle: It makes me happy - content: except when I have to hack internals to fix pagination. even then really. - created_at: <%= 20.minutes.ago.to_s(:db) %> - -ar: - id: 4 - project_id: 1 - title: ActiveRecord sometimes freaks me out - content: "I mean, what's the deal with eager loading?" - created_at: <%= 15.minutes.ago.to_s(:db) %> diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/user.rb b/vendor/gems/will_paginate-2.2.2/test/fixtures/user.rb deleted file mode 100644 index 4a57cf0..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/user.rb +++ /dev/null @@ -1,2 +0,0 @@ -class User < ActiveRecord::Base -end diff --git a/vendor/gems/will_paginate-2.2.2/test/fixtures/users.yml b/vendor/gems/will_paginate-2.2.2/test/fixtures/users.yml deleted file mode 100644 index ed2c03a..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/fixtures/users.yml +++ /dev/null @@ -1,35 +0,0 @@ -david: - id: 1 - name: David - salary: 80000 - type: Developer - -jamis: - id: 2 - name: Jamis - salary: 150000 - type: Developer - -<% for digit in 3..10 %> -dev_<%= digit %>: - id: <%= digit %> - name: fixture_<%= digit %> - salary: 100000 - type: Developer -<% end %> - -poor_jamis: - id: 11 - name: Jamis - salary: 9000 - type: Developer - -admin: - id: 12 - name: admin - type: Admin - -goofy: - id: 13 - name: Goofy - type: Admin diff --git a/vendor/gems/will_paginate-2.2.2/test/helper.rb b/vendor/gems/will_paginate-2.2.2/test/helper.rb deleted file mode 100644 index ad52b1b..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/helper.rb +++ /dev/null @@ -1,37 +0,0 @@ -require 'test/unit' -require 'rubygems' - -# gem install redgreen for colored test output -begin require 'redgreen'; rescue LoadError; end - -require 'boot' unless defined?(ActiveRecord) - -class Test::Unit::TestCase - protected - def assert_respond_to_all object, methods - methods.each do |method| - [method.to_s, method.to_sym].each { |m| assert_respond_to object, m } - end - end - - def collect_deprecations - old_behavior = WillPaginate::Deprecation.behavior - deprecations = [] - WillPaginate::Deprecation.behavior = Proc.new do |message, callstack| - deprecations << message - end - result = yield - [result, deprecations] - ensure - WillPaginate::Deprecation.behavior = old_behavior - end -end - -# Wrap tests that use Mocha and skip if unavailable. -def uses_mocha(test_name) - require 'mocha' unless Object.const_defined?(:Mocha) -rescue LoadError => load_error - $stderr.puts "Skipping #{test_name} tests. `gem install mocha` and try again." -else - yield -end diff --git a/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_case.rb b/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_case.rb deleted file mode 100644 index 8f66ebe..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_case.rb +++ /dev/null @@ -1,36 +0,0 @@ -require 'lib/activerecord_test_connector' - -class ActiveRecordTestCase < Test::Unit::TestCase - # Set our fixture path - if ActiveRecordTestConnector.able_to_connect - self.fixture_path = File.join(File.dirname(__FILE__), '..', 'fixtures') - self.use_transactional_fixtures = true - end - - def self.fixtures(*args) - super if ActiveRecordTestConnector.connected - end - - def run(*args) - super if ActiveRecordTestConnector.connected - end - - # Default so Test::Unit::TestCase doesn't complain - def test_truth - end - - protected - - def assert_queries(num = 1) - $query_count = 0 - yield - ensure - assert_equal num, $query_count, "#{$query_count} instead of #{num} queries were executed." - end - - def assert_no_queries(&block) - assert_queries(0, &block) - end -end - -ActiveRecordTestConnector.setup diff --git a/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_connector.rb b/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_connector.rb deleted file mode 100644 index 0decd8a..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/lib/activerecord_test_connector.rb +++ /dev/null @@ -1,69 +0,0 @@ -require 'active_record' -require 'active_record/version' -require 'active_record/fixtures' - -class ActiveRecordTestConnector - cattr_accessor :able_to_connect - cattr_accessor :connected - - FIXTURES_PATH = File.join(File.dirname(__FILE__), '..', 'fixtures') - - # Set our defaults - self.connected = false - self.able_to_connect = true - - def self.setup - unless self.connected || !self.able_to_connect - setup_connection - load_schema - Dependencies.load_paths.unshift FIXTURES_PATH - self.connected = true - end - rescue Exception => e # errors from ActiveRecord setup - $stderr.puts "\nSkipping ActiveRecord tests: #{e}" - $stderr.puts "Install SQLite3 to run the full test suite for will_paginate.\n\n" - self.able_to_connect = false - end - - private - - def self.setup_connection - db = ENV['DB'].blank?? 'sqlite3' : ENV['DB'] - - configurations = YAML.load_file(File.join(File.dirname(__FILE__), '..', 'database.yml')) - raise "no configuration for '#{db}'" unless configurations.key? db - configuration = configurations[db] - - ActiveRecord::Base.logger = Logger.new(STDOUT) if $0 == 'irb' - puts "using #{configuration['adapter']} adapter" unless ENV['DB'].blank? - - ActiveRecord::Base.establish_connection(configuration) - ActiveRecord::Base.configurations = { db => configuration } - prepare ActiveRecord::Base.connection - - unless Object.const_defined?(:QUOTED_TYPE) - Object.send :const_set, :QUOTED_TYPE, ActiveRecord::Base.connection.quote_column_name('type') - end - end - - def self.load_schema - ActiveRecord::Base.silence do - ActiveRecord::Migration.verbose = false - load File.join(FIXTURES_PATH, 'schema.rb') - end - end - - def self.prepare(conn) - class << conn - IGNORED_SQL = [/^PRAGMA/, /^SELECT currval/, /^SELECT CAST/, /^SELECT @@IDENTITY/, /^SELECT @@ROWCOUNT/, /^SHOW FIELDS /] - - def execute_with_counting(sql, name = nil, &block) - $query_count ||= 0 - $query_count += 1 unless IGNORED_SQL.any? { |r| sql =~ r } - execute_without_counting(sql, name, &block) - end - - alias_method_chain :execute, :counting - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/lib/load_fixtures.rb b/vendor/gems/will_paginate-2.2.2/test/lib/load_fixtures.rb deleted file mode 100644 index 10d6f42..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/lib/load_fixtures.rb +++ /dev/null @@ -1,11 +0,0 @@ -require 'boot' -require 'lib/activerecord_test_connector' - -# setup the connection -ActiveRecordTestConnector.setup - -# load all fixtures -Fixtures.create_fixtures(ActiveRecordTestConnector::FIXTURES_PATH, ActiveRecord::Base.connection.tables) - -require 'will_paginate' -WillPaginate.enable_activerecord diff --git a/vendor/gems/will_paginate-2.2.2/test/lib/view_test_process.rb b/vendor/gems/will_paginate-2.2.2/test/lib/view_test_process.rb deleted file mode 100644 index d117d9f..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/lib/view_test_process.rb +++ /dev/null @@ -1,73 +0,0 @@ -require 'action_controller' -require 'action_controller/test_process' - -require 'will_paginate' -WillPaginate.enable_actionpack - -ActionController::Routing::Routes.draw do |map| - map.connect 'dummy/page/:page', :controller => 'dummy' - map.connect ':controller/:action/:id' -end - -ActionController::Base.perform_caching = false - -class DummyRequest - attr_accessor :symbolized_path_parameters - - def initialize - @get = true - @params = {} - @symbolized_path_parameters = { :controller => 'foo', :action => 'bar' } - end - - def get? - @get - end - - def post - @get = false - end - - def relative_url_root - '' - end - - def params(more = nil) - @params.update(more) if more - @params - end -end - -class DummyController - attr_reader :request - attr_accessor :controller_name - - def initialize - @request = DummyRequest.new - @url = ActionController::UrlRewriter.new(@request, @request.params) - end - - def url_for(params) - @url.rewrite(params) - end -end - -module HTML - Node.class_eval do - def inner_text - children.map(&:inner_text).join('') - end - end - - Text.class_eval do - def inner_text - self.to_s - end - end - - Tag.class_eval do - def inner_text - childless?? '' : super - end - end -end diff --git a/vendor/gems/will_paginate-2.2.2/test/view_test.rb b/vendor/gems/will_paginate-2.2.2/test/view_test.rb deleted file mode 100644 index 5686194..0000000 --- a/vendor/gems/will_paginate-2.2.2/test/view_test.rb +++ /dev/null @@ -1,344 +0,0 @@ -require 'helper' -require 'action_controller' -require 'lib/view_test_process' - -class ViewTest < Test::Unit::TestCase - - def setup - super - @controller = DummyController.new - @request = @controller.request - @html_result = nil - @template = '<%= will_paginate collection, options %>' - - @view = ActionView::Base.new - @view.assigns['controller'] = @controller - @view.assigns['_request'] = @request - @view.assigns['_params'] = @request.params - end - - ## basic pagination ## - - def test_will_paginate - paginate do |pagination| - assert_select 'a[href]', 3 do |elements| - validate_page_numbers [2,3,2], elements - assert_select elements.last, ':last-child', "Next »" - end - assert_select 'span', 2 - assert_select 'span.disabled:first-child', '« Previous' - assert_select 'span.current', '1' - assert_equal '« Previous 1 2 3 Next »', pagination.first.inner_text - end - end - - def test_no_pagination_when_page_count_is_one - paginate :per_page => 30 - assert_equal '', @html_result - end - - def test_will_paginate_with_options - paginate({ :page => 2 }, - :class => 'will_paginate', :prev_label => 'Prev', :next_label => 'Next') do - assert_select 'a[href]', 4 do |elements| - validate_page_numbers [1,1,3,3], elements - # test rel attribute values: - assert_select elements[1], 'a', '1' do |link| - assert_equal 'prev start', link.first['rel'] - end - assert_select elements.first, 'a', "Prev" do |link| - assert_equal 'prev start', link.first['rel'] - end - assert_select elements.last, 'a', "Next" do |link| - assert_equal 'next', link.first['rel'] - end - end - assert_select 'span.current', '2' - end - end - - def test_prev_next_links_have_classnames - paginate do |pagination| - assert_select 'span.disabled.prev_page:first-child' - assert_select 'a.next_page[href]:last-child' - end - end - - def test_full_output - paginate - expected = <<-HTML - - HTML - expected.strip!.gsub!(/\s{2,}/, ' ') - - assert_dom_equal expected, @html_result - end - - ## advanced options for pagination ## - - def test_will_paginate_without_container - paginate({}, :container => false) - assert_select 'div.pagination', 0, 'main DIV present when it shouldn\'t' - assert_select 'a[href]', 3 - end - - def test_will_paginate_without_page_links - paginate({ :page => 2 }, :page_links => false) do - assert_select 'a[href]', 2 do |elements| - validate_page_numbers [1,3], elements - end - end - end - - def test_will_paginate_windows - paginate({ :page => 6, :per_page => 1 }, :inner_window => 1) do |pagination| - assert_select 'a[href]', 8 do |elements| - validate_page_numbers [5,1,2,5,7,10,11,7], elements - assert_select elements.first, 'a', '« Previous' - assert_select elements.last, 'a', 'Next »' - end - assert_select 'span.current', '6' - assert_equal '« Previous 1 2 … 5 6 7 … 10 11 Next »', pagination.first.inner_text - end - end - - def test_will_paginate_eliminates_small_gaps - paginate({ :page => 6, :per_page => 1 }, :inner_window => 2) do - assert_select 'a[href]', 12 do |elements| - validate_page_numbers [5,1,2,3,4,5,7,8,9,10,11,7], elements - end - end - end - - def test_container_id - paginate do |div| - assert_nil div.first['id'] - end - - # magic ID - paginate({}, :id => true) do |div| - assert_equal 'fixnums_pagination', div.first['id'] - end - - # explicit ID - paginate({}, :id => 'custom_id') do |div| - assert_equal 'custom_id', div.first['id'] - end - end - - ## other helpers ## - - def test_paginated_section - @template = <<-ERB - <% paginated_section collection, options do %> - <%= content_tag :div, '', :id => "developers" %> - <% end %> - ERB - - paginate - assert_select 'div.pagination', 2 - assert_select 'div.pagination + div#developers', 1 - end - - def test_page_entries_info - @template = '<%= page_entries_info collection %>' - array = ('a'..'z').to_a - - paginate array.paginate(:page => 2, :per_page => 5) - assert_equal %{Displaying entries 6 - 10 of 26 in total}, - @html_result - - paginate array.paginate(:page => 7, :per_page => 4) - assert_equal %{Displaying entries 25 - 26 of 26 in total}, - @html_result - end - - def test_page_entries_info_with_single_page_collection - @template = '<%= page_entries_info collection %>' - - paginate(('a'..'d').to_a.paginate(:page => 1, :per_page => 5)) - assert_equal %{Displaying all 4 entries}, @html_result - - paginate(['a'].paginate(:page => 1, :per_page => 5)) - assert_equal %{Displaying 1 entry}, @html_result - - paginate([].paginate(:page => 1, :per_page => 5)) - assert_equal %{No entries found}, @html_result - end - - ## parameter handling in page links ## - - def test_will_paginate_preserves_parameters_on_get - @request.params :foo => { :bar => 'baz' } - paginate - assert_links_match /foo%5Bbar%5D=baz/ - end - - def test_will_paginate_doesnt_preserve_parameters_on_post - @request.post - @request.params :foo => 'bar' - paginate - assert_no_links_match /foo=bar/ - end - - def test_adding_additional_parameters - paginate({}, :params => { :foo => 'bar' }) - assert_links_match /foo=bar/ - end - - def test_removing_arbitrary_parameters - @request.params :foo => 'bar' - paginate({}, :params => { :foo => nil }) - assert_no_links_match /foo=bar/ - end - - def test_adding_additional_route_parameters - paginate({}, :params => { :controller => 'baz', :action => 'list' }) - assert_links_match %r{\Wbaz/list\W} - end - - def test_will_paginate_with_custom_page_param - paginate({ :page => 2 }, :param_name => :developers_page) do - assert_select 'a[href]', 4 do |elements| - validate_page_numbers [1,1,3,3], elements, :developers_page - end - end - end - - def test_complex_custom_page_param - @request.params :developers => { :page => 2 } - - paginate({ :page => 2 }, :param_name => 'developers[page]') do - assert_select 'a[href]', 4 do |links| - assert_links_match /\?developers%5Bpage%5D=\d+$/, links - validate_page_numbers [1,1,3,3], links, 'developers[page]' - end - end - end - - def test_custom_routing_page_param - @request.symbolized_path_parameters.update :controller => 'dummy', :action => nil - paginate :per_page => 2 do - assert_select 'a[href]', 6 do |links| - assert_links_match %r{/page/(\d+)$}, links, [2, 3, 4, 5, 6, 2] - end - end - end - - ## internal hardcore stuff ## - - class LegacyCollection < WillPaginate::Collection - alias :page_count :total_pages - undef :total_pages - end - - def test_deprecation_notices_with_page_count - collection = LegacyCollection.new(1, 1, 2) - - assert_deprecated collection.class.name do - paginate collection - end - end - - uses_mocha 'view internals' do - def test_collection_name_can_be_guessed - collection = mock - collection.expects(:total_pages).returns(1) - - @template = '<%= will_paginate options %>' - @controller.controller_name = 'developers' - @view.assigns['developers'] = collection - - paginate(nil) - end - end - - def test_inferred_collection_name_raises_error_when_nil - @template = '<%= will_paginate options %>' - @controller.controller_name = 'developers' - - e = assert_raise ArgumentError do - paginate(nil) - end - assert e.message.include?('@developers') - end - - if ActionController::Base.respond_to? :rescue_responses - # only on Rails 2 - def test_rescue_response_hook_presence - assert_equal :not_found, - ActionController::Base.rescue_responses['WillPaginate::InvalidPage'] - end - end - - - protected - - def paginate(collection = {}, options = {}, &block) - if collection.instance_of? Hash - page_options = { :page => 1, :total_entries => 11, :per_page => 4 }.merge(collection) - collection = [1].paginate(page_options) - end - - locals = { :collection => collection, :options => options } - - if defined? ActionView::Template - # Rails 2.1 - args = [ ActionView::Template.new(@view, @template, false, locals, true, nil) ] - else - # older Rails versions - args = [nil, @template, nil, locals] - end - - @html_result = @view.render_template(*args) - @html_document = HTML::Document.new(@html_result, true, false) - - if block_given? - classname = options[:class] || WillPaginate::ViewHelpers.pagination_options[:class] - assert_select("div.#{classname}", 1, 'no main DIV', &block) - end - end - - def response_from_page_or_rjs - @html_document.root - end - - def validate_page_numbers expected, links, param_name = :page - param_pattern = /\W#{CGI.escape(param_name.to_s)}=([^&]*)/ - - assert_equal(expected, links.map { |e| - e['href'] =~ param_pattern - $1 ? $1.to_i : $1 - }) - end - - def assert_links_match pattern, links = nil, numbers = nil - links ||= assert_select 'div.pagination a[href]' do |elements| - elements - end - - pages = [] if numbers - - links.each do |el| - assert_match pattern, el['href'] - if numbers - el['href'] =~ pattern - pages << $1.to_i - end - end - - assert_equal pages, numbers, "page numbers don't match" if numbers - end - - def assert_no_links_match pattern - assert_select 'div.pagination a[href]' do |elements| - elements.each do |el| - assert_no_match pattern, el['href'] - end - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/.specification b/vendor/gems/youtube-g-0.4.9.9/.specification deleted file mode 100644 index f9afae2..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/.specification +++ /dev/null @@ -1,91 +0,0 @@ ---- !ruby/object:Gem::Specification -name: youtube-g -version: !ruby/object:Gem::Version - version: 0.4.9.9 -platform: ruby -authors: -- Shane Vitarana -- Walter Korman -- Aman Gupta -- Filip H.F. Slagter -autorequire: -bindir: bin -cert_chain: [] - -date: 2008-09-01 00:00:00 -04:00 -default_executable: -dependencies: [] - -description: An object-oriented Ruby wrapper for the YouTube GData API -email: ruby-youtube-library@googlegroups.com -executables: [] - -extensions: [] - -extra_rdoc_files: -- History.txt -- README.txt -files: -- History.txt -- lib/youtube_g/client.rb -- lib/youtube_g/logger.rb -- lib/youtube_g/model/author.rb -- lib/youtube_g/model/category.rb -- lib/youtube_g/model/contact.rb -- lib/youtube_g/model/content.rb -- lib/youtube_g/model/playlist.rb -- lib/youtube_g/model/rating.rb -- lib/youtube_g/model/thumbnail.rb -- lib/youtube_g/model/user.rb -- lib/youtube_g/model/video.rb -- lib/youtube_g/parser.rb -- lib/youtube_g/record.rb -- lib/youtube_g/request/base_search.rb -- lib/youtube_g/request/standard_search.rb -- lib/youtube_g/request/user_search.rb -- lib/youtube_g/request/video_search.rb -- lib/youtube_g/request/video_upload.rb -- lib/youtube_g/response/video_search.rb -- lib/youtube_g.rb -- Manifest.txt -- README.txt -- test/test_client.rb -- test/test_video.rb -- test/test_video_search.rb -- TODO.txt -- youtube-g.gemspec -has_rdoc: true -homepage: http://youtube-g.rubyforge.org/ -licenses: [] - -post_install_message: -rdoc_options: -- --main -- README.txt -require_paths: -- bin -- bin -- lib -required_ruby_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -required_rubygems_version: !ruby/object:Gem::Requirement - requirements: - - - ">=" - - !ruby/object:Gem::Version - version: "0" - version: -requirements: [] - -rubyforge_project: -rubygems_version: 1.3.5 -signing_key: -specification_version: 2 -summary: An object-oriented Ruby wrapper for the YouTube GData API -test_files: -- test/test_client.rb -- test/test_video.rb -- test/test_video_search.rb diff --git a/vendor/gems/youtube-g-0.4.9.9/History.txt b/vendor/gems/youtube-g-0.4.9.9/History.txt deleted file mode 100644 index 3aa169c..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/History.txt +++ /dev/null @@ -1,41 +0,0 @@ -== 0.4.9.9 / 2008-09-01 - -* Add Geodata information (thanks Jose Galisteo) -* Added :page and :per_page options, this allows easier usage of the will_paginate - plugin with the library. The :offset and :max_results options are no longer available. [Daniel Insley] -* Added ability to get video responses on the instances of the YouTube::Model::Video object. [Daniel Insley] -* Added and improved the existing documentation [Daniel Insley] -* Fixed usage of deprecated yt:racy, now using media:rating [Daniel Insley] -* Renamed can_embed? method to embeddable? [Daniel Insley] -* Added ability for padingation and ordering on standard feeds. [Daniel Insley] -* Add error-handling for video upload errors. [FiXato] -* Add error-handling for authentication errors from YouTube during video upload. [FiXato] -* Add support for making videos private upon video upload. [FiXato] -* Fix issue with REXML parsing of video upload response. [FiXato] -* Fix issue with response code comparison. [FiXato] -* Authcode is now retrieved for video uploads. [FiXato] -* Add basic support for uploading videos [thanks Joe Damato] -* Add basic support for related videos [tmm1] -* Improve docs for order_by attribute [thanks Jason Arora] -* Added support for the "racy" parameter (choices are "include" or "exclude") [thanks Jason Arora] -* Add missing attribute reader for description [tmm1] -* Fix issue with missing yt:statistics and viewCount [tmm1] -* Allow Client#video_by to take either a url or a video id [tmm1] - -== 0.4.1 / 2008-02-11 - -* Added 3GPP video format [shane] -* Fixed tests [shane] - -== 0.4.0 / 2007-12-18 - -* Fixed API projection in search URL [Pete Higgins] -* Fixed embeddable video searching [Pete Higgins] -* Fixed video embeddable detection [Pete Higgins] -* Fixed unique id hyphen detection [Pete Higgins, Chris Taggart] - -== 0.3.0 / 2007-09-17 - -* Initial public release - * Birthday! - diff --git a/vendor/gems/youtube-g-0.4.9.9/Manifest.txt b/vendor/gems/youtube-g-0.4.9.9/Manifest.txt deleted file mode 100644 index 6bea4f0..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/Manifest.txt +++ /dev/null @@ -1,28 +0,0 @@ -History.txt -Manifest.txt -README.txt -Rakefile -TODO.txt -lib/youtube_g.rb -lib/youtube_g/client.rb -lib/youtube_g/logger.rb -lib/youtube_g/model/author.rb -lib/youtube_g/model/category.rb -lib/youtube_g/model/contact.rb -lib/youtube_g/model/content.rb -lib/youtube_g/model/playlist.rb -lib/youtube_g/model/rating.rb -lib/youtube_g/model/thumbnail.rb -lib/youtube_g/model/user.rb -lib/youtube_g/model/video.rb -lib/youtube_g/parser.rb -lib/youtube_g/record.rb -lib/youtube_g/request/base_search.rb -lib/youtube_g/request/standard_search.rb -lib/youtube_g/request/user_search.rb -lib/youtube_g/request/video_search.rb -lib/youtube_g/request/video_upload.rb -lib/youtube_g/response/video_search.rb -test/test_client.rb -test/test_video.rb -test/test_video_search.rb diff --git a/vendor/gems/youtube-g-0.4.9.9/README.txt b/vendor/gems/youtube-g-0.4.9.9/README.txt deleted file mode 100644 index ef749f9..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/README.txt +++ /dev/null @@ -1,83 +0,0 @@ -youtube-g - by Shane Vitarana and Walter Korman - - Rubyforge: http://rubyforge.org/projects/youtube-g/ - RDoc: http://youtube-g.rubyforge.org/ - Google Group: http://groups.google.com/group/ruby-youtube-library - -== DESCRIPTION: - -youtube-g is a pure Ruby client for the YouTube GData API. It provides an easy -way to access the latest YouTube video search results from your own programs. -In comparison with the earlier Youtube search interfaces, this new API and -library offers much-improved flexibility around executing complex search -queries to obtain well-targeted video search results. - -More detail on the underlying source Google-provided API is available at: - -http://code.google.com/apis/youtube/overview.html - -== FEATURES/PROBLEMS: - -* Aims to be in parity with Google's YouTube GData API. Core functionality - is currently present -- work is in progress to fill in the rest. - -== SYNOPSIS: - -Create a client: - - require 'youtube_g' - client = YouTubeG::Client.new - -Basic queries: - - client.videos_by(:query => "penguin") - client.videos_by(:query => "penguin", :page => 2, :per_page => 15) - client.videos_by(:tags => ['tiger', 'leopard']) - client.videos_by(:categories => [:news, :sports]) - client.videos_by(:categories => [:news, :sports], :tags => ['soccer', 'football']) - client.videos_by(:user => 'liz') - -Standard feeds: - - client.videos_by(:most_viewed) - client.videos_by(:most_linked, :page => 3) - client.videos_by(:top_rated, :time => :today) - -Advanced queries (with boolean operators OR (either), AND (include), NOT (exclude)): - - client.videos_by(:categories => { :either => [:news, :sports], :exclude => [:comedy] }, :tags => { :include => ['football'], :exclude => ['soccer'] }) - - -== REQUIREMENTS: - -* None - -== INSTALL: - -* sudo gem install youtube-g - -== LICENSE: - -MIT License - -Copyright (c) 2007 Shane Vitarana and Walter Korman - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/gems/youtube-g-0.4.9.9/TODO.txt b/vendor/gems/youtube-g-0.4.9.9/TODO.txt deleted file mode 100644 index 86f15c6..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/TODO.txt +++ /dev/null @@ -1,16 +0,0 @@ -[ ] stub out http request/response cycle for tests -[ ] allow specifying values as single items where you don't need to wrap in a list, e.g. :tags => :chickens instead of :tags => [ 'chickens' ] -[ ] make sure symbols will work as well as tags everywhere (again, :tags => :chickens is same as :tags => 'chickens') -[ ] figure out better structure for class/file (either rename request/video_search.rb or split into one class per file again) -[ ] restore spaces after method def names -[ ] use a proxy for testing with static sample result xml so we have repeatable tests -[ ] Clean up tests using Shoulda to define contexts -[ ] Allow :category and :categories for query DSL -[ ] Exception handling - -== API Features TODO - -[ ] Profile feed parsing -[ ] Playlist feeds -[ ] User subscriptions -[ ] Video comments \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g.rb deleted file mode 100644 index dd9024f..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g.rb +++ /dev/null @@ -1,29 +0,0 @@ -require 'logger' -require 'open-uri' -require 'net/https' -require 'digest/md5' -require 'rexml/document' -require 'cgi' - -require File.dirname(__FILE__) + '/youtube_g/client' -require File.dirname(__FILE__) + '/youtube_g/record' -require File.dirname(__FILE__) + '/youtube_g/parser' -require File.dirname(__FILE__) + '/youtube_g/model/author' -require File.dirname(__FILE__) + '/youtube_g/model/category' -require File.dirname(__FILE__) + '/youtube_g/model/contact' -require File.dirname(__FILE__) + '/youtube_g/model/content' -require File.dirname(__FILE__) + '/youtube_g/model/playlist' -require File.dirname(__FILE__) + '/youtube_g/model/rating' -require File.dirname(__FILE__) + '/youtube_g/model/thumbnail' -require File.dirname(__FILE__) + '/youtube_g/model/user' -require File.dirname(__FILE__) + '/youtube_g/model/video' -require File.dirname(__FILE__) + '/youtube_g/request/base_search' -require File.dirname(__FILE__) + '/youtube_g/request/user_search' -require File.dirname(__FILE__) + '/youtube_g/request/standard_search' -require File.dirname(__FILE__) + '/youtube_g/request/video_upload' -require File.dirname(__FILE__) + '/youtube_g/request/video_search' -require File.dirname(__FILE__) + '/youtube_g/response/video_search' - -class YouTubeG #:nodoc: - VERSION = '0.4.9.9' -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/client.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/client.rb deleted file mode 100644 index c1b7e5e..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/client.rb +++ /dev/null @@ -1,85 +0,0 @@ -class YouTubeG - class Client - attr_accessor :logger - - def initialize(logger=false) - @logger = Logger.new(STDOUT) if logger - end - - # Retrieves an array of standard feed, custom query, or user videos. - # - # === Parameters - # If fetching videos for a standard feed: - # params:: Accepts a symbol of :top_rated, :top_favorites, :most_viewed, - # :most_popular, :most_recent, :most_discussed, :most_linked, - # :most_responded, :recently_featured, and :watch_on_mobile. - # - # You can find out more specific information about what each standard feed provides - # by visiting: http://code.google.com/apis/youtube/reference.html#Standard_feeds - # - # options (optional):: Accepts the options of :time, :page (default is 1), - # and :per_page (default is 25). :offset and :max_results - # can also be passed for a custom offset. - # - # If fetching videos by tags, categories, query: - # params:: Accepts the keys :tags, :categories, :query, :order_by, - # :author, :racy, :response_format, :video_format, :page (default is 1), - # and :per_page(default is 25) - # - # options:: Not used. (Optional) - # - # If fetching videos for a particular user: - # params:: Key of :user with a value of the username. - # options:: Not used. (Optional) - # === Returns - # YouTubeG::Response::VideoSearch - def videos_by(params, options={}) - request_params = params.respond_to?(:to_hash) ? params : options - request_params[:page] = integer_or_default(request_params[:page], 1) - - unless request_params[:max_results] - request_params[:max_results] = integer_or_default(request_params[:per_page], 25) - end - - unless request_params[:offset] - request_params[:offset] = calculate_offset(request_params[:page], request_params[:max_results] ) - end - - if params.respond_to?(:to_hash) and not params[:user] - request = YouTubeG::Request::VideoSearch.new(request_params) - elsif (params.respond_to?(:to_hash) && params[:user]) || (params == :favorites) - request = YouTubeG::Request::UserSearch.new(request_params, options) - else - request = YouTubeG::Request::StandardSearch.new(params, request_params) - end - - logger.debug "Submitting request [url=#{request.url}]." if logger - parser = YouTubeG::Parser::VideosFeedParser.new(request.url) - parser.parse - end - - # Retrieves a single YouTube video. - # - # === Parameters - # vid:: The ID or URL of the video that you'd like to retrieve. - # - # === Returns - # YouTubeG::Model::Video - def video_by(vid) - video_id = vid =~ /^http/ ? vid : "http://gdata.youtube.com/feeds/videos/#{vid}" - parser = YouTubeG::Parser::VideoFeedParser.new(video_id) - parser.parse - end - - private - - def calculate_offset(page, per_page) - page == 1 ? 1 : ((per_page * page) - per_page + 1) - end - - def integer_or_default(value, default) - value = value.to_i - value > 0 ? value : default - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/logger.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/logger.rb deleted file mode 100644 index ea941d6..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/logger.rb +++ /dev/null @@ -1,25 +0,0 @@ -class YouTubeG - - # TODO: Why is this needed? Does this happen if running standalone w/o Rails? - # Anyway, isn't it easier to debug w/o the really long timestamp & log level? - # How often do you look at the timestamp and log level? Wouldn't it be nice to - # see your logger output first? - - # Extension of the base ruby Logger class to restore the default log - # level and timestamp formatting which is so rudely taken forcibly - # away from us by the Rails app's use of the ActiveSupport library - # that wholesale-ly modifies the Logger's format_message method. - # - class Logger < ::Logger - private - begin - # restore original log formatting to un-screw the screwage that is - # foisted upon us by the activesupport library's clean_logger.rb - alias format_message old_format_message - - rescue NameError - # nothing for now -- this means we didn't need to alias since the - # method wasn't overridden - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/author.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/author.rb deleted file mode 100644 index d811dcc..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/author.rb +++ /dev/null @@ -1,11 +0,0 @@ -class YouTubeG - module Model - class Author < YouTubeG::Record - # *String*: Author's YouTube username. - attr_reader :name - - # *String*: Feed URL of the author. - attr_reader :uri - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/category.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/category.rb deleted file mode 100644 index caed4b1..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/category.rb +++ /dev/null @@ -1,11 +0,0 @@ -class YouTubeG - module Model - class Category < YouTubeG::Record - # *String*:: Name of the YouTube category - attr_reader :label - - # *String*:: Identifies the type of item described. - attr_reader :term - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/contact.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/contact.rb deleted file mode 100644 index 11fedbb..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/contact.rb +++ /dev/null @@ -1,16 +0,0 @@ -class YouTubeG - module Model - class Contact < YouTubeG::Record - # *String*:: Identifies the status of a contact. - # - # * The tag's value will be accepted if the authenticated user and the contact have marked each other as friends. - # * The tag's value will be requested if the contact has asked to be added to the authenticated user's contact list, but the request has not yet been accepted (or rejected). - # * The tag's value will be pending if the authenticated user has asked to be added to the contact's contact list, but the request has not yet been accepted or rejected. - # - attr_reader :status - - # *String*:: The Youtube username of the contact. - attr_reader :username - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/content.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/content.rb deleted file mode 100644 index d9de833..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/content.rb +++ /dev/null @@ -1,18 +0,0 @@ -class YouTubeG - module Model - class Content < YouTubeG::Record - # *Boolean*:: Description of the video. - attr_reader :default - # *Fixnum*:: Length of the video in seconds. - attr_reader :duration - # YouTubeG::Model::Video::Format:: Specifies the video format of the video object - attr_reader :format - # *String*:: Specifies the MIME type of the media object. - attr_reader :mime_type - # *String*:: Specifies the URL for the media object. - attr_reader :url - - alias :is_default? :default - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/playlist.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/playlist.rb deleted file mode 100644 index 8f43c5c..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/playlist.rb +++ /dev/null @@ -1,8 +0,0 @@ -class YouTubeG - module Model - class Playlist < YouTubeG::Record - # *String*:: User entered description for the playlist. - attr_reader :description - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/rating.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/rating.rb deleted file mode 100644 index c8381c3..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/rating.rb +++ /dev/null @@ -1,17 +0,0 @@ -class YouTubeG - module Model - class Rating < YouTubeG::Record - # *Float*:: Average rating given to the video - attr_reader :average - - # *Fixnum*:: Maximum rating that can be assigned to the video - attr_reader :max - - # *Fixnum*:: Minimum rating that can be assigned to the video - attr_reader :min - - # *Fixnum*:: Indicates how many people have rated the video - attr_reader :rater_count - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/thumbnail.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/thumbnail.rb deleted file mode 100644 index c05c3c8..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/thumbnail.rb +++ /dev/null @@ -1,17 +0,0 @@ -class YouTubeG - module Model - class Thumbnail < YouTubeG::Record - # *String*:: URL for the thumbnail image. - attr_reader :url - - # *Fixnum*:: Height of the thumbnail image. - attr_reader :height - - # *Fixnum*:: Width of the thumbnail image. - attr_reader :width - - # *String*:: Specifies the time offset at which the frame shown in the thumbnail image appears in the video. - attr_reader :time - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/user.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/user.rb deleted file mode 100644 index e9a1fd8..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/user.rb +++ /dev/null @@ -1,20 +0,0 @@ -class YouTubeG - module Model - class User < YouTubeG::Record - attr_reader :age - attr_reader :books - attr_reader :company - attr_reader :gender - attr_reader :hobbies - attr_reader :hometown - attr_reader :location - attr_reader :movies - attr_reader :music - attr_reader :occupation - attr_reader :relationship - attr_reader :school - attr_reader :description - attr_reader :username - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/video.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/video.rb deleted file mode 100644 index f475cfa..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/model/video.rb +++ /dev/null @@ -1,190 +0,0 @@ -# TODO -# * self atom feed -# * alternate youtube watch url -# * comments feedLink - -class YouTubeG - module Model - class Video < YouTubeG::Record - # Describes the various file formats in which a Youtube video may be - # made available and allows looking them up by format code number. - class Format - @@formats = Hash.new - - # Instantiates a new video format object. - # - # == Parameters - # :format_code:: The Youtube Format code of the object. - # :name:: The name of the format - # - # == Returns - # YouTubeG::Model::Video::Format: Video format object - def initialize(format_code, name) - @format_code = format_code - @name = name - - @@formats[format_code] = self - end - - # Allows you to get the video format for a specific format code. - # - # A full list of format codes is available at: - # - # http://code.google.com/apis/youtube/reference.html#youtube_data_api_tag_media:content - # - # == Parameters - # :format_code:: The Youtube Format code of the object. - # - # == Returns - # YouTubeG::Model::Video::Format: Video format object - def self.by_code(format_code) - @@formats[format_code] - end - - # Flash format on YouTube site. All videos are available in this format. - FLASH = YouTubeG::Model::Video::Format.new(0, :flash) - - # RTSP streaming URL for mobile video playback. H.263 video (176x144) and AMR audio. - RTSP = YouTubeG::Model::Video::Format.new(1, :rtsp) - - # HTTP URL to the embeddable player (SWF) for this video. This format - # is not available for a video that is not embeddable. - SWF = YouTubeG::Model::Video::Format.new(5, :swf) - - # RTSP streaming URL for mobile video playback. MPEG-4 SP video (up to 176x144) and AAC audio. - THREE_GPP = YouTubeG::Model::Video::Format.new(6, :three_gpp) - end - - # *Fixnum*:: Duration of a video in seconds. - attr_reader :duration - - # *Boolean*:: Specifies that a video may or may not be embedded on other websites. - attr_reader :noembed - - # *Fixnum*:: Specifies the order in which the video appears in a playlist. - attr_reader :position - - # *Boolean*:: Specifies that a video is flagged as adult or not. - attr_reader :racy - - # *String*: Specifies a URI that uniquely and permanently identifies the video. - attr_reader :video_id - - # *Time*:: When the video was published on Youtube. - attr_reader :published_at - - # *Time*:: When the video's data was last updated. - attr_reader :updated_at - - # *Array*:: A array of YouTubeG::Model::Category objects that describe the videos categories. - attr_reader :categories - - # *Array*:: An array of words associated with the video. - attr_reader :keywords - - # *String*:: Description of the video. - attr_reader :description - - # *String*:: Title for the video. - attr_reader :title - - # *String*:: Description of the video. - attr_reader :html_content - - # YouTubeG::Model::Author:: Information about the YouTube user who owns a piece of video content. - attr_reader :author - - # *Array*:: An array of YouTubeG::Model::Content objects describing the individual media content data available for this video. Most, but not all, videos offer this. - attr_reader :media_content - - # *Array*:: An array of YouTubeG::Model::Thumbnail objects that contain information regarding the videos thumbnail images. - attr_reader :thumbnails - - # *String*:: The link to watch the URL on YouTubes website. - attr_reader :player_url - - # YouTubeG::Model::Rating:: Information about the videos rating. - attr_reader :rating - - # *Fixnum*:: Number of times that the video has been viewed - attr_reader :view_count - - # Geodata - attr_reader :where - attr_reader :position - attr_reader :latitude - attr_reader :longitude - - attr_reader :statistics - - # Videos related to the current video. - # - # === Returns - # YouTubeG::Response::VideoSearch - def related - YouTubeG::Parser::VideosFeedParser.new("http://gdata.youtube.com/feeds/api/videos/#{unique_id}/related").parse - end - - # Video responses to the current video. - # - # === Returns - # YouTubeG::Response::VideoSearch - def responses - YouTubeG::Parser::VideosFeedParser.new("http://gdata.youtube.com/feeds/api/videos/#{unique_id}/responses").parse - end - - # The ID of the video, useful for searching for the video again without having to store it anywhere. - # A regular query search, with this id will return the same video. - # - # === Example - # >> video.unique_id - # => "ZTUVgYoeN_o" - # - # === Returns - # String: The Youtube video id. - def unique_id - video_id[/videos\/([^<]+)/, 1] - end - - # Allows you to check whether the video can be embedded on a webpage. - # - # === Returns - # Boolean: True if the video can be embedded, false if not. - def embeddable? - not @noembed - end - - # Provides a URL and various other types of information about a video. - # - # === Returns - # YouTubeG::Model::Content: Data about the embeddable video. - def default_media_content - @media_content.find { |c| c.is_default? } - end - - # Gives you the HTML to embed the video on your website. - # - # === Returns - # String: The HTML for embedding the video on your website. - def embed_html(width = 425, height = 350) - < - - - - -EDOC - end - - # The URL needed for embedding the video in a page. - # - # === Returns - # String: Absolute URL for embedding video - def embed_url - @player_url.sub('watch?', '').sub('=', '/') - end - - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/parser.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/parser.rb deleted file mode 100755 index 3a5c2de..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/parser.rb +++ /dev/null @@ -1,169 +0,0 @@ -class YouTubeG - module Parser #:nodoc: - class FeedParser #:nodoc: - def initialize(url) - @url = url - end - - def parse - parse_content open(@url).read - end - end - - class VideoFeedParser < FeedParser #:nodoc: - - def parse_content(content) - doc = REXML::Document.new(content) - entry = doc.elements["entry"] - parse_entry(entry) - end - - protected - def parse_entry(entry) - video_id = entry.elements["id"].text - published_at = Time.parse(entry.elements["published"].text) - updated_at = Time.parse(entry.elements["updated"].text) - - # parse the category and keyword lists - categories = [] - keywords = [] - entry.elements.each("category") do |category| - # determine if it's really a category, or just a keyword - scheme = category.attributes["scheme"] - if (scheme =~ /\/categories\.cat$/) - # it's a category - categories << YouTubeG::Model::Category.new( - :term => category.attributes["term"], - :label => category.attributes["label"]) - - elsif (scheme =~ /\/keywords\.cat$/) - # it's a keyword - keywords << category.attributes["term"] - end - end - - title = entry.elements["title"].text - html_content = entry.elements["content"].text - - # parse the author - author_element = entry.elements["author"] - author = nil - if author_element - author = YouTubeG::Model::Author.new( - :name => author_element.elements["name"].text, - :uri => author_element.elements["uri"].text) - end - - media_group = entry.elements["media:group"] - description = media_group.elements["media:description"].text - duration = media_group.elements["yt:duration"].attributes["seconds"].to_i - - media_content = [] - media_group.elements.each("media:content") do |mce| - media_content << parse_media_content(mce) - end - - player_url = media_group.elements["media:player"].attributes["url"] - - # parse thumbnails - thumbnails = [] - media_group.elements.each("media:thumbnail") do |thumb_element| - # TODO: convert time HH:MM:ss string to seconds? - thumbnails << YouTubeG::Model::Thumbnail.new( - :url => thumb_element.attributes["url"], - :height => thumb_element.attributes["height"].to_i, - :width => thumb_element.attributes["width"].to_i, - :time => thumb_element.attributes["time"]) - end - - rating_element = entry.elements["gd:rating"] - rating = nil - if rating_element - rating = YouTubeG::Model::Rating.new( - :min => rating_element.attributes["min"].to_i, - :max => rating_element.attributes["max"].to_i, - :rater_count => rating_element.attributes["numRaters"].to_i, - :average => rating_element.attributes["average"].to_f) - end - - view_count = (el = entry.elements["yt:statistics"]) ? el.attributes["viewCount"].to_i : 0 - - noembed = entry.elements["yt:noembed"] ? true : false - racy = entry.elements["media:rating"] ? true : false - - if where = entry.elements["georss:where"] - position = where.elements["gml:Point"].elements["gml:pos"].text - latitude, longitude = position.split(" ") - end - - YouTubeG::Model::Video.new( - :video_id => video_id, - :published_at => published_at, - :updated_at => updated_at, - :categories => categories, - :keywords => keywords, - :title => title, - :html_content => html_content, - :author => author, - :description => description, - :duration => duration, - :media_content => media_content, - :player_url => player_url, - :thumbnails => thumbnails, - :rating => rating, - :view_count => view_count, - :noembed => noembed, - :racy => racy, - :where => where, - :position => position, - :latitude => latitude, - :longitude => longitude) - end - - def parse_media_content (media_content_element) - content_url = media_content_element.attributes["url"] - format_code = media_content_element.attributes["yt:format"].to_i - format = YouTubeG::Model::Video::Format.by_code(format_code) - duration = media_content_element.attributes["duration"].to_i - mime_type = media_content_element.attributes["type"] - default = (media_content_element.attributes["isDefault"] == "true") - - YouTubeG::Model::Content.new( - :url => content_url, - :format => format, - :duration => duration, - :mime_type => mime_type, - :default => default) - end - end - - class VideosFeedParser < VideoFeedParser #:nodoc: - - private - def parse_content(content) #:nodoc: - doc = REXML::Document.new(content) - feed = doc.elements["feed"] - - feed_id = feed.elements["id"].text - updated_at = Time.parse(feed.elements["updated"].text) - total_result_count = feed.elements["openSearch:totalResults"].text.to_i - offset = feed.elements["openSearch:startIndex"].text.to_i - max_result_count = feed.elements["openSearch:itemsPerPage"].text.to_i - - videos = [] - feed.elements.each("entry") do |entry| - videos << parse_entry(entry) - end - - YouTubeG::Response::VideoSearch.new( - :feed_id => feed_id, - :updated_at => updated_at, - :total_result_count => total_result_count, - :offset => offset, - :max_result_count => max_result_count, - :videos => videos) - end - end - - end -end \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/record.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/record.rb deleted file mode 100644 index e1167ea..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/record.rb +++ /dev/null @@ -1,12 +0,0 @@ -class YouTubeG - class Record #:nodoc: - def initialize (params) - return if params.nil? - - params.each do |key, value| - name = key.to_s - instance_variable_set("@#{name}", value) if respond_to?(name) - end - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/base_search.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/base_search.rb deleted file mode 100644 index cb6d606..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/base_search.rb +++ /dev/null @@ -1,43 +0,0 @@ -class YouTubeG - module Request #:nodoc: - class BaseSearch #:nodoc: - attr_reader :url - - private - - def base_url #:nodoc: - "http://gdata.youtube.com/feeds/api/" - end - - def set_instance_variables( variables ) #:nodoc: - variables.each do |key, value| - name = key.to_s - instance_variable_set("@#{name}", value) if respond_to?(name) - end - end - - def build_query_params(params) #:nodoc: - # nothing to do if there are no params - return '' if (!params || params.empty?) - - # build up the query param string, tacking on every key/value - # pair for which the value is non-nil - u = '?' - item_count = 0 - params.keys.sort.each do |key| - value = params[key] - next if value.nil? - - u << '&' if (item_count > 0) - u << "#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}" - item_count += 1 - end - - # if we found no non-nil values, we've got no params so just - # return an empty string - (item_count == 0) ? '' : u - end - end - - end -end \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/standard_search.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/standard_search.rb deleted file mode 100644 index 3fb6581..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/standard_search.rb +++ /dev/null @@ -1,40 +0,0 @@ -class YouTubeG - module Request #:nodoc: - class StandardSearch < BaseSearch #:nodoc: - attr_reader :max_results # max_results - attr_reader :order_by # orderby, ([relevance], viewCount, published, rating) - attr_reader :offset # start-index - attr_reader :time # time - - TYPES = [ :top_rated, :top_favorites, :most_viewed, :most_popular, - :most_recent, :most_discussed, :most_linked, :most_responded, - :recently_featured, :watch_on_mobile ] - - def initialize(type, options={}) - if TYPES.include?(type) - @max_results, @order_by, @offset, @time = nil - set_instance_variables(options) - @url = base_url + type.to_s << build_query_params(to_youtube_params) - else - raise "Invalid type, must be one of: #{ TYPES.map { |t| t.to_s }.join(", ") }" - end - end - - private - - def base_url #:nodoc: - super << "standardfeeds/" - end - - def to_youtube_params #:nodoc: - { - 'max-results' => @max_results, - 'orderby' => @order_by, - 'start-index' => @offset, - 'time' => @time - } - end - end - - end -end \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/user_search.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/user_search.rb deleted file mode 100644 index d4e214a..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/user_search.rb +++ /dev/null @@ -1,39 +0,0 @@ -class YouTubeG - module Request #:nodoc: - class UserSearch < BaseSearch #:nodoc: - attr_reader :max_results # max_results - attr_reader :order_by # orderby, ([relevance], viewCount, published, rating) - attr_reader :offset # start-index - - def initialize(params, options={}) - @max_results, @order_by, @offset = nil - @url = base_url - - if params == :favorites - @url << "#{options[:user]}/favorites" - set_instance_variables(options) - elsif params[:user] - @url << "#{params[:user]}/uploads" - set_instance_variables(params) - end - - @url << build_query_params(to_youtube_params) - end - - private - - def base_url #:nodoc: - super << "users/" - end - - def to_youtube_params #:nodoc: - { - 'max-results' => @max_results, - 'orderby' => @order_by, - 'start-index' => @offset - } - end - end - - end -end \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_search.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_search.rb deleted file mode 100644 index e6b016d..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_search.rb +++ /dev/null @@ -1,93 +0,0 @@ -class YouTubeG - module Request #:nodoc: - class VideoSearch < BaseSearch #:nodoc: - # From here: http://code.google.com/apis/youtube/reference.html#yt_format - ONLY_EMBEDDABLE = 5 - - attr_reader :max_results # max_results - attr_reader :order_by # orderby, ([relevance], viewCount, published, rating) - attr_reader :offset # start-index - attr_reader :query # vq - attr_reader :response_format # alt, ([atom], rss, json) - attr_reader :tags # /-/tag1/tag2 - attr_reader :categories # /-/Category1/Category2 - attr_reader :video_format # format (1=mobile devices) - attr_reader :racy # racy ([exclude], include) - attr_reader :author - - def initialize(params={}) - # Initialize our various member data to avoid warnings and so we'll - # automatically fall back to the youtube api defaults - @max_results, @order_by, - @offset, @query, - @response_format, @video_format, - @racy, @author = nil - @url = base_url - - # Return a single video (base_url + /T7YazwP8GtY) - return @url << "/" << params[:video_id] if params[:video_id] - - @url << "/-/" if (params[:categories] || params[:tags]) - @url << categories_to_params(params.delete(:categories)) if params[:categories] - @url << tags_to_params(params.delete(:tags)) if params[:tags] - - set_instance_variables(params) - - if( params[ :only_embeddable ] ) - @video_format = ONLY_EMBEDDABLE - end - - @url << build_query_params(to_youtube_params) - end - - private - - def base_url #:nodoc: - super << "videos" - end - - def to_youtube_params #:nodoc: - { - 'max-results' => @max_results, - 'orderby' => @order_by, - 'start-index' => @offset, - 'vq' => @query, - 'alt' => @response_format, - 'format' => @video_format, - 'racy' => @racy, - 'author' => @author - } - end - - # Convert category symbols into strings and build the URL. GData requires categories to be capitalized. - # Categories defined like: categories => { :include => [:news], :exclude => [:sports], :either => [..] } - # or like: categories => [:news, :sports] - def categories_to_params(categories) #:nodoc: - if categories.respond_to?(:keys) and categories.respond_to?(:[]) - s = "" - s << categories[:either].map { |c| c.to_s.capitalize }.join("%7C") << '/' if categories[:either] - s << categories[:include].map { |c| c.to_s.capitalize }.join("/") << '/' if categories[:include] - s << ("-" << categories[:exclude].map { |c| c.to_s.capitalize }.join("/-")) << '/' if categories[:exclude] - s - else - categories.map { |c| c.to_s.capitalize }.join("/") << '/' - end - end - - # Tags defined like: tags => { :include => [:football], :exclude => [:soccer], :either => [:polo, :tennis] } - # or tags => [:football, :soccer] - def tags_to_params(tags) #:nodoc: - if tags.respond_to?(:keys) and tags.respond_to?(:[]) - s = "" - s << tags[:either].map { |t| CGI.escape(t.to_s) }.join("%7C") << '/' if tags[:either] - s << tags[:include].map { |t| CGI.escape(t.to_s) }.join("/") << '/' if tags[:include] - s << ("-" << tags[:exclude].map { |t| CGI.escape(t.to_s) }.join("/-")) << '/' if tags[:exclude] - s - else - tags.map { |t| CGI.escape(t.to_s) }.join("/") << '/' - end - end - - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_upload.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_upload.rb deleted file mode 100644 index 558dd09..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/request/video_upload.rb +++ /dev/null @@ -1,130 +0,0 @@ -class YouTubeG - - module Upload - class UploadError < Exception; end - class AuthenticationError < Exception; end - - # require 'youtube_g' - # - # uploader = YouTubeG::Upload::VideoUpload.new("user", "pass", "dev-key") - # uploader.upload File.open("test.m4v"), :title => 'test', - # :description => 'cool vid d00d', - # :category => 'People', - # :keywords => %w[cool blah test] - - class VideoUpload - - def initialize user, pass, dev_key, client_id = 'youtube_g' - @user, @pass, @dev_key, @client_id = user, pass, dev_key, client_id - end - - # - # Upload "data" to youtube, where data is either an IO object or - # raw file data. - # The hash keys for opts (which specify video info) are as follows: - # :mime_type - # :filename - # :title - # :description - # :category - # :keywords - # :private - # Specifying :private will make the video private, otherwise it will be public. - # - # When one of the fields is invalid according to YouTube, - # an UploadError will be returned. Its message contains a list of newline separated - # errors, containing the key and its error code. - # - # When the authentication credentials are incorrect, an AuthenticationError will be raised. - def upload data, opts = {} - data = data.respond_to?(:read) ? data.read : data - @opts = { :mime_type => 'video/mp4', - :filename => Digest::MD5.hexdigest(data), - :title => '', - :description => '', - :category => '', - :keywords => [] }.merge(opts) - - uploadBody = generate_upload_body(boundary, video_xml, data) - - uploadHeader = { - "Authorization" => "GoogleLogin auth=#{auth_token}", - "X-GData-Client" => "#{@client_id}", - "X-GData-Key" => "key=#{@dev_key}", - "Slug" => "#{@opts[:filename]}", - "Content-Type" => "multipart/related; boundary=#{boundary}", - "Content-Length" => "#{uploadBody.length}" - } - - Net::HTTP.start(base_url) do |upload| - response = upload.post('/feeds/api/users/' << @user << '/uploads', uploadBody, uploadHeader) - if response.code.to_i == 403 - raise AuthenticationError, response.body[/(.+)<\/TITLE>/, 1] - elsif response.code.to_i != 201 - upload_error = '' - xml = REXML::Document.new(response.body) - errors = xml.elements["//errors"] - errors.each do |error| - location = error.elements["location"].text[/media:group\/media:(.*)\/text\(\)/,1] - code = error.elements["code"].text - upload_error << sprintf("%s: %s\r\n", location, code) - end - raise UploadError, upload_error - end - xml = REXML::Document.new(response.body) - return xml.elements["//id"].text[/videos\/(.+)/, 1] - end - - end - - private - - def base_url #:nodoc: - "uploads.gdata.youtube.com" - end - - def boundary #:nodoc: - "An43094fu" - end - - def auth_token #:nodoc: - unless @auth_token - http = Net::HTTP.new("www.google.com", 443) - http.use_ssl = true - body = "Email=#{CGI::escape @user}&Passwd=#{CGI::escape @pass}&service=youtube&source=#{CGI::escape @client_id}" - response = http.post("/youtube/accounts/ClientLogin", body, "Content-Type" => "application/x-www-form-urlencoded") - raise UploadError, response.body[/Error=(.+)/,1] if response.code.to_i != 200 - @auth_token = response.body[/Auth=(.+)/, 1] - - end - @auth_token - end - - def video_xml #:nodoc: - video_xml = '' - video_xml << '<?xml version="1.0"?>' - video_xml << '<entry xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xmlns:yt="http://gdata.youtube.com/schemas/2007">' - video_xml << '<media:group>' - video_xml << '<media:title type="plain">%s</media:title>' % @opts[:title] - video_xml << '<media:description type="plain">%s</media:description>' % @opts[:description] - video_xml << '<media:keywords>%s</media:keywords>' % @opts[:keywords].join(",") - video_xml << '<media:category scheme="http://gdata.youtube.com/schemas/2007/categories.cat">%s</media:category>' % @opts[:category] - video_xml << '<yt:private/>' if @opts[:private] - video_xml << '</media:group>' - video_xml << '</entry>' - end - - def generate_upload_body(boundary, video_xml, data) #:nodoc: - uploadBody = "" - uploadBody << "--#{boundary}\r\n" - uploadBody << "Content-Type: application/atom+xml; charset=UTF-8\r\n\r\n" - uploadBody << video_xml - uploadBody << "\r\n--#{boundary}\r\n" - uploadBody << "Content-Type: #{@opts[:mime_type]}\r\nContent-Transfer-Encoding: binary\r\n\r\n" - uploadBody << data - uploadBody << "\r\n--#{boundary}--\r\n" - end - - end - end -end \ No newline at end of file diff --git a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/response/video_search.rb b/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/response/video_search.rb deleted file mode 100644 index e5df504..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/lib/youtube_g/response/video_search.rb +++ /dev/null @@ -1,41 +0,0 @@ -class YouTubeG - module Response - class VideoSearch < YouTubeG::Record - # *String*:: Unique feed identifying url. - attr_reader :feed_id - - # *Fixnum*:: Number of results per page. - attr_reader :max_result_count - - # *Fixnum*:: 1-based offset index into the full result set. - attr_reader :offset - - # *Fixnum*:: Total number of results available for the original request. - attr_reader :total_result_count - - # *Time*:: Date and time at which the feed was last updated - attr_reader :updated_at - - # *Array*:: Array of YouTubeG::Model::Video records - attr_reader :videos - - def current_page - ((offset - 1) / max_result_count) + 1 - end - - # current_page + 1 or nil if there is no next page - def next_page - current_page < total_pages ? (current_page + 1) : nil - end - - # current_page - 1 or nil if there is no previous page - def previous_page - current_page > 1 ? (current_page - 1) : nil - end - - def total_pages - (total_result_count / max_result_count.to_f).ceil - end - end - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/test/test_client.rb b/vendor/gems/youtube-g-0.4.9.9/test/test_client.rb deleted file mode 100644 index c24abc9..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/test/test_client.rb +++ /dev/null @@ -1,262 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'pp' - -require 'youtube_g' - -class TestClient < Test::Unit::TestCase - def setup - @client = YouTubeG::Client.new - end - - def test_should_respond_to_a_basic_query - response = @client.videos_by(:query => "penguin") - - assert_equal "http://gdata.youtube.com/feeds/api/videos", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 25, response.videos.length - assert_equal 1, response.offset - assert(response.total_result_count > 100) - assert_instance_of Time, response.updated_at - - response.videos.each { |v| assert_valid_video v } - end - - def test_should_respond_to_a_basic_query_with_offset_and_max_results - response = @client.videos_by(:query => "penguin", :offset => 15, :max_results => 30) - - assert_equal "http://gdata.youtube.com/feeds/api/videos", response.feed_id - assert_equal 30, response.max_result_count - assert_equal 30, response.videos.length - assert_equal 15, response.offset - assert(response.total_result_count > 100) - assert_instance_of Time, response.updated_at - - response.videos.each { |v| assert_valid_video v } - end - - def test_should_respond_to_a_basic_query_with_paging - response = @client.videos_by(:query => "penguin") - assert_equal "http://gdata.youtube.com/feeds/api/videos", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 1, response.offset - - response = @client.videos_by(:query => "penguin", :page => 2) - assert_equal "http://gdata.youtube.com/feeds/api/videos", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 26, response.offset - - response2 = @client.videos_by(:query => "penguin", :page => 3) - assert_equal "http://gdata.youtube.com/feeds/api/videos", response2.feed_id - assert_equal 25, response2.max_result_count - assert_equal 51, response2.offset - end - - def test_should_get_videos_for_multiword_metasearch_query - response = @client.videos_by(:query => 'christina ricci') - - assert_equal "http://gdata.youtube.com/feeds/api/videos", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 25, response.videos.length - assert_equal 1, response.offset - assert(response.total_result_count > 100) - assert_instance_of Time, response.updated_at - - response.videos.each { |v| assert_valid_video v } - end - - def test_should_handle_video_not_yet_viewed - response = @client.videos_by(:query => "YnqHZDh_t2Q") - - assert_equal 1, response.videos.length - response.videos.each { |v| assert_valid_video v } - end - - # TODO: this doesn't work because the returned feed is in an unknown format - # def test_should_get_video_for_search_by_video_id - # response = @client.videos_by(:video_id => "T7YazwP8GtY") - # response.videos.each { |v| assert_valid_video v } - # end - - def test_should_get_videos_for_one_tag - response = @client.videos_by(:tags => ['panther']) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_for_multiple_tags - response = @client.videos_by(:tags => ['tiger', 'leopard']) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_for_one_category - response = @client.videos_by(:categories => [:news]) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_for_multiple_categories - response = @client.videos_by(:categories => [:news, :sports]) - response.videos.each { |v| assert_valid_video v } - end - - # TODO: Need to do more specific checking in these tests - # Currently, if a URL is valid, and videos are found, the test passes regardless of search criteria - def test_should_get_videos_for_categories_and_tags - response = @client.videos_by(:categories => [:news, :sports], :tags => ['soccer', 'football']) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_most_viewed_videos - response = @client.videos_by(:most_viewed) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_top_rated_videos_for_today - response = @client.videos_by(:top_rated, :time => :today) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_for_categories_and_tags_with_category_boolean_operators - response = @client.videos_by(:categories => { :either => [:news, :sports], :exclude => [:comedy] }, - :tags => { :include => ['football'], :exclude => ['soccer'] }) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_for_categories_and_tags_with_tag_boolean_operators - response = @client.videos_by(:categories => { :either => [:news, :sports], :exclude => [:comedy] }, - :tags => { :either => ['football', 'soccer', 'polo'] }) - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_by_user - response = @client.videos_by(:user => 'liz') - response.videos.each { |v| assert_valid_video v } - end - - def test_should_get_videos_by_user_with_pagination_and_ordering - response = @client.videos_by(:user => 'liz', :page => 2, :per_page => '2', :order_by => 'published') - response.videos.each { |v| assert_valid_video v } - assert_equal 3, response.offset - assert_equal 2, response.max_result_count - end - - # HTTP 403 Error - # def test_should_get_favorite_videos_by_user - # response = @client.videos_by(:favorites, :user => 'liz') - # response.videos.each { |v| assert_valid_video v } - # end - - def test_should_get_videos_for_query_search_with_categories_excluded - video = @client.video_by("EkF4JD2rO3Q") - assert_equal "<object width=\"425\" height=\"350\">\n <param name=\"movie\" value=\"http://www.youtube.com/v/EkF4JD2rO3Q\"></param>\n <param name=\"wmode\" value=\"transparent\"></param>\n <embed src=\"http://www.youtube.com/v/EkF4JD2rO3Q\" type=\"application/x-shockwave-flash\" \n wmode=\"transparent\" width=\"425\" height=\"350\"></embed>\n</object>\n", video.embed_html - assert_valid_video video - end - - def test_should_disable_debug_if_debug_is_set_to_false - @client = YouTubeG::Client.new - assert_nil @client.logger - end - - def test_should_enable_logger_if_debug_is_true - @client = YouTubeG::Client.new(true) - assert_not_nil @client.logger - end - - def test_should_determine_if_nonembeddable_video_is_embeddable - response = @client.videos_by(:query => "avril lavigne girlfriend") - - video = response.videos.first - assert !video.embeddable? - end - - def test_should_determine_if_embeddable_video_is_embeddable - response = @client.videos_by(:query => "strongbad") - - video = response.videos.first - assert video.embeddable? - end - - def test_should_retrieve_video_by_id - video = @client.video_by("http://gdata.youtube.com/feeds/videos/EkF4JD2rO3Q") - assert_valid_video video - - video = @client.video_by("EkF4JD2rO3Q") - assert_valid_video video - end - - private - - def assert_valid_video (video) - # pp video - - # check general attributes - assert_instance_of YouTubeG::Model::Video, video - assert_instance_of Fixnum, video.duration - assert(video.duration > 0) - #assert_match(/^<div style=.*?<\/div>/m, video.html_content) - assert_instance_of String, video.html_content - - # validate media content records - video.media_content.each do |media_content| - # http://www.youtube.com/v/IHVaXG1thXM - assert_valid_url media_content.url - assert(media_content.duration > 0) - assert_instance_of YouTubeG::Model::Video::Format, media_content.format - assert_instance_of String, media_content.mime_type - assert_match(/^[^\/]+\/[^\/]+$/, media_content.mime_type) - end - - default_content = video.default_media_content - if default_content - assert_instance_of YouTubeG::Model::Content, default_content - assert default_content.is_default? - end - - # validate keywords - video.keywords.each { |kw| assert_instance_of(String, kw) } - - # http://www.youtube.com/watch?v=IHVaXG1thXM - assert_valid_url video.player_url - assert_instance_of Time, video.published_at - - # validate optionally-present rating - if video.rating - assert_instance_of YouTubeG::Model::Rating, video.rating - assert_instance_of Float, video.rating.average - assert_instance_of Fixnum, video.rating.max - assert_instance_of Fixnum, video.rating.min - assert_instance_of Fixnum, video.rating.rater_count - end - - # validate thumbnails - assert(video.thumbnails.size > 0) - - assert_not_nil video.title - assert_instance_of String, video.title - assert(video.title.length > 0) - - assert_instance_of Time, video.updated_at - # http://gdata.youtube.com/feeds/videos/IHVaXG1thXM - assert_valid_url video.video_id - assert_instance_of Fixnum, video.view_count - - # validate author - assert_instance_of YouTubeG::Model::Author, video.author - assert_instance_of String, video.author.name - assert(video.author.name.length > 0) - assert_valid_url video.author.uri - - # validate categories - video.categories.each do |cat| - assert_instance_of YouTubeG::Model::Category, cat - assert_instance_of String, cat.label - assert_instance_of String, cat.term - end - end - - def assert_valid_url (url) - URI::parse(url) - return true - rescue - return false - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/test/test_video.rb b/vendor/gems/youtube-g-0.4.9.9/test/test_video.rb deleted file mode 100644 index 86ffdbf..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/test/test_video.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'pp' - -require 'youtube_g' - -class TestVideo < Test::Unit::TestCase - def test_should_extract_unique_id_from_video_id - video = YouTubeG::Model::Video.new(:video_id => "http://gdata.youtube.com/feeds/videos/ZTUVgYoeN_o") - assert_equal "ZTUVgYoeN_o", video.unique_id - end - - def test_should_extract_unique_id_with_hypen_from_video_id - video = YouTubeG::Model::Video.new(:video_id => "http://gdata.youtube.com/feeds/videos/BDqs-OZWw9o") - assert_equal "BDqs-OZWw9o", video.unique_id - end - - def test_should_have_related_videos - video = YouTubeG::Model::Video.new(:video_id => "http://gdata.youtube.com/feeds/videos/BDqs-OZWw9o") - response = video.related - - assert_equal "http://gdata.youtube.com/feeds/api/videos/BDqs-OZWw9o/related", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 25, response.videos.length - assert_equal 1, response.offset - assert(response.total_result_count > 0) - assert_instance_of Time, response.updated_at - end - - def test_should_have_response_videos - video = YouTubeG::Model::Video.new(:video_id => "http://gdata.youtube.com/feeds/videos/BDqs-OZWw9o") - response = video.responses - - assert_equal "http://gdata.youtube.com/feeds/api/videos/BDqs-OZWw9o/responses", response.feed_id - assert_equal 25, response.max_result_count - assert_equal 25, response.videos.length - assert_equal 1, response.offset - assert(response.total_result_count > 0) - assert_instance_of Time, response.updated_at - end - -end diff --git a/vendor/gems/youtube-g-0.4.9.9/test/test_video_search.rb b/vendor/gems/youtube-g-0.4.9.9/test/test_video_search.rb deleted file mode 100644 index d70205c..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/test/test_video_search.rb +++ /dev/null @@ -1,138 +0,0 @@ -require 'rubygems' -require 'test/unit' -require 'pp' - -require 'youtube_g' - -class TestVideoSearch < Test::Unit::TestCase - - def test_should_build_basic_query_url - request = YouTubeG::Request::VideoSearch.new(:query => "penguin") - assert_equal "http://gdata.youtube.com/feeds/api/videos?vq=penguin", request.url - end - - def test_should_build_multiword_metasearch_query_url - request = YouTubeG::Request::VideoSearch.new(:query => 'christina ricci') - assert_equal "http://gdata.youtube.com/feeds/api/videos?vq=christina+ricci", request.url - end - - def test_should_build_video_id_url - request = YouTubeG::Request::VideoSearch.new(:video_id => 'T7YazwP8GtY') - assert_equal "http://gdata.youtube.com/feeds/api/videos/T7YazwP8GtY", request.url - end - - def test_should_build_one_tag_querl_url - request = YouTubeG::Request::VideoSearch.new(:tags => ['panther']) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/panther/", request.url - end - - def test_should_build_multiple_tags_query_url - request = YouTubeG::Request::VideoSearch.new(:tags => ['tiger', 'leopard']) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/tiger/leopard/", request.url - end - - def test_should_build_one_category_query_url - request = YouTubeG::Request::VideoSearch.new(:categories => [:news]) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News/", request.url - end - - def test_should_build_multiple_categories_query_url - request = YouTubeG::Request::VideoSearch.new(:categories => [:news, :sports]) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News/Sports/", request.url - end - - def test_should_build_categories_and_tags_query_url - request = YouTubeG::Request::VideoSearch.new(:categories => [:news, :sports], :tags => ['soccer', 'football']) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News/Sports/soccer/football/", request.url - end - - def test_should_build_categories_and_tags_url_with_max_results - request = YouTubeG::Request::VideoSearch.new(:categories => [:music], :tags => ['classic', 'rock'], :max_results => 2) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/Music/classic/rock/?max-results=2", request.url - end - - def test_should_build_author_query_url - request = YouTubeG::Request::VideoSearch.new(:author => "davidguetta") - assert_equal "http://gdata.youtube.com/feeds/api/videos?author=davidguetta", request.url - end - # -- Standard Feeds -------------------------------------------------------------------------------- - - def test_should_build_url_for_most_viewed - request = YouTubeG::Request::StandardSearch.new(:most_viewed) - assert_equal "http://gdata.youtube.com/feeds/api/standardfeeds/most_viewed", request.url - end - - def test_should_build_url_for_top_rated_for_today - request = YouTubeG::Request::StandardSearch.new(:top_rated, :time => :today) - assert_equal "http://gdata.youtube.com/feeds/api/standardfeeds/top_rated?time=today", request.url - end - - def test_should_build_url_for_most_viewed_offset_and_max_results_without_time - request = YouTubeG::Request::StandardSearch.new(:top_rated, :offset => 5, :max_results => 10) - assert_equal "http://gdata.youtube.com/feeds/api/standardfeeds/top_rated?max-results=10&start-index=5", request.url - end - - def test_should_build_url_for_most_viewed_offset_and_max_results_with_time - request = YouTubeG::Request::StandardSearch.new(:top_rated, :offset => 5, :max_results => 10, :time => :today) - assert_equal "http://gdata.youtube.com/feeds/api/standardfeeds/top_rated?max-results=10&start-index=5&time=today", request.url - end - - def test_should_raise_exception_for_invalid_type - assert_raise RuntimeError do - request = YouTubeG::Request::StandardSearch.new(:most_viewed_yo) - end - end - - # -- Complex Video Queries ------------------------------------------------------------------------- - - def test_should_build_url_for_boolean_or_case_for_categories - request = YouTubeG::Request::VideoSearch.new(:categories => { :either => [:news, :sports] }) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News%7CSports/", request.url - end - - def test_should_build_url_for_boolean_or_and_exclude_case_for_categories - request = YouTubeG::Request::VideoSearch.new(:categories => { :either => [:news, :sports], :exclude => [:comedy] }) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News%7CSports/-Comedy/", request.url - end - - def test_should_build_url_for_exclude_case_for_tags - request = YouTubeG::Request::VideoSearch.new(:categories => { :either => [:news, :sports], :exclude => [:comedy] }, - :tags => { :include => ['football'], :exclude => ['soccer'] }) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News%7CSports/-Comedy/football/-soccer/", request.url - end - - def test_should_build_url_for_either_case_for_tags - request = YouTubeG::Request::VideoSearch.new(:categories => { :either => [:news, :sports], :exclude => [:comedy] }, - :tags => { :either => ['soccer', 'football', 'donkey'] }) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/News%7CSports/-Comedy/soccer%7Cfootball%7Cdonkey/", request.url - end - - def test_should_build_url_for_query_search_with_categories_excluded - request = YouTubeG::Request::VideoSearch.new(:query => 'bench press', - :categories => { :exclude => [:comedy, :entertainment] }, - :max_results => 10) - assert_equal "http://gdata.youtube.com/feeds/api/videos/-/-Comedy/-Entertainment/?max-results=10&vq=bench+press", request.url - end - - # -- User Queries --------------------------------------------------------------------------------- - - def test_should_build_url_for_videos_by_user - request = YouTubeG::Request::UserSearch.new(:user => 'liz') - assert_equal "http://gdata.youtube.com/feeds/api/users/liz/uploads", request.url - end - - def test_should_build_url_for_videos_by_user_paginate_and_order - request = YouTubeG::Request::UserSearch.new(:user => 'liz', :offset => 20, :max_results => 10, :order_by => 'published') - assert_equal "http://gdata.youtube.com/feeds/api/users/liz/uploads?max-results=10&orderby=published&start-index=20", request.url - end - - def test_should_build_url_for_favorite_videos_by_user - request = YouTubeG::Request::UserSearch.new(:favorites, :user => 'liz') - assert_equal "http://gdata.youtube.com/feeds/api/users/liz/favorites", request.url - end - - def test_should_build_url_for_favorite_videos_by_user_paginate - request = YouTubeG::Request::UserSearch.new(:favorites, :user => 'liz', :offset => 20, :max_results => 10) - assert_equal "http://gdata.youtube.com/feeds/api/users/liz/favorites?max-results=10&start-index=20", request.url - end -end diff --git a/vendor/gems/youtube-g-0.4.9.9/youtube-g.gemspec b/vendor/gems/youtube-g-0.4.9.9/youtube-g.gemspec deleted file mode 100644 index 8ac4e89..0000000 --- a/vendor/gems/youtube-g-0.4.9.9/youtube-g.gemspec +++ /dev/null @@ -1,52 +0,0 @@ -spec = Gem::Specification.new do |s| - s.name = 'youtube-g' - s.version = '0.4.9.9' - s.date = '2008-09-01' - s.summary = 'An object-oriented Ruby wrapper for the YouTube GData API' - s.email = "ruby-youtube-library@googlegroups.com" - s.homepage = "http://youtube-g.rubyforge.org/" - s.description = "An object-oriented Ruby wrapper for the YouTube GData API" - s.has_rdoc = true - s.authors = ["Shane Vitarana", "Walter Korman", "Aman Gupta", "Filip H.F. Slagter"] - - # ruby -rpp -e "pp Dir['**/*.*'].map" - s.files = [ - "History.txt", - "lib/youtube_g/client.rb", - "lib/youtube_g/logger.rb", - "lib/youtube_g/model/author.rb", - "lib/youtube_g/model/category.rb", - "lib/youtube_g/model/contact.rb", - "lib/youtube_g/model/content.rb", - "lib/youtube_g/model/playlist.rb", - "lib/youtube_g/model/rating.rb", - "lib/youtube_g/model/thumbnail.rb", - "lib/youtube_g/model/user.rb", - "lib/youtube_g/model/video.rb", - "lib/youtube_g/parser.rb", - "lib/youtube_g/record.rb", - "lib/youtube_g/request/base_search.rb", - "lib/youtube_g/request/standard_search.rb", - "lib/youtube_g/request/user_search.rb", - "lib/youtube_g/request/video_search.rb", - "lib/youtube_g/request/video_upload.rb", - "lib/youtube_g/response/video_search.rb", - "lib/youtube_g.rb", - "Manifest.txt", - "README.txt", - "test/test_client.rb", - "test/test_video.rb", - "test/test_video_search.rb", - "TODO.txt", - "youtube-g.gemspec" - ] - - s.test_files = [ - "test/test_client.rb", - "test/test_video.rb", - "test/test_video_search.rb" - ] - - s.rdoc_options = ["--main", "README.txt"] - s.extra_rdoc_files = ["History.txt", "README.txt"] -end diff --git a/vendor/plugins/flashback/MIT-LICENSE b/vendor/plugins/flashback/MIT-LICENSE deleted file mode 100644 index fe90bce..0000000 --- a/vendor/plugins/flashback/MIT-LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008 Justin Knowlden - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/plugins/flashback/README b/vendor/plugins/flashback/README deleted file mode 100644 index b6a8ff4..0000000 --- a/vendor/plugins/flashback/README +++ /dev/null @@ -1,62 +0,0 @@ -= Flashback - -Calling _flashback_ in your functional test sometime after the TestRequest is -instantiated and before your first call to an action, will allow you to -access the discarded flash variables (those that were _flashed_) during -the request processing. Specifically, it will allow you to access the -<em>Flash.now</em> variables by name. - -You will access these discarded variables similar to how you would access them -in <em>Flash.now</em>, but this time via a _flashed_ method. For example: - - class FooController < ApplicationController - def create - ... - flash.now[:error] = 'Whoops!' unless params[:foo][:baz] - ... - end - end - - class FooControllerTest < ActionController::TestCase - def test_create_should_set_some_flash_now_variable - flashback - get :create, :foo => {:bar => 'hello'} - assert_equal 'Whoops!', flash.flashed[:error] - end - end - -What you will not have access to via _flashed_ are the normal, -inter-request Flash variables. This is because Flashback is only tracking -those flash variables that are _discarded_ during the transaction, which -includes all variables passed through <em>Flash.now</em>. - -If you want _flashed_ available all of the time, then simply call _flashback_ -in the _setup_ method of your TestCase. There are likely better ways that I hope -someone will tell me about, but I just wanted to get this plugin out-the-door. - -The only caveat to Flashback is that if you define your own Flash instance and -pass that to your various process methods (get, post, head, etc.), your flash -will override Flashback's, rendering it useless. - -== Installation - - ./script/plugin install http://glomp.rubyforge.org/svn/plugins/flashback - -== Notes - -I built this plugin using Ruby 1.8.6 on Rails 2.0. It will likely work for Rails -1.2, but it has not been tested. - -== Acknowledgements - -If no one else, then DHH - -== Contact - -Justin Knowlden <justin@goglomp.net> - -== License - -Copyright (c) 2008 Justin Knowlden, released under the MIT license - -See MIT-LICENSE for more detail diff --git a/vendor/plugins/flashback/Rakefile b/vendor/plugins/flashback/Rakefile deleted file mode 100644 index db2cb16..0000000 --- a/vendor/plugins/flashback/Rakefile +++ /dev/null @@ -1,22 +0,0 @@ -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' - -desc 'Default: run unit tests.' -task :default => :test - -desc 'Test the flashback plugin.' -Rake::TestTask.new(:test) do |t| - t.libs << 'lib' - t.pattern = 'test/**/*_test.rb' - t.verbose = true -end - -desc 'Generate documentation for the flashback plugin.' -Rake::RDocTask.new(:rdoc) do |rdoc| - rdoc.rdoc_dir = 'rdoc' - rdoc.title = 'Flashback' - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.rdoc_files.include('README') - rdoc.rdoc_files.include('lib/**/*.rb') -end diff --git a/vendor/plugins/flashback/init.rb b/vendor/plugins/flashback/init.rb deleted file mode 100644 index 442c84f..0000000 --- a/vendor/plugins/flashback/init.rb +++ /dev/null @@ -1 +0,0 @@ -require 'flashback' \ No newline at end of file diff --git a/vendor/plugins/flashback/install.rb b/vendor/plugins/flashback/install.rb deleted file mode 100644 index f7732d3..0000000 --- a/vendor/plugins/flashback/install.rb +++ /dev/null @@ -1 +0,0 @@ -# Install hook code here diff --git a/vendor/plugins/flashback/lib/flashback.rb b/vendor/plugins/flashback/lib/flashback.rb deleted file mode 100644 index 6224ae8..0000000 --- a/vendor/plugins/flashback/lib/flashback.rb +++ /dev/null @@ -1,58 +0,0 @@ -module Glomp - module Flashback - class FlashedHash < ActionController::Flash::FlashHash - def flashed - @flashed ||= {} - end - - def discard(k=nil) - flashed[k] = self[k] - super(k) - end - end - - # Calling this method in your test sometime after the TestRequest is - # instantiated and before your first call to an action, will allow you to - # access the discarded flash variables (those that were _flashed_) during - # the request processing. Specifically, it will allow you to access the - # <em>Flash.now</em> variables by name. - # - # You will access these discarded variables similar to how you would access - # <em>Flash.now</em>, but this time via a _flashed_ method. For example: - # - # class FooController < ApplicationController - # def create - # ... - # flash.now[:error] = 'Whoops!' unless params[:foo][:baz] - # ... - # end - # end - # - # class FooControllerTest < ActionController::TestCase - # def test_create_should_set_some_flash_now_variable - # flashback - # get :create, :foo => {:bar => 'hello'} - # assert_equal 'Whoops!', flash.flashed[:error] - # end - # end - # - # What you will not have access to via _flashed_ are the normal, - # inter-request Flash variables. This is because Flashback is only tracking - # those flash variables that are _discarded_ during the transaction, which - # includes all variables passed through <em>Flash.now</em>. - # - # If you want _flashed_ available all of the time, then simply call - # _flashback_ in the _setup_ method of your TestCase. There are likely - # better ways that I hope someone will tell me about, but I just wanted to - # get this plugin out-the-door. - # - # The only caveat to Flashback is that if you define your own Flash instance - # and pass that to your various process methods (get, post, head, etc.), - # your flash will override Flashback's, rendering it useless. - def flashback - @request.session['flash'] = FlashedHash.new - end - end -end - -Test::Unit::TestCase.send(:include, Glomp::Flashback) diff --git a/vendor/plugins/flashback/tasks/flashback_tasks.rake b/vendor/plugins/flashback/tasks/flashback_tasks.rake deleted file mode 100644 index acad647..0000000 --- a/vendor/plugins/flashback/tasks/flashback_tasks.rake +++ /dev/null @@ -1,4 +0,0 @@ -# desc "Explaining what the task does" -# task :flashback do -# # Task goes here -# end diff --git a/vendor/plugins/flashback/test/app_root/app/controllers/application.rb b/vendor/plugins/flashback/test/app_root/app/controllers/application.rb deleted file mode 100644 index 09705d1..0000000 --- a/vendor/plugins/flashback/test/app_root/app/controllers/application.rb +++ /dev/null @@ -1,2 +0,0 @@ -class ApplicationController < ActionController::Base -end diff --git a/vendor/plugins/flashback/test/app_root/app/controllers/flashers_controller.rb b/vendor/plugins/flashback/test/app_root/app/controllers/flashers_controller.rb deleted file mode 100644 index 19c66f0..0000000 --- a/vendor/plugins/flashback/test/app_root/app/controllers/flashers_controller.rb +++ /dev/null @@ -1,7 +0,0 @@ -class FlashersController < ApplicationController - def index - flash[:actual_flash] = params[:actual_flash] - flash.now[:actual_flash_now] = params[:actual_flash_now] - render :text => 'blah' - end -end diff --git a/vendor/plugins/flashback/test/app_root/config/boot.rb b/vendor/plugins/flashback/test/app_root/config/boot.rb deleted file mode 100644 index a526753..0000000 --- a/vendor/plugins/flashback/test/app_root/config/boot.rb +++ /dev/null @@ -1,47 +0,0 @@ -unless defined?(RAILS_ROOT) - root_path = File.join(File.expand_path('.'), 'test/app_root') - - unless RUBY_PLATFORM =~ /(:?mswin|mingw)/ - require 'pathname' - root_path = Pathname.new(root_path).cleanpath(true).to_s - end - - RAILS_ROOT = root_path -end - -unless defined?(RAILS_FRAMEWORK_ROOT) - RAILS_FRAMEWORK_ROOT = ENV['RAILS_FRAMEWORK_ROOT'] || "#{RAILS_ROOT}/vendor/rails" -end - -unless defined?(Rails::Initializer) - if File.directory?(RAILS_FRAMEWORK_ROOT) - require "#{RAILS_FRAMEWORK_ROOT}/railties/lib/initializer" - else - require 'rubygems' - - environment_without_comments = IO.readlines(File.dirname(__FILE__) + '/environment.rb').reject { |l| l =~ /^#/ }.join - environment_without_comments =~ /[^#]RAILS_GEM_VERSION = '([\d.]+)'/ - rails_gem_version = $1 - - if version = defined?(RAILS_GEM_VERSION) ? RAILS_GEM_VERSION : rails_gem_version - # Asking for 1.1.6 will give you 1.1.6.5206, if available -- makes it easier to use beta gems - rails_gem = Gem.cache.search('rails', "~>#{version}.0").sort_by { |g| g.version.version }.last - - if rails_gem - gem "rails", "=#{rails_gem.version.version}" - require rails_gem.full_gem_path + '/lib/initializer' - else - STDERR.puts %(Cannot find gem for Rails ~>#{version}.0: - Install the missing gem with 'gem install -v=#{version} rails', or - change environment.rb to define RAILS_GEM_VERSION with your desired version. - ) - exit 1 - end - else - gem "rails" - require 'initializer' - end - end - - Rails::Initializer.run(:set_load_path) -end diff --git a/vendor/plugins/flashback/test/app_root/config/database.yml b/vendor/plugins/flashback/test/app_root/config/database.yml deleted file mode 100644 index 2895ec1..0000000 --- a/vendor/plugins/flashback/test/app_root/config/database.yml +++ /dev/null @@ -1,5 +0,0 @@ -postgresql: - adapter: postgresql - database: plugin_test - host: localhost - min_messages: ERROR diff --git a/vendor/plugins/flashback/test/app_root/config/environment.rb b/vendor/plugins/flashback/test/app_root/config/environment.rb deleted file mode 100644 index c84c299..0000000 --- a/vendor/plugins/flashback/test/app_root/config/environment.rb +++ /dev/null @@ -1,12 +0,0 @@ -require 'config/boot' - -Rails::Initializer.run do |config| - config.log_level = :debug - config.cache_classes = false - config.whiny_nils = true - config.load_paths << "#{RAILS_ROOT}/../../lib" - config.plugin_paths << '..' - config.plugins = [File.basename(File.expand_path('.'))] -end - -Dependencies.log_activity = true diff --git a/vendor/plugins/flashback/test/app_root/config/environments/in_memory.rb b/vendor/plugins/flashback/test/app_root/config/environments/in_memory.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/flashback/test/app_root/config/environments/mysql.rb b/vendor/plugins/flashback/test/app_root/config/environments/mysql.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/flashback/test/app_root/config/environments/postgresql.rb b/vendor/plugins/flashback/test/app_root/config/environments/postgresql.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/flashback/test/app_root/config/environments/sqlite.rb b/vendor/plugins/flashback/test/app_root/config/environments/sqlite.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/flashback/test/app_root/config/environments/sqlite3.rb b/vendor/plugins/flashback/test/app_root/config/environments/sqlite3.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/flashback/test/app_root/config/routes.rb b/vendor/plugins/flashback/test/app_root/config/routes.rb deleted file mode 100644 index 8221115..0000000 --- a/vendor/plugins/flashback/test/app_root/config/routes.rb +++ /dev/null @@ -1,3 +0,0 @@ -ActionController::Routing::Routes.draw do |map| - map.resources :flashers -end diff --git a/vendor/plugins/flashback/test/flashers_controller_test.rb b/vendor/plugins/flashback/test/flashers_controller_test.rb deleted file mode 100644 index 0a77d30..0000000 --- a/vendor/plugins/flashback/test/flashers_controller_test.rb +++ /dev/null @@ -1,35 +0,0 @@ -require File.expand_path(File.dirname(__FILE__) + "/test_helper") - -class FlashersControllerTest < ActionController::TestCase - - def test_flash_available_after_request - get :index, :actual_flash => 'hello' - assert_equal 'hello', flash[:actual_flash] - end - - def test_flash_now_not_available_after_request - get :index, :actual_flash_now => 'world' - assert_nil flash.now[:actual_flash_now] - end - - def test_no_flashback_means_flash_now_not_available_after_request_via_flashed - get :index, :actual_flash_now => 'world' - assert_raise(NoMethodError) {flash.flashed[:actual_flash_now]} - end - - def test_flash_now_is_available_after_request_via_flashed - flashback - get :index, :actual_flash_now => 'world' - assert_equal 'world', flash.flashed[:actual_flash_now] - end - - def test_flash_not_available_after_request_via_flashed - # Flash variables are not available via _flashed_ after the request has - # finished because they have not yet been discarded. Only discarded - # key/value pairs are available via _flashed_ - flashback - get :index, :actual_flash => 'hello' - assert_nil flash.flashed[:actual_flash] - end - -end diff --git a/vendor/plugins/flashback/test/test_helper.rb b/vendor/plugins/flashback/test/test_helper.rb deleted file mode 100644 index be1d811..0000000 --- a/vendor/plugins/flashback/test/test_helper.rb +++ /dev/null @@ -1,9 +0,0 @@ -# If you want to change the default rails environment -ENV['RAILS_ENV'] ||= 'postgresql' - -# Load the plugin testing framework -require 'rubygems' -require 'plugin_test_helper' - -# Run the migrations (optional) -#ActiveRecord::Migrator.migrate("#{RAILS_ROOT}/db/migrate") \ No newline at end of file diff --git a/vendor/plugins/flashback/uninstall.rb b/vendor/plugins/flashback/uninstall.rb deleted file mode 100644 index 9738333..0000000 --- a/vendor/plugins/flashback/uninstall.rb +++ /dev/null @@ -1 +0,0 @@ -# Uninstall hook code here diff --git a/vendor/plugins/rows_logger/README b/vendor/plugins/rows_logger/README deleted file mode 100644 index 47833bd..0000000 --- a/vendor/plugins/rows_logger/README +++ /dev/null @@ -1,105 +0,0 @@ -RowsLogger -========== - -This plugin offers rich information about result sets to AR logs. - - -Usage -===== - -Just enjoy Rails as usual. - - -Example -======= - -Consider some read operations like this. - - Member.count - Member.find(:all) - -That usually makes following log. - - SQL (0.000300) SELECT count(*) AS count_all FROM members - Member Load (0.000482) SELECT * FROM members - -RowsLogger appends information about rows count to the log. - - SQL (0.000301) (1 Row) SELECT count(*) AS count_all FROM members - Member Load (0.000415) (3 Rows) SELECT * FROM members - - -For Developpers -=============== - -This plugin modfies following methods. - 'ConnectionAdapters::AbstractAdapter#log' - 'ConnectionAdapters::AbstractAdapter#log_info' - -<before> -ConnectionAdapters::AbstractAdapter#log - --> log_info(sql, name, seconds) - -<after> -ConnectionAdapters::AbstractAdapter#log - --> log_info(sql, name, seconds, result = nil) - --> log_result_info(result) - --> ConcreteAdapter#count_result(result) - - -<an example for concrete adapter> -ConnectionAdapters::MysqlAdapter#count_result - protected - def count_result(result) - result.num_rows - end - - -Count Result Method -=================== - -The 'count_result' method of Adapter class should return count of result set -from 'result' object, where 'result' is an object generated by 'log' method. -This is used as result information. - -For exmaple, although this is nonsencial definition, - - def count_result(result) - 0 - end - -this code always appends "(0 Rows)" to the log. The returned value is directly -used even if it is not a numeric value. But no information will be appended -in following cases. - -1) when 'count_result' method returns nil -2) when 'count_result' method is not defined in current adapter - - -Note -==== - -'count_result' method should be defined as 'protected' or 'public' because -we check whether it is implemented or not in current adapter by using -'respond_to?' method. - - -Supported Databases -=================== - -Currently these databases are supported. - -* PostgreSQL -* MySQL - - -For Other Databases -=================== - -Put a new adapter file that contains 'count_result' method for -your database adapter into adatpers directory. - - -Author -====== -Maiha <maiha@wota.jp> \ No newline at end of file diff --git a/vendor/plugins/rows_logger/Rakefile b/vendor/plugins/rows_logger/Rakefile deleted file mode 100644 index ae0f083..0000000 --- a/vendor/plugins/rows_logger/Rakefile +++ /dev/null @@ -1,22 +0,0 @@ -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' - -desc 'Default: run unit tests.' -task :default => :test - -desc 'Test the rows_logger plugin.' -Rake::TestTask.new(:test) do |t| - t.libs << 'lib' - t.pattern = 'test/**/*_test.rb' - t.verbose = true -end - -desc 'Generate documentation for the rows_logger plugin.' -Rake::RDocTask.new(:rdoc) do |rdoc| - rdoc.rdoc_dir = 'rdoc' - rdoc.title = 'RowsLogger' - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.rdoc_files.include('README') - rdoc.rdoc_files.include('lib/**/*.rb') -end diff --git a/vendor/plugins/rows_logger/adapters/mysql.rb b/vendor/plugins/rows_logger/adapters/mysql.rb deleted file mode 100644 index f002c06..0000000 --- a/vendor/plugins/rows_logger/adapters/mysql.rb +++ /dev/null @@ -1,6 +0,0 @@ -ActiveRecord::ConnectionAdapters::MysqlAdapter.class_eval do - protected - def count_result(result) - result.num_rows - end -end \ No newline at end of file diff --git a/vendor/plugins/rows_logger/adapters/postgresql.rb b/vendor/plugins/rows_logger/adapters/postgresql.rb deleted file mode 100644 index 40e6aac..0000000 --- a/vendor/plugins/rows_logger/adapters/postgresql.rb +++ /dev/null @@ -1,10 +0,0 @@ -ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.class_eval do - protected - def count_result(result) - if result.is_a?(PGresult) - result.num_tuples - else - nil - end - end -end \ No newline at end of file diff --git a/vendor/plugins/rows_logger/init.rb b/vendor/plugins/rows_logger/init.rb deleted file mode 100644 index b938b39..0000000 --- a/vendor/plugins/rows_logger/init.rb +++ /dev/null @@ -1,57 +0,0 @@ -# Include hook code here - -Dir["#{ File.dirname(__FILE__) }/adapters/*.rb"].each do |path| - adapter = File.basename(path, '.rb') - if ActiveRecord::Base.respond_to?("#{adapter}_connection") - ActiveRecord::Base.logger.debug "RowsLogger plugin enables #{adapter}" - require path - end -end - - -ActiveRecord::ConnectionAdapters::AbstractAdapter.class_eval do - protected - def log(sql, name) - if block_given? - if @logger and @logger.level <= Logger::INFO - result = nil - seconds = Benchmark.realtime { result = yield } - @runtime += seconds - log_info(sql, name, seconds, result) - result - else - yield - end - else - log_info(sql, name, 0) - nil - end - rescue Exception => e - # Log message and raise exception. - # Set last_verfication to 0, so that connection gets verified - # upon reentering the request loop - @last_verification = 0 - message = "#{e.class.name}: #{e.message}: #{sql}" - log_info(message, name, 0) - raise ActiveRecord::StatementInvalid, message - end - - def log_info(sql, name, runtime, result = nil) - return unless @logger - - @logger.debug( - format_log_entry( - "#{name.nil? ? "SQL" : name} (#{sprintf("%f", runtime)})#{log_result_info(result)}", - sql.gsub(/ +/, " ") - ) - ) - end - - def log_result_info(result) - return nil if result.nil? or !respond_to?(:count_result) - - count = count_result(result) rescue '?' or return nil - unit = (count == 1) ? 'Row' : 'Rows' - " (%s %s)" % [count, unit] - end -end \ No newline at end of file diff --git a/vendor/plugins/rows_logger/install.rb b/vendor/plugins/rows_logger/install.rb deleted file mode 100644 index f7732d3..0000000 --- a/vendor/plugins/rows_logger/install.rb +++ /dev/null @@ -1 +0,0 @@ -# Install hook code here diff --git a/vendor/plugins/rows_logger/lib/rows_logger.rb b/vendor/plugins/rows_logger/lib/rows_logger.rb deleted file mode 100644 index 80d64d6..0000000 --- a/vendor/plugins/rows_logger/lib/rows_logger.rb +++ /dev/null @@ -1 +0,0 @@ -# RowsLogger \ No newline at end of file diff --git a/vendor/plugins/rows_logger/tasks/rows_logger_tasks.rake b/vendor/plugins/rows_logger/tasks/rows_logger_tasks.rake deleted file mode 100644 index 251ec9a..0000000 --- a/vendor/plugins/rows_logger/tasks/rows_logger_tasks.rake +++ /dev/null @@ -1,4 +0,0 @@ -# desc "Explaining what the task does" -# task :rows_logger do -# # Task goes here -# end \ No newline at end of file diff --git a/vendor/plugins/rows_logger/test/rows_logger_test.rb b/vendor/plugins/rows_logger/test/rows_logger_test.rb deleted file mode 100644 index 77baaa3..0000000 --- a/vendor/plugins/rows_logger/test/rows_logger_test.rb +++ /dev/null @@ -1,8 +0,0 @@ -require 'test/unit' - -class RowsLoggerTest < Test::Unit::TestCase - # Replace this with your real tests. - def test_this_plugin - flunk - end -end diff --git a/vendor/plugins/shoulda/.autotest b/vendor/plugins/shoulda/.autotest deleted file mode 100644 index c378379..0000000 --- a/vendor/plugins/shoulda/.autotest +++ /dev/null @@ -1,13 +0,0 @@ -Autotest.add_hook :initialize do |at| - at.add_mapping(%r{^lib/\w.*\.rb}) do - at.files_matching(%r{^test/*/\w.*_test\.rb}) - end - - at.add_mapping(%r{^test/rails_root/\w.*}) do - at.files_matching(%r{^test/*/\w.*_test\.rb}) - end - - at.add_exception(%r{.svn}) - at.add_exception(%r{.log$}) - at.add_exception(%r{^.autotest$}) -end diff --git a/vendor/plugins/shoulda/MIT-LICENSE b/vendor/plugins/shoulda/MIT-LICENSE deleted file mode 100644 index f8e9154..0000000 --- a/vendor/plugins/shoulda/MIT-LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2007, Tammer Saleh, Thoughtbot, Inc. - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/plugins/shoulda/README b/vendor/plugins/shoulda/README deleted file mode 100644 index f35348e..0000000 --- a/vendor/plugins/shoulda/README +++ /dev/null @@ -1,123 +0,0 @@ -= Shoulda - Making tests easy on the fingers and eyes - -Shoulda makes it easy to write elegant, understandable, and maintainable tests. Shoulda consists of test macros, assertions, and helpers added on to the Test::Unit framework. It's fully compatible with your existing tests, and requires no retooling to use. - -Helpers:: #context and #should give you rSpec like test blocks. - In addition, you get nested contexts and a much more readable syntax. -Macros:: Generate hundreds of lines of Controller and ActiveRecord tests with these powerful macros. - They get you started quickly, and can help you ensure that your application is conforming to best practices. -Assertions:: Many common rails testing idioms have been distilled into a set of useful assertions. - -= Usage - -=== Context Helpers (ThoughtBot::Shoulda::Context) - -Stop killing your fingers with all of those underscores... Name your tests with plain sentences! - - class UserTest << Test::Unit::TestCase - context "A User instance" do - setup do - @user = User.find(:first) - end - - should "return its full name" - assert_equal 'John Doe', @user.full_name - end - - context "with a profile" do - setup do - @user.profile = Profile.find(:first) - end - - should "return true when sent #has_profile?" - assert @user.has_profile? - end - end - end - end - -Produces the following test methods: - - "test: A User instance should return its full name." - "test: A User instance with a profile should return true when sent #has_profile?." - -So readable! - -=== ActiveRecord Tests (ThoughtBot::Shoulda::ActiveRecord) - -Quick macro tests for your ActiveRecord associations and validations: - - class PostTest < Test::Unit::TestCase - load_all_fixtures - - should_belong_to :user - should_have_many :tags, :through => :taggings - - should_require_unique_attributes :title - should_require_attributes :body, :message => /wtf/ - should_require_attributes :title - should_only_allow_numeric_values_for :user_id - end - - class UserTest < Test::Unit::TestCase - load_all_fixtures - - should_have_many :posts - - should_not_allow_values_for :email, "blah", "b lah" - should_allow_values_for :email, "a@b.com", "asdf@asdf.com" - should_ensure_length_in_range :email, 1..100 - should_ensure_value_in_range :age, 1..100 - should_protect_attributes :password - end - -Makes TDD so much easier. - -=== Controller Tests (ThoughtBot::Shoulda::Controller::ClassMethods) - -Macros to test the most common controller patterns... - - context "on GET to :show for first record" do - setup do - get :show, :id => 1 - end - - should_assign_to :user - should_respond_with :success - should_render_template :show - should_not_set_the_flash - - should "do something else really cool" do - assert_equal 1, assigns(:user).id - end - end - -Test entire controllers in a few lines... - - class PostsControllerTest < Test::Unit::TestCase - should_be_restful do |resource| - resource.parent = :user - - resource.create.params = { :title => "first post", :body => 'blah blah blah'} - resource.update.params = { :title => "changed" } - end - end - -should_be_restful generates 40 tests on the fly, for both html and xml requests. - -=== Helpful Assertions (ThoughtBot::Shoulda::General) - -More to come here, but have fun with what's there. - - load_all_fixtures - assert_same_elements([:a, :b, :c], [:c, :a, :b]) - assert_contains(['a', '1'], /\d/) - assert_contains(['a', '1'], 'a') - -= Credits - -Shoulda is maintained by {Tammer Saleh}[mailto:tsaleh@thoughtbot.com], and is funded by Thoughtbot[http://www.thoughtbot.com], inc. - -= License - -Shoulda is Copyright © 2006-2007 Tammer Saleh, Thoughtbot. It is free software, and may be redistributed under the terms specified in the MIT-LICENSE file. diff --git a/vendor/plugins/shoulda/Rakefile b/vendor/plugins/shoulda/Rakefile deleted file mode 100644 index 8c7cf29..0000000 --- a/vendor/plugins/shoulda/Rakefile +++ /dev/null @@ -1,32 +0,0 @@ -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' - -# Test::Unit::UI::VERBOSE - -Rake::TestTask.new do |t| - t.libs << 'lib' - t.pattern = 'test/{unit,functional,other}/**/*_test.rb' - t.verbose = false -end - -Rake::RDocTask.new { |rdoc| - rdoc.rdoc_dir = 'doc' - rdoc.title = "Shoulda -- Making tests easy on the fingers and eyes" - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.template = "#{ENV['template']}.rb" if ENV['template'] - rdoc.rdoc_files.include('README', 'lib/**/*.rb') -} - -desc 'Update documentation on website' -task :sync_docs => 'rdoc' do - `rsync -ave ssh doc/ dev@dev.thoughtbot.com:/home/dev/www/dev.thoughtbot.com/shoulda` -end - -desc 'Default: run tests.' -task :default => ['test'] - -Dir['tasks/*.rake'].each do |f| - load f -end - diff --git a/vendor/plugins/shoulda/bin/convert_to_should_syntax b/vendor/plugins/shoulda/bin/convert_to_should_syntax deleted file mode 100755 index ca5b94d..0000000 --- a/vendor/plugins/shoulda/bin/convert_to_should_syntax +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env ruby -require 'fileutils' - -def usage(msg = nil) - puts "Error: #{msg}" if msg - puts if msg - puts "Usage: #{File.basename(__FILE__)} normal_test_file.rb" - puts - puts "Will convert an existing test file with names like " - puts - puts " def test_should_do_stuff" - puts " ..." - puts " end" - puts - puts "to one using the new syntax: " - puts - puts " should \"be super cool\" do" - puts " ..." - puts " end" - puts - puts "A copy of the old file will be left under /tmp/ in case this script just seriously screws up" - puts - exit (msg ? 2 : 0) -end - -usage("Wrong number of arguments.") unless ARGV.size == 1 -usage("This system doesn't have a /tmp directory. wtf?") unless File.directory?('/tmp') - -file = ARGV.shift -tmpfile = "/tmp/#{File.basename(file)}" -usage("File '#{file}' doesn't exist") unless File.exists?(file) - -FileUtils.cp(file, tmpfile) -contents = File.read(tmpfile) -contents.gsub!(/def test_should_(.*)\s*$/, 'should "\1" do') -contents.gsub!(/def test_(.*)\s*$/, 'should "RENAME ME: test \1" do') -contents.gsub!(/should ".*" do$/) {|line| line.tr!('_', ' ')} -File.open(file, 'w') { |f| f.write(contents) } - -puts "File '#{file}' has been converted to 'should' syntax. Old version has been stored in '#{tmpfile}'" diff --git a/vendor/plugins/shoulda/doc/created.rid b/vendor/plugins/shoulda/doc/created.rid deleted file mode 100644 index 86f6583..0000000 --- a/vendor/plugins/shoulda/doc/created.rid +++ /dev/null @@ -1 +0,0 @@ -Sun, 15 Jun 2008 17:11:42 -0400 diff --git a/vendor/plugins/shoulda/doc/files/README.html b/vendor/plugins/shoulda/doc/files/README.html deleted file mode 100644 index e13550d..0000000 --- a/vendor/plugins/shoulda/doc/files/README.html +++ /dev/null @@ -1,256 +0,0 @@ -<?xml version="1.0" encoding="iso-8859-1"?> -<!DOCTYPE html - PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" - "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> - -<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> -<head> - <title>File: README - - - - - - - - - - -
-

README

- - - - - - - - - -
Path:README -
Last Update:Fri Jun 13 20:43:05 -0400 2008
-
- - -
- - - -
- -
-

Shoulda - Making tests easy on the fingers and eyes

-

-Shoulda makes it easy to write elegant, understandable, and maintainable -tests. Shoulda consists of test macros, assertions, and helpers added on to -the Test::Unit framework. It‘s fully compatible with your existing -tests, and requires no retooling to use. -

- - - - -
Helpers:context and should give you rSpec like test blocks. In addition, you get -nested contexts and a much more readable syntax. - -
Macros:Generate hundreds of lines of Controller and ActiveRecord tests with these -powerful macros. They get you started quickly, and can help you ensure that -your application is conforming to best practices. - -
Assertions:Many common rails testing idioms have been distilled into a set of useful -assertions. - -
-

Usage

-

Context Helpers (ThoughtBot::Shoulda::Context)

-

-Stop killing your fingers with all of those underscores… Name your -tests with plain sentences! -

-
-  class UserTest << Test::Unit::TestCase
-    context "A User instance" do
-      setup do
-        @user = User.find(:first)
-      end
-
-      should "return its full name"
-        assert_equal 'John Doe', @user.full_name
-      end
-
-      context "with a profile" do
-        setup do
-          @user.profile = Profile.find(:first)
-        end
-
-        should "return true when sent #has_profile?"
-          assert @user.has_profile?
-        end
-      end
-    end
-  end
-
-

-Produces the following test methods: -

-
-  "test: A User instance should return its full name."
-  "test: A User instance with a profile should return true when sent #has_profile?."
-
-

-So readable! -

-

ActiveRecord Tests (ThoughtBot::Shoulda::ActiveRecord)

-

-Quick macro tests for your ActiveRecord associations and validations: -

-
-  class PostTest < Test::Unit::TestCase
-    load_all_fixtures
-
-    should_belong_to :user
-    should_have_many :tags, :through => :taggings
-
-    should_require_unique_attributes :title
-    should_require_attributes :body, :message => /wtf/
-    should_require_attributes :title
-    should_only_allow_numeric_values_for :user_id
-  end
-
-  class UserTest < Test::Unit::TestCase
-    load_all_fixtures
-
-    should_have_many :posts
-
-    should_not_allow_values_for :email, "blah", "b lah"
-    should_allow_values_for :email, "a@b.com", "asdf@asdf.com"
-    should_ensure_length_in_range :email, 1..100
-    should_ensure_value_in_range :age, 1..100
-    should_protect_attributes :password
-  end
-
-

-Makes TDD so much easier. -

-

Controller Tests (ThoughtBot::Shoulda::Controller::ClassMethods)

-

-Macros to test the most common controller patterns… -

-
-  context "on GET to :show for first record" do
-    setup do
-      get :show, :id => 1
-    end
-
-    should_assign_to :user
-    should_respond_with :success
-    should_render_template :show
-    should_not_set_the_flash
-
-    should "do something else really cool" do
-      assert_equal 1, assigns(:user).id
-    end
-  end
-
-

-Test entire controllers in a few lines… -

-
-  class PostsControllerTest < Test::Unit::TestCase
-    should_be_restful do |resource|
-      resource.parent = :user
-
-      resource.create.params = { :title => "first post", :body => 'blah blah blah'}
-      resource.update.params = { :title => "changed" }
-    end
-  end
-
-

-should_be_restful generates 40 tests on the fly, for both html and xml -requests. -

-

Helpful Assertions (ThoughtBot::Shoulda::General)

-

-More to come here, but have fun with what‘s there. -

-
-  load_all_fixtures
-  assert_same_elements([:a, :b, :c], [:c, :a, :b])
-  assert_contains(['a', '1'], /\d/)
-  assert_contains(['a', '1'], 'a')
-
-

Credits

-

-Shoulda is maintained by Tammer -Saleh, and is funded by Thoughtbot, inc. -

-

License

-

-Shoulda is Copyright © 2006-2007 Tammer Saleh, Thoughtbot. It is free -software, and may be redistributed under the terms specified in the -MIT-LICENSE file. -

- -
- - -
- - -
- - - - -
- - - - - - - - - - - -
- - - - - - \ No newline at end of file diff --git a/vendor/plugins/shoulda/doc/fr_class_index.html b/vendor/plugins/shoulda/doc/fr_class_index.html deleted file mode 100644 index 49ce9e2..0000000 --- a/vendor/plugins/shoulda/doc/fr_class_index.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - Classes - - - - - -
-

Classes

-
-
-
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/doc/fr_file_index.html b/vendor/plugins/shoulda/doc/fr_file_index.html deleted file mode 100644 index 94ecb65..0000000 --- a/vendor/plugins/shoulda/doc/fr_file_index.html +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - Files - - - - - -
-

Files

-
- README
-
-
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/doc/fr_method_index.html b/vendor/plugins/shoulda/doc/fr_method_index.html deleted file mode 100644 index cdfab83..0000000 --- a/vendor/plugins/shoulda/doc/fr_method_index.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - Methods - - - - - -
-

Methods

-
-
-
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/doc/index.html b/vendor/plugins/shoulda/doc/index.html deleted file mode 100644 index fa5d583..0000000 --- a/vendor/plugins/shoulda/doc/index.html +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - RDoc Documentation - - - - - - - - - - - \ No newline at end of file diff --git a/vendor/plugins/shoulda/doc/rdoc-style.css b/vendor/plugins/shoulda/doc/rdoc-style.css deleted file mode 100644 index 44c7b3d..0000000 --- a/vendor/plugins/shoulda/doc/rdoc-style.css +++ /dev/null @@ -1,208 +0,0 @@ - -body { - font-family: Verdana,Arial,Helvetica,sans-serif; - font-size: 90%; - margin: 0; - margin-left: 40px; - padding: 0; - background: white; -} - -h1,h2,h3,h4 { margin: 0; color: #efefef; background: transparent; } -h1 { font-size: 150%; } -h2,h3,h4 { margin-top: 1em; } - -a { background: #eef; color: #039; text-decoration: none; } -a:hover { background: #039; color: #eef; } - -/* Override the base stylesheet's Anchor inside a table cell */ -td > a { - background: transparent; - color: #039; - text-decoration: none; -} - -/* and inside a section title */ -.section-title > a { - background: transparent; - color: #eee; - text-decoration: none; -} - -/* === Structural elements =================================== */ - -div#index { - margin: 0; - margin-left: -40px; - padding: 0; - font-size: 90%; -} - - -div#index a { - margin-left: 0.7em; -} - -div#index .section-bar { - margin-left: 0px; - padding-left: 0.7em; - background: #ccc; - font-size: small; -} - - -div#classHeader, div#fileHeader { - width: auto; - color: white; - padding: 0.5em 1.5em 0.5em 1.5em; - margin: 0; - margin-left: -40px; - border-bottom: 3px solid #006; -} - -div#classHeader a, div#fileHeader a { - background: inherit; - color: white; -} - -div#classHeader td, div#fileHeader td { - background: inherit; - color: white; -} - - -div#fileHeader { - background: #057; -} - -div#classHeader { - background: #048; -} - - -.class-name-in-header { - font-size: 180%; - font-weight: bold; -} - - -div#bodyContent { - padding: 0 1.5em 0 1.5em; -} - -div#description { - padding: 0.5em 1.5em; - background: #efefef; - border: 1px dotted #999; -} - -div#description h1,h2,h3,h4,h5,h6 { - color: #125;; - background: transparent; -} - -div#validator-badges { - text-align: center; -} -div#validator-badges img { border: 0; } - -div#copyright { - color: #333; - background: #efefef; - font: 0.75em sans-serif; - margin-top: 5em; - margin-bottom: 0; - padding: 0.5em 2em; -} - - -/* === Classes =================================== */ - -table.header-table { - color: white; - font-size: small; -} - -.type-note { - font-size: small; - color: #DEDEDE; -} - -.xxsection-bar { - background: #eee; - color: #333; - padding: 3px; -} - -.section-bar { - color: #333; - border-bottom: 1px solid #999; - margin-left: -20px; -} - - -.section-title { - background: #79a; - color: #eee; - padding: 3px; - margin-top: 2em; - margin-left: -30px; - border: 1px solid #999; -} - -.top-aligned-row { vertical-align: top } -.bottom-aligned-row { vertical-align: bottom } - -/* --- Context section classes ----------------------- */ - -.context-row { } -.context-item-name { font-family: monospace; font-weight: bold; color: black; } -.context-item-value { font-size: small; color: #448; } -.context-item-desc { color: #333; padding-left: 2em; } - -/* --- Method classes -------------------------- */ -.method-detail { - background: #efefef; - padding: 0; - margin-top: 0.5em; - margin-bottom: 1em; - border: 1px dotted #ccc; -} -.method-heading { - color: black; - background: #ccc; - border-bottom: 1px solid #666; - padding: 0.2em 0.5em 0 0.5em; -} -.method-signature { color: black; background: inherit; } -.method-name { font-weight: bold; } -.method-args { font-style: italic; } -.method-description { padding: 0 0.5em 0 0.5em; } - -/* --- Source code sections -------------------- */ - -a.source-toggle { font-size: 90%; } -div.method-source-code { - background: #262626; - color: #ffdead; - margin: 1em; - padding: 0.5em; - border: 1px dashed #999; - overflow: hidden; -} - -div.method-source-code pre { color: #ffdead; overflow: hidden; } - -/* --- Ruby keyword styles --------------------- */ - -.standalone-code { background: #221111; color: #ffdead; overflow: hidden; } - -.ruby-constant { color: #7fffd4; background: transparent; } -.ruby-keyword { color: #00ffff; background: transparent; } -.ruby-ivar { color: #eedd82; background: transparent; } -.ruby-operator { color: #00ffee; background: transparent; } -.ruby-identifier { color: #ffdead; background: transparent; } -.ruby-node { color: #ffa07a; background: transparent; } -.ruby-comment { color: #b22222; font-weight: bold; background: transparent; } -.ruby-regexp { color: #ffa07a; background: transparent; } -.ruby-value { color: #7fffd4; background: transparent; } \ No newline at end of file diff --git a/vendor/plugins/shoulda/init.rb b/vendor/plugins/shoulda/init.rb deleted file mode 100644 index 736e004..0000000 --- a/vendor/plugins/shoulda/init.rb +++ /dev/null @@ -1,3 +0,0 @@ -require 'rubygems' -require 'active_support' -require 'shoulda' \ No newline at end of file diff --git a/vendor/plugins/shoulda/lib/shoulda.rb b/vendor/plugins/shoulda/lib/shoulda.rb deleted file mode 100644 index 5c006bc..0000000 --- a/vendor/plugins/shoulda/lib/shoulda.rb +++ /dev/null @@ -1,45 +0,0 @@ -require 'yaml' -require 'shoulda/private_helpers' -require 'shoulda/general' -require 'shoulda/gem/shoulda' -require 'shoulda/active_record_helpers' -require 'shoulda/controller_tests/controller_tests.rb' - -shoulda_options = {} - -possible_config_paths = [] -possible_config_paths << File.join(ENV["HOME"], ".shoulda.conf") if ENV["HOME"] -possible_config_paths << "shoulda.conf" -possible_config_paths << File.join("test", "shoulda.conf") -possible_config_paths << File.join(RAILS_ROOT, "test", "shoulda.conf") if defined?(RAILS_ROOT) - -possible_config_paths.each do |config_file| - if File.exists? config_file - shoulda_options = YAML.load_file(config_file).symbolize_keys - break - end -end - -require 'shoulda/color' if shoulda_options[:color] - -module Test # :nodoc: all - module Unit - class TestCase - - include ThoughtBot::Shoulda::Controller - include ThoughtBot::Shoulda::General - - class << self - include ThoughtBot::Shoulda::ActiveRecord - end - end - end -end - -module ActionController #:nodoc: all - module Integration - class Session - include ThoughtBot::Shoulda::General - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/active_record_helpers.rb b/vendor/plugins/shoulda/lib/shoulda/active_record_helpers.rb deleted file mode 100644 index 9dfe4e8..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/active_record_helpers.rb +++ /dev/null @@ -1,489 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - # = Macro test helpers for your active record models - # - # These helpers will test most of the validations and associations for your ActiveRecord models. - # - # class UserTest < Test::Unit::TestCase - # should_require_attributes :name, :phone_number - # should_not_allow_values_for :phone_number, "abcd", "1234" - # should_allow_values_for :phone_number, "(123) 456-7890" - # - # should_protect_attributes :password - # - # should_have_one :profile - # should_have_many :dogs - # should_have_many :messes, :through => :dogs - # should_belong_to :lover - # end - # - # For all of these helpers, the last parameter may be a hash of options. - # - module ActiveRecord - # Ensures that the model cannot be saved if one of the attributes listed is not present. - # Requires an existing record. - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /blank/ - # - # Example: - # should_require_attributes :name, :phone_number - # - def should_require_attributes(*attributes) - message = get_options!(attributes, :message) - message ||= /blank/ - klass = model_class - - attributes.each do |attribute| - should "require #{attribute} to be set" do - object = klass.new - object.send("#{attribute}=", nil) - assert !object.valid?, "#{klass.name} does not require #{attribute}." - assert object.errors.on(attribute), "#{klass.name} does not require #{attribute}." - assert_contains(object.errors.on(attribute), message) - end - end - end - - # Ensures that the model cannot be saved if one of the attributes listed is not unique. - # Requires an existing record - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /taken/ - # - # Example: - # should_require_unique_attributes :keyword, :username - # - def should_require_unique_attributes(*attributes) - message, scope = get_options!(attributes, :message, :scoped_to) - message ||= /taken/ - - klass = model_class - attributes.each do |attribute| - attribute = attribute.to_sym - should "require unique value for #{attribute}#{" scoped to #{scope}" if scope}" do - assert existing = klass.find(:first), "Can't find first #{klass}" - object = klass.new - - object.send(:"#{attribute}=", existing.send(attribute)) - if scope - assert_respond_to object, :"#{scope}=", "#{klass.name} doesn't seem to have a #{scope} attribute." - object.send(:"#{scope}=", existing.send(scope)) - end - - assert !object.valid?, "#{klass.name} does not require a unique value for #{attribute}." - assert object.errors.on(attribute), "#{klass.name} does not require a unique value for #{attribute}." - - assert_contains(object.errors.on(attribute), message) - - if scope - # Now test that the object is valid when changing the scoped attribute - # TODO: actually find all values for scope and create a unique one. - object.send(:"#{scope}=", existing.send(scope).nil? ? 1 : existing.send(scope).next) - object.errors.clear - object.valid? - assert_does_not_contain(object.errors.on(attribute), message, - "after :#{scope} set to #{object.send(scope.to_sym)}") - end - end - end - end - - # Ensures that the attribute cannot be set on update - # Requires an existing record - # - # should_protect_attributes :password, :admin_flag - # - def should_protect_attributes(*attributes) - get_options!(attributes) - klass = model_class - attributes.each do |attribute| - attribute = attribute.to_sym - should "not allow #{attribute} to be changed by update" do - assert object = klass.find(:first), "Can't find first #{klass}" - value = object[attribute] - # TODO: 1 may not be a valid value for the attribute (due to validations) - assert object.update_attributes({ attribute => 1 }), - "Cannot update #{klass} with { :#{attribute} => 1 }, #{object.errors.full_messages.to_sentence}" - assert object.valid?, "#{klass} isn't valid after changing #{attribute}" - assert_equal value, object[attribute], "Was able to change #{klass}##{attribute}" - end - end - end - - # Ensures that the attribute cannot be set to the given values - # Requires an existing record - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /invalid/ - # - # Example: - # should_not_allow_values_for :isbn, "bad 1", "bad 2" - # - def should_not_allow_values_for(attribute, *bad_values) - message = get_options!(bad_values, :message) - message ||= /invalid/ - klass = model_class - bad_values.each do |v| - should "not allow #{attribute} to be set to \"#{v}\"" do - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - assert !object.save, "Saved #{klass} with #{attribute} set to \"#{v}\"" - assert object.errors.on(attribute), "There are no errors set on #{attribute} after being set to \"#{v}\"" - assert_contains(object.errors.on(attribute), message, "when set to \"#{v}\"") - end - end - end - - # Ensures that the attribute can be set to the given values. - # Requires an existing record - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /invalid/ - # - # Example: - # should_allow_values_for :isbn, "isbn 1 2345 6789 0", "ISBN 1-2345-6789-0" - # - def should_allow_values_for(attribute, *good_values) - message = get_options!(good_values, :message) - message ||= /invalid/ - klass = model_class - good_values.each do |v| - should "allow #{attribute} to be set to \"#{v}\"" do - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - object.save - assert_does_not_contain(object.errors.on(attribute), message, "when set to \"#{v}\"") - end - end - end - - # Ensures that the length of the attribute is in the given range - # Requires an existing record - # - # Options: - # * :short_message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /short/ - # * :long_message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /long/ - # - # Example: - # should_ensure_length_in_range :password, (6..20) - # - def should_ensure_length_in_range(attribute, range, opts = {}) - short_message, long_message = get_options!([opts], :short_message, :long_message) - short_message ||= /short/ - long_message ||= /long/ - - klass = model_class - min_length = range.first - max_length = range.last - - if min_length > 0 - min_value = "x" * (min_length - 1) - should "not allow #{attribute} to be less than #{min_length} chars long" do - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", min_value) - assert !object.save, "Saved #{klass} with #{attribute} set to \"#{min_value}\"" - assert object.errors.on(attribute), "There are no errors set on #{attribute} after being set to \"#{min_value}\"" - assert_contains(object.errors.on(attribute), short_message, "when set to \"#{min_value}\"") - end - end - - max_value = "x" * (max_length + 1) - should "not allow #{attribute} to be more than #{max_length} chars long" do - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", max_value) - assert !object.save, "Saved #{klass} with #{attribute} set to \"#{max_value}\"" - assert object.errors.on(attribute), "There are no errors set on #{attribute} after being set to \"#{max_value}\"" - assert_contains(object.errors.on(attribute), long_message, "when set to \"#{max_value}\"") - end - end - - # Ensure that the attribute is in the range specified - # Requires an existing record - # - # Options: - # * :low_message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /included/ - # * :high_message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /included/ - # - # Example: - # should_ensure_value_in_range :age, (0..100) - # - def should_ensure_value_in_range(attribute, range, opts = {}) - low_message, high_message = get_options!([opts], :low_message, :high_message) - low_message ||= /included/ - high_message ||= /included/ - - klass = model_class - min = range.first - max = range.last - - should "not allow #{attribute} to be less than #{min}" do - v = min - 1 - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - assert !object.save, "Saved #{klass} with #{attribute} set to \"#{v}\"" - assert object.errors.on(attribute), "There are no errors set on #{attribute} after being set to \"#{v}\"" - assert_contains(object.errors.on(attribute), low_message, "when set to \"#{v}\"") - end - - should "allow #{attribute} to be #{min}" do - v = min - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - object.save - assert_does_not_contain(object.errors.on(attribute), low_message, "when set to \"#{v}\"") - end - - should "not allow #{attribute} to be more than #{max}" do - v = max + 1 - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - assert !object.save, "Saved #{klass} with #{attribute} set to \"#{v}\"" - assert object.errors.on(attribute), "There are no errors set on #{attribute} after being set to \"#{v}\"" - assert_contains(object.errors.on(attribute), high_message, "when set to \"#{v}\"") - end - - should "allow #{attribute} to be #{max}" do - v = max - assert object = klass.find(:first), "Can't find first #{klass}" - object.send("#{attribute}=", v) - object.save - assert_does_not_contain(object.errors.on(attribute), high_message, "when set to \"#{v}\"") - end - end - - # Ensure that the attribute is numeric - # Requires an existing record - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /number/ - # - # Example: - # should_only_allow_numeric_values_for :age - # - def should_only_allow_numeric_values_for(*attributes) - message = get_options!(attributes, :message) - message ||= /number/ - klass = model_class - attributes.each do |attribute| - attribute = attribute.to_sym - should "only allow numeric values for #{attribute}" do - assert object = klass.find(:first), "Can't find first #{klass}" - object.send(:"#{attribute}=", "abcd") - assert !object.valid?, "Instance is still valid" - assert_contains(object.errors.on(attribute), message) - end - end - end - - # Ensures that the has_many relationship exists. - # - # Options: - # * :through - association name for has_many :through - # - # Example: - # should_have_many :friends - # should_have_many :enemies, :through => :friends - # - def should_have_many(*associations) - through = get_options!(associations, :through) - klass = model_class - associations.each do |association| - name = "have many #{association}" - name += " through #{through}" if through - should name do - reflection = klass.reflect_on_association(association) - assert reflection, "#{klass.name} does not have any relationship to #{association}" - assert_equal :has_many, reflection.macro - - if through - through_reflection = klass.reflect_on_association(through) - assert through_reflection, "#{klass.name} does not have any relationship to #{through}" - assert_equal(through, reflection.options[:through]) - end - - unless reflection.options[:through] - # This is not a through association, so check for the existence of the foreign key on the other table - if reflection.options[:foreign_key] - fk = reflection.options[:foreign_key] - elsif reflection.options[:as] - fk = reflection.options[:as].to_s.foreign_key - else - fk = reflection.primary_key_name - end - associated_klass = (reflection.options[:class_name] || association.to_s.classify).constantize - assert associated_klass.column_names.include?(fk.to_s), "#{associated_klass.name} does not have a #{fk} foreign key." - end - end - end - end - - # Ensures that the has_and_belongs_to_many relationship exists. - # - # should_have_and_belong_to_many :posts, :cars - # - def should_have_and_belong_to_many(*associations) - get_options!(associations) - klass = model_class - associations.each do |association| - should "should have and belong to many #{association}" do - assert klass.reflect_on_association(association), "#{klass.name} does not have any relationship to #{association}" - assert_equal :has_and_belongs_to_many, klass.reflect_on_association(association).macro - end - end - end - - # Ensure that the has_one relationship exists. - # - # should_have_one :god # unless hindu - # - def should_have_one(*associations) - get_options!(associations) - klass = model_class - associations.each do |association| - should "have one #{association}" do - reflection = klass.reflect_on_association(association) - assert reflection, "#{klass.name} does not have any relationship to #{association}" - assert_equal :has_one, reflection.macro - - if reflection.options[:foreign_key] - fk = reflection.options[:foreign_key] - elsif reflection.options[:as] - fk = reflection.options[:as].to_s.foreign_key - else - fk = klass.name.foreign_key - end - associated_klass = (reflection.options[:class_name] || association.to_s.classify).constantize - assert associated_klass.column_names.include?(fk.to_s), "#{associated_klass.name} does not have a #{fk} foreign key." - end - end - end - - # Ensure that the belongs_to relationship exists. - # - # should_belong_to :parent - # - def should_belong_to(*associations) - get_options!(associations) - klass = model_class - associations.each do |association| - should "belong_to #{association}" do - reflection = klass.reflect_on_association(association) - assert reflection, "#{klass.name} does not have any relationship to #{association}" - assert_equal :belongs_to, reflection.macro - - unless reflection.options[:polymorphic] - associated_klass = (reflection.options[:class_name] || association.to_s.classify).constantize - fk = reflection.options[:foreign_key] || reflection.primary_key_name - assert klass.column_names.include?(fk.to_s), "#{klass.name} does not have a #{fk} foreign key." - end - end - end - end - - # Ensure that the given class methods are defined on the model. - # - # should_have_class_methods :find, :destroy - # - def should_have_class_methods(*methods) - get_options!(methods) - klass = model_class - methods.each do |method| - should "respond to class method #{method}" do - assert_respond_to klass, method, "#{klass.name} does not have class method #{method}" - end - end - end - - # Ensure that the given instance methods are defined on the model. - # - # should_have_instance_methods :email, :name, :name= - # - def should_have_instance_methods(*methods) - get_options!(methods) - klass = model_class - methods.each do |method| - should "respond to instance method #{method}" do - assert_respond_to klass.new, method, "#{klass.name} does not have instance method #{method}" - end - end - end - - # Ensure that the given columns are defined on the models backing SQL table. - # - # should_have_db_columns :id, :email, :name, :created_at - # - def should_have_db_columns(*columns) - column_type = get_options!(columns, :type) - klass = model_class - columns.each do |name| - test_name = "have column #{name}" - test_name += " of type #{column_type}" if column_type - should test_name do - column = klass.columns.detect {|c| c.name == name.to_s } - assert column, "#{klass.name} does not have column #{name}" - end - end - end - - # Ensure that the given column is defined on the models backing SQL table. The options are the same as - # the instance variables defined on the column definition: :precision, :limit, :default, :null, - # :primary, :type, :scale, and :sql_type. - # - # should_have_db_column :email, :type => "string", :default => nil, :precision => nil, :limit => 255, - # :null => true, :primary => false, :scale => nil, :sql_type => 'varchar(255)' - # - def should_have_db_column(name, opts = {}) - klass = model_class - test_name = "have column named :#{name}" - test_name += " with options " + opts.inspect unless opts.empty? - should test_name do - column = klass.columns.detect {|c| c.name == name.to_s } - assert column, "#{klass.name} does not have column #{name}" - opts.each do |k, v| - assert_equal column.instance_variable_get("@#{k}").to_s, v.to_s, ":#{name} column on table for #{klass} does not match option :#{k}" - end - end - end - - # Ensures that the model cannot be saved if one of the attributes listed is not accepted. - # - # Options: - # * :message - value the test expects to find in errors.on(:attribute). - # Regexp or string. Default = /must be accepted/ - # - # Example: - # should_require_acceptance_of :eula - # - def should_require_acceptance_of(*attributes) - message = get_options!(attributes, :message) - message ||= /must be accepted/ - klass = model_class - - attributes.each do |attribute| - should "require #{attribute} to be accepted" do - object = klass.new - object.send("#{attribute}=", false) - - assert !object.valid?, "#{klass.name} does not require acceptance of #{attribute}." - assert object.errors.on(attribute), "#{klass.name} does not require acceptance of #{attribute}." - assert_contains(object.errors.on(attribute), message) - end - end - end - - private - - include ThoughtBot::Shoulda::Private - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/color.rb b/vendor/plugins/shoulda/lib/shoulda/color.rb deleted file mode 100644 index 1ccfad2..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/color.rb +++ /dev/null @@ -1,77 +0,0 @@ -require 'test/unit/ui/console/testrunner' - -# Completely stolen from redgreen gem -# -# Adds colored output to your tests. Specify color: true in -# your ~/.shoulda.conf file to enable. -# -# *Bug*: for some reason, this adds another line of output to the end of -# every rake task, as though there was another (empty) set of tests. -# A fix would be most welcome. -# -module ThoughtBot::Shoulda::Color - COLORS = { :clear => 0, :red => 31, :green => 32, :yellow => 33 } # :nodoc: - def self.method_missing(color_name, *args) # :nodoc: - color(color_name) + args.first + color(:clear) - end - def self.color(color) # :nodoc: - "\e[#{COLORS[color.to_sym]}m" - end -end - -module Test # :nodoc: - module Unit # :nodoc: - class TestResult # :nodoc: - alias :old_to_s :to_s - def to_s - if old_to_s =~ /\d+ tests, \d+ assertions, (\d+) failures, (\d+) errors/ - ThoughtBot::Shoulda::Color.send($1.to_i != 0 || $2.to_i != 0 ? :red : :green, $&) - end - end - end - - class AutoRunner # :nodoc: - alias :old_initialize :initialize - def initialize(standalone) - old_initialize(standalone) - @runner = proc do |r| - Test::Unit::UI::Console::RedGreenTestRunner - end - end - end - - class Failure # :nodoc: - alias :old_long_display :long_display - def long_display - # old_long_display.sub('Failure', ThoughtBot::Shoulda::Color.red('Failure')) - ThoughtBot::Shoulda::Color.red(old_long_display) - end - end - - class Error # :nodoc: - alias :old_long_display :long_display - def long_display - # old_long_display.sub('Error', ThoughtBot::Shoulda::Color.yellow('Error')) - ThoughtBot::Shoulda::Color.yellow(old_long_display) - end - end - - module UI # :nodoc: - module Console # :nodoc: - class RedGreenTestRunner < Test::Unit::UI::Console::TestRunner # :nodoc: - def output_single(something, level=NORMAL) - return unless (output?(level)) - something = case something - when '.' then ThoughtBot::Shoulda::Color.green('.') - when 'F' then ThoughtBot::Shoulda::Color.red("F") - when 'E' then ThoughtBot::Shoulda::Color.yellow("E") - else something - end - @io.write(something) - @io.flush - end - end - end - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/controller_tests/controller_tests.rb b/vendor/plugins/shoulda/lib/shoulda/controller_tests/controller_tests.rb deleted file mode 100644 index 41ae92b..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/controller_tests/controller_tests.rb +++ /dev/null @@ -1,465 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - module Controller - def self.included(other) # :nodoc: - other.class_eval do - extend ThoughtBot::Shoulda::Controller::ClassMethods - include ThoughtBot::Shoulda::Controller::InstanceMethods - ThoughtBot::Shoulda::Controller::ClassMethods::VALID_FORMATS.each do |format| - include "ThoughtBot::Shoulda::Controller::#{format.to_s.upcase}".constantize - end - end - end - - # = Macro test helpers for your controllers - # - # By using the macro helpers you can quickly and easily create concise and easy to read test suites. - # - # This code segment: - # context "on GET to :show for first record" do - # setup do - # get :show, :id => 1 - # end - # - # should_assign_to :user - # should_respond_with :success - # should_render_template :show - # should_not_set_the_flash - # - # should "do something else really cool" do - # assert_equal 1, assigns(:user).id - # end - # end - # - # Would produce 5 tests for the +show+ action - # - # Furthermore, the should_be_restful helper will create an entire set of tests which will verify that your - # controller responds restfully to a variety of requested formats. - module ClassMethods - # Formats tested by #should_be_restful. Defaults to [:html, :xml] - VALID_FORMATS = Dir.glob(File.join(File.dirname(__FILE__), 'formats', '*.rb')).map { |f| File.basename(f, '.rb') }.map(&:to_sym) # :doc: - VALID_FORMATS.each {|f| require "shoulda/controller_tests/formats/#{f}.rb"} - - # Actions tested by #should_be_restful - VALID_ACTIONS = [:index, :show, :new, :edit, :create, :update, :destroy] # :doc: - - # A ResourceOptions object is passed into should_be_restful in order to configure the tests for your controller. - # - # Example: - # class UsersControllerTest < Test::Unit::TestCase - # load_all_fixtures - # - # def setup - # ...normal setup code... - # @user = User.find(:first) - # end - # - # should_be_restful do |resource| - # resource.identifier = :id - # resource.klass = User - # resource.object = :user - # resource.parent = [] - # resource.actions = [:index, :show, :new, :edit, :update, :create, :destroy] - # resource.formats = [:html, :xml] - # - # resource.create.params = { :name => "bob", :email => 'bob@bob.com', :age => 13} - # resource.update.params = { :name => "sue" } - # - # resource.create.redirect = "user_url(@user)" - # resource.update.redirect = "user_url(@user)" - # resource.destroy.redirect = "users_url" - # - # resource.create.flash = /created/i - # resource.update.flash = /updated/i - # resource.destroy.flash = /removed/i - # end - # end - # - # Whenever possible, the resource attributes will be set to sensible defaults. - # - class ResourceOptions - # Configuration options for the create, update, destroy actions under should_be_restful - class ActionOptions - # String evaled to get the target of the redirection. - # All of the instance variables set by the controller will be available to the - # evaled code. - # - # Example: - # resource.create.redirect = "user_url(@user.company, @user)" - # - # Defaults to a generated url based on the name of the controller, the action, and the resource.parents list. - attr_accessor :redirect - - # String or Regexp describing a value expected in the flash. Will match against any flash key. - # - # Defaults: - # destroy:: /removed/ - # create:: /created/ - # update:: /updated/ - attr_accessor :flash - - # Hash describing the params that should be sent in with this action. - attr_accessor :params - end - - # Configuration options for the denied actions under should_be_restful - # - # Example: - # context "The public" do - # setup do - # @request.session[:logged_in] = false - # end - # - # should_be_restful do |resource| - # resource.parent = :user - # - # resource.denied.actions = [:index, :show, :edit, :new, :create, :update, :destroy] - # resource.denied.flash = /get outta here/i - # resource.denied.redirect = 'new_session_url' - # end - # end - # - class DeniedOptions - # String evaled to get the target of the redirection. - # All of the instance variables set by the controller will be available to the - # evaled code. - # - # Example: - # resource.create.redirect = "user_url(@user.company, @user)" - attr_accessor :redirect - - # String or Regexp describing a value expected in the flash. Will match against any flash key. - # - # Example: - # resource.create.flash = /created/ - attr_accessor :flash - - # Actions that should be denied (only used by resource.denied). Note that these actions will - # only be tested if they are also listed in +resource.actions+ - # The special value of :all will deny all of the REST actions. - attr_accessor :actions - end - - # Name of key in params that references the primary key. - # Will almost always be :id (default), unless you are using a plugin or have patched rails. - attr_accessor :identifier - - # Name of the ActiveRecord class this resource is responsible for. Automatically determined from - # test class if not explicitly set. UserTest => :user - attr_accessor :klass - - # Name of the instantiated ActiveRecord object that should be used by some of the tests. - # Defaults to the underscored name of the AR class. CompanyManager => :company_manager - attr_accessor :object - - # Name of the parent AR objects. - # - # Example: - # # in the routes... - # map.resources :companies do - # map.resources :people do - # map.resources :limbs - # end - # end - # - # # in the tests... - # class PeopleControllerTest < Test::Unit::TestCase - # should_be_restful do |resource| - # resource.parent = :companies - # end - # end - # - # class LimbsControllerTest < Test::Unit::TestCase - # should_be_restful do |resource| - # resource.parents = [:companies, :people] - # end - # end - attr_accessor :parent - alias parents parent - alias parents= parent= - - # Actions that should be tested. Must be a subset of VALID_ACTIONS (default). - # Tests for each actionw will only be generated if the action is listed here. - # The special value of :all will test all of the REST actions. - # - # Example (for a read-only controller): - # resource.actions = [:show, :index] - attr_accessor :actions - - # Formats that should be tested. Must be a subset of VALID_FORMATS (default). - # Each action will be tested against the formats listed here. The special value - # of :all will test all of the supported formats. - # - # Example: - # resource.actions = [:html, :xml] - attr_accessor :formats - - # ActionOptions object specifying options for the create action. - attr_accessor :create - - # ActionOptions object specifying options for the update action. - attr_accessor :update - - # ActionOptions object specifying options for the desrtoy action. - attr_accessor :destroy - - # DeniedOptions object specifying which actions should return deny a request, and what should happen in that case. - attr_accessor :denied - - def initialize # :nodoc: - @create = ActionOptions.new - @update = ActionOptions.new - @destroy = ActionOptions.new - @denied = DeniedOptions.new - - @create.flash ||= /created/i - @update.flash ||= /updated/i - @destroy.flash ||= /removed/i - @denied.flash ||= /denied/i - - @create.params ||= {} - @update.params ||= {} - - @actions = VALID_ACTIONS - @formats = VALID_FORMATS - @denied.actions = [] - end - - def normalize!(target) # :nodoc: - @denied.actions = VALID_ACTIONS if @denied.actions == :all - @actions = VALID_ACTIONS if @actions == :all - @formats = VALID_FORMATS if @formats == :all - - @denied.actions = @denied.actions.map(&:to_sym) - @actions = @actions.map(&:to_sym) - @formats = @formats.map(&:to_sym) - - ensure_valid_members(@actions, VALID_ACTIONS, 'actions') - ensure_valid_members(@denied.actions, VALID_ACTIONS, 'denied.actions') - ensure_valid_members(@formats, VALID_FORMATS, 'formats') - - @identifier ||= :id - @klass ||= target.name.gsub(/ControllerTest$/, '').singularize.constantize - @object ||= @klass.name.tableize.singularize - @parent ||= [] - @parent = [@parent] unless @parent.is_a? Array - - collection_helper = [@parent, @object.to_s.pluralize, 'url'].flatten.join('_') - collection_args = @parent.map {|n| "@#{object}.#{n}"}.join(', ') - @destroy.redirect ||= "#{collection_helper}(#{collection_args})" - - member_helper = [@parent, @object, 'url'].flatten.join('_') - member_args = [@parent.map {|n| "@#{object}.#{n}"}, "@#{object}"].flatten.join(', ') - @create.redirect ||= "#{member_helper}(#{member_args})" - @update.redirect ||= "#{member_helper}(#{member_args})" - @denied.redirect ||= "new_session_url" - end - - private - - def ensure_valid_members(ary, valid_members, name) # :nodoc: - invalid = ary - valid_members - raise ArgumentError, "Unsupported #{name}: #{invalid.inspect}" unless invalid.empty? - end - end - - # :section: should_be_restful - # Generates a full suite of tests for a restful controller. - # - # The following definition will generate tests for the +index+, +show+, +new+, - # +edit+, +create+, +update+ and +destroy+ actions, in both +html+ and +xml+ formats: - # - # should_be_restful do |resource| - # resource.parent = :user - # - # resource.create.params = { :title => "first post", :body => 'blah blah blah'} - # resource.update.params = { :title => "changed" } - # end - # - # This generates about 40 tests, all of the format: - # "on GET to :show should assign @user." - # "on GET to :show should not set the flash." - # "on GET to :show should render 'show' template." - # "on GET to :show should respond with success." - # "on GET to :show as xml should assign @user." - # "on GET to :show as xml should have ContentType set to 'application/xml'." - # "on GET to :show as xml should respond with success." - # "on GET to :show as xml should return as the root element." - # The +resource+ parameter passed into the block is a ResourceOptions object, and - # is used to configure the tests for the details of your resources. - # - def should_be_restful(&blk) # :yields: resource - resource = ResourceOptions.new - blk.call(resource) - resource.normalize!(self) - - resource.formats.each do |format| - resource.actions.each do |action| - if self.respond_to? :"make_#{action}_#{format}_tests" - self.send(:"make_#{action}_#{format}_tests", resource) - else - should "test #{action} #{format}" do - flunk "Test for #{action} as #{format} not implemented" - end - end - end - end - end - - # :section: Test macros - - # Macro that creates a test asserting that the flash contains the given value. - # val can be a String, a Regex, or nil (indicating that the flash should not be set) - # - # Example: - # - # should_set_the_flash_to "Thank you for placing this order." - # should_set_the_flash_to /created/i - # should_set_the_flash_to nil - def should_set_the_flash_to(val) - if val - should "have #{val.inspect} in the flash" do - assert_contains flash.values, val, ", Flash: #{flash.inspect}" - end - else - should "not set the flash" do - assert_equal({}, flash, "Flash was set to:\n#{flash.inspect}") - end - end - end - - # Macro that creates a test asserting that the flash is empty. Same as - # @should_set_the_flash_to nil@ - def should_not_set_the_flash - should_set_the_flash_to nil - end - - # Macro that creates a test asserting that the controller assigned to @name - # - # Example: - # - # should_assign_to :user - def should_assign_to(name) - should "assign @#{name}" do - assert assigns(name.to_sym), "The action isn't assigning to @#{name}" - end - end - - # Macro that creates a test asserting that the controller did not assign to @name - # - # Example: - # - # should_not_assign_to :user - def should_not_assign_to(name) - should "not assign to @#{name}" do - assert !assigns(name.to_sym), "@#{name} was visible" - end - end - - # Macro that creates a test asserting that the controller responded with a 'response' status code. - # Example: - # - # should_respond_with :success - def should_respond_with(response) - should "respond with #{response}" do - assert_response response - end - end - - # Macro that creates a test asserting that the controller rendered the given template. - # Example: - # - # should_render_template :new - def should_render_template(template) - should "render '#{template}' template" do - assert_template template.to_s - end - end - - # Macro that creates a test asserting that the controller returned a redirect to the given path. - # The given string is evaled to produce the resulting redirect path. All of the instance variables - # set by the controller are available to the evaled string. - # Example: - # - # should_redirect_to '"/"' - # should_redirect_to "users_url(@user)" - def should_redirect_to(url) - should "redirect to \"#{url}\"" do - instantiate_variables_from_assigns do - assert_redirected_to eval(url, self.send(:binding), __FILE__, __LINE__) - end - end - end - - # Macro that creates a test asserting that the rendered view contains a
element. - def should_render_a_form - should "display a form" do - assert_select "form", true, "The template doesn't contain a element" - end - end - end - - module InstanceMethods # :nodoc: - - private # :enddoc: - - SPECIAL_INSTANCE_VARIABLES = %w{ - _cookies - _flash - _headers - _params - _request - _response - _session - action_name - before_filter_chain_aborted - cookies - flash - headers - ignore_missing_templates - logger - params - request - request_origin - response - session - template - template_class - template_root - url - variables_added - }.map(&:to_s) - - def instantiate_variables_from_assigns(*names, &blk) - old = {} - names = (@response.template.assigns.keys - SPECIAL_INSTANCE_VARIABLES) if names.empty? - names.each do |name| - old[name] = instance_variable_get("@#{name}") - instance_variable_set("@#{name}", assigns(name.to_sym)) - end - blk.call - names.each do |name| - instance_variable_set("@#{name}", old[name]) - end - end - - def get_existing_record(res) # :nodoc: - returning(instance_variable_get("@#{res.object}")) do |record| - assert(record, "This test requires you to set @#{res.object} in your setup block") - end - end - - def make_parent_params(resource, record = nil, parent_names = nil) # :nodoc: - parent_names ||= resource.parents.reverse - return {} if parent_names == [] # Base case - parent_name = parent_names.shift - parent = record ? record.send(parent_name) : parent_name.to_s.classify.constantize.find(:first) - - { :"#{parent_name}_id" => parent.id }.merge(make_parent_params(resource, parent, parent_names)) - end - - end - end - end -end - diff --git a/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/html.rb b/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/html.rb deleted file mode 100644 index 1cc9f72..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/html.rb +++ /dev/null @@ -1,197 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - module Controller # :nodoc: - module HTML # :nodoc: all - def self.included(other) - other.class_eval do - extend ThoughtBot::Shoulda::Controller::HTML::ClassMethods - end - end - - module ClassMethods - def make_show_html_tests(res) - context "on GET to :show" do - setup do - record = get_existing_record(res) - parent_params = make_parent_params(res, record) - get :show, parent_params.merge({ res.identifier => record.to_param }) - end - - if res.denied.actions.include?(:show) - should_not_assign_to res.object - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - else - should_assign_to res.object - should_respond_with :success - should_render_template :show - should_not_set_the_flash - end - end - end - - def make_edit_html_tests(res) - context "on GET to :edit" do - setup do - @record = get_existing_record(res) - parent_params = make_parent_params(res, @record) - get :edit, parent_params.merge({ res.identifier => @record.to_param }) - end - - if res.denied.actions.include?(:edit) - should_not_assign_to res.object - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - else - should_assign_to res.object - should_respond_with :success - should_render_template :edit - should_not_set_the_flash - should_render_a_form - should "set @#{res.object} to requested instance" do - assert_equal @record, assigns(res.object) - end - end - end - end - - def make_index_html_tests(res) - context "on GET to :index" do - setup do - record = get_existing_record(res) rescue nil - parent_params = make_parent_params(res, record) - get(:index, parent_params) - end - - if res.denied.actions.include?(:index) - should_not_assign_to res.object.to_s.pluralize - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - else - should_respond_with :success - should_assign_to res.object.to_s.pluralize - should_render_template :index - should_not_set_the_flash - end - end - end - - def make_new_html_tests(res) - context "on GET to :new" do - setup do - record = get_existing_record(res) rescue nil - parent_params = make_parent_params(res, record) - get(:new, parent_params) - end - - if res.denied.actions.include?(:new) - should_not_assign_to res.object - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - else - should_respond_with :success - should_assign_to res.object - should_not_set_the_flash - should_render_template :new - should_render_a_form - end - end - end - - def make_destroy_html_tests(res) - context "on DELETE to :destroy" do - setup do - @record = get_existing_record(res) - parent_params = make_parent_params(res, @record) - delete :destroy, parent_params.merge({ res.identifier => @record.to_param }) - end - - if res.denied.actions.include?(:destroy) - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - - should "not destroy record" do - assert_nothing_raised { assert @record.reload } - end - else - should_set_the_flash_to res.destroy.flash - if res.destroy.redirect.is_a? Symbol - should_respond_with res.destroy.redirect - else - should_redirect_to res.destroy.redirect - end - - should "destroy record" do - assert_raises(::ActiveRecord::RecordNotFound, "@#{res.object} was not destroyed.") do - @record.reload - end - end - end - end - end - - def make_create_html_tests(res) - context "on POST to :create with #{res.create.params.inspect}" do - setup do - record = get_existing_record(res) rescue nil - parent_params = make_parent_params(res, record) - @count = res.klass.count - post :create, parent_params.merge(res.object => res.create.params) - end - - if res.denied.actions.include?(:create) - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - should_not_assign_to res.object - - should "not create new record" do - assert_equal @count, res.klass.count - end - else - should_assign_to res.object - should_set_the_flash_to res.create.flash - if res.create.redirect.is_a? Symbol - should_respond_with res.create.redirect - else - should_redirect_to res.create.redirect - end - - should "not have errors on @#{res.object}" do - assert_equal [], pretty_error_messages(assigns(res.object)), "@#{res.object} has errors:" - end - end - end - end - - def make_update_html_tests(res) - context "on PUT to :update with #{res.create.params.inspect}" do - setup do - @record = get_existing_record(res) - parent_params = make_parent_params(res, @record) - put :update, parent_params.merge(res.identifier => @record.to_param, res.object => res.update.params) - end - - if res.denied.actions.include?(:update) - should_not_assign_to res.object - should_redirect_to res.denied.redirect - should_set_the_flash_to res.denied.flash - else - should_assign_to res.object - should_set_the_flash_to(res.update.flash) - if res.update.redirect.is_a? Symbol - should_respond_with res.update.redirect - else - should_redirect_to res.update.redirect - end - - should "not have errors on @#{res.object}" do - assert_equal [], pretty_error_messages(assigns(res.object)), "@#{res.object} has errors:" - end - end - end - end - end - end - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/xml.rb b/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/xml.rb deleted file mode 100644 index 8a9bf8a..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/controller_tests/formats/xml.rb +++ /dev/null @@ -1,170 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - module Controller # :nodoc: - module XML - def self.included(other) #:nodoc: - other.class_eval do - extend ThoughtBot::Shoulda::Controller::XML::ClassMethods - end - end - - module ClassMethods - # Macro that creates a test asserting that the controller responded with an XML content-type - # and that the XML contains ++ as the root element. - def should_respond_with_xml_for(name = nil) - should "have ContentType set to 'application/xml'" do - assert_xml_response - end - - if name - should "return <#{name}/> as the root element" do - body = @response.body.first(100).map {|l| " #{l}"} - assert_select name.to_s.dasherize, 1, "Body:\n#{body}...\nDoes not have <#{name}/> as the root element." - end - end - end - alias should_respond_with_xml should_respond_with_xml_for - - protected - - def make_show_xml_tests(res) # :nodoc: - context "on GET to :show as xml" do - setup do - request_xml - record = get_existing_record(res) - parent_params = make_parent_params(res, record) - get :show, parent_params.merge({ res.identifier => record.to_param }) - end - - if res.denied.actions.include?(:show) - should_not_assign_to res.object - should_respond_with 401 - else - should_assign_to res.object - should_respond_with :success - should_respond_with_xml_for res.object - end - end - end - - def make_edit_xml_tests(res) # :nodoc: - # XML doesn't need an :edit action - end - - def make_new_xml_tests(res) # :nodoc: - # XML doesn't need a :new action - end - - def make_index_xml_tests(res) # :nodoc: - context "on GET to :index as xml" do - setup do - request_xml - parent_params = make_parent_params(res) - get(:index, parent_params) - end - - if res.denied.actions.include?(:index) - should_not_assign_to res.object.to_s.pluralize - should_respond_with 401 - else - should_respond_with :success - should_respond_with_xml_for res.object.to_s.pluralize - should_assign_to res.object.to_s.pluralize - end - end - end - - def make_destroy_xml_tests(res) # :nodoc: - context "on DELETE to :destroy as xml" do - setup do - request_xml - @record = get_existing_record(res) - parent_params = make_parent_params(res, @record) - delete :destroy, parent_params.merge({ res.identifier => @record.to_param }) - end - - if res.denied.actions.include?(:destroy) - should_respond_with 401 - - should "not destroy record" do - assert @record.reload - end - else - should "destroy record" do - assert_raises(::ActiveRecord::RecordNotFound, "@#{res.object} was not destroyed.") do - @record.reload - end - end - end - end - end - - def make_create_xml_tests(res) # :nodoc: - context "on POST to :create as xml" do - setup do - request_xml - parent_params = make_parent_params(res) - @count = res.klass.count - post :create, parent_params.merge(res.object => res.create.params) - end - - if res.denied.actions.include?(:create) - should_respond_with 401 - should_not_assign_to res.object - - should "not create new record" do - assert_equal @count, res.klass.count - end - else - should_assign_to res.object - - should "not have errors on @#{res.object}" do - assert_equal [], pretty_error_messages(assigns(res.object)), "@#{res.object} has errors:" - end - end - end - end - - def make_update_xml_tests(res) # :nodoc: - context "on PUT to :update as xml" do - setup do - request_xml - @record = get_existing_record(res) - parent_params = make_parent_params(res, @record) - put :update, parent_params.merge(res.identifier => @record.to_param, res.object => res.update.params) - end - - if res.denied.actions.include?(:update) - should_not_assign_to res.object - should_respond_with 401 - else - should_assign_to res.object - - should "not have errors on @#{res.object}" do - assert_equal [], assigns(res.object).errors.full_messages, "@#{res.object} has errors:" - end - end - end - end - end - - # Sets the next request's format to 'application/xml' - def request_xml - @request.accept = "application/xml" - end - - # Asserts that the controller's response was 'application/xml' - def assert_xml_response - content_type = (@response.headers["Content-Type"] || @response.headers["type"]).to_s - regex = %r{\bapplication/xml\b} - - msg = "Content Type '#{content_type.inspect}' doesn't match '#{regex.inspect}'\n" - msg += "Body: #{@response.body.first(100).chomp} ..." - - assert_match regex, content_type, msg - end - - end - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/gem/proc_extensions.rb b/vendor/plugins/shoulda/lib/shoulda/gem/proc_extensions.rb deleted file mode 100644 index 0d577df..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/gem/proc_extensions.rb +++ /dev/null @@ -1,14 +0,0 @@ -# Stolen straight from ActiveSupport - -class Proc #:nodoc: - def bind(object) - block, time = self, Time.now - (class << object; self end).class_eval do - method_name = "__bind_#{time.to_i}_#{time.usec}" - define_method(method_name, &block) - method = instance_method(method_name) - remove_method(method_name) - method - end.bind(object) - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/gem/shoulda.rb b/vendor/plugins/shoulda/lib/shoulda/gem/shoulda.rb deleted file mode 100644 index e10660f..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/gem/shoulda.rb +++ /dev/null @@ -1,240 +0,0 @@ -require File.join(File.dirname(__FILE__), 'proc_extensions') - -module Thoughtbot - module Shoulda - class << self - attr_accessor :current_context - end - - VERSION = '1.1.0' - - # = Should statements - # - # Should statements are just syntactic sugar over normal Test::Unit test methods. A should block - # contains all the normal code and assertions you're used to seeing, with the added benefit that - # they can be wrapped inside context blocks (see below). - # - # == Example: - # - # class UserTest << Test::Unit::TestCase - # - # def setup - # @user = User.new("John", "Doe") - # end - # - # should "return its full name" - # assert_equal 'John Doe', @user.full_name - # end - # - # end - # - # ...will produce the following test: - # * "test: User should return its full name. " - # - # Note: The part before should in the test name is gleamed from the name of the Test::Unit class. - - def should(name, &blk) - if Shoulda.current_context - Shoulda.current_context.should(name, &blk) - else - context_name = self.name.gsub(/Test/, "") - context = Thoughtbot::Shoulda::Context.new(context_name, self) do - should(name, &blk) - end - context.build - end - end - - # Just like should, but never runs, and instead prints an 'X' in the Test::Unit output. - def should_eventually(name, &blk) - context_name = self.name.gsub(/Test/, "") - context = Thoughtbot::Shoulda::Context.new(context_name, self) do - should_eventually(name, &blk) - end - context.build - end - - # = Contexts - # - # A context block groups should statements under a common set of setup/teardown methods. - # Context blocks can be arbitrarily nested, and can do wonders for improving the maintainability - # and readability of your test code. - # - # A context block can contain setup, should, should_eventually, and teardown blocks. - # - # class UserTest << Test::Unit::TestCase - # context "A User instance" do - # setup do - # @user = User.find(:first) - # end - # - # should "return its full name" - # assert_equal 'John Doe', @user.full_name - # end - # end - # end - # - # This code will produce the method "test: A User instance should return its full name. ". - # - # Contexts may be nested. Nested contexts run their setup blocks from out to in before each - # should statement. They then run their teardown blocks from in to out after each should statement. - # - # class UserTest << Test::Unit::TestCase - # context "A User instance" do - # setup do - # @user = User.find(:first) - # end - # - # should "return its full name" - # assert_equal 'John Doe', @user.full_name - # end - # - # context "with a profile" do - # setup do - # @user.profile = Profile.find(:first) - # end - # - # should "return true when sent :has_profile?" - # assert @user.has_profile? - # end - # end - # end - # end - # - # This code will produce the following methods - # * "test: A User instance should return its full name. " - # * "test: A User instance with a profile should return true when sent :has_profile?. " - # - # Just like should statements, a context block can exist next to normal def test_the_old_way; end - # tests. This means you do not have to fully commit to the context/should syntax in a test file. - - def context(name, &blk) - if Shoulda.current_context - Shoulda.current_context.context(name, &blk) - else - context = Thoughtbot::Shoulda::Context.new(name, self, &blk) - context.build - end - end - - class Context # :nodoc: - - attr_accessor :name # my name - attr_accessor :parent # may be another context, or the original test::unit class. - attr_accessor :subcontexts # array of contexts nested under myself - attr_accessor :setup_block # block given via a setup method - attr_accessor :teardown_block # block given via a teardown method - attr_accessor :shoulds # array of hashes representing the should statements - attr_accessor :should_eventuallys # array of hashes representing the should eventually statements - - def initialize(name, parent, &blk) - Shoulda.current_context = self - self.name = name - self.parent = parent - self.setup_block = nil - self.teardown_block = nil - self.shoulds = [] - self.should_eventuallys = [] - self.subcontexts = [] - - blk.bind(self).call - Shoulda.current_context = nil - end - - def context(name, &blk) - subcontexts << Context.new(name, self, &blk) - Shoulda.current_context = self - end - - def setup(&blk) - self.setup_block = blk - end - - def teardown(&blk) - self.teardown_block = blk - end - - def should(name, &blk) - self.shoulds << { :name => name, :block => blk } - end - - def should_eventually(name, &blk) - self.should_eventuallys << { :name => name, :block => blk } - end - - def full_name - parent_name = parent.full_name if am_subcontext? - return [parent_name, name].join(" ").strip - end - - def am_subcontext? - parent.is_a?(self.class) # my parent is the same class as myself. - end - - def test_unit_class - am_subcontext? ? parent.test_unit_class : parent - end - - def create_test_from_should_hash(should) - test_name = ["test:", full_name, "should", "#{should[:name]}. "].flatten.join(' ').to_sym - - if test_unit_class.instance_methods.include?(test_name.to_s) - puts "'#{test_name}' is already defined" - #raise ArgumentError, "'#{test_name}' is already defined" - end - - context = self - test_unit_class.send(:define_method, test_name) do |*args| - begin - context.run_all_setup_blocks(self) - should[:block].bind(self).call - ensure - context.run_all_teardown_blocks(self) - end - end - end - - def run_all_setup_blocks(binding) - self.parent.run_all_setup_blocks(binding) if am_subcontext? - setup_block.bind(binding).call if setup_block - end - - def run_all_teardown_blocks(binding) - teardown_block.bind(binding).call if teardown_block - self.parent.run_all_teardown_blocks(binding) if am_subcontext? - end - - def print_should_eventuallys - should_eventuallys.each do |should| - test_name = [full_name, "should", "#{should[:name]}. "].flatten.join(' ') - puts " * DEFERRED: " + test_name - end - subcontexts.each { |context| context.print_should_eventuallys } - end - - def build - shoulds.each do |should| - create_test_from_should_hash(should) - end - - subcontexts.each { |context| context.build } - - print_should_eventuallys - end - - def method_missing(method, *args, &blk) - test_unit_class.send(method, *args, &blk) - end - - end - end -end - -module Test # :nodoc: all - module Unit - class TestCase - extend Thoughtbot::Shoulda - end - end -end - diff --git a/vendor/plugins/shoulda/lib/shoulda/general.rb b/vendor/plugins/shoulda/lib/shoulda/general.rb deleted file mode 100644 index a785a31..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/general.rb +++ /dev/null @@ -1,105 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - module General - def self.included(other) # :nodoc: - other.class_eval do - extend ThoughtBot::Shoulda::General::ClassMethods - # include ThoughtBot::Shoulda::General::InstanceMethods - end - end - - module ClassMethods - # Loads all fixture files (test/fixtures/*.yml) - def load_all_fixtures - all_fixtures = Dir.glob(File.join(Test::Unit::TestCase.fixture_path, "*.yml")).collect do |f| - File.basename(f, '.yml').to_sym - end - fixtures *all_fixtures - end - end - - # Prints a message to stdout, tagged with the name of the calling method. - def report!(msg = "") - puts("#{caller.first}: #{msg}") - end - - # Asserts that two arrays contain the same elements, the same number of times. Essentially ==, but unordered. - # - # assert_same_elements([:a, :b, :c], [:c, :a, :b]) => passes - def assert_same_elements(a1, a2, msg = nil) - [:select, :inject, :size].each do |m| - [a1, a2].each {|a| assert_respond_to(a, m, "Are you sure that #{a.inspect} is an array? It doesn't respond to #{m}.") } - end - - assert a1h = a1.inject({}) { |h,e| h[e] = a1.select { |i| i == e }.size; h } - assert a2h = a2.inject({}) { |h,e| h[e] = a2.select { |i| i == e }.size; h } - - assert_equal(a1h, a2h, msg) - end - - # Asserts that the given collection contains item x. If x is a regular expression, ensure that - # at least one element from the collection matches x. +extra_msg+ is appended to the error message if the assertion fails. - # - # assert_contains(['a', '1'], /\d/) => passes - # assert_contains(['a', '1'], 'a') => passes - # assert_contains(['a', '1'], /not there/) => fails - def assert_contains(collection, x, extra_msg = "") - collection = [collection] unless collection.is_a?(Array) - msg = "#{x.inspect} not found in #{collection.to_a.inspect} " + extra_msg - case x - when Regexp: assert(collection.detect { |e| e =~ x }, msg) - else assert(collection.include?(x), msg) - end - end - - # Asserts that the given collection does not contain item x. If x is a regular expression, ensure that - # none of the elements from the collection match x. - def assert_does_not_contain(collection, x, extra_msg = "") - collection = [collection] unless collection.is_a?(Array) - msg = "#{x.inspect} found in #{collection.to_a.inspect} " + extra_msg - case x - when Regexp: assert(!collection.detect { |e| e =~ x }, msg) - else assert(!collection.include?(x), msg) - end - end - - # Asserts that the given object can be saved - # - # assert_save User.new(params) - def assert_save(obj) - assert obj.save, "Errors: #{pretty_error_messages obj}" - obj.reload - end - - # Asserts that the given object is valid - # - # assert_valid User.new(params) - def assert_valid(obj) - assert obj.valid?, "Errors: #{pretty_error_messages obj}" - end - - # Asserts that the block uses ActionMailer to send emails - # - # assert_sends_email(2) { Mailer.deliver_messages } - def assert_sends_email(num = 1, &blk) - ActionMailer::Base.deliveries.clear - blk.call - msg = "Sent #{ActionMailer::Base.deliveries.size} emails, when #{num} expected:\n" - ActionMailer::Base.deliveries.each { |m| msg << " '#{m.subject}' sent to #{m.to.to_sentence}\n" } - assert(num == ActionMailer::Base.deliveries.size, msg) - end - - # Asserts that the block does not send emails thorough ActionMailer - # - # assert_does_not_send_email { # do nothing } - def assert_does_not_send_email(&blk) - assert_sends_email 0, &blk - end - - def pretty_error_messages(obj) - obj.errors.map { |a, m| "#{m} (#{obj.send(a).inspect})" } - end - - end - end -end diff --git a/vendor/plugins/shoulda/lib/shoulda/private_helpers.rb b/vendor/plugins/shoulda/lib/shoulda/private_helpers.rb deleted file mode 100644 index 73118c6..0000000 --- a/vendor/plugins/shoulda/lib/shoulda/private_helpers.rb +++ /dev/null @@ -1,17 +0,0 @@ -module ThoughtBot # :nodoc: - module Shoulda # :nodoc: - module Private # :nodoc: - def get_options!(args, *wanted) - ret = [] - opts = (args.last.is_a?(Hash) ? args.pop : {}) - wanted.each {|w| ret << opts.delete(w)} - raise ArgumentError, "Unsuported options given: #{opts.keys.join(', ')}" unless opts.keys.empty? - return *ret - end - - def model_class - self.name.gsub(/Test$/, '').constantize - end - end - end -end diff --git a/vendor/plugins/shoulda/tasks/list_tests.rake b/vendor/plugins/shoulda/tasks/list_tests.rake deleted file mode 100644 index cad270f..0000000 --- a/vendor/plugins/shoulda/tasks/list_tests.rake +++ /dev/null @@ -1,40 +0,0 @@ -namespace :shoulda do - desc "List the names of the test methods in a specification like format" - task :list do - - require 'test/unit' - require 'rubygems' - require 'active_support' - - # bug in test unit. Set to true to stop from running. - Test::Unit.run = true - - test_files = Dir.glob(File.join('test', '**', '*_test.rb')) - test_files.each do |file| - load file - klass = File.basename(file, '.rb').classify.constantize - - puts - puts "#{klass.name.gsub(/Test$/, '')}" - test_methods = klass.instance_methods.grep(/^test/).map {|s| s.gsub(/^test: /, '')}.sort - test_methods.each {|m| puts " - #{m}" } - # puts "#{klass.name.gsub(/Test$/, '')}" - # test_methods = klass.instance_methods.grep(/^test/).sort - # - # method_hash = test_methods.inject({}) do |h, name| - # header = name.gsub(/^test: (.*)should.*$/, '\1') - # test = name.gsub(/^test:.*should (.*)$/, '\1') - # h[header] ||= [] - # h[header] << test - # h - # end - # - # method_hash.keys.sort.each do |header| - # puts " #{header.chomp} should" - # method_hash[header].each do |test| - # puts " - #{test}" - # end - # end - end - end -end diff --git a/vendor/plugins/shoulda/tasks/yaml_to_shoulda.rake b/vendor/plugins/shoulda/tasks/yaml_to_shoulda.rake deleted file mode 100644 index 8303011..0000000 --- a/vendor/plugins/shoulda/tasks/yaml_to_shoulda.rake +++ /dev/null @@ -1,28 +0,0 @@ -namespace :shoulda do - # From http://blog.internautdesign.com/2007/11/2/a-yaml_to_shoulda-rake-task - # David.Lowenfels@gmail.com - desc "Converts a YAML file (FILE=./path/to/yaml) into a Shoulda skeleton" - task :from_yaml do - require 'yaml' - - def yaml_to_context(hash, indent = 0) - indent1 = ' ' * indent - indent2 = ' ' * (indent + 1) - hash.each_pair do |context, shoulds| - puts indent1 + "context \"#{context}\" do" - puts - shoulds.each do |should| - yaml_to_context( should, indent + 1 ) and next if should.is_a?( Hash ) - puts indent2 + "should_eventually \"" + should.gsub(/^should +/,'') + "\" do" - puts indent2 + "end" - puts - end - puts indent1 + "end" - end - end - - puts("Please pass in a FILE argument.") and exit unless ENV['FILE'] - - yaml_to_context( YAML.load_file( ENV['FILE'] ) ) - end -end \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/README b/vendor/plugins/shoulda/test/README deleted file mode 100644 index 3906de5..0000000 --- a/vendor/plugins/shoulda/test/README +++ /dev/null @@ -1,8 +0,0 @@ -The tests for should have two dependencies that I know of: - -* Rails version 1.2.3 -* A working sqlite3 installation. - -If you have problems running these tests, please notify the shoulda mailing list: shoulda@googlegroups.com - -- Tammer Saleh \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/fixtures/posts.yml b/vendor/plugins/shoulda/test/fixtures/posts.yml deleted file mode 100644 index f5cd7b6..0000000 --- a/vendor/plugins/shoulda/test/fixtures/posts.yml +++ /dev/null @@ -1,5 +0,0 @@ -first: - id: 1 - title: My Cute Kitten! - body: This is totally a cute kitten - user_id: 1 diff --git a/vendor/plugins/shoulda/test/fixtures/taggings.yml b/vendor/plugins/shoulda/test/fixtures/taggings.yml deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/shoulda/test/fixtures/tags.yml b/vendor/plugins/shoulda/test/fixtures/tags.yml deleted file mode 100644 index 3ef6292..0000000 --- a/vendor/plugins/shoulda/test/fixtures/tags.yml +++ /dev/null @@ -1,9 +0,0 @@ -first: - id: 1 - name: Stuff -second: - id: 2 - name: Rails -third: - id: 3 - name: Nothing \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/fixtures/users.yml b/vendor/plugins/shoulda/test/fixtures/users.yml deleted file mode 100644 index cfe60c3..0000000 --- a/vendor/plugins/shoulda/test/fixtures/users.yml +++ /dev/null @@ -1,5 +0,0 @@ -first: - id: 1 - name: Some dude - age: 2 - email: none@none.com diff --git a/vendor/plugins/shoulda/test/functional/posts_controller_test.rb b/vendor/plugins/shoulda/test/functional/posts_controller_test.rb deleted file mode 100644 index 6fdeb58..0000000 --- a/vendor/plugins/shoulda/test/functional/posts_controller_test.rb +++ /dev/null @@ -1,43 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' -require 'posts_controller' - -# Re-raise errors caught by the controller. -class PostsController; def rescue_action(e) raise e end; end - -class PostsControllerTest < Test::Unit::TestCase - load_all_fixtures - - def setup - @controller = PostsController.new - @request = ActionController::TestRequest.new - @response = ActionController::TestResponse.new - @post = Post.find(:first) - end - - context "The public" do - setup do - @request.session[:logged_in] = false - end - - should_be_restful do |resource| - resource.parent = :user - - resource.denied.actions = [:index, :show, :edit, :new, :create, :update, :destroy] - resource.denied.flash = /what/i - resource.denied.redirect = '"/"' - end - end - - context "Logged in" do - setup do - @request.session[:logged_in] = true - end - - should_be_restful do |resource| - resource.parent = :user - - resource.create.params = { :title => "first post", :body => 'blah blah blah'} - resource.update.params = { :title => "changed" } - end - end -end diff --git a/vendor/plugins/shoulda/test/functional/users_controller_test.rb b/vendor/plugins/shoulda/test/functional/users_controller_test.rb deleted file mode 100644 index 6b48e26..0000000 --- a/vendor/plugins/shoulda/test/functional/users_controller_test.rb +++ /dev/null @@ -1,36 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' -require 'users_controller' - -# Re-raise errors caught by the controller. -class UsersController; def rescue_action(e) raise e end; end - -class UsersControllerTest < Test::Unit::TestCase - load_all_fixtures - - def setup - @controller = UsersController.new - @request = ActionController::TestRequest.new - @response = ActionController::TestResponse.new - @user = User.find(:first) - end - - should_be_restful do |resource| - resource.identifier = :id - resource.klass = User - resource.object = :user - resource.parent = [] - resource.actions = [:index, :show, :new, :edit, :update, :create, :destroy] - resource.formats = [:html, :xml] - - resource.create.params = { :name => "bob", :email => 'bob@bob.com', :age => 13} - resource.update.params = { :name => "sue" } - - resource.create.redirect = "user_url(@user)" - resource.update.redirect = "user_url(@user)" - resource.destroy.redirect = "users_url" - - resource.create.flash = /created/i - resource.update.flash = /updated/i - resource.destroy.flash = /removed/i - end -end diff --git a/vendor/plugins/shoulda/test/other/context_test.rb b/vendor/plugins/shoulda/test/other/context_test.rb deleted file mode 100644 index 6d93153..0000000 --- a/vendor/plugins/shoulda/test/other/context_test.rb +++ /dev/null @@ -1,71 +0,0 @@ -require File.join(File.dirname(__FILE__), '..', 'test_helper') - -class ContextTest < Test::Unit::TestCase # :nodoc: - - context "context with setup block" do - setup do - @blah = "blah" - end - - should "have @blah == 'blah'" do - assert_equal "blah", @blah - end - - should "have name set right" do - assert_match(/^test: context with setup block/, self.to_s) - end - - context "and a subcontext" do - setup do - @blah = "#{@blah} twice" - end - - should "be named correctly" do - assert_match(/^test: context with setup block and a subcontext should be named correctly/, self.to_s) - end - - should "run the setup methods in order" do - assert_equal @blah, "blah twice" - end - end - end - - context "another context with setup block" do - setup do - @blah = "foo" - end - - should "have @blah == 'foo'" do - assert_equal "foo", @blah - end - - should "have name set right" do - assert_match(/^test: another context with setup block/, self.to_s) - end - end - - context "context with method definition" do - setup do - def hello; "hi"; end - end - - should "be able to read that method" do - assert_equal "hi", hello - end - - should "have name set right" do - assert_match(/^test: context with method definition/, self.to_s) - end - end - - context "another context" do - should "not define @blah" do - assert_nil @blah - end - end - - should_eventually "should pass, since it's unimplemented" do - flunk "what?" - end - -end diff --git a/vendor/plugins/shoulda/test/other/helpers_test.rb b/vendor/plugins/shoulda/test/other/helpers_test.rb deleted file mode 100644 index a225393..0000000 --- a/vendor/plugins/shoulda/test/other/helpers_test.rb +++ /dev/null @@ -1,40 +0,0 @@ -require File.join(File.dirname(__FILE__), '..', 'test_helper') - -class Val - @@val = 0 - def self.val; @@val; end - def self.inc(i=1); @@val += i; end -end - -class HelpersTest < Test::Unit::TestCase # :nodoc: - - context "an array of values" do - setup do - @a = ['abc', 'def', 3] - end - - [/b/, 'abc', 3].each do |x| - should "contain #{x.inspect}" do - assert_raises(Test::Unit::AssertionFailedError) do - assert_does_not_contain @a, x - end - assert_contains @a, x - end - end - - should "not contain 'wtf'" do - assert_raises(Test::Unit::AssertionFailedError) {assert_contains @a, 'wtf'} - assert_does_not_contain @a, 'wtf' - end - - should "be the same as another array, ordered differently" do - assert_same_elements(@a, [3, "def", "abc"]) - assert_raises(Test::Unit::AssertionFailedError) do - assert_same_elements(@a, [3, 3, "def", "abc"]) - end - assert_raises(Test::Unit::AssertionFailedError) do - assert_same_elements([@a, "abc"].flatten, [3, 3, "def", "abc"]) - end - end - end -end diff --git a/vendor/plugins/shoulda/test/other/private_helpers_test.rb b/vendor/plugins/shoulda/test/other/private_helpers_test.rb deleted file mode 100644 index 9c35999..0000000 --- a/vendor/plugins/shoulda/test/other/private_helpers_test.rb +++ /dev/null @@ -1,26 +0,0 @@ -require File.join(File.dirname(__FILE__), '..', 'test_helper') - -class PrivateHelpersTest < Test::Unit::TestCase # :nodoc: - include ThoughtBot::Shoulda::ActiveRecord - context "get_options!" do - should "remove opts from args" do - args = [:a, :b, {}] - get_options!(args) - assert_equal [:a, :b], args - end - - should "return wanted opts in order" do - args = [{:one => 1, :two => 2}] - one, two = get_options!(args, :one, :two) - assert_equal 1, one - assert_equal 2, two - end - - should "raise ArgumentError if given unwanted option" do - args = [{:one => 1, :two => 2}] - assert_raises ArgumentError do - get_options!(args, :one) - end - end - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/Rakefile b/vendor/plugins/shoulda/test/rails_root/Rakefile deleted file mode 100644 index 3bb0e85..0000000 --- a/vendor/plugins/shoulda/test/rails_root/Rakefile +++ /dev/null @@ -1,10 +0,0 @@ -# Add your own tasks in files placed in lib/tasks ending in .rake, -# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. - -require(File.join(File.dirname(__FILE__), 'config', 'boot')) - -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' - -require 'tasks/rails' diff --git a/vendor/plugins/shoulda/test/rails_root/app/controllers/application.rb b/vendor/plugins/shoulda/test/rails_root/app/controllers/application.rb deleted file mode 100644 index 10fa987..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/controllers/application.rb +++ /dev/null @@ -1,25 +0,0 @@ -# Filters added to this controller apply to all controllers in the application. -# Likewise, all the methods added will be available for all controllers. - -class ApplicationController < ActionController::Base - # Pick a unique cookie name to distinguish our session data from others' - session :session_key => '_rails_root_session_id' - - def ensure_logged_in - unless session[:logged_in] - respond_to do |accepts| - accepts.html do - flash[:error] = 'What do you think you\'re doing?' - redirect_to '/' - end - accepts.xml do - headers["Status"] = "Unauthorized" - headers["WWW-Authenticate"] = %(Basic realm="Web Password") - render :text => "Couldn't authenticate you", :status => '401 Unauthorized' - end - end - return false - end - return true - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/controllers/posts_controller.rb b/vendor/plugins/shoulda/test/rails_root/app/controllers/posts_controller.rb deleted file mode 100644 index 3a7d578..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/controllers/posts_controller.rb +++ /dev/null @@ -1,78 +0,0 @@ -class PostsController < ApplicationController - before_filter :ensure_logged_in - before_filter :load_user - - def index - @posts = @user.posts - - respond_to do |format| - format.html # index.rhtml - format.xml { render :xml => @posts.to_xml } - end - end - - def show - @post = @user.posts.find(params[:id]) - - respond_to do |format| - format.html # show.rhtml - format.xml { render :xml => @post.to_xml } - end - end - - def new - @post = @user.posts.build - end - - def edit - @post = @user.posts.find(params[:id]) - end - - def create - @post = @user.posts.build(params[:post]) - - respond_to do |format| - if @post.save - flash[:notice] = 'Post was successfully created.' - format.html { redirect_to user_post_url(@post.user, @post) } - format.xml { head :created, :location => user_post_url(@post.user, @post) } - else - format.html { render :action => "new" } - format.xml { render :xml => @post.errors.to_xml } - end - end - end - - def update - @post = @user.posts.find(params[:id]) - - respond_to do |format| - if @post.update_attributes(params[:post]) - flash[:notice] = 'Post was successfully updated.' - format.html { redirect_to user_post_url(@post.user, @post) } - format.xml { head :ok } - else - format.html { render :action => "edit" } - format.xml { render :xml => @post.errors.to_xml } - end - end - end - - def destroy - @post = @user.posts.find(params[:id]) - @post.destroy - - flash[:notice] = "Post was removed" - - respond_to do |format| - format.html { redirect_to user_posts_url(@post.user) } - format.xml { head :ok } - end - end - - private - - def load_user - @user = User.find(params[:user_id]) - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/controllers/users_controller.rb b/vendor/plugins/shoulda/test/rails_root/app/controllers/users_controller.rb deleted file mode 100644 index 4fdef93..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/controllers/users_controller.rb +++ /dev/null @@ -1,81 +0,0 @@ -class UsersController < ApplicationController - # GET /users - # GET /users.xml - def index - @users = User.find(:all) - - respond_to do |format| - format.html # index.rhtml - format.xml { render :xml => @users.to_xml } - end - end - - # GET /users/1 - # GET /users/1.xml - def show - @user = User.find(params[:id]) - - respond_to do |format| - format.html # show.rhtml - format.xml { render :xml => @user.to_xml } - end - end - - # GET /users/new - def new - @user = User.new - end - - # GET /users/1;edit - def edit - @user = User.find(params[:id]) - end - - # POST /users - # POST /users.xml - def create - @user = User.new(params[:user]) - - respond_to do |format| - if @user.save - flash[:notice] = 'User was successfully created.' - format.html { redirect_to user_url(@user) } - format.xml { head :created, :location => user_url(@user) } - else - format.html { render :action => "new" } - format.xml { render :xml => @user.errors.to_xml } - end - end - end - - # PUT /users/1 - # PUT /users/1.xml - def update - @user = User.find(params[:id]) - - respond_to do |format| - if @user.update_attributes(params[:user]) - flash[:notice] = 'User was successfully updated.' - format.html { redirect_to user_url(@user) } - format.xml { head :ok } - else - format.html { render :action => "edit" } - format.xml { render :xml => @user.errors.to_xml } - end - end - end - - # DELETE /users/1 - # DELETE /users/1.xml - def destroy - @user = User.find(params[:id]) - @user.destroy - - flash[:notice] = "User was removed" - - respond_to do |format| - format.html { redirect_to users_url } - format.xml { head :ok } - end - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/helpers/application_helper.rb b/vendor/plugins/shoulda/test/rails_root/app/helpers/application_helper.rb deleted file mode 100644 index 22a7940..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/helpers/application_helper.rb +++ /dev/null @@ -1,3 +0,0 @@ -# Methods added to this helper will be available to all templates in the application. -module ApplicationHelper -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/helpers/posts_helper.rb b/vendor/plugins/shoulda/test/rails_root/app/helpers/posts_helper.rb deleted file mode 100644 index a7b8cec..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/helpers/posts_helper.rb +++ /dev/null @@ -1,2 +0,0 @@ -module PostsHelper -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/helpers/users_helper.rb b/vendor/plugins/shoulda/test/rails_root/app/helpers/users_helper.rb deleted file mode 100644 index 2310a24..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/helpers/users_helper.rb +++ /dev/null @@ -1,2 +0,0 @@ -module UsersHelper -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/models/dog.rb b/vendor/plugins/shoulda/test/rails_root/app/models/dog.rb deleted file mode 100644 index 2532fee..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/models/dog.rb +++ /dev/null @@ -1,3 +0,0 @@ -class Dog < ActiveRecord::Base - belongs_to :user, :foreign_key => :owner_id -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/models/post.rb b/vendor/plugins/shoulda/test/rails_root/app/models/post.rb deleted file mode 100644 index 5b840b9..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/models/post.rb +++ /dev/null @@ -1,11 +0,0 @@ -class Post < ActiveRecord::Base - belongs_to :user - belongs_to :owner, :foreign_key => :user_id, :class_name => 'User' - has_many :taggings - has_many :tags, :through => :taggings - - validates_uniqueness_of :title - validates_presence_of :title - validates_presence_of :body, :message => 'Seriously... wtf' - validates_numericality_of :user_id -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/models/tag.rb b/vendor/plugins/shoulda/test/rails_root/app/models/tag.rb deleted file mode 100644 index 9e52fdf..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/models/tag.rb +++ /dev/null @@ -1,4 +0,0 @@ -class Tag < ActiveRecord::Base - has_many :taggings - has_many :posts, :through => :taggings -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/models/tagging.rb b/vendor/plugins/shoulda/test/rails_root/app/models/tagging.rb deleted file mode 100644 index 9b8fb6b..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/models/tagging.rb +++ /dev/null @@ -1,4 +0,0 @@ -class Tagging < ActiveRecord::Base - belongs_to :post - belongs_to :tag -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/models/user.rb b/vendor/plugins/shoulda/test/rails_root/app/models/user.rb deleted file mode 100644 index 883ca5c..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/models/user.rb +++ /dev/null @@ -1,11 +0,0 @@ -class User < ActiveRecord::Base - has_many :posts - has_many :dogs, :foreign_key => :owner_id - - attr_protected :password - - validates_format_of :email, :with => /\w*@\w*.com/ - validates_length_of :email, :in => 1..100 - validates_inclusion_of :age, :in => 1..100 - validates_acceptance_of :eula -end diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/layouts/posts.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/layouts/posts.rhtml deleted file mode 100644 index 9a0e16b..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/layouts/posts.rhtml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - Posts: <%= controller.action_name %> - <%= stylesheet_link_tag 'scaffold' %> - - - -

<%= flash[:notice] %>

- -<%= yield %> - - - diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/layouts/users.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/layouts/users.rhtml deleted file mode 100644 index 23757aa..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/layouts/users.rhtml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - Users: <%= controller.action_name %> - <%= stylesheet_link_tag 'scaffold' %> - - - -

<%= flash[:notice] %>

- -<%= yield %> - - - diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/posts/edit.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/posts/edit.rhtml deleted file mode 100644 index 65bd4fe..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/posts/edit.rhtml +++ /dev/null @@ -1,27 +0,0 @@ -

Editing post

- -<%= error_messages_for :post %> - -<% form_for(:post, :url => user_post_path(@post.user, @post), :html => { :method => :put }) do |f| %> -

- User
- <%= f.text_field :user_id %> -

- -

- Title
- <%= f.text_field :title %> -

- -

- Body
- <%= f.text_area :body %> -

- -

- <%= submit_tag "Update" %> -

-<% end %> - -<%= link_to 'Show', user_post_path(@post.user, @post) %> | -<%= link_to 'Back', user_posts_path(@post.user) %> diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/posts/index.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/posts/index.rhtml deleted file mode 100644 index f6bb3bd..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/posts/index.rhtml +++ /dev/null @@ -1,25 +0,0 @@ -

Listing posts

- - - - - - - - -<% for post in @posts %> - - - - - - - - -<% end %> -
UserTitleBody
<%=h post.user_id %><%=h post.title %><%=h post.body %><%= link_to 'Show', user_post_path(post.user, post) %><%= link_to 'Edit', edit_user_post_path(post.user, post) %><%= link_to 'Destroy', user_post_path(post.user, post), :confirm => 'Are you sure?', - :method => :delete %>
- -
- -<%= link_to 'New post', new_user_post_path(post.user) %> diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/posts/new.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/posts/new.rhtml deleted file mode 100644 index 216f1e0..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/posts/new.rhtml +++ /dev/null @@ -1,26 +0,0 @@ -

New post

- -<%= error_messages_for :post %> - -<% form_for(:post, :url => user_posts_path(@user)) do |f| %> -

- User
- <%= f.text_field :user_id %> -

- -

- Title
- <%= f.text_field :title %> -

- -

- Body
- <%= f.text_area :body %> -

- -

- <%= submit_tag "Create" %> -

-<% end %> - -<%= link_to 'Back', user_posts_path(@user) %> diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/posts/show.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/posts/show.rhtml deleted file mode 100644 index b9d3791..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/posts/show.rhtml +++ /dev/null @@ -1,18 +0,0 @@ -

- User: - <%=h @post.user_id %> -

- -

- Title: - <%=h @post.title %> -

- -

- Body: - <%=h @post.body %> -

- - -<%= link_to 'Edit', edit_user_post_path(@post.user, @post) %> | -<%= link_to 'Back', user_posts_path(@post.user) %> diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/users/edit.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/users/edit.rhtml deleted file mode 100644 index 9c47ad5..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/users/edit.rhtml +++ /dev/null @@ -1,22 +0,0 @@ -

Editing user

- -<%= error_messages_for :user %> - -<% form_for(:user, :url => user_path(@user), :html => { :method => :put }) do |f| %> -

- Email
- <%= f.text_field :email %> -

- -

- Age
- <%= f.text_field :age %> -

- -

- <%= submit_tag "Update" %> -

-<% end %> - -<%= link_to 'Show', user_path(@user) %> | -<%= link_to 'Back', users_path %> \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/users/index.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/users/index.rhtml deleted file mode 100644 index 83f2e91..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/users/index.rhtml +++ /dev/null @@ -1,22 +0,0 @@ -

Listing users

- - - - - - - -<% for user in @users %> - - - - - - - -<% end %> -
EmailAge
<%=h user.email %><%=h user.age %><%= link_to 'Show', user_path(user) %><%= link_to 'Edit', edit_user_path(user) %><%= link_to 'Destroy', user_path(user), :confirm => 'Are you sure?', :method => :delete %>
- -
- -<%= link_to 'New user', new_user_path %> \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/users/new.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/users/new.rhtml deleted file mode 100644 index 6f2c3a4..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/users/new.rhtml +++ /dev/null @@ -1,21 +0,0 @@ -

New user

- -<%= error_messages_for :user %> - -<% form_for(:user, :url => users_path) do |f| %> -

- Email
- <%= f.text_field :email %> -

- -

- Age
- <%= f.text_field :age %> -

- -

- <%= submit_tag "Create" %> -

-<% end %> - -<%= link_to 'Back', users_path %> \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/app/views/users/show.rhtml b/vendor/plugins/shoulda/test/rails_root/app/views/users/show.rhtml deleted file mode 100644 index bdcad8a..0000000 --- a/vendor/plugins/shoulda/test/rails_root/app/views/users/show.rhtml +++ /dev/null @@ -1,13 +0,0 @@ -

- Email: - <%=h @user.email %> -

- -

- Age: - <%=h @user.age %> -

- - -<%= link_to 'Edit', edit_user_path(@user) %> | -<%= link_to 'Back', users_path %> \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/config/boot.rb b/vendor/plugins/shoulda/test/rails_root/config/boot.rb deleted file mode 100644 index b7af0c3..0000000 --- a/vendor/plugins/shoulda/test/rails_root/config/boot.rb +++ /dev/null @@ -1,45 +0,0 @@ -# Don't change this file. Configuration is done in config/environment.rb and config/environments/*.rb - -unless defined?(RAILS_ROOT) - root_path = File.join(File.dirname(__FILE__), '..') - - unless RUBY_PLATFORM =~ /(:?mswin|mingw)/ - require 'pathname' - root_path = Pathname.new(root_path).cleanpath(true).to_s - end - - RAILS_ROOT = root_path -end - -unless defined?(Rails::Initializer) - if File.directory?("#{RAILS_ROOT}/vendor/rails") - require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer" - else - require 'rubygems' - - environment_without_comments = IO.readlines(File.dirname(__FILE__) + '/environment.rb').reject { |l| l =~ /^#/ }.join - environment_without_comments =~ /[^#]RAILS_GEM_VERSION = '([\d.]+)'/ - rails_gem_version = $1 - - if version = defined?(RAILS_GEM_VERSION) ? RAILS_GEM_VERSION : rails_gem_version - # Asking for 1.1.6 will give you 1.1.6.5206, if available -- makes it easier to use beta gems - rails_gem = Gem.cache.search('rails', "~>#{version}.0").sort_by { |g| g.version.version }.last - - if rails_gem - gem "rails", "=#{rails_gem.version.version}" - require rails_gem.full_gem_path + '/lib/initializer' - else - STDERR.puts %(Cannot find gem for Rails ~>#{version}.0: - Install the missing gem with 'gem install -v=#{version} rails', or - change environment.rb to define RAILS_GEM_VERSION with your desired version. - ) - exit 1 - end - else - gem "rails" - require 'initializer' - end - end - - Rails::Initializer.run(:set_load_path) -end diff --git a/vendor/plugins/shoulda/test/rails_root/config/database.yml b/vendor/plugins/shoulda/test/rails_root/config/database.yml deleted file mode 100644 index dd7027f..0000000 --- a/vendor/plugins/shoulda/test/rails_root/config/database.yml +++ /dev/null @@ -1,4 +0,0 @@ -sqlite3: - :adapter: sqlite3 - # :dbfile: db/sqlite3.db - :dbfile: ":memory:" diff --git a/vendor/plugins/shoulda/test/rails_root/config/environment.rb b/vendor/plugins/shoulda/test/rails_root/config/environment.rb deleted file mode 100644 index 0e737ec..0000000 --- a/vendor/plugins/shoulda/test/rails_root/config/environment.rb +++ /dev/null @@ -1,18 +0,0 @@ -# Specifies gem version of Rails to use when vendor/rails is not present -old_verbose, $VERBOSE = $VERBOSE, nil -RAILS_GEM_VERSION = '2.0.2' -$VERBOSE = old_verbose - -require File.join(File.dirname(__FILE__), 'boot') - -Rails::Initializer.run do |config| - # Someday, I'm going to find a way of getting rid of that symlink... - # config.plugin_paths = ['../../../'] - # config.plugins = [:shoulda] - config.log_level = :debug - config.cache_classes = false - config.whiny_nils = true - # config.load_paths << File.join(File.dirname(__FILE__), *%w{.. .. .. lib}) -end - -# Dependencies.log_activity = true \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/config/environments/sqlite3.rb b/vendor/plugins/shoulda/test/rails_root/config/environments/sqlite3.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/shoulda/test/rails_root/config/routes.rb b/vendor/plugins/shoulda/test/rails_root/config/routes.rb deleted file mode 100644 index ae2bddd..0000000 --- a/vendor/plugins/shoulda/test/rails_root/config/routes.rb +++ /dev/null @@ -1,6 +0,0 @@ -ActionController::Routing::Routes.draw do |map| - - map.resources :posts - map.resources :users, :has_many => :posts - -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/migrate/001_create_users.rb b/vendor/plugins/shoulda/test/rails_root/db/migrate/001_create_users.rb deleted file mode 100644 index 3c6423e..0000000 --- a/vendor/plugins/shoulda/test/rails_root/db/migrate/001_create_users.rb +++ /dev/null @@ -1,13 +0,0 @@ -class CreateUsers < ActiveRecord::Migration - def self.up - create_table :users do |t| - t.column :name, :string - t.column :email, :string - t.column :age, :integer - end - end - - def self.down - drop_table :users - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/migrate/002_create_posts.rb b/vendor/plugins/shoulda/test/rails_root/db/migrate/002_create_posts.rb deleted file mode 100644 index 9ed4deb..0000000 --- a/vendor/plugins/shoulda/test/rails_root/db/migrate/002_create_posts.rb +++ /dev/null @@ -1,13 +0,0 @@ -class CreatePosts < ActiveRecord::Migration - def self.up - create_table :posts do |t| - t.column :user_id, :integer - t.column :title, :string - t.column :body, :text - end - end - - def self.down - drop_table :posts - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/migrate/003_create_taggings.rb b/vendor/plugins/shoulda/test/rails_root/db/migrate/003_create_taggings.rb deleted file mode 100644 index e163a0a..0000000 --- a/vendor/plugins/shoulda/test/rails_root/db/migrate/003_create_taggings.rb +++ /dev/null @@ -1,12 +0,0 @@ -class CreateTaggings < ActiveRecord::Migration - def self.up - create_table :taggings do |t| - t.column :post_id, :integer - t.column :tag_id, :integer - end - end - - def self.down - drop_table :taggings - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/migrate/004_create_tags.rb b/vendor/plugins/shoulda/test/rails_root/db/migrate/004_create_tags.rb deleted file mode 100644 index dc58c4f..0000000 --- a/vendor/plugins/shoulda/test/rails_root/db/migrate/004_create_tags.rb +++ /dev/null @@ -1,11 +0,0 @@ -class CreateTags < ActiveRecord::Migration - def self.up - create_table :tags do |t| - t.column :name, :string - end - end - - def self.down - drop_table :tags - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/migrate/005_create_dogs.rb b/vendor/plugins/shoulda/test/rails_root/db/migrate/005_create_dogs.rb deleted file mode 100644 index a0d8e03..0000000 --- a/vendor/plugins/shoulda/test/rails_root/db/migrate/005_create_dogs.rb +++ /dev/null @@ -1,11 +0,0 @@ -class CreateDogs < ActiveRecord::Migration - def self.up - create_table :dogs do |t| - t.column :owner_id, :integer - end - end - - def self.down - drop_table :dogs - end -end diff --git a/vendor/plugins/shoulda/test/rails_root/db/schema.rb b/vendor/plugins/shoulda/test/rails_root/db/schema.rb deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/shoulda/test/rails_root/doc/README_FOR_APP b/vendor/plugins/shoulda/test/rails_root/doc/README_FOR_APP deleted file mode 100644 index ac6c149..0000000 --- a/vendor/plugins/shoulda/test/rails_root/doc/README_FOR_APP +++ /dev/null @@ -1,2 +0,0 @@ -Use this README file to introduce your application and point to useful places in the API for learning more. -Run "rake appdoc" to generate API documentation for your models and controllers. \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/.htaccess b/vendor/plugins/shoulda/test/rails_root/public/.htaccess deleted file mode 100644 index d3c9983..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/.htaccess +++ /dev/null @@ -1,40 +0,0 @@ -# General Apache options -AddHandler fastcgi-script .fcgi -AddHandler cgi-script .cgi -Options +FollowSymLinks +ExecCGI - -# If you don't want Rails to look in certain directories, -# use the following rewrite rules so that Apache won't rewrite certain requests -# -# Example: -# RewriteCond %{REQUEST_URI} ^/notrails.* -# RewriteRule .* - [L] - -# Redirect all requests not available on the filesystem to Rails -# By default the cgi dispatcher is used which is very slow -# -# For better performance replace the dispatcher with the fastcgi one -# -# Example: -# RewriteRule ^(.*)$ dispatch.fcgi [QSA,L] -RewriteEngine On - -# If your Rails application is accessed via an Alias directive, -# then you MUST also set the RewriteBase in this htaccess file. -# -# Example: -# Alias /myrailsapp /path/to/myrailsapp/public -# RewriteBase /myrailsapp - -RewriteRule ^$ index.html [QSA] -RewriteRule ^([^.]+)$ $1.html [QSA] -RewriteCond %{REQUEST_FILENAME} !-f -RewriteRule ^(.*)$ dispatch.cgi [QSA,L] - -# In case Rails experiences terminal errors -# Instead of displaying this message you can supply a file here which will be rendered instead -# -# Example: -# ErrorDocument 500 /500.html - -ErrorDocument 500 "

Application error

Rails application failed to start properly" \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/404.html b/vendor/plugins/shoulda/test/rails_root/public/404.html deleted file mode 100644 index eff660b..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/404.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - The page you were looking for doesn't exist (404) - - - - - -
-

The page you were looking for doesn't exist.

-

You may have mistyped the address or the page may have moved.

-
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/500.html b/vendor/plugins/shoulda/test/rails_root/public/500.html deleted file mode 100644 index f0aee0e..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/500.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - We're sorry, but something went wrong - - - - - -
-

We're sorry, but something went wrong.

-

We've been notified about this issue and we'll take a look at it shortly.

-
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/dispatch.cgi b/vendor/plugins/shoulda/test/rails_root/public/dispatch.cgi deleted file mode 100755 index a76782a..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/dispatch.cgi +++ /dev/null @@ -1,10 +0,0 @@ -#!/opt/local/bin/ruby - -require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) - -# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: -# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired -require "dispatcher" - -ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) -Dispatcher.dispatch \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/dispatch.fcgi b/vendor/plugins/shoulda/test/rails_root/public/dispatch.fcgi deleted file mode 100755 index a526766..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/dispatch.fcgi +++ /dev/null @@ -1,24 +0,0 @@ -#!/opt/local/bin/ruby -# -# You may specify the path to the FastCGI crash log (a log of unhandled -# exceptions which forced the FastCGI instance to exit, great for debugging) -# and the number of requests to process before running garbage collection. -# -# By default, the FastCGI crash log is RAILS_ROOT/log/fastcgi.crash.log -# and the GC period is nil (turned off). A reasonable number of requests -# could range from 10-100 depending on the memory footprint of your app. -# -# Example: -# # Default log path, normal GC behavior. -# RailsFCGIHandler.process! -# -# # Default log path, 50 requests between GC. -# RailsFCGIHandler.process! nil, 50 -# -# # Custom log path, normal GC behavior. -# RailsFCGIHandler.process! '/var/log/myapp_fcgi_crash.log' -# -require File.dirname(__FILE__) + "/../config/environment" -require 'fcgi_handler' - -RailsFCGIHandler.process! diff --git a/vendor/plugins/shoulda/test/rails_root/public/dispatch.rb b/vendor/plugins/shoulda/test/rails_root/public/dispatch.rb deleted file mode 100755 index a76782a..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/dispatch.rb +++ /dev/null @@ -1,10 +0,0 @@ -#!/opt/local/bin/ruby - -require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT) - -# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like: -# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired -require "dispatcher" - -ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun) -Dispatcher.dispatch \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/favicon.ico b/vendor/plugins/shoulda/test/rails_root/public/favicon.ico deleted file mode 100644 index e69de29..0000000 diff --git a/vendor/plugins/shoulda/test/rails_root/public/images/rails.png b/vendor/plugins/shoulda/test/rails_root/public/images/rails.png deleted file mode 100644 index b8441f1..0000000 Binary files a/vendor/plugins/shoulda/test/rails_root/public/images/rails.png and /dev/null differ diff --git a/vendor/plugins/shoulda/test/rails_root/public/index.html b/vendor/plugins/shoulda/test/rails_root/public/index.html deleted file mode 100644 index a2daab7..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/index.html +++ /dev/null @@ -1,277 +0,0 @@ - - - - - Ruby on Rails: Welcome aboard - - - - - - -
- - -
- - - - -
-

Getting started

-

Here’s how to get rolling:

- -
    -
  1. -

    Create your databases and edit config/database.yml

    -

    Rails needs to know your login and password.

    -
  2. - -
  3. -

    Use script/generate to create your models and controllers

    -

    To see all available options, run it without parameters.

    -
  4. - -
  5. -

    Set up a default route and remove or rename this file

    -

    Routes are setup in config/routes.rb.

    -
  6. -
-
-
- - -
- - \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/javascripts/application.js b/vendor/plugins/shoulda/test/rails_root/public/javascripts/application.js deleted file mode 100644 index fe45776..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/javascripts/application.js +++ /dev/null @@ -1,2 +0,0 @@ -// Place your application-specific JavaScript functions and classes here -// This file is automatically included by javascript_include_tag :defaults diff --git a/vendor/plugins/shoulda/test/rails_root/public/javascripts/controls.js b/vendor/plugins/shoulda/test/rails_root/public/javascripts/controls.js deleted file mode 100644 index 8c273f8..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/javascripts/controls.js +++ /dev/null @@ -1,833 +0,0 @@ -// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// (c) 2005, 2006 Ivan Krstic (http://blogs.law.harvard.edu/ivan) -// (c) 2005, 2006 Jon Tirsen (http://www.tirsen.com) -// Contributors: -// Richard Livsey -// Rahul Bhargava -// Rob Wills -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -// Autocompleter.Base handles all the autocompletion functionality -// that's independent of the data source for autocompletion. This -// includes drawing the autocompletion menu, observing keyboard -// and mouse events, and similar. -// -// Specific autocompleters need to provide, at the very least, -// a getUpdatedChoices function that will be invoked every time -// the text inside the monitored textbox changes. This method -// should get the text for which to provide autocompletion by -// invoking this.getToken(), NOT by directly accessing -// this.element.value. This is to allow incremental tokenized -// autocompletion. Specific auto-completion logic (AJAX, etc) -// belongs in getUpdatedChoices. -// -// Tokenized incremental autocompletion is enabled automatically -// when an autocompleter is instantiated with the 'tokens' option -// in the options parameter, e.g.: -// new Ajax.Autocompleter('id','upd', '/url/', { tokens: ',' }); -// will incrementally autocomplete with a comma as the token. -// Additionally, ',' in the above example can be replaced with -// a token array, e.g. { tokens: [',', '\n'] } which -// enables autocompletion on multiple tokens. This is most -// useful when one of the tokens is \n (a newline), as it -// allows smart autocompletion after linebreaks. - -if(typeof Effect == 'undefined') - throw("controls.js requires including script.aculo.us' effects.js library"); - -var Autocompleter = {} -Autocompleter.Base = function() {}; -Autocompleter.Base.prototype = { - baseInitialize: function(element, update, options) { - this.element = $(element); - this.update = $(update); - this.hasFocus = false; - this.changed = false; - this.active = false; - this.index = 0; - this.entryCount = 0; - - if(this.setOptions) - this.setOptions(options); - else - this.options = options || {}; - - this.options.paramName = this.options.paramName || this.element.name; - this.options.tokens = this.options.tokens || []; - this.options.frequency = this.options.frequency || 0.4; - this.options.minChars = this.options.minChars || 1; - this.options.onShow = this.options.onShow || - function(element, update){ - if(!update.style.position || update.style.position=='absolute') { - update.style.position = 'absolute'; - Position.clone(element, update, { - setHeight: false, - offsetTop: element.offsetHeight - }); - } - Effect.Appear(update,{duration:0.15}); - }; - this.options.onHide = this.options.onHide || - function(element, update){ new Effect.Fade(update,{duration:0.15}) }; - - if(typeof(this.options.tokens) == 'string') - this.options.tokens = new Array(this.options.tokens); - - this.observer = null; - - this.element.setAttribute('autocomplete','off'); - - Element.hide(this.update); - - Event.observe(this.element, "blur", this.onBlur.bindAsEventListener(this)); - Event.observe(this.element, "keypress", this.onKeyPress.bindAsEventListener(this)); - }, - - show: function() { - if(Element.getStyle(this.update, 'display')=='none') this.options.onShow(this.element, this.update); - if(!this.iefix && - (navigator.appVersion.indexOf('MSIE')>0) && - (navigator.userAgent.indexOf('Opera')<0) && - (Element.getStyle(this.update, 'position')=='absolute')) { - new Insertion.After(this.update, - ''); - this.iefix = $(this.update.id+'_iefix'); - } - if(this.iefix) setTimeout(this.fixIEOverlapping.bind(this), 50); - }, - - fixIEOverlapping: function() { - Position.clone(this.update, this.iefix, {setTop:(!this.update.style.height)}); - this.iefix.style.zIndex = 1; - this.update.style.zIndex = 2; - Element.show(this.iefix); - }, - - hide: function() { - this.stopIndicator(); - if(Element.getStyle(this.update, 'display')!='none') this.options.onHide(this.element, this.update); - if(this.iefix) Element.hide(this.iefix); - }, - - startIndicator: function() { - if(this.options.indicator) Element.show(this.options.indicator); - }, - - stopIndicator: function() { - if(this.options.indicator) Element.hide(this.options.indicator); - }, - - onKeyPress: function(event) { - if(this.active) - switch(event.keyCode) { - case Event.KEY_TAB: - case Event.KEY_RETURN: - this.selectEntry(); - Event.stop(event); - case Event.KEY_ESC: - this.hide(); - this.active = false; - Event.stop(event); - return; - case Event.KEY_LEFT: - case Event.KEY_RIGHT: - return; - case Event.KEY_UP: - this.markPrevious(); - this.render(); - if(navigator.appVersion.indexOf('AppleWebKit')>0) Event.stop(event); - return; - case Event.KEY_DOWN: - this.markNext(); - this.render(); - if(navigator.appVersion.indexOf('AppleWebKit')>0) Event.stop(event); - return; - } - else - if(event.keyCode==Event.KEY_TAB || event.keyCode==Event.KEY_RETURN || - (navigator.appVersion.indexOf('AppleWebKit') > 0 && event.keyCode == 0)) return; - - this.changed = true; - this.hasFocus = true; - - if(this.observer) clearTimeout(this.observer); - this.observer = - setTimeout(this.onObserverEvent.bind(this), this.options.frequency*1000); - }, - - activate: function() { - this.changed = false; - this.hasFocus = true; - this.getUpdatedChoices(); - }, - - onHover: function(event) { - var element = Event.findElement(event, 'LI'); - if(this.index != element.autocompleteIndex) - { - this.index = element.autocompleteIndex; - this.render(); - } - Event.stop(event); - }, - - onClick: function(event) { - var element = Event.findElement(event, 'LI'); - this.index = element.autocompleteIndex; - this.selectEntry(); - this.hide(); - }, - - onBlur: function(event) { - // needed to make click events working - setTimeout(this.hide.bind(this), 250); - this.hasFocus = false; - this.active = false; - }, - - render: function() { - if(this.entryCount > 0) { - for (var i = 0; i < this.entryCount; i++) - this.index==i ? - Element.addClassName(this.getEntry(i),"selected") : - Element.removeClassName(this.getEntry(i),"selected"); - - if(this.hasFocus) { - this.show(); - this.active = true; - } - } else { - this.active = false; - this.hide(); - } - }, - - markPrevious: function() { - if(this.index > 0) this.index-- - else this.index = this.entryCount-1; - this.getEntry(this.index).scrollIntoView(true); - }, - - markNext: function() { - if(this.index < this.entryCount-1) this.index++ - else this.index = 0; - this.getEntry(this.index).scrollIntoView(false); - }, - - getEntry: function(index) { - return this.update.firstChild.childNodes[index]; - }, - - getCurrentEntry: function() { - return this.getEntry(this.index); - }, - - selectEntry: function() { - this.active = false; - this.updateElement(this.getCurrentEntry()); - }, - - updateElement: function(selectedElement) { - if (this.options.updateElement) { - this.options.updateElement(selectedElement); - return; - } - var value = ''; - if (this.options.select) { - var nodes = document.getElementsByClassName(this.options.select, selectedElement) || []; - if(nodes.length>0) value = Element.collectTextNodes(nodes[0], this.options.select); - } else - value = Element.collectTextNodesIgnoreClass(selectedElement, 'informal'); - - var lastTokenPos = this.findLastToken(); - if (lastTokenPos != -1) { - var newValue = this.element.value.substr(0, lastTokenPos + 1); - var whitespace = this.element.value.substr(lastTokenPos + 1).match(/^\s+/); - if (whitespace) - newValue += whitespace[0]; - this.element.value = newValue + value; - } else { - this.element.value = value; - } - this.element.focus(); - - if (this.options.afterUpdateElement) - this.options.afterUpdateElement(this.element, selectedElement); - }, - - updateChoices: function(choices) { - if(!this.changed && this.hasFocus) { - this.update.innerHTML = choices; - Element.cleanWhitespace(this.update); - Element.cleanWhitespace(this.update.down()); - - if(this.update.firstChild && this.update.down().childNodes) { - this.entryCount = - this.update.down().childNodes.length; - for (var i = 0; i < this.entryCount; i++) { - var entry = this.getEntry(i); - entry.autocompleteIndex = i; - this.addObservers(entry); - } - } else { - this.entryCount = 0; - } - - this.stopIndicator(); - this.index = 0; - - if(this.entryCount==1 && this.options.autoSelect) { - this.selectEntry(); - this.hide(); - } else { - this.render(); - } - } - }, - - addObservers: function(element) { - Event.observe(element, "mouseover", this.onHover.bindAsEventListener(this)); - Event.observe(element, "click", this.onClick.bindAsEventListener(this)); - }, - - onObserverEvent: function() { - this.changed = false; - if(this.getToken().length>=this.options.minChars) { - this.startIndicator(); - this.getUpdatedChoices(); - } else { - this.active = false; - this.hide(); - } - }, - - getToken: function() { - var tokenPos = this.findLastToken(); - if (tokenPos != -1) - var ret = this.element.value.substr(tokenPos + 1).replace(/^\s+/,'').replace(/\s+$/,''); - else - var ret = this.element.value; - - return /\n/.test(ret) ? '' : ret; - }, - - findLastToken: function() { - var lastTokenPos = -1; - - for (var i=0; i lastTokenPos) - lastTokenPos = thisTokenPos; - } - return lastTokenPos; - } -} - -Ajax.Autocompleter = Class.create(); -Object.extend(Object.extend(Ajax.Autocompleter.prototype, Autocompleter.Base.prototype), { - initialize: function(element, update, url, options) { - this.baseInitialize(element, update, options); - this.options.asynchronous = true; - this.options.onComplete = this.onComplete.bind(this); - this.options.defaultParams = this.options.parameters || null; - this.url = url; - }, - - getUpdatedChoices: function() { - entry = encodeURIComponent(this.options.paramName) + '=' + - encodeURIComponent(this.getToken()); - - this.options.parameters = this.options.callback ? - this.options.callback(this.element, entry) : entry; - - if(this.options.defaultParams) - this.options.parameters += '&' + this.options.defaultParams; - - new Ajax.Request(this.url, this.options); - }, - - onComplete: function(request) { - this.updateChoices(request.responseText); - } - -}); - -// The local array autocompleter. Used when you'd prefer to -// inject an array of autocompletion options into the page, rather -// than sending out Ajax queries, which can be quite slow sometimes. -// -// The constructor takes four parameters. The first two are, as usual, -// the id of the monitored textbox, and id of the autocompletion menu. -// The third is the array you want to autocomplete from, and the fourth -// is the options block. -// -// Extra local autocompletion options: -// - choices - How many autocompletion choices to offer -// -// - partialSearch - If false, the autocompleter will match entered -// text only at the beginning of strings in the -// autocomplete array. Defaults to true, which will -// match text at the beginning of any *word* in the -// strings in the autocomplete array. If you want to -// search anywhere in the string, additionally set -// the option fullSearch to true (default: off). -// -// - fullSsearch - Search anywhere in autocomplete array strings. -// -// - partialChars - How many characters to enter before triggering -// a partial match (unlike minChars, which defines -// how many characters are required to do any match -// at all). Defaults to 2. -// -// - ignoreCase - Whether to ignore case when autocompleting. -// Defaults to true. -// -// It's possible to pass in a custom function as the 'selector' -// option, if you prefer to write your own autocompletion logic. -// In that case, the other options above will not apply unless -// you support them. - -Autocompleter.Local = Class.create(); -Autocompleter.Local.prototype = Object.extend(new Autocompleter.Base(), { - initialize: function(element, update, array, options) { - this.baseInitialize(element, update, options); - this.options.array = array; - }, - - getUpdatedChoices: function() { - this.updateChoices(this.options.selector(this)); - }, - - setOptions: function(options) { - this.options = Object.extend({ - choices: 10, - partialSearch: true, - partialChars: 2, - ignoreCase: true, - fullSearch: false, - selector: function(instance) { - var ret = []; // Beginning matches - var partial = []; // Inside matches - var entry = instance.getToken(); - var count = 0; - - for (var i = 0; i < instance.options.array.length && - ret.length < instance.options.choices ; i++) { - - var elem = instance.options.array[i]; - var foundPos = instance.options.ignoreCase ? - elem.toLowerCase().indexOf(entry.toLowerCase()) : - elem.indexOf(entry); - - while (foundPos != -1) { - if (foundPos == 0 && elem.length != entry.length) { - ret.push("
  • " + elem.substr(0, entry.length) + "" + - elem.substr(entry.length) + "
  • "); - break; - } else if (entry.length >= instance.options.partialChars && - instance.options.partialSearch && foundPos != -1) { - if (instance.options.fullSearch || /\s/.test(elem.substr(foundPos-1,1))) { - partial.push("
  • " + elem.substr(0, foundPos) + "" + - elem.substr(foundPos, entry.length) + "" + elem.substr( - foundPos + entry.length) + "
  • "); - break; - } - } - - foundPos = instance.options.ignoreCase ? - elem.toLowerCase().indexOf(entry.toLowerCase(), foundPos + 1) : - elem.indexOf(entry, foundPos + 1); - - } - } - if (partial.length) - ret = ret.concat(partial.slice(0, instance.options.choices - ret.length)) - return "
      " + ret.join('') + "
    "; - } - }, options || {}); - } -}); - -// AJAX in-place editor -// -// see documentation on http://wiki.script.aculo.us/scriptaculous/show/Ajax.InPlaceEditor - -// Use this if you notice weird scrolling problems on some browsers, -// the DOM might be a bit confused when this gets called so do this -// waits 1 ms (with setTimeout) until it does the activation -Field.scrollFreeActivate = function(field) { - setTimeout(function() { - Field.activate(field); - }, 1); -} - -Ajax.InPlaceEditor = Class.create(); -Ajax.InPlaceEditor.defaultHighlightColor = "#FFFF99"; -Ajax.InPlaceEditor.prototype = { - initialize: function(element, url, options) { - this.url = url; - this.element = $(element); - - this.options = Object.extend({ - paramName: "value", - okButton: true, - okText: "ok", - cancelLink: true, - cancelText: "cancel", - savingText: "Saving...", - clickToEditText: "Click to edit", - okText: "ok", - rows: 1, - onComplete: function(transport, element) { - new Effect.Highlight(element, {startcolor: this.options.highlightcolor}); - }, - onFailure: function(transport) { - alert("Error communicating with the server: " + transport.responseText.stripTags()); - }, - callback: function(form) { - return Form.serialize(form); - }, - handleLineBreaks: true, - loadingText: 'Loading...', - savingClassName: 'inplaceeditor-saving', - loadingClassName: 'inplaceeditor-loading', - formClassName: 'inplaceeditor-form', - highlightcolor: Ajax.InPlaceEditor.defaultHighlightColor, - highlightendcolor: "#FFFFFF", - externalControl: null, - submitOnBlur: false, - ajaxOptions: {}, - evalScripts: false - }, options || {}); - - if(!this.options.formId && this.element.id) { - this.options.formId = this.element.id + "-inplaceeditor"; - if ($(this.options.formId)) { - // there's already a form with that name, don't specify an id - this.options.formId = null; - } - } - - if (this.options.externalControl) { - this.options.externalControl = $(this.options.externalControl); - } - - this.originalBackground = Element.getStyle(this.element, 'background-color'); - if (!this.originalBackground) { - this.originalBackground = "transparent"; - } - - this.element.title = this.options.clickToEditText; - - this.onclickListener = this.enterEditMode.bindAsEventListener(this); - this.mouseoverListener = this.enterHover.bindAsEventListener(this); - this.mouseoutListener = this.leaveHover.bindAsEventListener(this); - Event.observe(this.element, 'click', this.onclickListener); - Event.observe(this.element, 'mouseover', this.mouseoverListener); - Event.observe(this.element, 'mouseout', this.mouseoutListener); - if (this.options.externalControl) { - Event.observe(this.options.externalControl, 'click', this.onclickListener); - Event.observe(this.options.externalControl, 'mouseover', this.mouseoverListener); - Event.observe(this.options.externalControl, 'mouseout', this.mouseoutListener); - } - }, - enterEditMode: function(evt) { - if (this.saving) return; - if (this.editing) return; - this.editing = true; - this.onEnterEditMode(); - if (this.options.externalControl) { - Element.hide(this.options.externalControl); - } - Element.hide(this.element); - this.createForm(); - this.element.parentNode.insertBefore(this.form, this.element); - if (!this.options.loadTextURL) Field.scrollFreeActivate(this.editField); - // stop the event to avoid a page refresh in Safari - if (evt) { - Event.stop(evt); - } - return false; - }, - createForm: function() { - this.form = document.createElement("form"); - this.form.id = this.options.formId; - Element.addClassName(this.form, this.options.formClassName) - this.form.onsubmit = this.onSubmit.bind(this); - - this.createEditField(); - - if (this.options.textarea) { - var br = document.createElement("br"); - this.form.appendChild(br); - } - - if (this.options.okButton) { - okButton = document.createElement("input"); - okButton.type = "submit"; - okButton.value = this.options.okText; - okButton.className = 'editor_ok_button'; - this.form.appendChild(okButton); - } - - if (this.options.cancelLink) { - cancelLink = document.createElement("a"); - cancelLink.href = "#"; - cancelLink.appendChild(document.createTextNode(this.options.cancelText)); - cancelLink.onclick = this.onclickCancel.bind(this); - cancelLink.className = 'editor_cancel'; - this.form.appendChild(cancelLink); - } - }, - hasHTMLLineBreaks: function(string) { - if (!this.options.handleLineBreaks) return false; - return string.match(/
    /i); - }, - convertHTMLLineBreaks: function(string) { - return string.replace(/
    /gi, "\n").replace(//gi, "\n").replace(/<\/p>/gi, "\n").replace(/

    /gi, ""); - }, - createEditField: function() { - var text; - if(this.options.loadTextURL) { - text = this.options.loadingText; - } else { - text = this.getText(); - } - - var obj = this; - - if (this.options.rows == 1 && !this.hasHTMLLineBreaks(text)) { - this.options.textarea = false; - var textField = document.createElement("input"); - textField.obj = this; - textField.type = "text"; - textField.name = this.options.paramName; - textField.value = text; - textField.style.backgroundColor = this.options.highlightcolor; - textField.className = 'editor_field'; - var size = this.options.size || this.options.cols || 0; - if (size != 0) textField.size = size; - if (this.options.submitOnBlur) - textField.onblur = this.onSubmit.bind(this); - this.editField = textField; - } else { - this.options.textarea = true; - var textArea = document.createElement("textarea"); - textArea.obj = this; - textArea.name = this.options.paramName; - textArea.value = this.convertHTMLLineBreaks(text); - textArea.rows = this.options.rows; - textArea.cols = this.options.cols || 40; - textArea.className = 'editor_field'; - if (this.options.submitOnBlur) - textArea.onblur = this.onSubmit.bind(this); - this.editField = textArea; - } - - if(this.options.loadTextURL) { - this.loadExternalText(); - } - this.form.appendChild(this.editField); - }, - getText: function() { - return this.element.innerHTML; - }, - loadExternalText: function() { - Element.addClassName(this.form, this.options.loadingClassName); - this.editField.disabled = true; - new Ajax.Request( - this.options.loadTextURL, - Object.extend({ - asynchronous: true, - onComplete: this.onLoadedExternalText.bind(this) - }, this.options.ajaxOptions) - ); - }, - onLoadedExternalText: function(transport) { - Element.removeClassName(this.form, this.options.loadingClassName); - this.editField.disabled = false; - this.editField.value = transport.responseText.stripTags(); - Field.scrollFreeActivate(this.editField); - }, - onclickCancel: function() { - this.onComplete(); - this.leaveEditMode(); - return false; - }, - onFailure: function(transport) { - this.options.onFailure(transport); - if (this.oldInnerHTML) { - this.element.innerHTML = this.oldInnerHTML; - this.oldInnerHTML = null; - } - return false; - }, - onSubmit: function() { - // onLoading resets these so we need to save them away for the Ajax call - var form = this.form; - var value = this.editField.value; - - // do this first, sometimes the ajax call returns before we get a chance to switch on Saving... - // which means this will actually switch on Saving... *after* we've left edit mode causing Saving... - // to be displayed indefinitely - this.onLoading(); - - if (this.options.evalScripts) { - new Ajax.Request( - this.url, Object.extend({ - parameters: this.options.callback(form, value), - onComplete: this.onComplete.bind(this), - onFailure: this.onFailure.bind(this), - asynchronous:true, - evalScripts:true - }, this.options.ajaxOptions)); - } else { - new Ajax.Updater( - { success: this.element, - // don't update on failure (this could be an option) - failure: null }, - this.url, Object.extend({ - parameters: this.options.callback(form, value), - onComplete: this.onComplete.bind(this), - onFailure: this.onFailure.bind(this) - }, this.options.ajaxOptions)); - } - // stop the event to avoid a page refresh in Safari - if (arguments.length > 1) { - Event.stop(arguments[0]); - } - return false; - }, - onLoading: function() { - this.saving = true; - this.removeForm(); - this.leaveHover(); - this.showSaving(); - }, - showSaving: function() { - this.oldInnerHTML = this.element.innerHTML; - this.element.innerHTML = this.options.savingText; - Element.addClassName(this.element, this.options.savingClassName); - this.element.style.backgroundColor = this.originalBackground; - Element.show(this.element); - }, - removeForm: function() { - if(this.form) { - if (this.form.parentNode) Element.remove(this.form); - this.form = null; - } - }, - enterHover: function() { - if (this.saving) return; - this.element.style.backgroundColor = this.options.highlightcolor; - if (this.effect) { - this.effect.cancel(); - } - Element.addClassName(this.element, this.options.hoverClassName) - }, - leaveHover: function() { - if (this.options.backgroundColor) { - this.element.style.backgroundColor = this.oldBackground; - } - Element.removeClassName(this.element, this.options.hoverClassName) - if (this.saving) return; - this.effect = new Effect.Highlight(this.element, { - startcolor: this.options.highlightcolor, - endcolor: this.options.highlightendcolor, - restorecolor: this.originalBackground - }); - }, - leaveEditMode: function() { - Element.removeClassName(this.element, this.options.savingClassName); - this.removeForm(); - this.leaveHover(); - this.element.style.backgroundColor = this.originalBackground; - Element.show(this.element); - if (this.options.externalControl) { - Element.show(this.options.externalControl); - } - this.editing = false; - this.saving = false; - this.oldInnerHTML = null; - this.onLeaveEditMode(); - }, - onComplete: function(transport) { - this.leaveEditMode(); - this.options.onComplete.bind(this)(transport, this.element); - }, - onEnterEditMode: function() {}, - onLeaveEditMode: function() {}, - dispose: function() { - if (this.oldInnerHTML) { - this.element.innerHTML = this.oldInnerHTML; - } - this.leaveEditMode(); - Event.stopObserving(this.element, 'click', this.onclickListener); - Event.stopObserving(this.element, 'mouseover', this.mouseoverListener); - Event.stopObserving(this.element, 'mouseout', this.mouseoutListener); - if (this.options.externalControl) { - Event.stopObserving(this.options.externalControl, 'click', this.onclickListener); - Event.stopObserving(this.options.externalControl, 'mouseover', this.mouseoverListener); - Event.stopObserving(this.options.externalControl, 'mouseout', this.mouseoutListener); - } - } -}; - -Ajax.InPlaceCollectionEditor = Class.create(); -Object.extend(Ajax.InPlaceCollectionEditor.prototype, Ajax.InPlaceEditor.prototype); -Object.extend(Ajax.InPlaceCollectionEditor.prototype, { - createEditField: function() { - if (!this.cached_selectTag) { - var selectTag = document.createElement("select"); - var collection = this.options.collection || []; - var optionTag; - collection.each(function(e,i) { - optionTag = document.createElement("option"); - optionTag.value = (e instanceof Array) ? e[0] : e; - if((typeof this.options.value == 'undefined') && - ((e instanceof Array) ? this.element.innerHTML == e[1] : e == optionTag.value)) optionTag.selected = true; - if(this.options.value==optionTag.value) optionTag.selected = true; - optionTag.appendChild(document.createTextNode((e instanceof Array) ? e[1] : e)); - selectTag.appendChild(optionTag); - }.bind(this)); - this.cached_selectTag = selectTag; - } - - this.editField = this.cached_selectTag; - if(this.options.loadTextURL) this.loadExternalText(); - this.form.appendChild(this.editField); - this.options.callback = function(form, value) { - return "value=" + encodeURIComponent(value); - } - } -}); - -// Delayed observer, like Form.Element.Observer, -// but waits for delay after last key input -// Ideal for live-search fields - -Form.Element.DelayedObserver = Class.create(); -Form.Element.DelayedObserver.prototype = { - initialize: function(element, delay, callback) { - this.delay = delay || 0.5; - this.element = $(element); - this.callback = callback; - this.timer = null; - this.lastValue = $F(this.element); - Event.observe(this.element,'keyup',this.delayedListener.bindAsEventListener(this)); - }, - delayedListener: function(event) { - if(this.lastValue == $F(this.element)) return; - if(this.timer) clearTimeout(this.timer); - this.timer = setTimeout(this.onTimerEvent.bind(this), this.delay * 1000); - this.lastValue = $F(this.element); - }, - onTimerEvent: function() { - this.timer = null; - this.callback(this.element, $F(this.element)); - } -}; diff --git a/vendor/plugins/shoulda/test/rails_root/public/javascripts/dragdrop.js b/vendor/plugins/shoulda/test/rails_root/public/javascripts/dragdrop.js deleted file mode 100644 index c71ddb8..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/javascripts/dragdrop.js +++ /dev/null @@ -1,942 +0,0 @@ -// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// (c) 2005, 2006 Sammi Williams (http://www.oriontransfer.co.nz, sammi@oriontransfer.co.nz) -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -if(typeof Effect == 'undefined') - throw("dragdrop.js requires including script.aculo.us' effects.js library"); - -var Droppables = { - drops: [], - - remove: function(element) { - this.drops = this.drops.reject(function(d) { return d.element==$(element) }); - }, - - add: function(element) { - element = $(element); - var options = Object.extend({ - greedy: true, - hoverclass: null, - tree: false - }, arguments[1] || {}); - - // cache containers - if(options.containment) { - options._containers = []; - var containment = options.containment; - if((typeof containment == 'object') && - (containment.constructor == Array)) { - containment.each( function(c) { options._containers.push($(c)) }); - } else { - options._containers.push($(containment)); - } - } - - if(options.accept) options.accept = [options.accept].flatten(); - - Element.makePositioned(element); // fix IE - options.element = element; - - this.drops.push(options); - }, - - findDeepestChild: function(drops) { - deepest = drops[0]; - - for (i = 1; i < drops.length; ++i) - if (Element.isParent(drops[i].element, deepest.element)) - deepest = drops[i]; - - return deepest; - }, - - isContained: function(element, drop) { - var containmentNode; - if(drop.tree) { - containmentNode = element.treeNode; - } else { - containmentNode = element.parentNode; - } - return drop._containers.detect(function(c) { return containmentNode == c }); - }, - - isAffected: function(point, element, drop) { - return ( - (drop.element!=element) && - ((!drop._containers) || - this.isContained(element, drop)) && - ((!drop.accept) || - (Element.classNames(element).detect( - function(v) { return drop.accept.include(v) } ) )) && - Position.within(drop.element, point[0], point[1]) ); - }, - - deactivate: function(drop) { - if(drop.hoverclass) - Element.removeClassName(drop.element, drop.hoverclass); - this.last_active = null; - }, - - activate: function(drop) { - if(drop.hoverclass) - Element.addClassName(drop.element, drop.hoverclass); - this.last_active = drop; - }, - - show: function(point, element) { - if(!this.drops.length) return; - var affected = []; - - if(this.last_active) this.deactivate(this.last_active); - this.drops.each( function(drop) { - if(Droppables.isAffected(point, element, drop)) - affected.push(drop); - }); - - if(affected.length>0) { - drop = Droppables.findDeepestChild(affected); - Position.within(drop.element, point[0], point[1]); - if(drop.onHover) - drop.onHover(element, drop.element, Position.overlap(drop.overlap, drop.element)); - - Droppables.activate(drop); - } - }, - - fire: function(event, element) { - if(!this.last_active) return; - Position.prepare(); - - if (this.isAffected([Event.pointerX(event), Event.pointerY(event)], element, this.last_active)) - if (this.last_active.onDrop) - this.last_active.onDrop(element, this.last_active.element, event); - }, - - reset: function() { - if(this.last_active) - this.deactivate(this.last_active); - } -} - -var Draggables = { - drags: [], - observers: [], - - register: function(draggable) { - if(this.drags.length == 0) { - this.eventMouseUp = this.endDrag.bindAsEventListener(this); - this.eventMouseMove = this.updateDrag.bindAsEventListener(this); - this.eventKeypress = this.keyPress.bindAsEventListener(this); - - Event.observe(document, "mouseup", this.eventMouseUp); - Event.observe(document, "mousemove", this.eventMouseMove); - Event.observe(document, "keypress", this.eventKeypress); - } - this.drags.push(draggable); - }, - - unregister: function(draggable) { - this.drags = this.drags.reject(function(d) { return d==draggable }); - if(this.drags.length == 0) { - Event.stopObserving(document, "mouseup", this.eventMouseUp); - Event.stopObserving(document, "mousemove", this.eventMouseMove); - Event.stopObserving(document, "keypress", this.eventKeypress); - } - }, - - activate: function(draggable) { - if(draggable.options.delay) { - this._timeout = setTimeout(function() { - Draggables._timeout = null; - window.focus(); - Draggables.activeDraggable = draggable; - }.bind(this), draggable.options.delay); - } else { - window.focus(); // allows keypress events if window isn't currently focused, fails for Safari - this.activeDraggable = draggable; - } - }, - - deactivate: function() { - this.activeDraggable = null; - }, - - updateDrag: function(event) { - if(!this.activeDraggable) return; - var pointer = [Event.pointerX(event), Event.pointerY(event)]; - // Mozilla-based browsers fire successive mousemove events with - // the same coordinates, prevent needless redrawing (moz bug?) - if(this._lastPointer && (this._lastPointer.inspect() == pointer.inspect())) return; - this._lastPointer = pointer; - - this.activeDraggable.updateDrag(event, pointer); - }, - - endDrag: function(event) { - if(this._timeout) { - clearTimeout(this._timeout); - this._timeout = null; - } - if(!this.activeDraggable) return; - this._lastPointer = null; - this.activeDraggable.endDrag(event); - this.activeDraggable = null; - }, - - keyPress: function(event) { - if(this.activeDraggable) - this.activeDraggable.keyPress(event); - }, - - addObserver: function(observer) { - this.observers.push(observer); - this._cacheObserverCallbacks(); - }, - - removeObserver: function(element) { // element instead of observer fixes mem leaks - this.observers = this.observers.reject( function(o) { return o.element==element }); - this._cacheObserverCallbacks(); - }, - - notify: function(eventName, draggable, event) { // 'onStart', 'onEnd', 'onDrag' - if(this[eventName+'Count'] > 0) - this.observers.each( function(o) { - if(o[eventName]) o[eventName](eventName, draggable, event); - }); - if(draggable.options[eventName]) draggable.options[eventName](draggable, event); - }, - - _cacheObserverCallbacks: function() { - ['onStart','onEnd','onDrag'].each( function(eventName) { - Draggables[eventName+'Count'] = Draggables.observers.select( - function(o) { return o[eventName]; } - ).length; - }); - } -} - -/*--------------------------------------------------------------------------*/ - -var Draggable = Class.create(); -Draggable._dragging = {}; - -Draggable.prototype = { - initialize: function(element) { - var defaults = { - handle: false, - reverteffect: function(element, top_offset, left_offset) { - var dur = Math.sqrt(Math.abs(top_offset^2)+Math.abs(left_offset^2))*0.02; - new Effect.Move(element, { x: -left_offset, y: -top_offset, duration: dur, - queue: {scope:'_draggable', position:'end'} - }); - }, - endeffect: function(element) { - var toOpacity = typeof element._opacity == 'number' ? element._opacity : 1.0; - new Effect.Opacity(element, {duration:0.2, from:0.7, to:toOpacity, - queue: {scope:'_draggable', position:'end'}, - afterFinish: function(){ - Draggable._dragging[element] = false - } - }); - }, - zindex: 1000, - revert: false, - scroll: false, - scrollSensitivity: 20, - scrollSpeed: 15, - snap: false, // false, or xy or [x,y] or function(x,y){ return [x,y] } - delay: 0 - }; - - if(!arguments[1] || typeof arguments[1].endeffect == 'undefined') - Object.extend(defaults, { - starteffect: function(element) { - element._opacity = Element.getOpacity(element); - Draggable._dragging[element] = true; - new Effect.Opacity(element, {duration:0.2, from:element._opacity, to:0.7}); - } - }); - - var options = Object.extend(defaults, arguments[1] || {}); - - this.element = $(element); - - if(options.handle && (typeof options.handle == 'string')) - this.handle = this.element.down('.'+options.handle, 0); - - if(!this.handle) this.handle = $(options.handle); - if(!this.handle) this.handle = this.element; - - if(options.scroll && !options.scroll.scrollTo && !options.scroll.outerHTML) { - options.scroll = $(options.scroll); - this._isScrollChild = Element.childOf(this.element, options.scroll); - } - - Element.makePositioned(this.element); // fix IE - - this.delta = this.currentDelta(); - this.options = options; - this.dragging = false; - - this.eventMouseDown = this.initDrag.bindAsEventListener(this); - Event.observe(this.handle, "mousedown", this.eventMouseDown); - - Draggables.register(this); - }, - - destroy: function() { - Event.stopObserving(this.handle, "mousedown", this.eventMouseDown); - Draggables.unregister(this); - }, - - currentDelta: function() { - return([ - parseInt(Element.getStyle(this.element,'left') || '0'), - parseInt(Element.getStyle(this.element,'top') || '0')]); - }, - - initDrag: function(event) { - if(typeof Draggable._dragging[this.element] != 'undefined' && - Draggable._dragging[this.element]) return; - if(Event.isLeftClick(event)) { - // abort on form elements, fixes a Firefox issue - var src = Event.element(event); - if(src.tagName && ( - src.tagName=='INPUT' || - src.tagName=='SELECT' || - src.tagName=='OPTION' || - src.tagName=='BUTTON' || - src.tagName=='TEXTAREA')) return; - - var pointer = [Event.pointerX(event), Event.pointerY(event)]; - var pos = Position.cumulativeOffset(this.element); - this.offset = [0,1].map( function(i) { return (pointer[i] - pos[i]) }); - - Draggables.activate(this); - Event.stop(event); - } - }, - - startDrag: function(event) { - this.dragging = true; - - if(this.options.zindex) { - this.originalZ = parseInt(Element.getStyle(this.element,'z-index') || 0); - this.element.style.zIndex = this.options.zindex; - } - - if(this.options.ghosting) { - this._clone = this.element.cloneNode(true); - Position.absolutize(this.element); - this.element.parentNode.insertBefore(this._clone, this.element); - } - - if(this.options.scroll) { - if (this.options.scroll == window) { - var where = this._getWindowScroll(this.options.scroll); - this.originalScrollLeft = where.left; - this.originalScrollTop = where.top; - } else { - this.originalScrollLeft = this.options.scroll.scrollLeft; - this.originalScrollTop = this.options.scroll.scrollTop; - } - } - - Draggables.notify('onStart', this, event); - - if(this.options.starteffect) this.options.starteffect(this.element); - }, - - updateDrag: function(event, pointer) { - if(!this.dragging) this.startDrag(event); - Position.prepare(); - Droppables.show(pointer, this.element); - Draggables.notify('onDrag', this, event); - - this.draw(pointer); - if(this.options.change) this.options.change(this); - - if(this.options.scroll) { - this.stopScrolling(); - - var p; - if (this.options.scroll == window) { - with(this._getWindowScroll(this.options.scroll)) { p = [ left, top, left+width, top+height ]; } - } else { - p = Position.page(this.options.scroll); - p[0] += this.options.scroll.scrollLeft + Position.deltaX; - p[1] += this.options.scroll.scrollTop + Position.deltaY; - p.push(p[0]+this.options.scroll.offsetWidth); - p.push(p[1]+this.options.scroll.offsetHeight); - } - var speed = [0,0]; - if(pointer[0] < (p[0]+this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[0]+this.options.scrollSensitivity); - if(pointer[1] < (p[1]+this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[1]+this.options.scrollSensitivity); - if(pointer[0] > (p[2]-this.options.scrollSensitivity)) speed[0] = pointer[0]-(p[2]-this.options.scrollSensitivity); - if(pointer[1] > (p[3]-this.options.scrollSensitivity)) speed[1] = pointer[1]-(p[3]-this.options.scrollSensitivity); - this.startScrolling(speed); - } - - // fix AppleWebKit rendering - if(navigator.appVersion.indexOf('AppleWebKit')>0) window.scrollBy(0,0); - - Event.stop(event); - }, - - finishDrag: function(event, success) { - this.dragging = false; - - if(this.options.ghosting) { - Position.relativize(this.element); - Element.remove(this._clone); - this._clone = null; - } - - if(success) Droppables.fire(event, this.element); - Draggables.notify('onEnd', this, event); - - var revert = this.options.revert; - if(revert && typeof revert == 'function') revert = revert(this.element); - - var d = this.currentDelta(); - if(revert && this.options.reverteffect) { - this.options.reverteffect(this.element, - d[1]-this.delta[1], d[0]-this.delta[0]); - } else { - this.delta = d; - } - - if(this.options.zindex) - this.element.style.zIndex = this.originalZ; - - if(this.options.endeffect) - this.options.endeffect(this.element); - - Draggables.deactivate(this); - Droppables.reset(); - }, - - keyPress: function(event) { - if(event.keyCode!=Event.KEY_ESC) return; - this.finishDrag(event, false); - Event.stop(event); - }, - - endDrag: function(event) { - if(!this.dragging) return; - this.stopScrolling(); - this.finishDrag(event, true); - Event.stop(event); - }, - - draw: function(point) { - var pos = Position.cumulativeOffset(this.element); - if(this.options.ghosting) { - var r = Position.realOffset(this.element); - pos[0] += r[0] - Position.deltaX; pos[1] += r[1] - Position.deltaY; - } - - var d = this.currentDelta(); - pos[0] -= d[0]; pos[1] -= d[1]; - - if(this.options.scroll && (this.options.scroll != window && this._isScrollChild)) { - pos[0] -= this.options.scroll.scrollLeft-this.originalScrollLeft; - pos[1] -= this.options.scroll.scrollTop-this.originalScrollTop; - } - - var p = [0,1].map(function(i){ - return (point[i]-pos[i]-this.offset[i]) - }.bind(this)); - - if(this.options.snap) { - if(typeof this.options.snap == 'function') { - p = this.options.snap(p[0],p[1],this); - } else { - if(this.options.snap instanceof Array) { - p = p.map( function(v, i) { - return Math.round(v/this.options.snap[i])*this.options.snap[i] }.bind(this)) - } else { - p = p.map( function(v) { - return Math.round(v/this.options.snap)*this.options.snap }.bind(this)) - } - }} - - var style = this.element.style; - if((!this.options.constraint) || (this.options.constraint=='horizontal')) - style.left = p[0] + "px"; - if((!this.options.constraint) || (this.options.constraint=='vertical')) - style.top = p[1] + "px"; - - if(style.visibility=="hidden") style.visibility = ""; // fix gecko rendering - }, - - stopScrolling: function() { - if(this.scrollInterval) { - clearInterval(this.scrollInterval); - this.scrollInterval = null; - Draggables._lastScrollPointer = null; - } - }, - - startScrolling: function(speed) { - if(!(speed[0] || speed[1])) return; - this.scrollSpeed = [speed[0]*this.options.scrollSpeed,speed[1]*this.options.scrollSpeed]; - this.lastScrolled = new Date(); - this.scrollInterval = setInterval(this.scroll.bind(this), 10); - }, - - scroll: function() { - var current = new Date(); - var delta = current - this.lastScrolled; - this.lastScrolled = current; - if(this.options.scroll == window) { - with (this._getWindowScroll(this.options.scroll)) { - if (this.scrollSpeed[0] || this.scrollSpeed[1]) { - var d = delta / 1000; - this.options.scroll.scrollTo( left + d*this.scrollSpeed[0], top + d*this.scrollSpeed[1] ); - } - } - } else { - this.options.scroll.scrollLeft += this.scrollSpeed[0] * delta / 1000; - this.options.scroll.scrollTop += this.scrollSpeed[1] * delta / 1000; - } - - Position.prepare(); - Droppables.show(Draggables._lastPointer, this.element); - Draggables.notify('onDrag', this); - if (this._isScrollChild) { - Draggables._lastScrollPointer = Draggables._lastScrollPointer || $A(Draggables._lastPointer); - Draggables._lastScrollPointer[0] += this.scrollSpeed[0] * delta / 1000; - Draggables._lastScrollPointer[1] += this.scrollSpeed[1] * delta / 1000; - if (Draggables._lastScrollPointer[0] < 0) - Draggables._lastScrollPointer[0] = 0; - if (Draggables._lastScrollPointer[1] < 0) - Draggables._lastScrollPointer[1] = 0; - this.draw(Draggables._lastScrollPointer); - } - - if(this.options.change) this.options.change(this); - }, - - _getWindowScroll: function(w) { - var T, L, W, H; - with (w.document) { - if (w.document.documentElement && documentElement.scrollTop) { - T = documentElement.scrollTop; - L = documentElement.scrollLeft; - } else if (w.document.body) { - T = body.scrollTop; - L = body.scrollLeft; - } - if (w.innerWidth) { - W = w.innerWidth; - H = w.innerHeight; - } else if (w.document.documentElement && documentElement.clientWidth) { - W = documentElement.clientWidth; - H = documentElement.clientHeight; - } else { - W = body.offsetWidth; - H = body.offsetHeight - } - } - return { top: T, left: L, width: W, height: H }; - } -} - -/*--------------------------------------------------------------------------*/ - -var SortableObserver = Class.create(); -SortableObserver.prototype = { - initialize: function(element, observer) { - this.element = $(element); - this.observer = observer; - this.lastValue = Sortable.serialize(this.element); - }, - - onStart: function() { - this.lastValue = Sortable.serialize(this.element); - }, - - onEnd: function() { - Sortable.unmark(); - if(this.lastValue != Sortable.serialize(this.element)) - this.observer(this.element) - } -} - -var Sortable = { - SERIALIZE_RULE: /^[^_\-](?:[A-Za-z0-9\-\_]*)[_](.*)$/, - - sortables: {}, - - _findRootElement: function(element) { - while (element.tagName != "BODY") { - if(element.id && Sortable.sortables[element.id]) return element; - element = element.parentNode; - } - }, - - options: function(element) { - element = Sortable._findRootElement($(element)); - if(!element) return; - return Sortable.sortables[element.id]; - }, - - destroy: function(element){ - var s = Sortable.options(element); - - if(s) { - Draggables.removeObserver(s.element); - s.droppables.each(function(d){ Droppables.remove(d) }); - s.draggables.invoke('destroy'); - - delete Sortable.sortables[s.element.id]; - } - }, - - create: function(element) { - element = $(element); - var options = Object.extend({ - element: element, - tag: 'li', // assumes li children, override with tag: 'tagname' - dropOnEmpty: false, - tree: false, - treeTag: 'ul', - overlap: 'vertical', // one of 'vertical', 'horizontal' - constraint: 'vertical', // one of 'vertical', 'horizontal', false - containment: element, // also takes array of elements (or id's); or false - handle: false, // or a CSS class - only: false, - delay: 0, - hoverclass: null, - ghosting: false, - scroll: false, - scrollSensitivity: 20, - scrollSpeed: 15, - format: this.SERIALIZE_RULE, - onChange: Prototype.emptyFunction, - onUpdate: Prototype.emptyFunction - }, arguments[1] || {}); - - // clear any old sortable with same element - this.destroy(element); - - // build options for the draggables - var options_for_draggable = { - revert: true, - scroll: options.scroll, - scrollSpeed: options.scrollSpeed, - scrollSensitivity: options.scrollSensitivity, - delay: options.delay, - ghosting: options.ghosting, - constraint: options.constraint, - handle: options.handle }; - - if(options.starteffect) - options_for_draggable.starteffect = options.starteffect; - - if(options.reverteffect) - options_for_draggable.reverteffect = options.reverteffect; - else - if(options.ghosting) options_for_draggable.reverteffect = function(element) { - element.style.top = 0; - element.style.left = 0; - }; - - if(options.endeffect) - options_for_draggable.endeffect = options.endeffect; - - if(options.zindex) - options_for_draggable.zindex = options.zindex; - - // build options for the droppables - var options_for_droppable = { - overlap: options.overlap, - containment: options.containment, - tree: options.tree, - hoverclass: options.hoverclass, - onHover: Sortable.onHover - } - - var options_for_tree = { - onHover: Sortable.onEmptyHover, - overlap: options.overlap, - containment: options.containment, - hoverclass: options.hoverclass - } - - // fix for gecko engine - Element.cleanWhitespace(element); - - options.draggables = []; - options.droppables = []; - - // drop on empty handling - if(options.dropOnEmpty || options.tree) { - Droppables.add(element, options_for_tree); - options.droppables.push(element); - } - - (this.findElements(element, options) || []).each( function(e) { - // handles are per-draggable - var handle = options.handle ? - $(e).down('.'+options.handle,0) : e; - options.draggables.push( - new Draggable(e, Object.extend(options_for_draggable, { handle: handle }))); - Droppables.add(e, options_for_droppable); - if(options.tree) e.treeNode = element; - options.droppables.push(e); - }); - - if(options.tree) { - (Sortable.findTreeElements(element, options) || []).each( function(e) { - Droppables.add(e, options_for_tree); - e.treeNode = element; - options.droppables.push(e); - }); - } - - // keep reference - this.sortables[element.id] = options; - - // for onupdate - Draggables.addObserver(new SortableObserver(element, options.onUpdate)); - - }, - - // return all suitable-for-sortable elements in a guaranteed order - findElements: function(element, options) { - return Element.findChildren( - element, options.only, options.tree ? true : false, options.tag); - }, - - findTreeElements: function(element, options) { - return Element.findChildren( - element, options.only, options.tree ? true : false, options.treeTag); - }, - - onHover: function(element, dropon, overlap) { - if(Element.isParent(dropon, element)) return; - - if(overlap > .33 && overlap < .66 && Sortable.options(dropon).tree) { - return; - } else if(overlap>0.5) { - Sortable.mark(dropon, 'before'); - if(dropon.previousSibling != element) { - var oldParentNode = element.parentNode; - element.style.visibility = "hidden"; // fix gecko rendering - dropon.parentNode.insertBefore(element, dropon); - if(dropon.parentNode!=oldParentNode) - Sortable.options(oldParentNode).onChange(element); - Sortable.options(dropon.parentNode).onChange(element); - } - } else { - Sortable.mark(dropon, 'after'); - var nextElement = dropon.nextSibling || null; - if(nextElement != element) { - var oldParentNode = element.parentNode; - element.style.visibility = "hidden"; // fix gecko rendering - dropon.parentNode.insertBefore(element, nextElement); - if(dropon.parentNode!=oldParentNode) - Sortable.options(oldParentNode).onChange(element); - Sortable.options(dropon.parentNode).onChange(element); - } - } - }, - - onEmptyHover: function(element, dropon, overlap) { - var oldParentNode = element.parentNode; - var droponOptions = Sortable.options(dropon); - - if(!Element.isParent(dropon, element)) { - var index; - - var children = Sortable.findElements(dropon, {tag: droponOptions.tag, only: droponOptions.only}); - var child = null; - - if(children) { - var offset = Element.offsetSize(dropon, droponOptions.overlap) * (1.0 - overlap); - - for (index = 0; index < children.length; index += 1) { - if (offset - Element.offsetSize (children[index], droponOptions.overlap) >= 0) { - offset -= Element.offsetSize (children[index], droponOptions.overlap); - } else if (offset - (Element.offsetSize (children[index], droponOptions.overlap) / 2) >= 0) { - child = index + 1 < children.length ? children[index + 1] : null; - break; - } else { - child = children[index]; - break; - } - } - } - - dropon.insertBefore(element, child); - - Sortable.options(oldParentNode).onChange(element); - droponOptions.onChange(element); - } - }, - - unmark: function() { - if(Sortable._marker) Sortable._marker.hide(); - }, - - mark: function(dropon, position) { - // mark on ghosting only - var sortable = Sortable.options(dropon.parentNode); - if(sortable && !sortable.ghosting) return; - - if(!Sortable._marker) { - Sortable._marker = - ($('dropmarker') || Element.extend(document.createElement('DIV'))). - hide().addClassName('dropmarker').setStyle({position:'absolute'}); - document.getElementsByTagName("body").item(0).appendChild(Sortable._marker); - } - var offsets = Position.cumulativeOffset(dropon); - Sortable._marker.setStyle({left: offsets[0]+'px', top: offsets[1] + 'px'}); - - if(position=='after') - if(sortable.overlap == 'horizontal') - Sortable._marker.setStyle({left: (offsets[0]+dropon.clientWidth) + 'px'}); - else - Sortable._marker.setStyle({top: (offsets[1]+dropon.clientHeight) + 'px'}); - - Sortable._marker.show(); - }, - - _tree: function(element, options, parent) { - var children = Sortable.findElements(element, options) || []; - - for (var i = 0; i < children.length; ++i) { - var match = children[i].id.match(options.format); - - if (!match) continue; - - var child = { - id: encodeURIComponent(match ? match[1] : null), - element: element, - parent: parent, - children: [], - position: parent.children.length, - container: $(children[i]).down(options.treeTag) - } - - /* Get the element containing the children and recurse over it */ - if (child.container) - this._tree(child.container, options, child) - - parent.children.push (child); - } - - return parent; - }, - - tree: function(element) { - element = $(element); - var sortableOptions = this.options(element); - var options = Object.extend({ - tag: sortableOptions.tag, - treeTag: sortableOptions.treeTag, - only: sortableOptions.only, - name: element.id, - format: sortableOptions.format - }, arguments[1] || {}); - - var root = { - id: null, - parent: null, - children: [], - container: element, - position: 0 - } - - return Sortable._tree(element, options, root); - }, - - /* Construct a [i] index for a particular node */ - _constructIndex: function(node) { - var index = ''; - do { - if (node.id) index = '[' + node.position + ']' + index; - } while ((node = node.parent) != null); - return index; - }, - - sequence: function(element) { - element = $(element); - var options = Object.extend(this.options(element), arguments[1] || {}); - - return $(this.findElements(element, options) || []).map( function(item) { - return item.id.match(options.format) ? item.id.match(options.format)[1] : ''; - }); - }, - - setSequence: function(element, new_sequence) { - element = $(element); - var options = Object.extend(this.options(element), arguments[2] || {}); - - var nodeMap = {}; - this.findElements(element, options).each( function(n) { - if (n.id.match(options.format)) - nodeMap[n.id.match(options.format)[1]] = [n, n.parentNode]; - n.parentNode.removeChild(n); - }); - - new_sequence.each(function(ident) { - var n = nodeMap[ident]; - if (n) { - n[1].appendChild(n[0]); - delete nodeMap[ident]; - } - }); - }, - - serialize: function(element) { - element = $(element); - var options = Object.extend(Sortable.options(element), arguments[1] || {}); - var name = encodeURIComponent( - (arguments[1] && arguments[1].name) ? arguments[1].name : element.id); - - if (options.tree) { - return Sortable.tree(element, arguments[1]).children.map( function (item) { - return [name + Sortable._constructIndex(item) + "[id]=" + - encodeURIComponent(item.id)].concat(item.children.map(arguments.callee)); - }).flatten().join('&'); - } else { - return Sortable.sequence(element, arguments[1]).map( function(item) { - return name + "[]=" + encodeURIComponent(item); - }).join('&'); - } - } -} - -// Returns true if child is contained within element -Element.isParent = function(child, element) { - if (!child.parentNode || child == element) return false; - if (child.parentNode == element) return true; - return Element.isParent(child.parentNode, element); -} - -Element.findChildren = function(element, only, recursive, tagName) { - if(!element.hasChildNodes()) return null; - tagName = tagName.toUpperCase(); - if(only) only = [only].flatten(); - var elements = []; - $A(element.childNodes).each( function(e) { - if(e.tagName && e.tagName.toUpperCase()==tagName && - (!only || (Element.classNames(e).detect(function(v) { return only.include(v) })))) - elements.push(e); - if(recursive) { - var grandchildren = Element.findChildren(e, only, recursive, tagName); - if(grandchildren) elements.push(grandchildren); - } - }); - - return (elements.length>0 ? elements.flatten() : []); -} - -Element.offsetSize = function (element, type) { - return element['offset' + ((type=='vertical' || type=='height') ? 'Height' : 'Width')]; -} diff --git a/vendor/plugins/shoulda/test/rails_root/public/javascripts/effects.js b/vendor/plugins/shoulda/test/rails_root/public/javascripts/effects.js deleted file mode 100644 index 3b02eda..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/javascripts/effects.js +++ /dev/null @@ -1,1088 +0,0 @@ -// Copyright (c) 2005, 2006 Thomas Fuchs (http://script.aculo.us, http://mir.aculo.us) -// Contributors: -// Justin Palmer (http://encytemedia.com/) -// Mark Pilgrim (http://diveintomark.org/) -// Martin Bialasinki -// -// script.aculo.us is freely distributable under the terms of an MIT-style license. -// For details, see the script.aculo.us web site: http://script.aculo.us/ - -// converts rgb() and #xxx to #xxxxxx format, -// returns self (or first argument) if not convertable -String.prototype.parseColor = function() { - var color = '#'; - if(this.slice(0,4) == 'rgb(') { - var cols = this.slice(4,this.length-1).split(','); - var i=0; do { color += parseInt(cols[i]).toColorPart() } while (++i<3); - } else { - if(this.slice(0,1) == '#') { - if(this.length==4) for(var i=1;i<4;i++) color += (this.charAt(i) + this.charAt(i)).toLowerCase(); - if(this.length==7) color = this.toLowerCase(); - } - } - return(color.length==7 ? color : (arguments[0] || this)); -} - -/*--------------------------------------------------------------------------*/ - -Element.collectTextNodes = function(element) { - return $A($(element).childNodes).collect( function(node) { - return (node.nodeType==3 ? node.nodeValue : - (node.hasChildNodes() ? Element.collectTextNodes(node) : '')); - }).flatten().join(''); -} - -Element.collectTextNodesIgnoreClass = function(element, className) { - return $A($(element).childNodes).collect( function(node) { - return (node.nodeType==3 ? node.nodeValue : - ((node.hasChildNodes() && !Element.hasClassName(node,className)) ? - Element.collectTextNodesIgnoreClass(node, className) : '')); - }).flatten().join(''); -} - -Element.setContentZoom = function(element, percent) { - element = $(element); - element.setStyle({fontSize: (percent/100) + 'em'}); - if(navigator.appVersion.indexOf('AppleWebKit')>0) window.scrollBy(0,0); - return element; -} - -Element.getOpacity = function(element){ - element = $(element); - var opacity; - if (opacity = element.getStyle('opacity')) - return parseFloat(opacity); - if (opacity = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) - if(opacity[1]) return parseFloat(opacity[1]) / 100; - return 1.0; -} - -Element.setOpacity = function(element, value){ - element= $(element); - if (value == 1){ - element.setStyle({ opacity: - (/Gecko/.test(navigator.userAgent) && !/Konqueror|Safari|KHTML/.test(navigator.userAgent)) ? - 0.999999 : 1.0 }); - if(/MSIE/.test(navigator.userAgent) && !window.opera) - element.setStyle({filter: Element.getStyle(element,'filter').replace(/alpha\([^\)]*\)/gi,'')}); - } else { - if(value < 0.00001) value = 0; - element.setStyle({opacity: value}); - if(/MSIE/.test(navigator.userAgent) && !window.opera) - element.setStyle( - { filter: element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,'') + - 'alpha(opacity='+value*100+')' }); - } - return element; -} - -Element.getInlineOpacity = function(element){ - return $(element).style.opacity || ''; -} - -Element.forceRerendering = function(element) { - try { - element = $(element); - var n = document.createTextNode(' '); - element.appendChild(n); - element.removeChild(n); - } catch(e) { } -}; - -/*--------------------------------------------------------------------------*/ - -Array.prototype.call = function() { - var args = arguments; - this.each(function(f){ f.apply(this, args) }); -} - -/*--------------------------------------------------------------------------*/ - -var Effect = { - _elementDoesNotExistError: { - name: 'ElementDoesNotExistError', - message: 'The specified DOM element does not exist, but is required for this effect to operate' - }, - tagifyText: function(element) { - if(typeof Builder == 'undefined') - throw("Effect.tagifyText requires including script.aculo.us' builder.js library"); - - var tagifyStyle = 'position:relative'; - if(/MSIE/.test(navigator.userAgent) && !window.opera) tagifyStyle += ';zoom:1'; - - element = $(element); - $A(element.childNodes).each( function(child) { - if(child.nodeType==3) { - child.nodeValue.toArray().each( function(character) { - element.insertBefore( - Builder.node('span',{style: tagifyStyle}, - character == ' ' ? String.fromCharCode(160) : character), - child); - }); - Element.remove(child); - } - }); - }, - multiple: function(element, effect) { - var elements; - if(((typeof element == 'object') || - (typeof element == 'function')) && - (element.length)) - elements = element; - else - elements = $(element).childNodes; - - var options = Object.extend({ - speed: 0.1, - delay: 0.0 - }, arguments[2] || {}); - var masterDelay = options.delay; - - $A(elements).each( function(element, index) { - new effect(element, Object.extend(options, { delay: index * options.speed + masterDelay })); - }); - }, - PAIRS: { - 'slide': ['SlideDown','SlideUp'], - 'blind': ['BlindDown','BlindUp'], - 'appear': ['Appear','Fade'] - }, - toggle: function(element, effect) { - element = $(element); - effect = (effect || 'appear').toLowerCase(); - var options = Object.extend({ - queue: { position:'end', scope:(element.id || 'global'), limit: 1 } - }, arguments[2] || {}); - Effect[element.visible() ? - Effect.PAIRS[effect][1] : Effect.PAIRS[effect][0]](element, options); - } -}; - -var Effect2 = Effect; // deprecated - -/* ------------- transitions ------------- */ - -Effect.Transitions = { - linear: Prototype.K, - sinoidal: function(pos) { - return (-Math.cos(pos*Math.PI)/2) + 0.5; - }, - reverse: function(pos) { - return 1-pos; - }, - flicker: function(pos) { - return ((-Math.cos(pos*Math.PI)/4) + 0.75) + Math.random()/4; - }, - wobble: function(pos) { - return (-Math.cos(pos*Math.PI*(9*pos))/2) + 0.5; - }, - pulse: function(pos, pulses) { - pulses = pulses || 5; - return ( - Math.round((pos % (1/pulses)) * pulses) == 0 ? - ((pos * pulses * 2) - Math.floor(pos * pulses * 2)) : - 1 - ((pos * pulses * 2) - Math.floor(pos * pulses * 2)) - ); - }, - none: function(pos) { - return 0; - }, - full: function(pos) { - return 1; - } -}; - -/* ------------- core effects ------------- */ - -Effect.ScopedQueue = Class.create(); -Object.extend(Object.extend(Effect.ScopedQueue.prototype, Enumerable), { - initialize: function() { - this.effects = []; - this.interval = null; - }, - _each: function(iterator) { - this.effects._each(iterator); - }, - add: function(effect) { - var timestamp = new Date().getTime(); - - var position = (typeof effect.options.queue == 'string') ? - effect.options.queue : effect.options.queue.position; - - switch(position) { - case 'front': - // move unstarted effects after this effect - this.effects.findAll(function(e){ return e.state=='idle' }).each( function(e) { - e.startOn += effect.finishOn; - e.finishOn += effect.finishOn; - }); - break; - case 'with-last': - timestamp = this.effects.pluck('startOn').max() || timestamp; - break; - case 'end': - // start effect after last queued effect has finished - timestamp = this.effects.pluck('finishOn').max() || timestamp; - break; - } - - effect.startOn += timestamp; - effect.finishOn += timestamp; - - if(!effect.options.queue.limit || (this.effects.length < effect.options.queue.limit)) - this.effects.push(effect); - - if(!this.interval) - this.interval = setInterval(this.loop.bind(this), 40); - }, - remove: function(effect) { - this.effects = this.effects.reject(function(e) { return e==effect }); - if(this.effects.length == 0) { - clearInterval(this.interval); - this.interval = null; - } - }, - loop: function() { - var timePos = new Date().getTime(); - this.effects.invoke('loop', timePos); - } -}); - -Effect.Queues = { - instances: $H(), - get: function(queueName) { - if(typeof queueName != 'string') return queueName; - - if(!this.instances[queueName]) - this.instances[queueName] = new Effect.ScopedQueue(); - - return this.instances[queueName]; - } -} -Effect.Queue = Effect.Queues.get('global'); - -Effect.DefaultOptions = { - transition: Effect.Transitions.sinoidal, - duration: 1.0, // seconds - fps: 25.0, // max. 25fps due to Effect.Queue implementation - sync: false, // true for combining - from: 0.0, - to: 1.0, - delay: 0.0, - queue: 'parallel' -} - -Effect.Base = function() {}; -Effect.Base.prototype = { - position: null, - start: function(options) { - this.options = Object.extend(Object.extend({},Effect.DefaultOptions), options || {}); - this.currentFrame = 0; - this.state = 'idle'; - this.startOn = this.options.delay*1000; - this.finishOn = this.startOn + (this.options.duration*1000); - this.event('beforeStart'); - if(!this.options.sync) - Effect.Queues.get(typeof this.options.queue == 'string' ? - 'global' : this.options.queue.scope).add(this); - }, - loop: function(timePos) { - if(timePos >= this.startOn) { - if(timePos >= this.finishOn) { - this.render(1.0); - this.cancel(); - this.event('beforeFinish'); - if(this.finish) this.finish(); - this.event('afterFinish'); - return; - } - var pos = (timePos - this.startOn) / (this.finishOn - this.startOn); - var frame = Math.round(pos * this.options.fps * this.options.duration); - if(frame > this.currentFrame) { - this.render(pos); - this.currentFrame = frame; - } - } - }, - render: function(pos) { - if(this.state == 'idle') { - this.state = 'running'; - this.event('beforeSetup'); - if(this.setup) this.setup(); - this.event('afterSetup'); - } - if(this.state == 'running') { - if(this.options.transition) pos = this.options.transition(pos); - pos *= (this.options.to-this.options.from); - pos += this.options.from; - this.position = pos; - this.event('beforeUpdate'); - if(this.update) this.update(pos); - this.event('afterUpdate'); - } - }, - cancel: function() { - if(!this.options.sync) - Effect.Queues.get(typeof this.options.queue == 'string' ? - 'global' : this.options.queue.scope).remove(this); - this.state = 'finished'; - }, - event: function(eventName) { - if(this.options[eventName + 'Internal']) this.options[eventName + 'Internal'](this); - if(this.options[eventName]) this.options[eventName](this); - }, - inspect: function() { - return '#'; - } -} - -Effect.Parallel = Class.create(); -Object.extend(Object.extend(Effect.Parallel.prototype, Effect.Base.prototype), { - initialize: function(effects) { - this.effects = effects || []; - this.start(arguments[1]); - }, - update: function(position) { - this.effects.invoke('render', position); - }, - finish: function(position) { - this.effects.each( function(effect) { - effect.render(1.0); - effect.cancel(); - effect.event('beforeFinish'); - if(effect.finish) effect.finish(position); - effect.event('afterFinish'); - }); - } -}); - -Effect.Event = Class.create(); -Object.extend(Object.extend(Effect.Event.prototype, Effect.Base.prototype), { - initialize: function() { - var options = Object.extend({ - duration: 0 - }, arguments[0] || {}); - this.start(options); - }, - update: Prototype.emptyFunction -}); - -Effect.Opacity = Class.create(); -Object.extend(Object.extend(Effect.Opacity.prototype, Effect.Base.prototype), { - initialize: function(element) { - this.element = $(element); - if(!this.element) throw(Effect._elementDoesNotExistError); - // make this work on IE on elements without 'layout' - if(/MSIE/.test(navigator.userAgent) && !window.opera && (!this.element.currentStyle.hasLayout)) - this.element.setStyle({zoom: 1}); - var options = Object.extend({ - from: this.element.getOpacity() || 0.0, - to: 1.0 - }, arguments[1] || {}); - this.start(options); - }, - update: function(position) { - this.element.setOpacity(position); - } -}); - -Effect.Move = Class.create(); -Object.extend(Object.extend(Effect.Move.prototype, Effect.Base.prototype), { - initialize: function(element) { - this.element = $(element); - if(!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - x: 0, - y: 0, - mode: 'relative' - }, arguments[1] || {}); - this.start(options); - }, - setup: function() { - // Bug in Opera: Opera returns the "real" position of a static element or - // relative element that does not have top/left explicitly set. - // ==> Always set top and left for position relative elements in your stylesheets - // (to 0 if you do not need them) - this.element.makePositioned(); - this.originalLeft = parseFloat(this.element.getStyle('left') || '0'); - this.originalTop = parseFloat(this.element.getStyle('top') || '0'); - if(this.options.mode == 'absolute') { - // absolute movement, so we need to calc deltaX and deltaY - this.options.x = this.options.x - this.originalLeft; - this.options.y = this.options.y - this.originalTop; - } - }, - update: function(position) { - this.element.setStyle({ - left: Math.round(this.options.x * position + this.originalLeft) + 'px', - top: Math.round(this.options.y * position + this.originalTop) + 'px' - }); - } -}); - -// for backwards compatibility -Effect.MoveBy = function(element, toTop, toLeft) { - return new Effect.Move(element, - Object.extend({ x: toLeft, y: toTop }, arguments[3] || {})); -}; - -Effect.Scale = Class.create(); -Object.extend(Object.extend(Effect.Scale.prototype, Effect.Base.prototype), { - initialize: function(element, percent) { - this.element = $(element); - if(!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - scaleX: true, - scaleY: true, - scaleContent: true, - scaleFromCenter: false, - scaleMode: 'box', // 'box' or 'contents' or {} with provided values - scaleFrom: 100.0, - scaleTo: percent - }, arguments[2] || {}); - this.start(options); - }, - setup: function() { - this.restoreAfterFinish = this.options.restoreAfterFinish || false; - this.elementPositioning = this.element.getStyle('position'); - - this.originalStyle = {}; - ['top','left','width','height','fontSize'].each( function(k) { - this.originalStyle[k] = this.element.style[k]; - }.bind(this)); - - this.originalTop = this.element.offsetTop; - this.originalLeft = this.element.offsetLeft; - - var fontSize = this.element.getStyle('font-size') || '100%'; - ['em','px','%','pt'].each( function(fontSizeType) { - if(fontSize.indexOf(fontSizeType)>0) { - this.fontSize = parseFloat(fontSize); - this.fontSizeType = fontSizeType; - } - }.bind(this)); - - this.factor = (this.options.scaleTo - this.options.scaleFrom)/100; - - this.dims = null; - if(this.options.scaleMode=='box') - this.dims = [this.element.offsetHeight, this.element.offsetWidth]; - if(/^content/.test(this.options.scaleMode)) - this.dims = [this.element.scrollHeight, this.element.scrollWidth]; - if(!this.dims) - this.dims = [this.options.scaleMode.originalHeight, - this.options.scaleMode.originalWidth]; - }, - update: function(position) { - var currentScale = (this.options.scaleFrom/100.0) + (this.factor * position); - if(this.options.scaleContent && this.fontSize) - this.element.setStyle({fontSize: this.fontSize * currentScale + this.fontSizeType }); - this.setDimensions(this.dims[0] * currentScale, this.dims[1] * currentScale); - }, - finish: function(position) { - if(this.restoreAfterFinish) this.element.setStyle(this.originalStyle); - }, - setDimensions: function(height, width) { - var d = {}; - if(this.options.scaleX) d.width = Math.round(width) + 'px'; - if(this.options.scaleY) d.height = Math.round(height) + 'px'; - if(this.options.scaleFromCenter) { - var topd = (height - this.dims[0])/2; - var leftd = (width - this.dims[1])/2; - if(this.elementPositioning == 'absolute') { - if(this.options.scaleY) d.top = this.originalTop-topd + 'px'; - if(this.options.scaleX) d.left = this.originalLeft-leftd + 'px'; - } else { - if(this.options.scaleY) d.top = -topd + 'px'; - if(this.options.scaleX) d.left = -leftd + 'px'; - } - } - this.element.setStyle(d); - } -}); - -Effect.Highlight = Class.create(); -Object.extend(Object.extend(Effect.Highlight.prototype, Effect.Base.prototype), { - initialize: function(element) { - this.element = $(element); - if(!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ startcolor: '#ffff99' }, arguments[1] || {}); - this.start(options); - }, - setup: function() { - // Prevent executing on elements not in the layout flow - if(this.element.getStyle('display')=='none') { this.cancel(); return; } - // Disable background image during the effect - this.oldStyle = { - backgroundImage: this.element.getStyle('background-image') }; - this.element.setStyle({backgroundImage: 'none'}); - if(!this.options.endcolor) - this.options.endcolor = this.element.getStyle('background-color').parseColor('#ffffff'); - if(!this.options.restorecolor) - this.options.restorecolor = this.element.getStyle('background-color'); - // init color calculations - this._base = $R(0,2).map(function(i){ return parseInt(this.options.startcolor.slice(i*2+1,i*2+3),16) }.bind(this)); - this._delta = $R(0,2).map(function(i){ return parseInt(this.options.endcolor.slice(i*2+1,i*2+3),16)-this._base[i] }.bind(this)); - }, - update: function(position) { - this.element.setStyle({backgroundColor: $R(0,2).inject('#',function(m,v,i){ - return m+(Math.round(this._base[i]+(this._delta[i]*position)).toColorPart()); }.bind(this)) }); - }, - finish: function() { - this.element.setStyle(Object.extend(this.oldStyle, { - backgroundColor: this.options.restorecolor - })); - } -}); - -Effect.ScrollTo = Class.create(); -Object.extend(Object.extend(Effect.ScrollTo.prototype, Effect.Base.prototype), { - initialize: function(element) { - this.element = $(element); - this.start(arguments[1] || {}); - }, - setup: function() { - Position.prepare(); - var offsets = Position.cumulativeOffset(this.element); - if(this.options.offset) offsets[1] += this.options.offset; - var max = window.innerHeight ? - window.height - window.innerHeight : - document.body.scrollHeight - - (document.documentElement.clientHeight ? - document.documentElement.clientHeight : document.body.clientHeight); - this.scrollStart = Position.deltaY; - this.delta = (offsets[1] > max ? max : offsets[1]) - this.scrollStart; - }, - update: function(position) { - Position.prepare(); - window.scrollTo(Position.deltaX, - this.scrollStart + (position*this.delta)); - } -}); - -/* ------------- combination effects ------------- */ - -Effect.Fade = function(element) { - element = $(element); - var oldOpacity = element.getInlineOpacity(); - var options = Object.extend({ - from: element.getOpacity() || 1.0, - to: 0.0, - afterFinishInternal: function(effect) { - if(effect.options.to!=0) return; - effect.element.hide().setStyle({opacity: oldOpacity}); - }}, arguments[1] || {}); - return new Effect.Opacity(element,options); -} - -Effect.Appear = function(element) { - element = $(element); - var options = Object.extend({ - from: (element.getStyle('display') == 'none' ? 0.0 : element.getOpacity() || 0.0), - to: 1.0, - // force Safari to render floated elements properly - afterFinishInternal: function(effect) { - effect.element.forceRerendering(); - }, - beforeSetup: function(effect) { - effect.element.setOpacity(effect.options.from).show(); - }}, arguments[1] || {}); - return new Effect.Opacity(element,options); -} - -Effect.Puff = function(element) { - element = $(element); - var oldStyle = { - opacity: element.getInlineOpacity(), - position: element.getStyle('position'), - top: element.style.top, - left: element.style.left, - width: element.style.width, - height: element.style.height - }; - return new Effect.Parallel( - [ new Effect.Scale(element, 200, - { sync: true, scaleFromCenter: true, scaleContent: true, restoreAfterFinish: true }), - new Effect.Opacity(element, { sync: true, to: 0.0 } ) ], - Object.extend({ duration: 1.0, - beforeSetupInternal: function(effect) { - Position.absolutize(effect.effects[0].element) - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().setStyle(oldStyle); } - }, arguments[1] || {}) - ); -} - -Effect.BlindUp = function(element) { - element = $(element); - element.makeClipping(); - return new Effect.Scale(element, 0, - Object.extend({ scaleContent: false, - scaleX: false, - restoreAfterFinish: true, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping(); - } - }, arguments[1] || {}) - ); -} - -Effect.BlindDown = function(element) { - element = $(element); - var elementDimensions = element.getDimensions(); - return new Effect.Scale(element, 100, Object.extend({ - scaleContent: false, - scaleX: false, - scaleFrom: 0, - scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, - restoreAfterFinish: true, - afterSetup: function(effect) { - effect.element.makeClipping().setStyle({height: '0px'}).show(); - }, - afterFinishInternal: function(effect) { - effect.element.undoClipping(); - } - }, arguments[1] || {})); -} - -Effect.SwitchOff = function(element) { - element = $(element); - var oldOpacity = element.getInlineOpacity(); - return new Effect.Appear(element, Object.extend({ - duration: 0.4, - from: 0, - transition: Effect.Transitions.flicker, - afterFinishInternal: function(effect) { - new Effect.Scale(effect.element, 1, { - duration: 0.3, scaleFromCenter: true, - scaleX: false, scaleContent: false, restoreAfterFinish: true, - beforeSetup: function(effect) { - effect.element.makePositioned().makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().undoPositioned().setStyle({opacity: oldOpacity}); - } - }) - } - }, arguments[1] || {})); -} - -Effect.DropOut = function(element) { - element = $(element); - var oldStyle = { - top: element.getStyle('top'), - left: element.getStyle('left'), - opacity: element.getInlineOpacity() }; - return new Effect.Parallel( - [ new Effect.Move(element, {x: 0, y: 100, sync: true }), - new Effect.Opacity(element, { sync: true, to: 0.0 }) ], - Object.extend( - { duration: 0.5, - beforeSetup: function(effect) { - effect.effects[0].element.makePositioned(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().undoPositioned().setStyle(oldStyle); - } - }, arguments[1] || {})); -} - -Effect.Shake = function(element) { - element = $(element); - var oldStyle = { - top: element.getStyle('top'), - left: element.getStyle('left') }; - return new Effect.Move(element, - { x: 20, y: 0, duration: 0.05, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: 40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: 40, y: 0, duration: 0.1, afterFinishInternal: function(effect) { - new Effect.Move(effect.element, - { x: -20, y: 0, duration: 0.05, afterFinishInternal: function(effect) { - effect.element.undoPositioned().setStyle(oldStyle); - }}) }}) }}) }}) }}) }}); -} - -Effect.SlideDown = function(element) { - element = $(element).cleanWhitespace(); - // SlideDown need to have the content of the element wrapped in a container element with fixed height! - var oldInnerBottom = element.down().getStyle('bottom'); - var elementDimensions = element.getDimensions(); - return new Effect.Scale(element, 100, Object.extend({ - scaleContent: false, - scaleX: false, - scaleFrom: window.opera ? 0 : 1, - scaleMode: {originalHeight: elementDimensions.height, originalWidth: elementDimensions.width}, - restoreAfterFinish: true, - afterSetup: function(effect) { - effect.element.makePositioned(); - effect.element.down().makePositioned(); - if(window.opera) effect.element.setStyle({top: ''}); - effect.element.makeClipping().setStyle({height: '0px'}).show(); - }, - afterUpdateInternal: function(effect) { - effect.element.down().setStyle({bottom: - (effect.dims[0] - effect.element.clientHeight) + 'px' }); - }, - afterFinishInternal: function(effect) { - effect.element.undoClipping().undoPositioned(); - effect.element.down().undoPositioned().setStyle({bottom: oldInnerBottom}); } - }, arguments[1] || {}) - ); -} - -Effect.SlideUp = function(element) { - element = $(element).cleanWhitespace(); - var oldInnerBottom = element.down().getStyle('bottom'); - return new Effect.Scale(element, window.opera ? 0 : 1, - Object.extend({ scaleContent: false, - scaleX: false, - scaleMode: 'box', - scaleFrom: 100, - restoreAfterFinish: true, - beforeStartInternal: function(effect) { - effect.element.makePositioned(); - effect.element.down().makePositioned(); - if(window.opera) effect.element.setStyle({top: ''}); - effect.element.makeClipping().show(); - }, - afterUpdateInternal: function(effect) { - effect.element.down().setStyle({bottom: - (effect.dims[0] - effect.element.clientHeight) + 'px' }); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().undoPositioned().setStyle({bottom: oldInnerBottom}); - effect.element.down().undoPositioned(); - } - }, arguments[1] || {}) - ); -} - -// Bug in opera makes the TD containing this element expand for a instance after finish -Effect.Squish = function(element) { - return new Effect.Scale(element, window.opera ? 1 : 0, { - restoreAfterFinish: true, - beforeSetup: function(effect) { - effect.element.makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping(); - } - }); -} - -Effect.Grow = function(element) { - element = $(element); - var options = Object.extend({ - direction: 'center', - moveTransition: Effect.Transitions.sinoidal, - scaleTransition: Effect.Transitions.sinoidal, - opacityTransition: Effect.Transitions.full - }, arguments[1] || {}); - var oldStyle = { - top: element.style.top, - left: element.style.left, - height: element.style.height, - width: element.style.width, - opacity: element.getInlineOpacity() }; - - var dims = element.getDimensions(); - var initialMoveX, initialMoveY; - var moveX, moveY; - - switch (options.direction) { - case 'top-left': - initialMoveX = initialMoveY = moveX = moveY = 0; - break; - case 'top-right': - initialMoveX = dims.width; - initialMoveY = moveY = 0; - moveX = -dims.width; - break; - case 'bottom-left': - initialMoveX = moveX = 0; - initialMoveY = dims.height; - moveY = -dims.height; - break; - case 'bottom-right': - initialMoveX = dims.width; - initialMoveY = dims.height; - moveX = -dims.width; - moveY = -dims.height; - break; - case 'center': - initialMoveX = dims.width / 2; - initialMoveY = dims.height / 2; - moveX = -dims.width / 2; - moveY = -dims.height / 2; - break; - } - - return new Effect.Move(element, { - x: initialMoveX, - y: initialMoveY, - duration: 0.01, - beforeSetup: function(effect) { - effect.element.hide().makeClipping().makePositioned(); - }, - afterFinishInternal: function(effect) { - new Effect.Parallel( - [ new Effect.Opacity(effect.element, { sync: true, to: 1.0, from: 0.0, transition: options.opacityTransition }), - new Effect.Move(effect.element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }), - new Effect.Scale(effect.element, 100, { - scaleMode: { originalHeight: dims.height, originalWidth: dims.width }, - sync: true, scaleFrom: window.opera ? 1 : 0, transition: options.scaleTransition, restoreAfterFinish: true}) - ], Object.extend({ - beforeSetup: function(effect) { - effect.effects[0].element.setStyle({height: '0px'}).show(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.undoClipping().undoPositioned().setStyle(oldStyle); - } - }, options) - ) - } - }); -} - -Effect.Shrink = function(element) { - element = $(element); - var options = Object.extend({ - direction: 'center', - moveTransition: Effect.Transitions.sinoidal, - scaleTransition: Effect.Transitions.sinoidal, - opacityTransition: Effect.Transitions.none - }, arguments[1] || {}); - var oldStyle = { - top: element.style.top, - left: element.style.left, - height: element.style.height, - width: element.style.width, - opacity: element.getInlineOpacity() }; - - var dims = element.getDimensions(); - var moveX, moveY; - - switch (options.direction) { - case 'top-left': - moveX = moveY = 0; - break; - case 'top-right': - moveX = dims.width; - moveY = 0; - break; - case 'bottom-left': - moveX = 0; - moveY = dims.height; - break; - case 'bottom-right': - moveX = dims.width; - moveY = dims.height; - break; - case 'center': - moveX = dims.width / 2; - moveY = dims.height / 2; - break; - } - - return new Effect.Parallel( - [ new Effect.Opacity(element, { sync: true, to: 0.0, from: 1.0, transition: options.opacityTransition }), - new Effect.Scale(element, window.opera ? 1 : 0, { sync: true, transition: options.scaleTransition, restoreAfterFinish: true}), - new Effect.Move(element, { x: moveX, y: moveY, sync: true, transition: options.moveTransition }) - ], Object.extend({ - beforeStartInternal: function(effect) { - effect.effects[0].element.makePositioned().makeClipping(); - }, - afterFinishInternal: function(effect) { - effect.effects[0].element.hide().undoClipping().undoPositioned().setStyle(oldStyle); } - }, options) - ); -} - -Effect.Pulsate = function(element) { - element = $(element); - var options = arguments[1] || {}; - var oldOpacity = element.getInlineOpacity(); - var transition = options.transition || Effect.Transitions.sinoidal; - var reverser = function(pos){ return transition(1-Effect.Transitions.pulse(pos, options.pulses)) }; - reverser.bind(transition); - return new Effect.Opacity(element, - Object.extend(Object.extend({ duration: 2.0, from: 0, - afterFinishInternal: function(effect) { effect.element.setStyle({opacity: oldOpacity}); } - }, options), {transition: reverser})); -} - -Effect.Fold = function(element) { - element = $(element); - var oldStyle = { - top: element.style.top, - left: element.style.left, - width: element.style.width, - height: element.style.height }; - element.makeClipping(); - return new Effect.Scale(element, 5, Object.extend({ - scaleContent: false, - scaleX: false, - afterFinishInternal: function(effect) { - new Effect.Scale(element, 1, { - scaleContent: false, - scaleY: false, - afterFinishInternal: function(effect) { - effect.element.hide().undoClipping().setStyle(oldStyle); - } }); - }}, arguments[1] || {})); -}; - -Effect.Morph = Class.create(); -Object.extend(Object.extend(Effect.Morph.prototype, Effect.Base.prototype), { - initialize: function(element) { - this.element = $(element); - if(!this.element) throw(Effect._elementDoesNotExistError); - var options = Object.extend({ - style: '' - }, arguments[1] || {}); - this.start(options); - }, - setup: function(){ - function parseColor(color){ - if(!color || ['rgba(0, 0, 0, 0)','transparent'].include(color)) color = '#ffffff'; - color = color.parseColor(); - return $R(0,2).map(function(i){ - return parseInt( color.slice(i*2+1,i*2+3), 16 ) - }); - } - this.transforms = this.options.style.parseStyle().map(function(property){ - var originalValue = this.element.getStyle(property[0]); - return $H({ - style: property[0], - originalValue: property[1].unit=='color' ? - parseColor(originalValue) : parseFloat(originalValue || 0), - targetValue: property[1].unit=='color' ? - parseColor(property[1].value) : property[1].value, - unit: property[1].unit - }); - }.bind(this)).reject(function(transform){ - return ( - (transform.originalValue == transform.targetValue) || - ( - transform.unit != 'color' && - (isNaN(transform.originalValue) || isNaN(transform.targetValue)) - ) - ) - }); - }, - update: function(position) { - var style = $H(), value = null; - this.transforms.each(function(transform){ - value = transform.unit=='color' ? - $R(0,2).inject('#',function(m,v,i){ - return m+(Math.round(transform.originalValue[i]+ - (transform.targetValue[i] - transform.originalValue[i])*position)).toColorPart() }) : - transform.originalValue + Math.round( - ((transform.targetValue - transform.originalValue) * position) * 1000)/1000 + transform.unit; - style[transform.style] = value; - }); - this.element.setStyle(style); - } -}); - -Effect.Transform = Class.create(); -Object.extend(Effect.Transform.prototype, { - initialize: function(tracks){ - this.tracks = []; - this.options = arguments[1] || {}; - this.addTracks(tracks); - }, - addTracks: function(tracks){ - tracks.each(function(track){ - var data = $H(track).values().first(); - this.tracks.push($H({ - ids: $H(track).keys().first(), - effect: Effect.Morph, - options: { style: data } - })); - }.bind(this)); - return this; - }, - play: function(){ - return new Effect.Parallel( - this.tracks.map(function(track){ - var elements = [$(track.ids) || $$(track.ids)].flatten(); - return elements.map(function(e){ return new track.effect(e, Object.extend({ sync:true }, track.options)) }); - }).flatten(), - this.options - ); - } -}); - -Element.CSS_PROPERTIES = ['azimuth', 'backgroundAttachment', 'backgroundColor', 'backgroundImage', - 'backgroundPosition', 'backgroundRepeat', 'borderBottomColor', 'borderBottomStyle', - 'borderBottomWidth', 'borderCollapse', 'borderLeftColor', 'borderLeftStyle', 'borderLeftWidth', - 'borderRightColor', 'borderRightStyle', 'borderRightWidth', 'borderSpacing', 'borderTopColor', - 'borderTopStyle', 'borderTopWidth', 'bottom', 'captionSide', 'clear', 'clip', 'color', 'content', - 'counterIncrement', 'counterReset', 'cssFloat', 'cueAfter', 'cueBefore', 'cursor', 'direction', - 'display', 'elevation', 'emptyCells', 'fontFamily', 'fontSize', 'fontSizeAdjust', 'fontStretch', - 'fontStyle', 'fontVariant', 'fontWeight', 'height', 'left', 'letterSpacing', 'lineHeight', - 'listStyleImage', 'listStylePosition', 'listStyleType', 'marginBottom', 'marginLeft', 'marginRight', - 'marginTop', 'markerOffset', 'marks', 'maxHeight', 'maxWidth', 'minHeight', 'minWidth', 'opacity', - 'orphans', 'outlineColor', 'outlineOffset', 'outlineStyle', 'outlineWidth', 'overflowX', 'overflowY', - 'paddingBottom', 'paddingLeft', 'paddingRight', 'paddingTop', 'page', 'pageBreakAfter', 'pageBreakBefore', - 'pageBreakInside', 'pauseAfter', 'pauseBefore', 'pitch', 'pitchRange', 'position', 'quotes', - 'richness', 'right', 'size', 'speakHeader', 'speakNumeral', 'speakPunctuation', 'speechRate', 'stress', - 'tableLayout', 'textAlign', 'textDecoration', 'textIndent', 'textShadow', 'textTransform', 'top', - 'unicodeBidi', 'verticalAlign', 'visibility', 'voiceFamily', 'volume', 'whiteSpace', 'widows', - 'width', 'wordSpacing', 'zIndex']; - -Element.CSS_LENGTH = /^(([\+\-]?[0-9\.]+)(em|ex|px|in|cm|mm|pt|pc|\%))|0$/; - -String.prototype.parseStyle = function(){ - var element = Element.extend(document.createElement('div')); - element.innerHTML = '

    '; - var style = element.down().style, styleRules = $H(); - - Element.CSS_PROPERTIES.each(function(property){ - if(style[property]) styleRules[property] = style[property]; - }); - - var result = $H(); - - styleRules.each(function(pair){ - var property = pair[0], value = pair[1], unit = null; - - if(value.parseColor('#zzzzzz') != '#zzzzzz') { - value = value.parseColor(); - unit = 'color'; - } else if(Element.CSS_LENGTH.test(value)) - var components = value.match(/^([\+\-]?[0-9\.]+)(.*)$/), - value = parseFloat(components[1]), unit = (components.length == 3) ? components[2] : null; - - result[property.underscore().dasherize()] = $H({ value:value, unit:unit }); - }.bind(this)); - - return result; -}; - -Element.morph = function(element, style) { - new Effect.Morph(element, Object.extend({ style: style }, arguments[2] || {})); - return element; -}; - -['setOpacity','getOpacity','getInlineOpacity','forceRerendering','setContentZoom', - 'collectTextNodes','collectTextNodesIgnoreClass','morph'].each( - function(f) { Element.Methods[f] = Element[f]; } -); - -Element.Methods.visualEffect = function(element, effect, options) { - s = effect.gsub(/_/, '-').camelize(); - effect_class = s.charAt(0).toUpperCase() + s.substring(1); - new Effect[effect_class](element, options); - return $(element); -}; - -Element.addMethods(); \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/javascripts/prototype.js b/vendor/plugins/shoulda/test/rails_root/public/javascripts/prototype.js deleted file mode 100644 index 5058221..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/javascripts/prototype.js +++ /dev/null @@ -1,2515 +0,0 @@ -/* Prototype JavaScript framework, version 1.5.0 - * (c) 2005-2007 Sam Stephenson - * - * Prototype is freely distributable under the terms of an MIT-style license. - * For details, see the Prototype web site: http://prototype.conio.net/ - * -/*--------------------------------------------------------------------------*/ - -var Prototype = { - Version: '1.5.0', - BrowserFeatures: { - XPath: !!document.evaluate - }, - - ScriptFragment: '(?:)((\n|\r|.)*?)(?:<\/script>)', - emptyFunction: function() {}, - K: function(x) { return x } -} - -var Class = { - create: function() { - return function() { - this.initialize.apply(this, arguments); - } - } -} - -var Abstract = new Object(); - -Object.extend = function(destination, source) { - for (var property in source) { - destination[property] = source[property]; - } - return destination; -} - -Object.extend(Object, { - inspect: function(object) { - try { - if (object === undefined) return 'undefined'; - if (object === null) return 'null'; - return object.inspect ? object.inspect() : object.toString(); - } catch (e) { - if (e instanceof RangeError) return '...'; - throw e; - } - }, - - keys: function(object) { - var keys = []; - for (var property in object) - keys.push(property); - return keys; - }, - - values: function(object) { - var values = []; - for (var property in object) - values.push(object[property]); - return values; - }, - - clone: function(object) { - return Object.extend({}, object); - } -}); - -Function.prototype.bind = function() { - var __method = this, args = $A(arguments), object = args.shift(); - return function() { - return __method.apply(object, args.concat($A(arguments))); - } -} - -Function.prototype.bindAsEventListener = function(object) { - var __method = this, args = $A(arguments), object = args.shift(); - return function(event) { - return __method.apply(object, [( event || window.event)].concat(args).concat($A(arguments))); - } -} - -Object.extend(Number.prototype, { - toColorPart: function() { - var digits = this.toString(16); - if (this < 16) return '0' + digits; - return digits; - }, - - succ: function() { - return this + 1; - }, - - times: function(iterator) { - $R(0, this, true).each(iterator); - return this; - } -}); - -var Try = { - these: function() { - var returnValue; - - for (var i = 0, length = arguments.length; i < length; i++) { - var lambda = arguments[i]; - try { - returnValue = lambda(); - break; - } catch (e) {} - } - - return returnValue; - } -} - -/*--------------------------------------------------------------------------*/ - -var PeriodicalExecuter = Class.create(); -PeriodicalExecuter.prototype = { - initialize: function(callback, frequency) { - this.callback = callback; - this.frequency = frequency; - this.currentlyExecuting = false; - - this.registerCallback(); - }, - - registerCallback: function() { - this.timer = setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); - }, - - stop: function() { - if (!this.timer) return; - clearInterval(this.timer); - this.timer = null; - }, - - onTimerEvent: function() { - if (!this.currentlyExecuting) { - try { - this.currentlyExecuting = true; - this.callback(this); - } finally { - this.currentlyExecuting = false; - } - } - } -} -String.interpret = function(value){ - return value == null ? '' : String(value); -} - -Object.extend(String.prototype, { - gsub: function(pattern, replacement) { - var result = '', source = this, match; - replacement = arguments.callee.prepareReplacement(replacement); - - while (source.length > 0) { - if (match = source.match(pattern)) { - result += source.slice(0, match.index); - result += String.interpret(replacement(match)); - source = source.slice(match.index + match[0].length); - } else { - result += source, source = ''; - } - } - return result; - }, - - sub: function(pattern, replacement, count) { - replacement = this.gsub.prepareReplacement(replacement); - count = count === undefined ? 1 : count; - - return this.gsub(pattern, function(match) { - if (--count < 0) return match[0]; - return replacement(match); - }); - }, - - scan: function(pattern, iterator) { - this.gsub(pattern, iterator); - return this; - }, - - truncate: function(length, truncation) { - length = length || 30; - truncation = truncation === undefined ? '...' : truncation; - return this.length > length ? - this.slice(0, length - truncation.length) + truncation : this; - }, - - strip: function() { - return this.replace(/^\s+/, '').replace(/\s+$/, ''); - }, - - stripTags: function() { - return this.replace(/<\/?[^>]+>/gi, ''); - }, - - stripScripts: function() { - return this.replace(new RegExp(Prototype.ScriptFragment, 'img'), ''); - }, - - extractScripts: function() { - var matchAll = new RegExp(Prototype.ScriptFragment, 'img'); - var matchOne = new RegExp(Prototype.ScriptFragment, 'im'); - return (this.match(matchAll) || []).map(function(scriptTag) { - return (scriptTag.match(matchOne) || ['', ''])[1]; - }); - }, - - evalScripts: function() { - return this.extractScripts().map(function(script) { return eval(script) }); - }, - - escapeHTML: function() { - var div = document.createElement('div'); - var text = document.createTextNode(this); - div.appendChild(text); - return div.innerHTML; - }, - - unescapeHTML: function() { - var div = document.createElement('div'); - div.innerHTML = this.stripTags(); - return div.childNodes[0] ? (div.childNodes.length > 1 ? - $A(div.childNodes).inject('',function(memo,node){ return memo+node.nodeValue }) : - div.childNodes[0].nodeValue) : ''; - }, - - toQueryParams: function(separator) { - var match = this.strip().match(/([^?#]*)(#.*)?$/); - if (!match) return {}; - - return match[1].split(separator || '&').inject({}, function(hash, pair) { - if ((pair = pair.split('='))[0]) { - var name = decodeURIComponent(pair[0]); - var value = pair[1] ? decodeURIComponent(pair[1]) : undefined; - - if (hash[name] !== undefined) { - if (hash[name].constructor != Array) - hash[name] = [hash[name]]; - if (value) hash[name].push(value); - } - else hash[name] = value; - } - return hash; - }); - }, - - toArray: function() { - return this.split(''); - }, - - succ: function() { - return this.slice(0, this.length - 1) + - String.fromCharCode(this.charCodeAt(this.length - 1) + 1); - }, - - camelize: function() { - var parts = this.split('-'), len = parts.length; - if (len == 1) return parts[0]; - - var camelized = this.charAt(0) == '-' - ? parts[0].charAt(0).toUpperCase() + parts[0].substring(1) - : parts[0]; - - for (var i = 1; i < len; i++) - camelized += parts[i].charAt(0).toUpperCase() + parts[i].substring(1); - - return camelized; - }, - - capitalize: function(){ - return this.charAt(0).toUpperCase() + this.substring(1).toLowerCase(); - }, - - underscore: function() { - return this.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'#{1}_#{2}').gsub(/([a-z\d])([A-Z])/,'#{1}_#{2}').gsub(/-/,'_').toLowerCase(); - }, - - dasherize: function() { - return this.gsub(/_/,'-'); - }, - - inspect: function(useDoubleQuotes) { - var escapedString = this.replace(/\\/g, '\\\\'); - if (useDoubleQuotes) - return '"' + escapedString.replace(/"/g, '\\"') + '"'; - else - return "'" + escapedString.replace(/'/g, '\\\'') + "'"; - } -}); - -String.prototype.gsub.prepareReplacement = function(replacement) { - if (typeof replacement == 'function') return replacement; - var template = new Template(replacement); - return function(match) { return template.evaluate(match) }; -} - -String.prototype.parseQuery = String.prototype.toQueryParams; - -var Template = Class.create(); -Template.Pattern = /(^|.|\r|\n)(#\{(.*?)\})/; -Template.prototype = { - initialize: function(template, pattern) { - this.template = template.toString(); - this.pattern = pattern || Template.Pattern; - }, - - evaluate: function(object) { - return this.template.gsub(this.pattern, function(match) { - var before = match[1]; - if (before == '\\') return match[2]; - return before + String.interpret(object[match[3]]); - }); - } -} - -var $break = new Object(); -var $continue = new Object(); - -var Enumerable = { - each: function(iterator) { - var index = 0; - try { - this._each(function(value) { - try { - iterator(value, index++); - } catch (e) { - if (e != $continue) throw e; - } - }); - } catch (e) { - if (e != $break) throw e; - } - return this; - }, - - eachSlice: function(number, iterator) { - var index = -number, slices = [], array = this.toArray(); - while ((index += number) < array.length) - slices.push(array.slice(index, index+number)); - return slices.map(iterator); - }, - - all: function(iterator) { - var result = true; - this.each(function(value, index) { - result = result && !!(iterator || Prototype.K)(value, index); - if (!result) throw $break; - }); - return result; - }, - - any: function(iterator) { - var result = false; - this.each(function(value, index) { - if (result = !!(iterator || Prototype.K)(value, index)) - throw $break; - }); - return result; - }, - - collect: function(iterator) { - var results = []; - this.each(function(value, index) { - results.push((iterator || Prototype.K)(value, index)); - }); - return results; - }, - - detect: function(iterator) { - var result; - this.each(function(value, index) { - if (iterator(value, index)) { - result = value; - throw $break; - } - }); - return result; - }, - - findAll: function(iterator) { - var results = []; - this.each(function(value, index) { - if (iterator(value, index)) - results.push(value); - }); - return results; - }, - - grep: function(pattern, iterator) { - var results = []; - this.each(function(value, index) { - var stringValue = value.toString(); - if (stringValue.match(pattern)) - results.push((iterator || Prototype.K)(value, index)); - }) - return results; - }, - - include: function(object) { - var found = false; - this.each(function(value) { - if (value == object) { - found = true; - throw $break; - } - }); - return found; - }, - - inGroupsOf: function(number, fillWith) { - fillWith = fillWith === undefined ? null : fillWith; - return this.eachSlice(number, function(slice) { - while(slice.length < number) slice.push(fillWith); - return slice; - }); - }, - - inject: function(memo, iterator) { - this.each(function(value, index) { - memo = iterator(memo, value, index); - }); - return memo; - }, - - invoke: function(method) { - var args = $A(arguments).slice(1); - return this.map(function(value) { - return value[method].apply(value, args); - }); - }, - - max: function(iterator) { - var result; - this.each(function(value, index) { - value = (iterator || Prototype.K)(value, index); - if (result == undefined || value >= result) - result = value; - }); - return result; - }, - - min: function(iterator) { - var result; - this.each(function(value, index) { - value = (iterator || Prototype.K)(value, index); - if (result == undefined || value < result) - result = value; - }); - return result; - }, - - partition: function(iterator) { - var trues = [], falses = []; - this.each(function(value, index) { - ((iterator || Prototype.K)(value, index) ? - trues : falses).push(value); - }); - return [trues, falses]; - }, - - pluck: function(property) { - var results = []; - this.each(function(value, index) { - results.push(value[property]); - }); - return results; - }, - - reject: function(iterator) { - var results = []; - this.each(function(value, index) { - if (!iterator(value, index)) - results.push(value); - }); - return results; - }, - - sortBy: function(iterator) { - return this.map(function(value, index) { - return {value: value, criteria: iterator(value, index)}; - }).sort(function(left, right) { - var a = left.criteria, b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - }).pluck('value'); - }, - - toArray: function() { - return this.map(); - }, - - zip: function() { - var iterator = Prototype.K, args = $A(arguments); - if (typeof args.last() == 'function') - iterator = args.pop(); - - var collections = [this].concat(args).map($A); - return this.map(function(value, index) { - return iterator(collections.pluck(index)); - }); - }, - - size: function() { - return this.toArray().length; - }, - - inspect: function() { - return '#'; - } -} - -Object.extend(Enumerable, { - map: Enumerable.collect, - find: Enumerable.detect, - select: Enumerable.findAll, - member: Enumerable.include, - entries: Enumerable.toArray -}); -var $A = Array.from = function(iterable) { - if (!iterable) return []; - if (iterable.toArray) { - return iterable.toArray(); - } else { - var results = []; - for (var i = 0, length = iterable.length; i < length; i++) - results.push(iterable[i]); - return results; - } -} - -Object.extend(Array.prototype, Enumerable); - -if (!Array.prototype._reverse) - Array.prototype._reverse = Array.prototype.reverse; - -Object.extend(Array.prototype, { - _each: function(iterator) { - for (var i = 0, length = this.length; i < length; i++) - iterator(this[i]); - }, - - clear: function() { - this.length = 0; - return this; - }, - - first: function() { - return this[0]; - }, - - last: function() { - return this[this.length - 1]; - }, - - compact: function() { - return this.select(function(value) { - return value != null; - }); - }, - - flatten: function() { - return this.inject([], function(array, value) { - return array.concat(value && value.constructor == Array ? - value.flatten() : [value]); - }); - }, - - without: function() { - var values = $A(arguments); - return this.select(function(value) { - return !values.include(value); - }); - }, - - indexOf: function(object) { - for (var i = 0, length = this.length; i < length; i++) - if (this[i] == object) return i; - return -1; - }, - - reverse: function(inline) { - return (inline !== false ? this : this.toArray())._reverse(); - }, - - reduce: function() { - return this.length > 1 ? this : this[0]; - }, - - uniq: function() { - return this.inject([], function(array, value) { - return array.include(value) ? array : array.concat([value]); - }); - }, - - clone: function() { - return [].concat(this); - }, - - size: function() { - return this.length; - }, - - inspect: function() { - return '[' + this.map(Object.inspect).join(', ') + ']'; - } -}); - -Array.prototype.toArray = Array.prototype.clone; - -function $w(string){ - string = string.strip(); - return string ? string.split(/\s+/) : []; -} - -if(window.opera){ - Array.prototype.concat = function(){ - var array = []; - for(var i = 0, length = this.length; i < length; i++) array.push(this[i]); - for(var i = 0, length = arguments.length; i < length; i++) { - if(arguments[i].constructor == Array) { - for(var j = 0, arrayLength = arguments[i].length; j < arrayLength; j++) - array.push(arguments[i][j]); - } else { - array.push(arguments[i]); - } - } - return array; - } -} -var Hash = function(obj) { - Object.extend(this, obj || {}); -}; - -Object.extend(Hash, { - toQueryString: function(obj) { - var parts = []; - - this.prototype._each.call(obj, function(pair) { - if (!pair.key) return; - - if (pair.value && pair.value.constructor == Array) { - var values = pair.value.compact(); - if (values.length < 2) pair.value = values.reduce(); - else { - key = encodeURIComponent(pair.key); - values.each(function(value) { - value = value != undefined ? encodeURIComponent(value) : ''; - parts.push(key + '=' + encodeURIComponent(value)); - }); - return; - } - } - if (pair.value == undefined) pair[1] = ''; - parts.push(pair.map(encodeURIComponent).join('=')); - }); - - return parts.join('&'); - } -}); - -Object.extend(Hash.prototype, Enumerable); -Object.extend(Hash.prototype, { - _each: function(iterator) { - for (var key in this) { - var value = this[key]; - if (value && value == Hash.prototype[key]) continue; - - var pair = [key, value]; - pair.key = key; - pair.value = value; - iterator(pair); - } - }, - - keys: function() { - return this.pluck('key'); - }, - - values: function() { - return this.pluck('value'); - }, - - merge: function(hash) { - return $H(hash).inject(this, function(mergedHash, pair) { - mergedHash[pair.key] = pair.value; - return mergedHash; - }); - }, - - remove: function() { - var result; - for(var i = 0, length = arguments.length; i < length; i++) { - var value = this[arguments[i]]; - if (value !== undefined){ - if (result === undefined) result = value; - else { - if (result.constructor != Array) result = [result]; - result.push(value) - } - } - delete this[arguments[i]]; - } - return result; - }, - - toQueryString: function() { - return Hash.toQueryString(this); - }, - - inspect: function() { - return '#'; - } -}); - -function $H(object) { - if (object && object.constructor == Hash) return object; - return new Hash(object); -}; -ObjectRange = Class.create(); -Object.extend(ObjectRange.prototype, Enumerable); -Object.extend(ObjectRange.prototype, { - initialize: function(start, end, exclusive) { - this.start = start; - this.end = end; - this.exclusive = exclusive; - }, - - _each: function(iterator) { - var value = this.start; - while (this.include(value)) { - iterator(value); - value = value.succ(); - } - }, - - include: function(value) { - if (value < this.start) - return false; - if (this.exclusive) - return value < this.end; - return value <= this.end; - } -}); - -var $R = function(start, end, exclusive) { - return new ObjectRange(start, end, exclusive); -} - -var Ajax = { - getTransport: function() { - return Try.these( - function() {return new XMLHttpRequest()}, - function() {return new ActiveXObject('Msxml2.XMLHTTP')}, - function() {return new ActiveXObject('Microsoft.XMLHTTP')} - ) || false; - }, - - activeRequestCount: 0 -} - -Ajax.Responders = { - responders: [], - - _each: function(iterator) { - this.responders._each(iterator); - }, - - register: function(responder) { - if (!this.include(responder)) - this.responders.push(responder); - }, - - unregister: function(responder) { - this.responders = this.responders.without(responder); - }, - - dispatch: function(callback, request, transport, json) { - this.each(function(responder) { - if (typeof responder[callback] == 'function') { - try { - responder[callback].apply(responder, [request, transport, json]); - } catch (e) {} - } - }); - } -}; - -Object.extend(Ajax.Responders, Enumerable); - -Ajax.Responders.register({ - onCreate: function() { - Ajax.activeRequestCount++; - }, - onComplete: function() { - Ajax.activeRequestCount--; - } -}); - -Ajax.Base = function() {}; -Ajax.Base.prototype = { - setOptions: function(options) { - this.options = { - method: 'post', - asynchronous: true, - contentType: 'application/x-www-form-urlencoded', - encoding: 'UTF-8', - parameters: '' - } - Object.extend(this.options, options || {}); - - this.options.method = this.options.method.toLowerCase(); - if (typeof this.options.parameters == 'string') - this.options.parameters = this.options.parameters.toQueryParams(); - } -} - -Ajax.Request = Class.create(); -Ajax.Request.Events = - ['Uninitialized', 'Loading', 'Loaded', 'Interactive', 'Complete']; - -Ajax.Request.prototype = Object.extend(new Ajax.Base(), { - _complete: false, - - initialize: function(url, options) { - this.transport = Ajax.getTransport(); - this.setOptions(options); - this.request(url); - }, - - request: function(url) { - this.url = url; - this.method = this.options.method; - var params = this.options.parameters; - - if (!['get', 'post'].include(this.method)) { - // simulate other verbs over post - params['_method'] = this.method; - this.method = 'post'; - } - - params = Hash.toQueryString(params); - if (params && /Konqueror|Safari|KHTML/.test(navigator.userAgent)) params += '&_=' - - // when GET, append parameters to URL - if (this.method == 'get' && params) - this.url += (this.url.indexOf('?') > -1 ? '&' : '?') + params; - - try { - Ajax.Responders.dispatch('onCreate', this, this.transport); - - this.transport.open(this.method.toUpperCase(), this.url, - this.options.asynchronous); - - if (this.options.asynchronous) - setTimeout(function() { this.respondToReadyState(1) }.bind(this), 10); - - this.transport.onreadystatechange = this.onStateChange.bind(this); - this.setRequestHeaders(); - - var body = this.method == 'post' ? (this.options.postBody || params) : null; - - this.transport.send(body); - - /* Force Firefox to handle ready state 4 for synchronous requests */ - if (!this.options.asynchronous && this.transport.overrideMimeType) - this.onStateChange(); - - } - catch (e) { - this.dispatchException(e); - } - }, - - onStateChange: function() { - var readyState = this.transport.readyState; - if (readyState > 1 && !((readyState == 4) && this._complete)) - this.respondToReadyState(this.transport.readyState); - }, - - setRequestHeaders: function() { - var headers = { - 'X-Requested-With': 'XMLHttpRequest', - 'X-Prototype-Version': Prototype.Version, - 'Accept': 'text/javascript, text/html, application/xml, text/xml, */*' - }; - - if (this.method == 'post') { - headers['Content-type'] = this.options.contentType + - (this.options.encoding ? '; charset=' + this.options.encoding : ''); - - /* Force "Connection: close" for older Mozilla browsers to work - * around a bug where XMLHttpRequest sends an incorrect - * Content-length header. See Mozilla Bugzilla #246651. - */ - if (this.transport.overrideMimeType && - (navigator.userAgent.match(/Gecko\/(\d{4})/) || [0,2005])[1] < 2005) - headers['Connection'] = 'close'; - } - - // user-defined headers - if (typeof this.options.requestHeaders == 'object') { - var extras = this.options.requestHeaders; - - if (typeof extras.push == 'function') - for (var i = 0, length = extras.length; i < length; i += 2) - headers[extras[i]] = extras[i+1]; - else - $H(extras).each(function(pair) { headers[pair.key] = pair.value }); - } - - for (var name in headers) - this.transport.setRequestHeader(name, headers[name]); - }, - - success: function() { - return !this.transport.status - || (this.transport.status >= 200 && this.transport.status < 300); - }, - - respondToReadyState: function(readyState) { - var state = Ajax.Request.Events[readyState]; - var transport = this.transport, json = this.evalJSON(); - - if (state == 'Complete') { - try { - this._complete = true; - (this.options['on' + this.transport.status] - || this.options['on' + (this.success() ? 'Success' : 'Failure')] - || Prototype.emptyFunction)(transport, json); - } catch (e) { - this.dispatchException(e); - } - - if ((this.getHeader('Content-type') || 'text/javascript').strip(). - match(/^(text|application)\/(x-)?(java|ecma)script(;.*)?$/i)) - this.evalResponse(); - } - - try { - (this.options['on' + state] || Prototype.emptyFunction)(transport, json); - Ajax.Responders.dispatch('on' + state, this, transport, json); - } catch (e) { - this.dispatchException(e); - } - - if (state == 'Complete') { - // avoid memory leak in MSIE: clean up - this.transport.onreadystatechange = Prototype.emptyFunction; - } - }, - - getHeader: function(name) { - try { - return this.transport.getResponseHeader(name); - } catch (e) { return null } - }, - - evalJSON: function() { - try { - var json = this.getHeader('X-JSON'); - return json ? eval('(' + json + ')') : null; - } catch (e) { return null } - }, - - evalResponse: function() { - try { - return eval(this.transport.responseText); - } catch (e) { - this.dispatchException(e); - } - }, - - dispatchException: function(exception) { - (this.options.onException || Prototype.emptyFunction)(this, exception); - Ajax.Responders.dispatch('onException', this, exception); - } -}); - -Ajax.Updater = Class.create(); - -Object.extend(Object.extend(Ajax.Updater.prototype, Ajax.Request.prototype), { - initialize: function(container, url, options) { - this.container = { - success: (container.success || container), - failure: (container.failure || (container.success ? null : container)) - } - - this.transport = Ajax.getTransport(); - this.setOptions(options); - - var onComplete = this.options.onComplete || Prototype.emptyFunction; - this.options.onComplete = (function(transport, param) { - this.updateContent(); - onComplete(transport, param); - }).bind(this); - - this.request(url); - }, - - updateContent: function() { - var receiver = this.container[this.success() ? 'success' : 'failure']; - var response = this.transport.responseText; - - if (!this.options.evalScripts) response = response.stripScripts(); - - if (receiver = $(receiver)) { - if (this.options.insertion) - new this.options.insertion(receiver, response); - else - receiver.update(response); - } - - if (this.success()) { - if (this.onComplete) - setTimeout(this.onComplete.bind(this), 10); - } - } -}); - -Ajax.PeriodicalUpdater = Class.create(); -Ajax.PeriodicalUpdater.prototype = Object.extend(new Ajax.Base(), { - initialize: function(container, url, options) { - this.setOptions(options); - this.onComplete = this.options.onComplete; - - this.frequency = (this.options.frequency || 2); - this.decay = (this.options.decay || 1); - - this.updater = {}; - this.container = container; - this.url = url; - - this.start(); - }, - - start: function() { - this.options.onComplete = this.updateComplete.bind(this); - this.onTimerEvent(); - }, - - stop: function() { - this.updater.options.onComplete = undefined; - clearTimeout(this.timer); - (this.onComplete || Prototype.emptyFunction).apply(this, arguments); - }, - - updateComplete: function(request) { - if (this.options.decay) { - this.decay = (request.responseText == this.lastText ? - this.decay * this.options.decay : 1); - - this.lastText = request.responseText; - } - this.timer = setTimeout(this.onTimerEvent.bind(this), - this.decay * this.frequency * 1000); - }, - - onTimerEvent: function() { - this.updater = new Ajax.Updater(this.container, this.url, this.options); - } -}); -function $(element) { - if (arguments.length > 1) { - for (var i = 0, elements = [], length = arguments.length; i < length; i++) - elements.push($(arguments[i])); - return elements; - } - if (typeof element == 'string') - element = document.getElementById(element); - return Element.extend(element); -} - -if (Prototype.BrowserFeatures.XPath) { - document._getElementsByXPath = function(expression, parentElement) { - var results = []; - var query = document.evaluate(expression, $(parentElement) || document, - null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); - for (var i = 0, length = query.snapshotLength; i < length; i++) - results.push(query.snapshotItem(i)); - return results; - }; -} - -document.getElementsByClassName = function(className, parentElement) { - if (Prototype.BrowserFeatures.XPath) { - var q = ".//*[contains(concat(' ', @class, ' '), ' " + className + " ')]"; - return document._getElementsByXPath(q, parentElement); - } else { - var children = ($(parentElement) || document.body).getElementsByTagName('*'); - var elements = [], child; - for (var i = 0, length = children.length; i < length; i++) { - child = children[i]; - if (Element.hasClassName(child, className)) - elements.push(Element.extend(child)); - } - return elements; - } -}; - -/*--------------------------------------------------------------------------*/ - -if (!window.Element) - var Element = new Object(); - -Element.extend = function(element) { - if (!element || _nativeExtensions || element.nodeType == 3) return element; - - if (!element._extended && element.tagName && element != window) { - var methods = Object.clone(Element.Methods), cache = Element.extend.cache; - - if (element.tagName == 'FORM') - Object.extend(methods, Form.Methods); - if (['INPUT', 'TEXTAREA', 'SELECT'].include(element.tagName)) - Object.extend(methods, Form.Element.Methods); - - Object.extend(methods, Element.Methods.Simulated); - - for (var property in methods) { - var value = methods[property]; - if (typeof value == 'function' && !(property in element)) - element[property] = cache.findOrStore(value); - } - } - - element._extended = true; - return element; -}; - -Element.extend.cache = { - findOrStore: function(value) { - return this[value] = this[value] || function() { - return value.apply(null, [this].concat($A(arguments))); - } - } -}; - -Element.Methods = { - visible: function(element) { - return $(element).style.display != 'none'; - }, - - toggle: function(element) { - element = $(element); - Element[Element.visible(element) ? 'hide' : 'show'](element); - return element; - }, - - hide: function(element) { - $(element).style.display = 'none'; - return element; - }, - - show: function(element) { - $(element).style.display = ''; - return element; - }, - - remove: function(element) { - element = $(element); - element.parentNode.removeChild(element); - return element; - }, - - update: function(element, html) { - html = typeof html == 'undefined' ? '' : html.toString(); - $(element).innerHTML = html.stripScripts(); - setTimeout(function() {html.evalScripts()}, 10); - return element; - }, - - replace: function(element, html) { - element = $(element); - html = typeof html == 'undefined' ? '' : html.toString(); - if (element.outerHTML) { - element.outerHTML = html.stripScripts(); - } else { - var range = element.ownerDocument.createRange(); - range.selectNodeContents(element); - element.parentNode.replaceChild( - range.createContextualFragment(html.stripScripts()), element); - } - setTimeout(function() {html.evalScripts()}, 10); - return element; - }, - - inspect: function(element) { - element = $(element); - var result = '<' + element.tagName.toLowerCase(); - $H({'id': 'id', 'className': 'class'}).each(function(pair) { - var property = pair.first(), attribute = pair.last(); - var value = (element[property] || '').toString(); - if (value) result += ' ' + attribute + '=' + value.inspect(true); - }); - return result + '>'; - }, - - recursivelyCollect: function(element, property) { - element = $(element); - var elements = []; - while (element = element[property]) - if (element.nodeType == 1) - elements.push(Element.extend(element)); - return elements; - }, - - ancestors: function(element) { - return $(element).recursivelyCollect('parentNode'); - }, - - descendants: function(element) { - return $A($(element).getElementsByTagName('*')); - }, - - immediateDescendants: function(element) { - if (!(element = $(element).firstChild)) return []; - while (element && element.nodeType != 1) element = element.nextSibling; - if (element) return [element].concat($(element).nextSiblings()); - return []; - }, - - previousSiblings: function(element) { - return $(element).recursivelyCollect('previousSibling'); - }, - - nextSiblings: function(element) { - return $(element).recursivelyCollect('nextSibling'); - }, - - siblings: function(element) { - element = $(element); - return element.previousSiblings().reverse().concat(element.nextSiblings()); - }, - - match: function(element, selector) { - if (typeof selector == 'string') - selector = new Selector(selector); - return selector.match($(element)); - }, - - up: function(element, expression, index) { - return Selector.findElement($(element).ancestors(), expression, index); - }, - - down: function(element, expression, index) { - return Selector.findElement($(element).descendants(), expression, index); - }, - - previous: function(element, expression, index) { - return Selector.findElement($(element).previousSiblings(), expression, index); - }, - - next: function(element, expression, index) { - return Selector.findElement($(element).nextSiblings(), expression, index); - }, - - getElementsBySelector: function() { - var args = $A(arguments), element = $(args.shift()); - return Selector.findChildElements(element, args); - }, - - getElementsByClassName: function(element, className) { - return document.getElementsByClassName(className, element); - }, - - readAttribute: function(element, name) { - element = $(element); - if (document.all && !window.opera) { - var t = Element._attributeTranslations; - if (t.values[name]) return t.values[name](element, name); - if (t.names[name]) name = t.names[name]; - var attribute = element.attributes[name]; - if(attribute) return attribute.nodeValue; - } - return element.getAttribute(name); - }, - - getHeight: function(element) { - return $(element).getDimensions().height; - }, - - getWidth: function(element) { - return $(element).getDimensions().width; - }, - - classNames: function(element) { - return new Element.ClassNames(element); - }, - - hasClassName: function(element, className) { - if (!(element = $(element))) return; - var elementClassName = element.className; - if (elementClassName.length == 0) return false; - if (elementClassName == className || - elementClassName.match(new RegExp("(^|\\s)" + className + "(\\s|$)"))) - return true; - return false; - }, - - addClassName: function(element, className) { - if (!(element = $(element))) return; - Element.classNames(element).add(className); - return element; - }, - - removeClassName: function(element, className) { - if (!(element = $(element))) return; - Element.classNames(element).remove(className); - return element; - }, - - toggleClassName: function(element, className) { - if (!(element = $(element))) return; - Element.classNames(element)[element.hasClassName(className) ? 'remove' : 'add'](className); - return element; - }, - - observe: function() { - Event.observe.apply(Event, arguments); - return $A(arguments).first(); - }, - - stopObserving: function() { - Event.stopObserving.apply(Event, arguments); - return $A(arguments).first(); - }, - - // removes whitespace-only text node children - cleanWhitespace: function(element) { - element = $(element); - var node = element.firstChild; - while (node) { - var nextNode = node.nextSibling; - if (node.nodeType == 3 && !/\S/.test(node.nodeValue)) - element.removeChild(node); - node = nextNode; - } - return element; - }, - - empty: function(element) { - return $(element).innerHTML.match(/^\s*$/); - }, - - descendantOf: function(element, ancestor) { - element = $(element), ancestor = $(ancestor); - while (element = element.parentNode) - if (element == ancestor) return true; - return false; - }, - - scrollTo: function(element) { - element = $(element); - var pos = Position.cumulativeOffset(element); - window.scrollTo(pos[0], pos[1]); - return element; - }, - - getStyle: function(element, style) { - element = $(element); - if (['float','cssFloat'].include(style)) - style = (typeof element.style.styleFloat != 'undefined' ? 'styleFloat' : 'cssFloat'); - style = style.camelize(); - var value = element.style[style]; - if (!value) { - if (document.defaultView && document.defaultView.getComputedStyle) { - var css = document.defaultView.getComputedStyle(element, null); - value = css ? css[style] : null; - } else if (element.currentStyle) { - value = element.currentStyle[style]; - } - } - - if((value == 'auto') && ['width','height'].include(style) && (element.getStyle('display') != 'none')) - value = element['offset'+style.capitalize()] + 'px'; - - if (window.opera && ['left', 'top', 'right', 'bottom'].include(style)) - if (Element.getStyle(element, 'position') == 'static') value = 'auto'; - if(style == 'opacity') { - if(value) return parseFloat(value); - if(value = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) - if(value[1]) return parseFloat(value[1]) / 100; - return 1.0; - } - return value == 'auto' ? null : value; - }, - - setStyle: function(element, style) { - element = $(element); - for (var name in style) { - var value = style[name]; - if(name == 'opacity') { - if (value == 1) { - value = (/Gecko/.test(navigator.userAgent) && - !/Konqueror|Safari|KHTML/.test(navigator.userAgent)) ? 0.999999 : 1.0; - if(/MSIE/.test(navigator.userAgent) && !window.opera) - element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,''); - } else if(value == '') { - if(/MSIE/.test(navigator.userAgent) && !window.opera) - element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,''); - } else { - if(value < 0.00001) value = 0; - if(/MSIE/.test(navigator.userAgent) && !window.opera) - element.style.filter = element.getStyle('filter').replace(/alpha\([^\)]*\)/gi,'') + - 'alpha(opacity='+value*100+')'; - } - } else if(['float','cssFloat'].include(name)) name = (typeof element.style.styleFloat != 'undefined') ? 'styleFloat' : 'cssFloat'; - element.style[name.camelize()] = value; - } - return element; - }, - - getDimensions: function(element) { - element = $(element); - var display = $(element).getStyle('display'); - if (display != 'none' && display != null) // Safari bug - return {width: element.offsetWidth, height: element.offsetHeight}; - - // All *Width and *Height properties give 0 on elements with display none, - // so enable the element temporarily - var els = element.style; - var originalVisibility = els.visibility; - var originalPosition = els.position; - var originalDisplay = els.display; - els.visibility = 'hidden'; - els.position = 'absolute'; - els.display = 'block'; - var originalWidth = element.clientWidth; - var originalHeight = element.clientHeight; - els.display = originalDisplay; - els.position = originalPosition; - els.visibility = originalVisibility; - return {width: originalWidth, height: originalHeight}; - }, - - makePositioned: function(element) { - element = $(element); - var pos = Element.getStyle(element, 'position'); - if (pos == 'static' || !pos) { - element._madePositioned = true; - element.style.position = 'relative'; - // Opera returns the offset relative to the positioning context, when an - // element is position relative but top and left have not been defined - if (window.opera) { - element.style.top = 0; - element.style.left = 0; - } - } - return element; - }, - - undoPositioned: function(element) { - element = $(element); - if (element._madePositioned) { - element._madePositioned = undefined; - element.style.position = - element.style.top = - element.style.left = - element.style.bottom = - element.style.right = ''; - } - return element; - }, - - makeClipping: function(element) { - element = $(element); - if (element._overflow) return element; - element._overflow = element.style.overflow || 'auto'; - if ((Element.getStyle(element, 'overflow') || 'visible') != 'hidden') - element.style.overflow = 'hidden'; - return element; - }, - - undoClipping: function(element) { - element = $(element); - if (!element._overflow) return element; - element.style.overflow = element._overflow == 'auto' ? '' : element._overflow; - element._overflow = null; - return element; - } -}; - -Object.extend(Element.Methods, {childOf: Element.Methods.descendantOf}); - -Element._attributeTranslations = {}; - -Element._attributeTranslations.names = { - colspan: "colSpan", - rowspan: "rowSpan", - valign: "vAlign", - datetime: "dateTime", - accesskey: "accessKey", - tabindex: "tabIndex", - enctype: "encType", - maxlength: "maxLength", - readonly: "readOnly", - longdesc: "longDesc" -}; - -Element._attributeTranslations.values = { - _getAttr: function(element, attribute) { - return element.getAttribute(attribute, 2); - }, - - _flag: function(element, attribute) { - return $(element).hasAttribute(attribute) ? attribute : null; - }, - - style: function(element) { - return element.style.cssText.toLowerCase(); - }, - - title: function(element) { - var node = element.getAttributeNode('title'); - return node.specified ? node.nodeValue : null; - } -}; - -Object.extend(Element._attributeTranslations.values, { - href: Element._attributeTranslations.values._getAttr, - src: Element._attributeTranslations.values._getAttr, - disabled: Element._attributeTranslations.values._flag, - checked: Element._attributeTranslations.values._flag, - readonly: Element._attributeTranslations.values._flag, - multiple: Element._attributeTranslations.values._flag -}); - -Element.Methods.Simulated = { - hasAttribute: function(element, attribute) { - var t = Element._attributeTranslations; - attribute = t.names[attribute] || attribute; - return $(element).getAttributeNode(attribute).specified; - } -}; - -// IE is missing .innerHTML support for TABLE-related elements -if (document.all && !window.opera){ - Element.Methods.update = function(element, html) { - element = $(element); - html = typeof html == 'undefined' ? '' : html.toString(); - var tagName = element.tagName.toUpperCase(); - if (['THEAD','TBODY','TR','TD'].include(tagName)) { - var div = document.createElement('div'); - switch (tagName) { - case 'THEAD': - case 'TBODY': - div.innerHTML = '' + html.stripScripts() + '
    '; - depth = 2; - break; - case 'TR': - div.innerHTML = '' + html.stripScripts() + '
    '; - depth = 3; - break; - case 'TD': - div.innerHTML = '
    ' + html.stripScripts() + '
    '; - depth = 4; - } - $A(element.childNodes).each(function(node){ - element.removeChild(node) - }); - depth.times(function(){ div = div.firstChild }); - - $A(div.childNodes).each( - function(node){ element.appendChild(node) }); - } else { - element.innerHTML = html.stripScripts(); - } - setTimeout(function() {html.evalScripts()}, 10); - return element; - } -}; - -Object.extend(Element, Element.Methods); - -var _nativeExtensions = false; - -if(/Konqueror|Safari|KHTML/.test(navigator.userAgent)) - ['', 'Form', 'Input', 'TextArea', 'Select'].each(function(tag) { - var className = 'HTML' + tag + 'Element'; - if(window[className]) return; - var klass = window[className] = {}; - klass.prototype = document.createElement(tag ? tag.toLowerCase() : 'div').__proto__; - }); - -Element.addMethods = function(methods) { - Object.extend(Element.Methods, methods || {}); - - function copy(methods, destination, onlyIfAbsent) { - onlyIfAbsent = onlyIfAbsent || false; - var cache = Element.extend.cache; - for (var property in methods) { - var value = methods[property]; - if (!onlyIfAbsent || !(property in destination)) - destination[property] = cache.findOrStore(value); - } - } - - if (typeof HTMLElement != 'undefined') { - copy(Element.Methods, HTMLElement.prototype); - copy(Element.Methods.Simulated, HTMLElement.prototype, true); - copy(Form.Methods, HTMLFormElement.prototype); - [HTMLInputElement, HTMLTextAreaElement, HTMLSelectElement].each(function(klass) { - copy(Form.Element.Methods, klass.prototype); - }); - _nativeExtensions = true; - } -} - -var Toggle = new Object(); -Toggle.display = Element.toggle; - -/*--------------------------------------------------------------------------*/ - -Abstract.Insertion = function(adjacency) { - this.adjacency = adjacency; -} - -Abstract.Insertion.prototype = { - initialize: function(element, content) { - this.element = $(element); - this.content = content.stripScripts(); - - if (this.adjacency && this.element.insertAdjacentHTML) { - try { - this.element.insertAdjacentHTML(this.adjacency, this.content); - } catch (e) { - var tagName = this.element.tagName.toUpperCase(); - if (['TBODY', 'TR'].include(tagName)) { - this.insertContent(this.contentFromAnonymousTable()); - } else { - throw e; - } - } - } else { - this.range = this.element.ownerDocument.createRange(); - if (this.initializeRange) this.initializeRange(); - this.insertContent([this.range.createContextualFragment(this.content)]); - } - - setTimeout(function() {content.evalScripts()}, 10); - }, - - contentFromAnonymousTable: function() { - var div = document.createElement('div'); - div.innerHTML = '' + this.content + '
    '; - return $A(div.childNodes[0].childNodes[0].childNodes); - } -} - -var Insertion = new Object(); - -Insertion.Before = Class.create(); -Insertion.Before.prototype = Object.extend(new Abstract.Insertion('beforeBegin'), { - initializeRange: function() { - this.range.setStartBefore(this.element); - }, - - insertContent: function(fragments) { - fragments.each((function(fragment) { - this.element.parentNode.insertBefore(fragment, this.element); - }).bind(this)); - } -}); - -Insertion.Top = Class.create(); -Insertion.Top.prototype = Object.extend(new Abstract.Insertion('afterBegin'), { - initializeRange: function() { - this.range.selectNodeContents(this.element); - this.range.collapse(true); - }, - - insertContent: function(fragments) { - fragments.reverse(false).each((function(fragment) { - this.element.insertBefore(fragment, this.element.firstChild); - }).bind(this)); - } -}); - -Insertion.Bottom = Class.create(); -Insertion.Bottom.prototype = Object.extend(new Abstract.Insertion('beforeEnd'), { - initializeRange: function() { - this.range.selectNodeContents(this.element); - this.range.collapse(this.element); - }, - - insertContent: function(fragments) { - fragments.each((function(fragment) { - this.element.appendChild(fragment); - }).bind(this)); - } -}); - -Insertion.After = Class.create(); -Insertion.After.prototype = Object.extend(new Abstract.Insertion('afterEnd'), { - initializeRange: function() { - this.range.setStartAfter(this.element); - }, - - insertContent: function(fragments) { - fragments.each((function(fragment) { - this.element.parentNode.insertBefore(fragment, - this.element.nextSibling); - }).bind(this)); - } -}); - -/*--------------------------------------------------------------------------*/ - -Element.ClassNames = Class.create(); -Element.ClassNames.prototype = { - initialize: function(element) { - this.element = $(element); - }, - - _each: function(iterator) { - this.element.className.split(/\s+/).select(function(name) { - return name.length > 0; - })._each(iterator); - }, - - set: function(className) { - this.element.className = className; - }, - - add: function(classNameToAdd) { - if (this.include(classNameToAdd)) return; - this.set($A(this).concat(classNameToAdd).join(' ')); - }, - - remove: function(classNameToRemove) { - if (!this.include(classNameToRemove)) return; - this.set($A(this).without(classNameToRemove).join(' ')); - }, - - toString: function() { - return $A(this).join(' '); - } -}; - -Object.extend(Element.ClassNames.prototype, Enumerable); -var Selector = Class.create(); -Selector.prototype = { - initialize: function(expression) { - this.params = {classNames: []}; - this.expression = expression.toString().strip(); - this.parseExpression(); - this.compileMatcher(); - }, - - parseExpression: function() { - function abort(message) { throw 'Parse error in selector: ' + message; } - - if (this.expression == '') abort('empty expression'); - - var params = this.params, expr = this.expression, match, modifier, clause, rest; - while (match = expr.match(/^(.*)\[([a-z0-9_:-]+?)(?:([~\|!]?=)(?:"([^"]*)"|([^\]\s]*)))?\]$/i)) { - params.attributes = params.attributes || []; - params.attributes.push({name: match[2], operator: match[3], value: match[4] || match[5] || ''}); - expr = match[1]; - } - - if (expr == '*') return this.params.wildcard = true; - - while (match = expr.match(/^([^a-z0-9_-])?([a-z0-9_-]+)(.*)/i)) { - modifier = match[1], clause = match[2], rest = match[3]; - switch (modifier) { - case '#': params.id = clause; break; - case '.': params.classNames.push(clause); break; - case '': - case undefined: params.tagName = clause.toUpperCase(); break; - default: abort(expr.inspect()); - } - expr = rest; - } - - if (expr.length > 0) abort(expr.inspect()); - }, - - buildMatchExpression: function() { - var params = this.params, conditions = [], clause; - - if (params.wildcard) - conditions.push('true'); - if (clause = params.id) - conditions.push('element.readAttribute("id") == ' + clause.inspect()); - if (clause = params.tagName) - conditions.push('element.tagName.toUpperCase() == ' + clause.inspect()); - if ((clause = params.classNames).length > 0) - for (var i = 0, length = clause.length; i < length; i++) - conditions.push('element.hasClassName(' + clause[i].inspect() + ')'); - if (clause = params.attributes) { - clause.each(function(attribute) { - var value = 'element.readAttribute(' + attribute.name.inspect() + ')'; - var splitValueBy = function(delimiter) { - return value + ' && ' + value + '.split(' + delimiter.inspect() + ')'; - } - - switch (attribute.operator) { - case '=': conditions.push(value + ' == ' + attribute.value.inspect()); break; - case '~=': conditions.push(splitValueBy(' ') + '.include(' + attribute.value.inspect() + ')'); break; - case '|=': conditions.push( - splitValueBy('-') + '.first().toUpperCase() == ' + attribute.value.toUpperCase().inspect() - ); break; - case '!=': conditions.push(value + ' != ' + attribute.value.inspect()); break; - case '': - case undefined: conditions.push('element.hasAttribute(' + attribute.name.inspect() + ')'); break; - default: throw 'Unknown operator ' + attribute.operator + ' in selector'; - } - }); - } - - return conditions.join(' && '); - }, - - compileMatcher: function() { - this.match = new Function('element', 'if (!element.tagName) return false; \ - element = $(element); \ - return ' + this.buildMatchExpression()); - }, - - findElements: function(scope) { - var element; - - if (element = $(this.params.id)) - if (this.match(element)) - if (!scope || Element.childOf(element, scope)) - return [element]; - - scope = (scope || document).getElementsByTagName(this.params.tagName || '*'); - - var results = []; - for (var i = 0, length = scope.length; i < length; i++) - if (this.match(element = scope[i])) - results.push(Element.extend(element)); - - return results; - }, - - toString: function() { - return this.expression; - } -} - -Object.extend(Selector, { - matchElements: function(elements, expression) { - var selector = new Selector(expression); - return elements.select(selector.match.bind(selector)).map(Element.extend); - }, - - findElement: function(elements, expression, index) { - if (typeof expression == 'number') index = expression, expression = false; - return Selector.matchElements(elements, expression || '*')[index || 0]; - }, - - findChildElements: function(element, expressions) { - return expressions.map(function(expression) { - return expression.match(/[^\s"]+(?:"[^"]*"[^\s"]+)*/g).inject([null], function(results, expr) { - var selector = new Selector(expr); - return results.inject([], function(elements, result) { - return elements.concat(selector.findElements(result || element)); - }); - }); - }).flatten(); - } -}); - -function $$() { - return Selector.findChildElements(document, $A(arguments)); -} -var Form = { - reset: function(form) { - $(form).reset(); - return form; - }, - - serializeElements: function(elements, getHash) { - var data = elements.inject({}, function(result, element) { - if (!element.disabled && element.name) { - var key = element.name, value = $(element).getValue(); - if (value != undefined) { - if (result[key]) { - if (result[key].constructor != Array) result[key] = [result[key]]; - result[key].push(value); - } - else result[key] = value; - } - } - return result; - }); - - return getHash ? data : Hash.toQueryString(data); - } -}; - -Form.Methods = { - serialize: function(form, getHash) { - return Form.serializeElements(Form.getElements(form), getHash); - }, - - getElements: function(form) { - return $A($(form).getElementsByTagName('*')).inject([], - function(elements, child) { - if (Form.Element.Serializers[child.tagName.toLowerCase()]) - elements.push(Element.extend(child)); - return elements; - } - ); - }, - - getInputs: function(form, typeName, name) { - form = $(form); - var inputs = form.getElementsByTagName('input'); - - if (!typeName && !name) return $A(inputs).map(Element.extend); - - for (var i = 0, matchingInputs = [], length = inputs.length; i < length; i++) { - var input = inputs[i]; - if ((typeName && input.type != typeName) || (name && input.name != name)) - continue; - matchingInputs.push(Element.extend(input)); - } - - return matchingInputs; - }, - - disable: function(form) { - form = $(form); - form.getElements().each(function(element) { - element.blur(); - element.disabled = 'true'; - }); - return form; - }, - - enable: function(form) { - form = $(form); - form.getElements().each(function(element) { - element.disabled = ''; - }); - return form; - }, - - findFirstElement: function(form) { - return $(form).getElements().find(function(element) { - return element.type != 'hidden' && !element.disabled && - ['input', 'select', 'textarea'].include(element.tagName.toLowerCase()); - }); - }, - - focusFirstElement: function(form) { - form = $(form); - form.findFirstElement().activate(); - return form; - } -} - -Object.extend(Form, Form.Methods); - -/*--------------------------------------------------------------------------*/ - -Form.Element = { - focus: function(element) { - $(element).focus(); - return element; - }, - - select: function(element) { - $(element).select(); - return element; - } -} - -Form.Element.Methods = { - serialize: function(element) { - element = $(element); - if (!element.disabled && element.name) { - var value = element.getValue(); - if (value != undefined) { - var pair = {}; - pair[element.name] = value; - return Hash.toQueryString(pair); - } - } - return ''; - }, - - getValue: function(element) { - element = $(element); - var method = element.tagName.toLowerCase(); - return Form.Element.Serializers[method](element); - }, - - clear: function(element) { - $(element).value = ''; - return element; - }, - - present: function(element) { - return $(element).value != ''; - }, - - activate: function(element) { - element = $(element); - element.focus(); - if (element.select && ( element.tagName.toLowerCase() != 'input' || - !['button', 'reset', 'submit'].include(element.type) ) ) - element.select(); - return element; - }, - - disable: function(element) { - element = $(element); - element.disabled = true; - return element; - }, - - enable: function(element) { - element = $(element); - element.blur(); - element.disabled = false; - return element; - } -} - -Object.extend(Form.Element, Form.Element.Methods); -var Field = Form.Element; -var $F = Form.Element.getValue; - -/*--------------------------------------------------------------------------*/ - -Form.Element.Serializers = { - input: function(element) { - switch (element.type.toLowerCase()) { - case 'checkbox': - case 'radio': - return Form.Element.Serializers.inputSelector(element); - default: - return Form.Element.Serializers.textarea(element); - } - }, - - inputSelector: function(element) { - return element.checked ? element.value : null; - }, - - textarea: function(element) { - return element.value; - }, - - select: function(element) { - return this[element.type == 'select-one' ? - 'selectOne' : 'selectMany'](element); - }, - - selectOne: function(element) { - var index = element.selectedIndex; - return index >= 0 ? this.optionValue(element.options[index]) : null; - }, - - selectMany: function(element) { - var values, length = element.length; - if (!length) return null; - - for (var i = 0, values = []; i < length; i++) { - var opt = element.options[i]; - if (opt.selected) values.push(this.optionValue(opt)); - } - return values; - }, - - optionValue: function(opt) { - // extend element because hasAttribute may not be native - return Element.extend(opt).hasAttribute('value') ? opt.value : opt.text; - } -} - -/*--------------------------------------------------------------------------*/ - -Abstract.TimedObserver = function() {} -Abstract.TimedObserver.prototype = { - initialize: function(element, frequency, callback) { - this.frequency = frequency; - this.element = $(element); - this.callback = callback; - - this.lastValue = this.getValue(); - this.registerCallback(); - }, - - registerCallback: function() { - setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); - }, - - onTimerEvent: function() { - var value = this.getValue(); - var changed = ('string' == typeof this.lastValue && 'string' == typeof value - ? this.lastValue != value : String(this.lastValue) != String(value)); - if (changed) { - this.callback(this.element, value); - this.lastValue = value; - } - } -} - -Form.Element.Observer = Class.create(); -Form.Element.Observer.prototype = Object.extend(new Abstract.TimedObserver(), { - getValue: function() { - return Form.Element.getValue(this.element); - } -}); - -Form.Observer = Class.create(); -Form.Observer.prototype = Object.extend(new Abstract.TimedObserver(), { - getValue: function() { - return Form.serialize(this.element); - } -}); - -/*--------------------------------------------------------------------------*/ - -Abstract.EventObserver = function() {} -Abstract.EventObserver.prototype = { - initialize: function(element, callback) { - this.element = $(element); - this.callback = callback; - - this.lastValue = this.getValue(); - if (this.element.tagName.toLowerCase() == 'form') - this.registerFormCallbacks(); - else - this.registerCallback(this.element); - }, - - onElementEvent: function() { - var value = this.getValue(); - if (this.lastValue != value) { - this.callback(this.element, value); - this.lastValue = value; - } - }, - - registerFormCallbacks: function() { - Form.getElements(this.element).each(this.registerCallback.bind(this)); - }, - - registerCallback: function(element) { - if (element.type) { - switch (element.type.toLowerCase()) { - case 'checkbox': - case 'radio': - Event.observe(element, 'click', this.onElementEvent.bind(this)); - break; - default: - Event.observe(element, 'change', this.onElementEvent.bind(this)); - break; - } - } - } -} - -Form.Element.EventObserver = Class.create(); -Form.Element.EventObserver.prototype = Object.extend(new Abstract.EventObserver(), { - getValue: function() { - return Form.Element.getValue(this.element); - } -}); - -Form.EventObserver = Class.create(); -Form.EventObserver.prototype = Object.extend(new Abstract.EventObserver(), { - getValue: function() { - return Form.serialize(this.element); - } -}); -if (!window.Event) { - var Event = new Object(); -} - -Object.extend(Event, { - KEY_BACKSPACE: 8, - KEY_TAB: 9, - KEY_RETURN: 13, - KEY_ESC: 27, - KEY_LEFT: 37, - KEY_UP: 38, - KEY_RIGHT: 39, - KEY_DOWN: 40, - KEY_DELETE: 46, - KEY_HOME: 36, - KEY_END: 35, - KEY_PAGEUP: 33, - KEY_PAGEDOWN: 34, - - element: function(event) { - return event.target || event.srcElement; - }, - - isLeftClick: function(event) { - return (((event.which) && (event.which == 1)) || - ((event.button) && (event.button == 1))); - }, - - pointerX: function(event) { - return event.pageX || (event.clientX + - (document.documentElement.scrollLeft || document.body.scrollLeft)); - }, - - pointerY: function(event) { - return event.pageY || (event.clientY + - (document.documentElement.scrollTop || document.body.scrollTop)); - }, - - stop: function(event) { - if (event.preventDefault) { - event.preventDefault(); - event.stopPropagation(); - } else { - event.returnValue = false; - event.cancelBubble = true; - } - }, - - // find the first node with the given tagName, starting from the - // node the event was triggered on; traverses the DOM upwards - findElement: function(event, tagName) { - var element = Event.element(event); - while (element.parentNode && (!element.tagName || - (element.tagName.toUpperCase() != tagName.toUpperCase()))) - element = element.parentNode; - return element; - }, - - observers: false, - - _observeAndCache: function(element, name, observer, useCapture) { - if (!this.observers) this.observers = []; - if (element.addEventListener) { - this.observers.push([element, name, observer, useCapture]); - element.addEventListener(name, observer, useCapture); - } else if (element.attachEvent) { - this.observers.push([element, name, observer, useCapture]); - element.attachEvent('on' + name, observer); - } - }, - - unloadCache: function() { - if (!Event.observers) return; - for (var i = 0, length = Event.observers.length; i < length; i++) { - Event.stopObserving.apply(this, Event.observers[i]); - Event.observers[i][0] = null; - } - Event.observers = false; - }, - - observe: function(element, name, observer, useCapture) { - element = $(element); - useCapture = useCapture || false; - - if (name == 'keypress' && - (navigator.appVersion.match(/Konqueror|Safari|KHTML/) - || element.attachEvent)) - name = 'keydown'; - - Event._observeAndCache(element, name, observer, useCapture); - }, - - stopObserving: function(element, name, observer, useCapture) { - element = $(element); - useCapture = useCapture || false; - - if (name == 'keypress' && - (navigator.appVersion.match(/Konqueror|Safari|KHTML/) - || element.detachEvent)) - name = 'keydown'; - - if (element.removeEventListener) { - element.removeEventListener(name, observer, useCapture); - } else if (element.detachEvent) { - try { - element.detachEvent('on' + name, observer); - } catch (e) {} - } - } -}); - -/* prevent memory leaks in IE */ -if (navigator.appVersion.match(/\bMSIE\b/)) - Event.observe(window, 'unload', Event.unloadCache, false); -var Position = { - // set to true if needed, warning: firefox performance problems - // NOT neeeded for page scrolling, only if draggable contained in - // scrollable elements - includeScrollOffsets: false, - - // must be called before calling withinIncludingScrolloffset, every time the - // page is scrolled - prepare: function() { - this.deltaX = window.pageXOffset - || document.documentElement.scrollLeft - || document.body.scrollLeft - || 0; - this.deltaY = window.pageYOffset - || document.documentElement.scrollTop - || document.body.scrollTop - || 0; - }, - - realOffset: function(element) { - var valueT = 0, valueL = 0; - do { - valueT += element.scrollTop || 0; - valueL += element.scrollLeft || 0; - element = element.parentNode; - } while (element); - return [valueL, valueT]; - }, - - cumulativeOffset: function(element) { - var valueT = 0, valueL = 0; - do { - valueT += element.offsetTop || 0; - valueL += element.offsetLeft || 0; - element = element.offsetParent; - } while (element); - return [valueL, valueT]; - }, - - positionedOffset: function(element) { - var valueT = 0, valueL = 0; - do { - valueT += element.offsetTop || 0; - valueL += element.offsetLeft || 0; - element = element.offsetParent; - if (element) { - if(element.tagName=='BODY') break; - var p = Element.getStyle(element, 'position'); - if (p == 'relative' || p == 'absolute') break; - } - } while (element); - return [valueL, valueT]; - }, - - offsetParent: function(element) { - if (element.offsetParent) return element.offsetParent; - if (element == document.body) return element; - - while ((element = element.parentNode) && element != document.body) - if (Element.getStyle(element, 'position') != 'static') - return element; - - return document.body; - }, - - // caches x/y coordinate pair to use with overlap - within: function(element, x, y) { - if (this.includeScrollOffsets) - return this.withinIncludingScrolloffsets(element, x, y); - this.xcomp = x; - this.ycomp = y; - this.offset = this.cumulativeOffset(element); - - return (y >= this.offset[1] && - y < this.offset[1] + element.offsetHeight && - x >= this.offset[0] && - x < this.offset[0] + element.offsetWidth); - }, - - withinIncludingScrolloffsets: function(element, x, y) { - var offsetcache = this.realOffset(element); - - this.xcomp = x + offsetcache[0] - this.deltaX; - this.ycomp = y + offsetcache[1] - this.deltaY; - this.offset = this.cumulativeOffset(element); - - return (this.ycomp >= this.offset[1] && - this.ycomp < this.offset[1] + element.offsetHeight && - this.xcomp >= this.offset[0] && - this.xcomp < this.offset[0] + element.offsetWidth); - }, - - // within must be called directly before - overlap: function(mode, element) { - if (!mode) return 0; - if (mode == 'vertical') - return ((this.offset[1] + element.offsetHeight) - this.ycomp) / - element.offsetHeight; - if (mode == 'horizontal') - return ((this.offset[0] + element.offsetWidth) - this.xcomp) / - element.offsetWidth; - }, - - page: function(forElement) { - var valueT = 0, valueL = 0; - - var element = forElement; - do { - valueT += element.offsetTop || 0; - valueL += element.offsetLeft || 0; - - // Safari fix - if (element.offsetParent==document.body) - if (Element.getStyle(element,'position')=='absolute') break; - - } while (element = element.offsetParent); - - element = forElement; - do { - if (!window.opera || element.tagName=='BODY') { - valueT -= element.scrollTop || 0; - valueL -= element.scrollLeft || 0; - } - } while (element = element.parentNode); - - return [valueL, valueT]; - }, - - clone: function(source, target) { - var options = Object.extend({ - setLeft: true, - setTop: true, - setWidth: true, - setHeight: true, - offsetTop: 0, - offsetLeft: 0 - }, arguments[2] || {}) - - // find page position of source - source = $(source); - var p = Position.page(source); - - // find coordinate system to use - target = $(target); - var delta = [0, 0]; - var parent = null; - // delta [0,0] will do fine with position: fixed elements, - // position:absolute needs offsetParent deltas - if (Element.getStyle(target,'position') == 'absolute') { - parent = Position.offsetParent(target); - delta = Position.page(parent); - } - - // correct by body offsets (fixes Safari) - if (parent == document.body) { - delta[0] -= document.body.offsetLeft; - delta[1] -= document.body.offsetTop; - } - - // set position - if(options.setLeft) target.style.left = (p[0] - delta[0] + options.offsetLeft) + 'px'; - if(options.setTop) target.style.top = (p[1] - delta[1] + options.offsetTop) + 'px'; - if(options.setWidth) target.style.width = source.offsetWidth + 'px'; - if(options.setHeight) target.style.height = source.offsetHeight + 'px'; - }, - - absolutize: function(element) { - element = $(element); - if (element.style.position == 'absolute') return; - Position.prepare(); - - var offsets = Position.positionedOffset(element); - var top = offsets[1]; - var left = offsets[0]; - var width = element.clientWidth; - var height = element.clientHeight; - - element._originalLeft = left - parseFloat(element.style.left || 0); - element._originalTop = top - parseFloat(element.style.top || 0); - element._originalWidth = element.style.width; - element._originalHeight = element.style.height; - - element.style.position = 'absolute'; - element.style.top = top + 'px'; - element.style.left = left + 'px'; - element.style.width = width + 'px'; - element.style.height = height + 'px'; - }, - - relativize: function(element) { - element = $(element); - if (element.style.position == 'relative') return; - Position.prepare(); - - element.style.position = 'relative'; - var top = parseFloat(element.style.top || 0) - (element._originalTop || 0); - var left = parseFloat(element.style.left || 0) - (element._originalLeft || 0); - - element.style.top = top + 'px'; - element.style.left = left + 'px'; - element.style.height = element._originalHeight; - element.style.width = element._originalWidth; - } -} - -// Safari returns margins on body which is incorrect if the child is absolutely -// positioned. For performance reasons, redefine Position.cumulativeOffset for -// KHTML/WebKit only. -if (/Konqueror|Safari|KHTML/.test(navigator.userAgent)) { - Position.cumulativeOffset = function(element) { - var valueT = 0, valueL = 0; - do { - valueT += element.offsetTop || 0; - valueL += element.offsetLeft || 0; - if (element.offsetParent == document.body) - if (Element.getStyle(element, 'position') == 'absolute') break; - - element = element.offsetParent; - } while (element); - - return [valueL, valueT]; - } -} - -Element.addMethods(); \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/robots.txt b/vendor/plugins/shoulda/test/rails_root/public/robots.txt deleted file mode 100644 index 4ab9e89..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/robots.txt +++ /dev/null @@ -1 +0,0 @@ -# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/public/stylesheets/scaffold.css b/vendor/plugins/shoulda/test/rails_root/public/stylesheets/scaffold.css deleted file mode 100644 index 8f239a3..0000000 --- a/vendor/plugins/shoulda/test/rails_root/public/stylesheets/scaffold.css +++ /dev/null @@ -1,74 +0,0 @@ -body { background-color: #fff; color: #333; } - -body, p, ol, ul, td { - font-family: verdana, arial, helvetica, sans-serif; - font-size: 13px; - line-height: 18px; -} - -pre { - background-color: #eee; - padding: 10px; - font-size: 11px; -} - -a { color: #000; } -a:visited { color: #666; } -a:hover { color: #fff; background-color:#000; } - -.fieldWithErrors { - padding: 2px; - background-color: red; - display: table; -} - -#errorExplanation { - width: 400px; - border: 2px solid red; - padding: 7px; - padding-bottom: 12px; - margin-bottom: 20px; - background-color: #f0f0f0; -} - -#errorExplanation h2 { - text-align: left; - font-weight: bold; - padding: 5px 5px 5px 15px; - font-size: 12px; - margin: -7px; - background-color: #c00; - color: #fff; -} - -#errorExplanation p { - color: #333; - margin-bottom: 0; - padding: 5px; -} - -#errorExplanation ul li { - font-size: 12px; - list-style: square; -} - -div.uploadStatus { - margin: 5px; -} - -div.progressBar { - margin: 5px; -} - -div.progressBar div.border { - background-color: #fff; - border: 1px solid grey; - width: 100%; -} - -div.progressBar div.background { - background-color: #333; - height: 18px; - width: 0%; -} - diff --git a/vendor/plugins/shoulda/test/rails_root/script/about b/vendor/plugins/shoulda/test/rails_root/script/about deleted file mode 100755 index 7b07d46..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/about +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/about' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/breakpointer b/vendor/plugins/shoulda/test/rails_root/script/breakpointer deleted file mode 100755 index 64af76e..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/breakpointer +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/breakpointer' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/console b/vendor/plugins/shoulda/test/rails_root/script/console deleted file mode 100755 index 42f28f7..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/console +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/console' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/destroy b/vendor/plugins/shoulda/test/rails_root/script/destroy deleted file mode 100755 index fa0e6fc..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/destroy +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/destroy' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/generate b/vendor/plugins/shoulda/test/rails_root/script/generate deleted file mode 100755 index ef976e0..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/generate +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/generate' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/performance/benchmarker b/vendor/plugins/shoulda/test/rails_root/script/performance/benchmarker deleted file mode 100755 index c842d35..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/performance/benchmarker +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/benchmarker' diff --git a/vendor/plugins/shoulda/test/rails_root/script/performance/profiler b/vendor/plugins/shoulda/test/rails_root/script/performance/profiler deleted file mode 100755 index d855ac8..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/performance/profiler +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/performance/profiler' diff --git a/vendor/plugins/shoulda/test/rails_root/script/plugin b/vendor/plugins/shoulda/test/rails_root/script/plugin deleted file mode 100755 index 26ca64c..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/plugin +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/plugin' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/process/inspector b/vendor/plugins/shoulda/test/rails_root/script/process/inspector deleted file mode 100755 index bf25ad8..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/process/inspector +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/inspector' diff --git a/vendor/plugins/shoulda/test/rails_root/script/process/reaper b/vendor/plugins/shoulda/test/rails_root/script/process/reaper deleted file mode 100755 index c77f045..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/process/reaper +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/reaper' diff --git a/vendor/plugins/shoulda/test/rails_root/script/process/spawner b/vendor/plugins/shoulda/test/rails_root/script/process/spawner deleted file mode 100755 index 7118f39..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/process/spawner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../../config/boot' -require 'commands/process/spawner' diff --git a/vendor/plugins/shoulda/test/rails_root/script/runner b/vendor/plugins/shoulda/test/rails_root/script/runner deleted file mode 100755 index ccc30f9..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/runner +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/runner' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/rails_root/script/server b/vendor/plugins/shoulda/test/rails_root/script/server deleted file mode 100755 index dfabcb8..0000000 --- a/vendor/plugins/shoulda/test/rails_root/script/server +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env ruby -require File.dirname(__FILE__) + '/../config/boot' -require 'commands/server' \ No newline at end of file diff --git a/vendor/plugins/shoulda/test/test_helper.rb b/vendor/plugins/shoulda/test/test_helper.rb deleted file mode 100644 index 1f24e47..0000000 --- a/vendor/plugins/shoulda/test/test_helper.rb +++ /dev/null @@ -1,35 +0,0 @@ -require 'fileutils' -# Load the environment -ENV['RAILS_ENV'] = 'sqlite3' - -# ln rails_root/vendor/plugins/shoulda => ../../../../ -rails_root = File.dirname(__FILE__) + '/rails_root' - -FileUtils.ln_s('../../../../', "#{rails_root}/vendor/plugins/shoulda") unless File.exists?("#{rails_root}/vendor/plugins/shoulda") - -require "#{rails_root}/config/environment.rb" - -# Load the testing framework -require 'test_help' -silence_warnings { RAILS_ENV = ENV['RAILS_ENV'] } - -# Run the migrations -ActiveRecord::Migration.verbose = false -ActiveRecord::Migrator.migrate("#{RAILS_ROOT}/db/migrate") - -# Setup the fixtures path -Test::Unit::TestCase.fixture_path = File.join(File.dirname(__FILE__), "fixtures") -# $LOAD_PATH.unshift(Test::Unit::TestCase.fixture_path) - -class Test::Unit::TestCase #:nodoc: - def create_fixtures(*table_names) - if block_given? - Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names) { yield } - else - Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names) - end - end - - self.use_transactional_fixtures = false - self.use_instantiated_fixtures = false -end diff --git a/vendor/plugins/shoulda/test/unit/dog_test.rb b/vendor/plugins/shoulda/test/unit/dog_test.rb deleted file mode 100644 index 390d142..0000000 --- a/vendor/plugins/shoulda/test/unit/dog_test.rb +++ /dev/null @@ -1,6 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' - -class DogTest < Test::Unit::TestCase - load_all_fixtures - should_belong_to :user -end diff --git a/vendor/plugins/shoulda/test/unit/post_test.rb b/vendor/plugins/shoulda/test/unit/post_test.rb deleted file mode 100644 index 9905147..0000000 --- a/vendor/plugins/shoulda/test/unit/post_test.rb +++ /dev/null @@ -1,14 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' - -class PostTest < Test::Unit::TestCase - load_all_fixtures - - should_belong_to :user - should_belong_to :owner - should_have_many :tags, :through => :taggings - - should_require_unique_attributes :title - should_require_attributes :body, :message => /wtf/ - should_require_attributes :title - should_only_allow_numeric_values_for :user_id -end diff --git a/vendor/plugins/shoulda/test/unit/tag_test.rb b/vendor/plugins/shoulda/test/unit/tag_test.rb deleted file mode 100644 index 4b0f8a2..0000000 --- a/vendor/plugins/shoulda/test/unit/tag_test.rb +++ /dev/null @@ -1,8 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' - -class TagTest < Test::Unit::TestCase - load_all_fixtures - - should_have_many :taggings - should_have_many :posts -end diff --git a/vendor/plugins/shoulda/test/unit/tagging_test.rb b/vendor/plugins/shoulda/test/unit/tagging_test.rb deleted file mode 100644 index dc0de10..0000000 --- a/vendor/plugins/shoulda/test/unit/tagging_test.rb +++ /dev/null @@ -1,8 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' - -class TaggingTest < Test::Unit::TestCase - load_all_fixtures - - should_belong_to :post - should_belong_to :tag -end diff --git a/vendor/plugins/shoulda/test/unit/user_test.rb b/vendor/plugins/shoulda/test/unit/user_test.rb deleted file mode 100644 index 8474c2d..0000000 --- a/vendor/plugins/shoulda/test/unit/user_test.rb +++ /dev/null @@ -1,21 +0,0 @@ -require File.dirname(__FILE__) + '/../test_helper' - -class UserTest < Test::Unit::TestCase - load_all_fixtures - - should_have_many :posts - should_have_many :dogs - - should_not_allow_values_for :email, "blah", "b lah" - should_allow_values_for :email, "a@b.com", "asdf@asdf.com" - should_ensure_length_in_range :email, 1..100 - should_ensure_value_in_range :age, 1..100 - should_protect_attributes :password - should_have_class_methods :find, :destroy - should_have_instance_methods :email, :age, :email=, :valid? - should_have_db_columns :name, :email, :age - should_have_db_column :id, :type => "integer", :primary => true - should_have_db_column :email, :type => "string", :default => nil, :precision => nil, :limit => 255, - :null => true, :primary => false, :scale => nil, :sql_type => 'varchar(255)' - should_require_acceptance_of :eula -end diff --git a/vendor/plugins/thinking-sphinx/.gitignore b/vendor/plugins/thinking-sphinx/.gitignore deleted file mode 100644 index 5a7edd3..0000000 --- a/vendor/plugins/thinking-sphinx/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -gem_deploy.rake -pkg -coverage -*.tmproj -rdoc -spec/fixtures/database.yml -tmp diff --git a/vendor/plugins/thinking-sphinx/LICENCE b/vendor/plugins/thinking-sphinx/LICENCE deleted file mode 100644 index e031e3c..0000000 --- a/vendor/plugins/thinking-sphinx/LICENCE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008 Pat Allan - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/plugins/thinking-sphinx/README b/vendor/plugins/thinking-sphinx/README deleted file mode 100644 index 91e8593..0000000 --- a/vendor/plugins/thinking-sphinx/README +++ /dev/null @@ -1,107 +0,0 @@ -= Thinking Sphinx - -== Usage - -First, if you haven't done so already, check out the main usage[http://ts.freelancing-gods.com/usage.html] page. Once you've done that, the next place to look for information is the specific method docs - ThinkingSphinx::Search and ThinkingSphinx::Index::Builder in particular. - -Keep in mind that while Thinking Sphinx works for ActiveRecord with Merb, it doesn't yet support DataMapper (although that is planned). - -== Contributing - -Fork on GitHub and after you've committed tested patches, send a pull request. - -To get the spec suite running, you will need to install the not-a-mock gem if you don't already have it: - - git clone git://github.com/freelancing-god/not-a-mock.git - cd not-a-mock - rake gem - gem install pkg/not_a_mock-1.1.0.gem - -Then install the ginger gem. The steps are the same, except that you might need to sudo the gem install: - - git clone git://github.com/freelancing-god/ginger.git - cd ginger - rake gem - sudo gem install pkg/ginger-1.1.0.gem - -Then set up your database: - - cp spec/fixtures/database.yml.default spec/fixtures/database.yml - mysqladmin -u root create thinking_sphinx - -Make sure you don't have another Sphinx daemon (searchd) running. If you do, quit it with "rake ts:stop" -in the app root. - -You should now have a passing test suite from which to build your patch on. - - rake spec - -If you get the message "Failed to start searchd daemon", run the spec with sudo: - - sudo rake spec - -If you quit the spec suite before it's completed, you may be left with data in the test -database, causing the next run to have failures. Let that run complete and then try again. - -== Contributors - -Since I first released this library, there's been quite a few people who have submitted patches, to my immense gratitude. Others have suggested syntax changes and general improvements. So my thanks to the following people: - -- Joost Hietbrink -- Jonathan Conway -- Gregory Mirzayantz -- Tung Nguyen -- Sean Cribbs -- Benoit Caccinolo -- John Barton -- Oliver Beddows -- Arthur Zapparoli -- Dusty Doris -- Marcus Crafter -- Patrick Lenz -- Björn Andreasson -- James Healy -- Jae-Jun Hwang -- Xavier Shay -- Jason Rust -- Gopal Patel -- Chris Heald -- Peter Vandenberk -- Josh French -- Andrew Bennett -- Jordan Fowler -- Seth Walker -- Joe Noon -- Wolfgang Postler -- Rick Olson -- Killian Murphy -- Morten Primdahl -- Ryan Bates -- David Eisinger -- Shay Arnett -- Minh Tran -- Jeremy Durham -- Piotr Sarnacki -- Matt Johnson -- Nicolas Blanco -- Max Lapshin -- Josh Natanson -- Philip Hallstrom -- Christian Rishøj -- Mike Flester -- Jim Remsik -- Kennon Ballou -- Henrik Nyh -- Emil Tin -- Doug Cole -- Ed Hickey -- Evan Weaver -- Thibaut Barrere -- Kristopher Chambers -- Dmitrij Smalko -- Aleksey Yeschenko -- Lachie Cox -- Lourens Naude -- Tom Davies -- Dan Pickett -- Alex Caudill diff --git a/vendor/plugins/thinking-sphinx/README.textile b/vendor/plugins/thinking-sphinx/README.textile deleted file mode 100644 index 6ae212d..0000000 --- a/vendor/plugins/thinking-sphinx/README.textile +++ /dev/null @@ -1,107 +0,0 @@ -h1. Thinking Sphinx - -h2. Usage - -First, if you haven't done so already, check out the main "usage":http://ts.freelancing-gods.com/usage.html page. Once you've done that, the next place to look for information is the specific method docs - ThinkingSphinx::Search and ThinkingSphinx::Index::Builder in particular. - -Keep in mind that while Thinking Sphinx works for ActiveRecord with Merb, it doesn't yet support DataMapper (although that is planned). - -h2. Contributing - -Fork on GitHub and after you've committed tested patches, send a pull request. - -To get the spec suite running, you will need to install the not-a-mock gem if you don't already have it: - - git clone git://github.com/freelancing-god/not-a-mock.git - cd not-a-mock - rake gem - gem install pkg/not_a_mock-1.1.0.gem - -Then install the ginger gem. The steps are the same, except that you might need to sudo the gem install: - - git clone git://github.com/freelancing-god/ginger.git - cd ginger - rake gem - sudo gem install pkg/ginger-1.1.0.gem - -Then set up your database: - - cp spec/fixtures/database.yml.default spec/fixtures/database.yml - mysqladmin -u root create thinking_sphinx - -Make sure you don't have another Sphinx daemon (searchd) running. If you do, quit it with "rake ts:stop" -in the app root. - -You should now have a passing test suite from which to build your patch on. - - rake spec - -If you get the message "Failed to start searchd daemon", run the spec with sudo: - - sudo rake spec - -If you quit the spec suite before it's completed, you may be left with data in the test -database, causing the next run to have failures. Let that run complete and then try again. - -h2. Contributors - -Since I first released this library, there's been quite a few people who have submitted patches, to my immense gratitude. Others have suggested syntax changes and general improvements. So my thanks to the following people: - -* Joost Hietbrink -* Jonathan Conway -* Gregory Mirzayantz -* Tung Nguyen -* Sean Cribbs -* Benoit Caccinolo -* John Barton -* Oliver Beddows -* Arthur Zapparoli -* Dusty Doris -* Marcus Crafter -* Patrick Lenz -* Björn Andreasson -* James Healy -* Jae-Jun Hwang -* Xavier Shay -* Jason Rust -* Gopal Patel -* Chris Heald -* Peter Vandenberk -* Josh French -* Andrew Bennett -* Jordan Fowler -* Seth Walker -* Joe Noon -* Wolfgang Postler -* Rick Olson -* Killian Murphy -* Morten Primdahl -* Ryan Bates -* David Eisinger -* Shay Arnett -* Minh Tran -* Jeremy Durham -* Piotr Sarnacki -* Matt Johnson -* Nicolas Blanco -* Max Lapshin -* Josh Natanson -* Philip Hallstrom -* Christian Rishøj -* Mike Flester -* Jim Remsik -* Kennon Ballou -* Henrik Nyh -* Emil Tin -* Doug Cole -* Ed Hickey -* Evan Weaver -* Thibaut Barrere -* Kristopher Chambers -* Dmitrij Smalko -* Aleksey Yeschenko -* Lachie Cox -* Lourens Naude -* Tom Davies -* Dan Pickett -* Alex Caudill diff --git a/vendor/plugins/thinking-sphinx/Rakefile b/vendor/plugins/thinking-sphinx/Rakefile deleted file mode 100644 index 1609b80..0000000 --- a/vendor/plugins/thinking-sphinx/Rakefile +++ /dev/null @@ -1,4 +0,0 @@ -require 'rubygems' - -require 'tasks/distribution' -require 'tasks/testing' \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/contribute.rb b/vendor/plugins/thinking-sphinx/contribute.rb deleted file mode 100755 index e0816c2..0000000 --- a/vendor/plugins/thinking-sphinx/contribute.rb +++ /dev/null @@ -1,328 +0,0 @@ -#!/usr/bin/env ruby - -require 'rubygems' -require 'yaml' -require 'pp' - -module ContributeHelper; end - -class Contribute - include ContributeHelper - - def dependencies - [ - Dependencies::Sphinx, - Dependencies::Mysql, - Dependencies::AR, - Dependencies::Ginger - ] - end - - def show - show_welcome_screen - - ( - check_for_dependencies && - create_database_yaml && - check_mysql_is_working && - create_test_database - ) || exit(1) - - show_done_screen - end - -private -WELCOME_SCREEN = <<-EO_WELCOME -Thinking Sphinx Contribution - -Thanks for contributing to Thinking Sphinx. - -In this script we'll help you get setup to hack: - - 1. We'll check that you have the right software installed and running. - 2. We'll set up the test database for specs to run against. - -EO_WELCOME - -DONE_SCREEN = <<-EO_DONE -Setup done! - -All done! Now you can start hacking by running - - rake spec - -EO_DONE - -REVIEW_YAML = <<-EO_REVIEW_YAML - -Please review the database details in the yaml file details before continuing. - -This file is used by the specs to connect to the database. - -Current details: -EO_REVIEW_YAML - - - -MYSQL_FAILED = <<-EO_MYSQL_FAILED - -Looks like we couldn't successfully talk to the mysql database. - -Don't worry though... - -EO_MYSQL_FAILED - -CREATE_DATABASE_FAILED = <<-EO_CREATE_DATABASE_FAILED - -Looks like we couldn't create a test database to work against. - -Don't worry though... - -EO_CREATE_DATABASE_FAILED - - def show_welcome_screen - colour_puts WELCOME_SCREEN - wait! - end - - def show_done_screen - colour_puts DONE_SCREEN - end - - # create database.yml - def create_database_yaml - colour_puts "creating database yaml" - puts - - - config = { - 'username' => 'root', - 'password' => nil, - 'host' => 'localhost' - } - - - colour_print " * #{db_yml}... " - unless File.exist?(db_yml) - open(db_yml,'w') {|f| f << config.to_yaml} - colour_puts "created" - else - config = YAML.load_file(db_yml) - colour_puts "already exists" - end - - colour_puts REVIEW_YAML - - config.each do |(k,v)| - colour_puts " * #{k}: #{v}" - end - - puts - - wait! - true - end - - def check_mysql_is_working - require 'activerecord' - colour_puts "check mysql is working" - puts - - connect_to_db - - print " * connecting to mysql... " - - begin - ActiveRecord::Base.connection.select_value('select sysdate() from dual') - - colour_puts "successful" - puts - - return true - rescue Mysql::Error - colour_puts "failed" - - puts MYSQL_FAILED - end - - false - end - - # create test db - def create_test_database - colour_puts "create test database" - puts - - connect_to_db - - colour_print " * creating thinking_sphinx database... " - begin - ActiveRecord::Base.connection.create_database('thinking_sphinx') - colour_puts "successful" - rescue ActiveRecord::StatementInvalid - if $!.message[/database exists/] - colour_puts "successful (database already existed)" - puts - return true - else - colour_puts "failed" - end - end - - colour_puts CREATE_DATABASE_FAILED - - false - end - - # project - def ts_root - File.expand_path(File.dirname(__FILE__)) - end - - def specs - ts_root / 'spec' - end - - def db_yml - specs / 'fixtures' / 'database.yml' - end - - def connect_to_db - config = YAML.load_file(db_yml) - config.update(:adapter => 'mysql', :database => 'test') - config.symbolize_keys! - - ActiveRecord::Base.establish_connection(config) - end -end - - - - - - - -class String - def /(other) - "#{self}/#{other}" - end -end - -module ContributeHelper - class Dependency - def self.name(name=nil) - if name then @name = name else @name end - end - - attr_reader :location - - def initialize - @found = false - @location = nil - end - - def name; self.class.name end - - def check; false end - def check! - @found = check - end - - def found? - @found - end - end - - class Gem < Dependency - def gem_name; self.class.name end - def name; "#{super} gem" end - - def check - ::Gem.available? self.gem_name - end - end - - - def check_for_dependencies - colour_puts "Checking for required software" - puts - - all_found = true - - dependencies.each do |klass| - dep = klass.new - print " * #{dep.name}... " - dep.check! - - if dep.found? - if dep.location - colour_puts "found at #{dep.location}" - else - colour_puts "found" - end - else - all_found &= false - colour_puts "not found" - end - end - - puts - - all_found - end - - - - DEFAULT_TERMINAL_COLORS = "\e[0m\e[37m\e[40m" - def subs_colour(data) - data = data.gsub(%r{(.*?)}m, "\e[1m\\1#{DEFAULT_TERMINAL_COLORS}") - data.gsub!(%r{(.*?)}m, "\e[1m\e[31m\\1#{DEFAULT_TERMINAL_COLORS}") - data.gsub!(%r{(.*?)}m, "\e[1m\e[32m\\1#{DEFAULT_TERMINAL_COLORS}") - data.gsub!(%r{(.*?)}m, "\e[1m\e[33m\\1#{DEFAULT_TERMINAL_COLORS}") - data.gsub!(%r{(.*?)}m, "\e[33m\e[44m\e[1m\\1#{DEFAULT_TERMINAL_COLORS}") - - return data - end - - def colour_puts(text) - puts subs_colour(text) - end - - def colour_print(text) - print subs_colour(text) - end - - - def wait! - colour_puts "Hit Enter to continue, or Ctrl-C to quit." - STDIN.readline - rescue Interrupt - exit! - end -end - -module Dependencies - class Mysql < ContributeHelper::Gem - name 'mysql' - end - - class AR < ContributeHelper::Gem - name 'activerecord' - end - - class Ginger < ContributeHelper::Gem - name 'ginger' - end - - class Sphinx < ContributeHelper::Dependency - name 'sphinx' - - def check - output = `which searchd` - @location = output.chomp if $? == 0 - $? == 0 - end - end -end - -Contribute.new.show \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/cucumber.yml b/vendor/plugins/thinking-sphinx/cucumber.yml deleted file mode 100644 index d057b5b..0000000 --- a/vendor/plugins/thinking-sphinx/cucumber.yml +++ /dev/null @@ -1 +0,0 @@ -default: "--require features/support/env.rb --require features/support/db/mysql.rb --require features/support/db/active_record.rb --require features/support/post_database.rb --require features/step_definitions/alpha_steps.rb --require features/step_definitions/beta_steps.rb --require features/step_definitions/cat_steps.rb --require features/step_definitions/common_steps.rb --require features/step_definitions/datetime_delta_steps.rb --require features/step_definitions/delayed_delta_indexing_steps.rb --require features/step_definitions/facet_steps.rb --require features/step_definitions/find_arguments_steps.rb --require features/step_definitions/gamma_steps.rb --require features/step_definitions/search_steps.rb --require features/step_definitions/sphinx_steps.rb" \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/a.rb b/vendor/plugins/thinking-sphinx/features/a.rb deleted file mode 100644 index 9f5a14e..0000000 --- a/vendor/plugins/thinking-sphinx/features/a.rb +++ /dev/null @@ -1,17 +0,0 @@ -# This file exists because Cucumber likes to auto-load all ruby files -puts <<-MESSAGE -Cucumber 0.1.12 defaults to loading all ruby files within the features folder -alphabetically. This is annoying, because some files need to be loaded before -others (and others perhaps not at all, given missing dependencies). Hence this -place-holder imaginatively named 'a.rb', to force this message. - -A work-around is to use cucumber profiles. You will find the default profile in -cucumber.yml should serve your needs fine, unless you add new step definitions. -When you do that, you can regenerate the YAML file by running: -rake cucumber_defaults - -And then run specific features as follows is slightly more verbose, but it -works, whereas this doesn't. -cucumber -p default features/something.feature -MESSAGE -exit 0 \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/attribute_transformation.feature b/vendor/plugins/thinking-sphinx/features/attribute_transformation.feature deleted file mode 100644 index d987bbd..0000000 --- a/vendor/plugins/thinking-sphinx/features/attribute_transformation.feature +++ /dev/null @@ -1,22 +0,0 @@ -Feature: Handle not-quite-supported column types as attributes - In order for Thinking Sphinx to be more understanding with model structures - The plugin - Should be able to use translatable columns as attributes - - Scenario: Decimals as floats - Given Sphinx is running - And I am searching on alphas - When I filter between 1.0 and 3.0 on cost - Then I should get 2 results - - Scenario: Dates as Datetimes - Given Sphinx is running - And I am searching on alphas - When I filter between 1 and 3 days ago on created_on - Then I should get 2 results - - Scenario: Timestamps as Datetimes - Given Sphinx is running - And I am searching on alphas - When I filter between 1 and 3 days ago on created_at - Then I should get 2 results \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/datetime_deltas.feature b/vendor/plugins/thinking-sphinx/features/datetime_deltas.feature deleted file mode 100644 index ebca4dc..0000000 --- a/vendor/plugins/thinking-sphinx/features/datetime_deltas.feature +++ /dev/null @@ -1,55 +0,0 @@ -Feature: Datetime Delta Indexing - In order to have delta indexing on frequently-updated sites - Developers - Should be able to use an existing datetime column to track changes - - Scenario: Delta Index should not fire automatically - Given Sphinx is running - And I am searching on thetas - When I search for one - Then I should get 1 result - - When I change the name of theta one to eleven - And I wait for Sphinx to catch up - And I search for one - Then I should get 1 result - - When I search for eleven - Then I should get 0 results - - Scenario: Delta Index should fire when jobs are run - Given Sphinx is running - And I am searching on thetas - When I search for two - Then I should get 1 result - - When I change the name of theta two to twelve - And I wait for Sphinx to catch up - And I search for twelve - Then I should get 0 results - - When I index the theta datetime delta - And I wait for Sphinx to catch up - And I search for twelve - Then I should get 1 result - - When I search for two - Then I should get 0 results - - Scenario: New records should be merged into the core index - Given Sphinx is running - And I am searching on thetas - When I search for thirteen - Then I should get 0 results - - When I create a new theta named thirteen - And I search for thirteen - Then I should get 0 results - - When I index the theta datetime delta - And I wait for Sphinx to catch up - And I search for thirteen - Then I should get 1 result - - When I search for the document id of theta thirteen in the theta_core index - Then it should exist \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/delayed_delta_indexing.feature b/vendor/plugins/thinking-sphinx/features/delayed_delta_indexing.feature deleted file mode 100644 index 5be4c4e..0000000 --- a/vendor/plugins/thinking-sphinx/features/delayed_delta_indexing.feature +++ /dev/null @@ -1,37 +0,0 @@ -Feature: Delayed Delta Indexing - In order to have delta indexing on frequently-updated sites - Developers - Should be able to use delayed_job to handle delta indexes to lower system load - - Scenario: Delta Index should not fire automatically - Given Sphinx is running - And I am searching on delayed betas - When I search for one - Then I should get 1 result - - When I change the name of delayed beta one to eleven - And I wait for Sphinx to catch up - And I search for one - Then I should get 1 result - - When I search for eleven - Then I should get 0 results - - Scenario: Delta Index should fire when jobs are run - Given Sphinx is running - And I am searching on delayed betas - When I search for one - Then I should get 1 result - - When I change the name of delayed beta two to twelve - And I wait for Sphinx to catch up - And I search for twelve - Then I should get 0 results - - When I run the delayed jobs - And I wait for Sphinx to catch up - And I search for twelve - Then I should get 1 result - - When I search for two - Then I should get 0 results \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/deleting_instances.feature b/vendor/plugins/thinking-sphinx/features/deleting_instances.feature deleted file mode 100644 index 7080a0b..0000000 --- a/vendor/plugins/thinking-sphinx/features/deleting_instances.feature +++ /dev/null @@ -1,52 +0,0 @@ -Feature: Keeping Sphinx in line with deleted model instances - In order to avoid deleted items being returned by Sphinx - Thinking Sphinx - Should keep deleted items out of search results - - Scenario: Deleting instances from the core index - Given Sphinx is running - And I am searching on betas - When I search for three - Then I should get 1 result - - When I destroy beta three - And I wait for Sphinx to catch up - And I search for three - Then I should get 0 results - - Scenario: Deleting subclasses when the parent class is indexed - Given Sphinx is running - And I am searching on cats - When I search for moggy - Then I should get 1 result - - When I destroy cat moggy - And I wait for Sphinx to catch up - And I search for moggy - Then I should get 0 results - - Scenario: Deleting created instances from the delta index - Given Sphinx is running - And I am searching on betas - When I create a new beta named eleven - And I wait for Sphinx to catch up - And I search for eleven - Then I should get 1 result - - When I destroy beta eleven - And I wait for Sphinx to catch up - And I search for eleven - Then I should get 0 results - - Scenario: Deleting edited instances from the delta index - Given Sphinx is running - And I am searching on betas - When I change the name of beta four to fourteen - And I wait for Sphinx to catch up - And I search for fourteen - Then I should get 1 result - - When I destroy beta fourteen - And I wait for Sphinx to catch up - And I search for fourteen - Then I should get 0 results diff --git a/vendor/plugins/thinking-sphinx/features/facets.feature b/vendor/plugins/thinking-sphinx/features/facets.feature deleted file mode 100644 index a05a8b9..0000000 --- a/vendor/plugins/thinking-sphinx/features/facets.feature +++ /dev/null @@ -1,26 +0,0 @@ -Feature: Search and browse models by their defined facets - - Scenario: Requesting facets - Given Sphinx is running - And I am searching on developers - When I am requesting facet results - Then I should have valid facet results - And I should have 4 facets - And I should have the facet State - And I should have the facet Country - And I should have the facet Age - And I should have the facet City - - Scenario: Requesting facet results - Given Sphinx is running - And I am searching on developers - When I am requesting facet results - And I drill down where Country is Australia - Then I should get 11 results - - Scenario: Requesting facet results by multiple facets - Given Sphinx is running - And I am searching on developers - When I am requesting facet results - And I drill down where Country is Australia and Age is 30 - Then I should get 4 results diff --git a/vendor/plugins/thinking-sphinx/features/handling_edits.feature b/vendor/plugins/thinking-sphinx/features/handling_edits.feature deleted file mode 100644 index 35fc059..0000000 --- a/vendor/plugins/thinking-sphinx/features/handling_edits.feature +++ /dev/null @@ -1,67 +0,0 @@ -Feature: Keeping Sphinx in line with model changes when requested - In order to keep indexes as up to date as possible - Thinking Sphinx - Should return the expected results depending on whether delta indexes are used - - Scenario: Returning instance from old data if there is no delta - Given Sphinx is running - And I am searching on alphas - When I search for two - Then I should get 1 result - - When I change the name of alpha two to twelve - And I wait for Sphinx to catch up - And I search for two - Then I should get 1 result - - Scenario: Not returing an instance from old data if there is a delta - Given Sphinx is running - And I am searching on betas - When I search for two - Then I should get 1 result - - When I change the name of beta two to twelve - And I wait for Sphinx to catch up - And I search for two - Then I should get 0 results - - Scenario: Returning instance from new data if there is a delta - Given Sphinx is running - And I am searching on betas - When I search for one - Then I should get 1 result - - When I change the name of beta one to eleven - And I wait for Sphinx to catch up - And I search for one - Then I should get 0 results - - When I search for eleven - Then I should get 1 result - - Scenario: Returning new records if there's a delta - Given Sphinx is running - And I am searching on betas - When I search for fifteen - Then I should get 0 results - - When I create a new beta named fifteen - And I wait for Sphinx to catch up - And I search for fifteen - Then I should get 1 result - - Scenario: Avoiding delta updates if there hasn't been changes - Given Sphinx is running - And I am searching on betas - When I search for five - Then I should get 1 result - - When I change the name of beta five to five - And I wait for Sphinx to catch up - And I search for five - Then I should get 1 result - - When I search for the document id of beta five in the beta_core index - Then it should exist if using Rails 2.1 or newer - When I search for the document id of beta five in the beta_delta index - Then it should not exist if using Rails 2.1 or newer diff --git a/vendor/plugins/thinking-sphinx/features/retry_stale_indexes.feature b/vendor/plugins/thinking-sphinx/features/retry_stale_indexes.feature deleted file mode 100644 index a14f43a..0000000 --- a/vendor/plugins/thinking-sphinx/features/retry_stale_indexes.feature +++ /dev/null @@ -1,24 +0,0 @@ -Feature: Manually updating Sphinx indexes to handle uncaught deletions - In order to keep indexes as up to date as possible - Thinking Sphinx - Should automatically update the indexes and retry the search if it gets a nil result - - Scenario: Changing retry_stale settings - Given Sphinx is running - And I am searching on gammas - Then I should not get 0 results - - When I set retry stale to false - And I set per page to 1 - And I order by "sphinx_internal_id ASC" - And I destroy gamma one without callbacks - Then I should get a single result of nil - - When I set retry stale to 1 - Then I should get a single gamma result with a name of two - - When I destroy gamma two without callbacks - Then I should get a single result of nil - - When I set retry stale to true - Then I should get a single gamma result with a name of three diff --git a/vendor/plugins/thinking-sphinx/features/searching_across_models.feature b/vendor/plugins/thinking-sphinx/features/searching_across_models.feature deleted file mode 100644 index 8ca10f8..0000000 --- a/vendor/plugins/thinking-sphinx/features/searching_across_models.feature +++ /dev/null @@ -1,20 +0,0 @@ -Feature: Searching across multiple model - In order to use Thinking Sphinx's core functionality - A developer - Should be able to search on multiple models - - Scenario: Retrieving total result count - Given Sphinx is running - When I search for James - And I am retrieving the result count - Then I should get a value of 3 - - Scenario: Confirming existance of a document id in a given index - Given Sphinx is running - When I search for the document id of alpha one in the alpha_core index - Then it should exist - - Scenario: Retrieving results from multiple models - Given Sphinx is running - When I search for ten - Then I should get 5 results diff --git a/vendor/plugins/thinking-sphinx/features/searching_by_model.feature b/vendor/plugins/thinking-sphinx/features/searching_by_model.feature deleted file mode 100644 index d88aed2..0000000 --- a/vendor/plugins/thinking-sphinx/features/searching_by_model.feature +++ /dev/null @@ -1,109 +0,0 @@ -Feature: Searching on a single model - In order to use Thinking Sphinx's core functionality - A developer - Should be able to search on a single model - - Scenario: Searching using a basic query - Given Sphinx is running - And I am searching on people - When I search for James - Then I should get 3 results - - Scenario: Searching on a specific field - Given Sphinx is running - And I am searching on people - When I search for James on first_name - Then I should get 2 results - - Scenario: Searching on multiple fields - Given Sphinx is running - And I am searching on people - When I search for James on first_name - And I search for Chamberlain on last_name - Then I should get 1 result - - Scenario: Searching with a filter - Given Sphinx is running - And I am searching on alphas - When I filter by 1 on value - Then I should get 1 result - - Scenario: Searching with multiple filters - Given Sphinx is running - And I am searching on boxes - When I filter by 2 on width - And I filter by 2 on length - Then I should get 1 result - - Scenario: Searching with ordering by attribute - Given Sphinx is running - And I am searching on alphas - When I order by value - Then I should get 10 results - And the value of each result should indicate order - - Scenario: Searching with ordering on a sortable field - Given Sphinx is running - And I am searching on people - And I order by first_name - Then I should get 20 results - And the first_name of each result should indicate order - - Scenario: Intepreting Sphinx Internal Identifiers - Given Sphinx is running - And I am searching on people - Then I should get 20 results - And each result id should match the corresponding sphinx internal id - - Scenario: Retrieving weightings - Given Sphinx is running - And I am searching on people - When I search for "Ellie Ford" - And I set match mode to any - Then I can iterate by result and weighting - - Scenario: Retrieving group counts - Given Sphinx is running - And I am searching on people - When I group results by the birthday attribute - Then I can iterate by result and count - - Scenario: Retrieving group values - Given Sphinx is running - And I am searching on people - When I group results by the birthday attribute - Then I can iterate by result and group - - Scenario: Retrieving both group values and counts - Given Sphinx is running - And I am searching on people - When I group results by the birthday attribute - Then I can iterate by result and group and count - - Scenario: Searching for ids - Given Sphinx is running - And I am searching on people - When I search for Ellie - And I am searching for ids - Then I should have an array of integers - - Scenario: Search results should match Sphinx's order - Given Sphinx is running - And I am searching on people - When I search for Ellie - And I order by "sphinx_internal_id DESC" - Then searching for ids should match the record ids of the normal search results - - Scenario: Retrieving total result count when total is less than a page - Given Sphinx is running - And I am searching on people - When I search for James - And I am retrieving the result count - Then I should get a value of 3 - - Scenario: Retrieving total result count for more than a page - Given Sphinx is running - And I am searching on people - When I am retrieving the result count - Then I should get a value of 1000 - \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/searching_with_find_arguments.feature b/vendor/plugins/thinking-sphinx/features/searching_with_find_arguments.feature deleted file mode 100644 index 68a5bb9..0000000 --- a/vendor/plugins/thinking-sphinx/features/searching_with_find_arguments.feature +++ /dev/null @@ -1,56 +0,0 @@ -Feature: Keeping AR::Base.find arguments in search calls - To keep things as streamlined as possible - Thinking Sphinx - Should respect particular arguments to AR::Base.find calls - - Scenario: Respecting the include option - Given Sphinx is running - And I am searching on posts - Then I should get 1 result - - When I get the first comment - And I track queries - And I compare comments - Then I should have 1 query - - When I include comments - Then I should get 1 result - When I track queries - And I compare comments - Then I should have 0 queries - - Scenario: Respecting the include option without using a specific model - Given Sphinx is running - And I search for "Hello World" - Then I should get 1 result - - When I get the first comment - And I track queries - And I compare comments - Then I should have 1 query - - When I include comments - Then I should get 1 result - When I track queries - And I compare comments - Then I should have 0 queries - - Scenario: Respecting the select option - Given Sphinx is running - And I am searching on posts - Then I should get 1 result - And I should not get an error accessing the subject - - When I select only content - Then I should get 1 result - And I should get an error accessing the subject - - Scenario: Respecting the select option without using a specific model - Given Sphinx is running - When I search for "Hello World" - Then I should get 1 result - And I should not get an error accessing the subject - - When I select only content - Then I should get 1 result - And I should get an error accessing the subject \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/sphinx_detection.feature b/vendor/plugins/thinking-sphinx/features/sphinx_detection.feature deleted file mode 100644 index 6383010..0000000 --- a/vendor/plugins/thinking-sphinx/features/sphinx_detection.feature +++ /dev/null @@ -1,16 +0,0 @@ -Feature: Checking whether Sphinx is running or not - In order to avoid unnecessary errors - Thinking Sphinx - Should be able to determine whether Sphinx is running or not - - Scenario: Checking Sphinx's status - Given Sphinx is running - Then Sphinx should be running - - When I stop Sphinx - And I wait for Sphinx to catch up - Then Sphinx should not be running - - When I start Sphinx - And I wait for Sphinx to catch up - Then Sphinx should be running \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/alpha_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/alpha_steps.rb deleted file mode 100644 index 987b10b..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/alpha_steps.rb +++ /dev/null @@ -1,3 +0,0 @@ -When /^I change the name of alpha (\w+) to (\w+)$/ do |current, replacement| - Alpha.find_by_name(current).update_attributes(:name => replacement) -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/beta_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/beta_steps.rb deleted file mode 100644 index 2d566a1..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/beta_steps.rb +++ /dev/null @@ -1,11 +0,0 @@ -When /^I destroy beta (\w+)$/ do |name| - Beta.find_by_name(name).destroy -end - -When /^I create a new beta named (\w+)$/ do |name| - Beta.create(:name => name) -end - -When /^I change the name of beta (\w+) to (\w+)$/ do |current, replacement| - Beta.find_by_name(current).update_attributes(:name => replacement) -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/cat_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/cat_steps.rb deleted file mode 100644 index 82b8b7f..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/cat_steps.rb +++ /dev/null @@ -1,3 +0,0 @@ -When /^I destroy cat (\w+)$/ do |name| - Cat.find_by_name(name).destroy -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/common_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/common_steps.rb deleted file mode 100644 index 97f5478..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/common_steps.rb +++ /dev/null @@ -1,136 +0,0 @@ -Before do - $queries_executed = [] - ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs - - @model = nil - @method = :search - @query = "" - @conditions = {} - @with = {} - @without = {} - @options = {} -end - -Given /^I am searching on (.+)$/ do |model| - @model = model.gsub(/\s/, '_').singularize.camelize.constantize -end - -When /^I am searching for ids$/ do - @results = nil - @method = :search_for_ids -end - -When /^I am retrieving the result count$/ do - @result = nil - @method = @model ? :search_count : :count -end - -When /^I search for (\w+)$/ do |query| - @results = nil - @query = query -end - -When /^I search for "([^\"]+)"$/ do |query| - @results = nil - @query = query -end - -When /^I search for (\w+) on (\w+)$/ do |query, field| - @results = nil - @conditions[field.to_sym] = query -end - -When /^I filter by (\w+) on (\w+)$/ do |filter, attribute| - @results = nil - @with[attribute.to_sym] = filter.to_i -end - -When /^I filter between ([\d\.]+) and ([\d\.]+) on (\w+)$/ do |first, last, attribute| - @results = nil - if first[/\./].nil? && last[/\./].nil? - @with[attribute.to_sym] = first.to_i..last.to_i - else - @with[attribute.to_sym] = first.to_f..last.to_f - end -end - -When /^I filter between (\d+) and (\d+) days ago on (\w+)$/ do |last, first, attribute| - @results = nil - @with[attribute.to_sym] = first.to_i.days.ago..last.to_i.days.ago -end - -When /^I order by (\w+)$/ do |attribute| - @results = nil - @options[:order] = attribute.to_sym -end - -When /^I order by "([^\"]+)"$/ do |str| - @results = nil - @options[:order] = str -end - -When /^I group results by the (\w+) attribute$/ do |attribute| - @results = nil - @options[:group_function] = :attr - @options[:group_by] = attribute -end - -When /^I set match mode to (\w+)$/ do |match_mode| - @results = nil - @options[:match_mode] = match_mode.to_sym -end - -When /^I set per page to (\d+)$/ do |per_page| - @results = nil - @options[:per_page] = per_page.to_i -end - -When /^I set retry stale to (\w+)$/ do |retry_stale| - @results = nil - @options[:retry_stale] = case retry_stale - when "true" then true - when "false" then false - else retry_stale.to_i - end -end - -Then /^the (\w+) of each result should indicate order$/ do |attribute| - results.inject(nil) do |prev, current| - unless prev.nil? - current.send(attribute.to_sym).should >= prev.send(attribute.to_sym) - end - - current - end -end - -Then /^I can iterate by result and (\w+)$/ do |attribute| - iteration = lambda { |result, attr_value| - result.should be_kind_of(@model) - unless attribute == "group" && attr_value.nil? - attr_value.should be_kind_of(Integer) - end - } - - results.send("each_with_#{attribute}", &iteration) -end - -Then /^I should get (\d+) results?$/ do |count| - results.length.should == count.to_i -end - -Then /^I should not get (\d+) results?$/ do |count| - results.length.should_not == count.to_i -end - -def results - @results ||= (@model || ThinkingSphinx::Search).send( - @method, - @query, - @options.merge( - :conditions => @conditions, - :with => @with, - :without => @without - ) - ) -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/datetime_delta_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/datetime_delta_steps.rb deleted file mode 100644 index 8f07947..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/datetime_delta_steps.rb +++ /dev/null @@ -1,11 +0,0 @@ -When /^I index the theta datetime delta$/ do - Theta.sphinx_indexes.first.delta_object.delayed_index(Theta) -end - -When /^I change the name of theta (\w+) to (\w+)$/ do |current, replacement| - Theta.find_by_name(current).update_attributes(:name => replacement) -end - -When /^I create a new theta named (\w+)$/ do |name| - Theta.create(:name => name) -end diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/delayed_delta_indexing_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/delayed_delta_indexing_steps.rb deleted file mode 100644 index dc30ee5..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/delayed_delta_indexing_steps.rb +++ /dev/null @@ -1,7 +0,0 @@ -When /^I run the delayed jobs$/ do - Delayed::Job.work_off.inspect -end - -When /^I change the name of delayed beta (\w+) to (\w+)$/ do |current, replacement| - DelayedBeta.find_by_name(current).update_attributes(:name => replacement) -end diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/facet_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/facet_steps.rb deleted file mode 100644 index 613da2d..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/facet_steps.rb +++ /dev/null @@ -1,30 +0,0 @@ -When "I am requesting facet results" do - @method = :facets -end - -When /^I drill down where (\w+) is (\w+)$/ do |facet, value| - @results = results.for(facet.downcase.to_sym => value) -end - -When /^I drill down where (\w+) is (\w+) and (\w+) is (\w+)$/ do |facet_one, value_one, facet_two, value_two| - value_one = value_one.to_i unless value_one[/^\d+$/].nil? - value_two = value_two.to_i unless value_two[/^\d+$/].nil? - - @results = results.for( - facet_one.downcase.to_sym => value_one, - facet_two.downcase.to_sym => value_two - ) -end - -Then "I should have valid facet results" do - results.should be_kind_of(Hash) - results.values.each { |value| value.should be_kind_of(Hash) } -end - -Then /^I should have (\d+) facets?$/ do |count| - results.keys.length.should == count.to_i -end - -Then /^I should have the facet (\w+)$/ do |name| - results[name.downcase.to_sym].should be_kind_of(Hash) -end diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/find_arguments_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/find_arguments_steps.rb deleted file mode 100644 index 65f889c..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/find_arguments_steps.rb +++ /dev/null @@ -1,36 +0,0 @@ -When "I include comments" do - @results = nil - @options[:include] = :comments -end - -When /^I get the first comment$/ do - @comment = Comment.find(:first) -end - -When /^I track queries$/ do - $queries_executed = [] -end - -When /^I compare comments$/ do - results.first.comments.first.should == @comment -end - -When /^I select only content$/ do - @results = nil - @options[:select] = "id, content" -end - -Then /^I should have (\d+) quer[yies]+$/ do |count| - $queries_executed.length.should == count.to_i -end - -Then /^I should not get an error accessing the subject$/ do - lambda { results.first.subject }.should_not raise_error -end - -Then /^I should get an error accessing the subject$/ do - error_class = NoMethodError - error_class = ActiveRecord::MissingAttributeError if ActiveRecord.constants.include?("MissingAttributeError") - - lambda { results.first.subject }.should raise_error(error_class) -end diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/gamma_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/gamma_steps.rb deleted file mode 100644 index c4fbe20..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/gamma_steps.rb +++ /dev/null @@ -1,15 +0,0 @@ -When /^I destroy gamma (\w+) without callbacks$/ do |name| - @results = nil - gamma = Gamma.find_by_name(name) - Gamma.delete(gamma.id) if gamma -end - -Then "I should get a single result of nil" do - results.should == [nil] -end - -Then /^I should get a single gamma result with a name of (\w+)$/ do |name| - results.length.should == 1 - results.first.should be_kind_of(Gamma) - results.first.name.should == name -end diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/search_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/search_steps.rb deleted file mode 100644 index 8c64dd7..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/search_steps.rb +++ /dev/null @@ -1,66 +0,0 @@ -When /^I search for the specific id of (\d+) in the (\w+) index$/ do |id, index| - @id = id.to_i - @index = index -end - -When /^I search for the document id of (\w+) (\w+) in the (\w+) index$/ do |model, name, index| - model = model.gsub(/\s/, '_').camelize.constantize - @id = model.find_by_name(name).sphinx_document_id - @index = index -end - -Then "it should exist" do - ThinkingSphinx::Search.search_for_id(@id, @index).should == true -end - -Then "it should not exist" do - ThinkingSphinx::Search.search_for_id(@id, @index).should == false -end - -Then "it should exist if using Rails 2.1 or newer" do - require 'active_record/version' - unless ActiveRecord::VERSION::STRING.to_f < 2.1 - ThinkingSphinx::Search.search_for_id(@id, @index).should == true - end -end - -Then "it should not exist if using Rails 2.1 or newer" do - require 'active_record/version' - unless ActiveRecord::VERSION::STRING.to_f < 2.1 - ThinkingSphinx::Search.search_for_id(@id, @index).should == false - end -end - -Then /^I can iterate by result and group and count$/ do - results.each_with_groupby_and_count do |result, group, count| - result.should be_kind_of(@model) - count.should be_kind_of(Integer) - group.should be_kind_of(Integer) - end -end - -Then "each result id should match the corresponding sphinx internal id" do - results.each_with_sphinx_internal_id do |result, id| - result.id.should == id - end -end - -Then "I should have an array of integers" do - results.each do |result| - result.should be_kind_of(Integer) - end -end - -Then "searching for ids should match the record ids of the normal search results" do - normal_results = results - - # reset search, switch method - @results = nil - @method = :search_for_ids - - results.should == normal_results.collect(&:id) -end - -Then /^I should get a value of (\d+)$/ do |count| - results.should == count.to_i -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/step_definitions/sphinx_steps.rb b/vendor/plugins/thinking-sphinx/features/step_definitions/sphinx_steps.rb deleted file mode 100644 index 8d2b965..0000000 --- a/vendor/plugins/thinking-sphinx/features/step_definitions/sphinx_steps.rb +++ /dev/null @@ -1,23 +0,0 @@ -Given "Sphinx is running" do - ThinkingSphinx::Configuration.instance.controller.should be_running -end - -When "I wait for Sphinx to catch up" do - sleep(0.25) -end - -When "I start Sphinx" do - ThinkingSphinx::Configuration.instance.controller.start -end - -When "I stop Sphinx" do - ThinkingSphinx::Configuration.instance.controller.stop -end - -Then "Sphinx should be running" do - ThinkingSphinx.sphinx_running?.should be_true -end - -Then "Sphinx should not be running" do - ThinkingSphinx.sphinx_running?.should be_false -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/db/.gitignore b/vendor/plugins/thinking-sphinx/features/support/db/.gitignore deleted file mode 100644 index 01bb46d..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/.gitignore +++ /dev/null @@ -1 +0,0 @@ -database.yml \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/db/active_record.rb b/vendor/plugins/thinking-sphinx/features/support/db/active_record.rb deleted file mode 100644 index fcb622f..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/active_record.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'yaml' -require 'active_record' - -# Database Defaults -host = "localhost" -username = "thinking_sphinx" -password = nil - -# Read in YAML file -if File.exist?("features/support/db/database.yml") - config = YAML.load open("features/support/db/database.yml") - host = config["host"] || host - username = config["username"] || username - password = config["password"] || password -end - -# Set up Connection -ActiveRecord::Base.establish_connection( - :adapter => Database, - :database => 'thinking_sphinx', - :username => username, - :password => password, - :host => host -) - -# Copied from ActiveRecord's test suite -ActiveRecord::Base.connection.class.class_eval do - IGNORED_SQL = [ - /^PRAGMA/, /^SELECT currval/, /^SELECT CAST/, /^SELECT @@IDENTITY/, - /^SELECT @@ROWCOUNT/, /^SHOW FIELDS/ - ] - - def execute_with_query_record(sql, name = nil, &block) - $queries_executed ||= [] - $queries_executed << sql unless IGNORED_SQL.any? { |r| sql =~ r } - execute_without_query_record(sql, name, &block) - end - - alias_method_chain :execute, :query_record -end diff --git a/vendor/plugins/thinking-sphinx/features/support/db/database.example.yml b/vendor/plugins/thinking-sphinx/features/support/db/database.example.yml deleted file mode 100644 index a70370f..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/database.example.yml +++ /dev/null @@ -1,4 +0,0 @@ -adapter: mysql -username: root -host: localhost -password: \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_alphas.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_alphas.rb deleted file mode 100644 index c9efab9..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_alphas.rb +++ /dev/null @@ -1,18 +0,0 @@ -ActiveRecord::Base.connection.create_table :alphas, :force => true do |t| - t.column :name, :string, :null => false - t.column :value, :integer, :null => false - t.column :cost, :decimal, :precision => 10, :scale => 6 - t.column :created_on, :date - t.column :created_at, :timestamp -end - -Alpha.create :name => "one", :value => 1, :cost => 1.51, :created_on => 1.day.ago.to_date, :created_at => 1.day.ago -Alpha.create :name => "two", :value => 2, :cost => 2.52, :created_on => 2.day.ago.to_date, :created_at => 2.day.ago -Alpha.create :name => "three", :value => 3, :cost => 3.53, :created_on => 3.day.ago.to_date, :created_at => 3.day.ago -Alpha.create :name => "four", :value => 4, :cost => 4.54, :created_on => 4.day.ago.to_date, :created_at => 4.day.ago -Alpha.create :name => "five", :value => 5, :cost => 5.55, :created_on => 5.day.ago.to_date, :created_at => 5.day.ago -Alpha.create :name => "six", :value => 6, :cost => 6.56, :created_on => 6.day.ago.to_date, :created_at => 6.day.ago -Alpha.create :name => "seven", :value => 7, :cost => 7.57, :created_on => 7.day.ago.to_date, :created_at => 7.day.ago -Alpha.create :name => "eight", :value => 8, :cost => 8.58, :created_on => 8.day.ago.to_date, :created_at => 8.day.ago -Alpha.create :name => "nine", :value => 9, :cost => 9.59, :created_on => 9.day.ago.to_date, :created_at => 9.day.ago -Alpha.create :name => "ten", :value => 10, :cost => 10.50, :created_on => 10.day.ago.to_date, :created_at => 10.day.ago diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_animals.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_animals.rb deleted file mode 100644 index 7619776..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_animals.rb +++ /dev/null @@ -1,9 +0,0 @@ -ActiveRecord::Base.connection.create_table :animals, :force => true do |t| - t.column :name, :string, :null => false - t.column :type, :string - t.column :delta, :boolean, :null => false, :default => false -end - -%w( rogue nat molly jasper moggy ).each do |name| - Cat.create :name => name -end diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_betas.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_betas.rb deleted file mode 100644 index 1d6cde5..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_betas.rb +++ /dev/null @@ -1,15 +0,0 @@ -ActiveRecord::Base.connection.create_table :betas, :force => true do |t| - t.column :name, :string, :null => false - t.column :delta, :boolean, :null => false, :default => false -end - -Beta.create :name => "one" -Beta.create :name => "two" -Beta.create :name => "three" -Beta.create :name => "four" -Beta.create :name => "five" -Beta.create :name => "six" -Beta.create :name => "seven" -Beta.create :name => "eight" -Beta.create :name => "nine" -Beta.create :name => "ten" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_boxes.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_boxes.rb deleted file mode 100644 index ae83401..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_boxes.rb +++ /dev/null @@ -1,9 +0,0 @@ -ActiveRecord::Base.connection.create_table :boxes, :force => true do |t| - t.column :width, :integer, :null => false - t.column :length, :integer, :null => false - t.column :depth, :integer, :null => false -end - -(1..10).each do |i| - Box.create :width => i, :length => i, :depth => i -end diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_comments.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_comments.rb deleted file mode 100644 index 4c998fe..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_comments.rb +++ /dev/null @@ -1,13 +0,0 @@ -ActiveRecord::Base.connection.create_table :comments, :force => true do |t| - t.column :name, :string, :null => false - t.column :email, :string - t.column :url, :string - t.column :content, :text - t.column :post_id, :integer, :null => false -end - -Comment.create( - :name => "Pat", - :content => "+1", - :post_id => 1 -) diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_delayed_betas.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_delayed_betas.rb deleted file mode 100644 index af0a374..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_delayed_betas.rb +++ /dev/null @@ -1,28 +0,0 @@ -ActiveRecord::Base.connection.create_table :delayed_betas, :force => true do |t| - t.column :name, :string, :null => false - t.column :delta, :boolean, :null => false, :default => false -end - -ActiveRecord::Base.connection.create_table :delayed_jobs, :force => true do |t| - t.column :priority, :integer, :default => 0 - t.column :attempts, :integer, :default => 0 - t.column :handler, :text - t.column :last_error, :string - t.column :run_at, :datetime - t.column :locked_at, :datetime - t.column :failed_at, :datetime - t.column :locked_by, :string - t.column :created_at, :datetime - t.column :updated_at, :datetime -end - -DelayedBeta.create :name => "one" -DelayedBeta.create :name => "two" -DelayedBeta.create :name => "three" -DelayedBeta.create :name => "four" -DelayedBeta.create :name => "five" -DelayedBeta.create :name => "six" -DelayedBeta.create :name => "seven" -DelayedBeta.create :name => "eight" -DelayedBeta.create :name => "nine" -DelayedBeta.create :name => "ten" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_developers.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_developers.rb deleted file mode 100644 index 32788f0..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_developers.rb +++ /dev/null @@ -1,39 +0,0 @@ -require 'faker' - -ActiveRecord::Base.connection.create_table :developers, :force => true do |t| - t.column :name, :string, :null => false - t.column :city, :string - t.column :state, :string - t.column :country, :string - t.column :age, :integer -end - -Developer.create :name => "Pat Allan", :city => "Melbourne", :state => "Victoria", :country => "Australia", :age => 26 - -2.times do - Developer.create :name => Faker::Name.name, :city => "Melbourne", :state => "Victoria", :country => "Australia", :age => 30 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Sydney", :state => "New South Wales", :country => "Australia", :age => 28 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Adelaide", :state => "South Australia", :country => "Australia", :age => 32 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Bendigo", :state => "Victoria", :country => "Australia", :age => 30 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Goulburn", :state => "New South Wales", :country => "Australia", :age => 28 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Auckland", :state => "North Island", :country => "New Zealand", :age => 32 -end - -2.times do - Developer.create :name => Faker::Name.name, :city => "Christchurch", :state => "South Island", :country => "New Zealand", :age => 30 -end diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_gammas.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_gammas.rb deleted file mode 100644 index 5c85b8d..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_gammas.rb +++ /dev/null @@ -1,14 +0,0 @@ -ActiveRecord::Base.connection.create_table :gammas, :force => true do |t| - t.column :name, :string, :null => false -end - -Gamma.create :name => "one" -Gamma.create :name => "two" -Gamma.create :name => "three" -Gamma.create :name => "four" -Gamma.create :name => "five" -Gamma.create :name => "six" -Gamma.create :name => "seven" -Gamma.create :name => "eight" -Gamma.create :name => "nine" -Gamma.create :name => "ten" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_people.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_people.rb deleted file mode 100644 index 51b4d31..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_people.rb +++ /dev/null @@ -1,1014 +0,0 @@ -ActiveRecord::Base.connection.create_table :people, :force => true do |t| - t.column :first_name, :string - t.column :middle_initial, :string - t.column :last_name, :string - t.column :gender, :string - t.column :street_address, :string - t.column :city, :string - t.column :state, :string - t.column :postcode, :string - t.column :email, :string - t.column :birthday, :datetime - t.column :delta, :boolean, :null => false, :default => false -end - -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "K", :last_name => "Ford", :street_address => "38 Mills Street", :city => "Eagle Farm Bc", :state => "QLD", :postcode => "4009", :email => "Ellie.K.Ford@mailinator.com", :birthday => "1970/1/23 00:00:00" -Person.create :gender => "female", :first_name => "Aaliyah", :middle_initial => "E", :last_name => "Allen", :street_address => "71 Murphy Street", :city => "Wyola West", :state => "WA", :postcode => "6407", :email => "Aaliyah.E.Allen@dodgit.com", :birthday => "1980/3/23 00:00:00" -Person.create :gender => "male", :first_name => "Callum", :middle_initial => "C", :last_name => "Miah", :street_address => "89 Dalgarno Street", :city => "Bullawa Creek", :state => "NSW", :postcode => "2390", :email => "Callum.C.Miah@trashymail.com", :birthday => "1973/3/25 00:00:00" -Person.create :gender => "male", :first_name => "Finley", :middle_initial => "L", :last_name => "Buckley", :street_address => "18 Queen Street", :city => "Manly Vale", :state => "NSW", :postcode => "2093", :email => "Finley.L.Buckley@spambob.com", :birthday => "1962/11/20 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "A", :last_name => "Hilton", :street_address => "36 Nerrigundah Drive", :city => "Nyora", :state => "VIC", :postcode => "3987", :email => "Poppy.A.Hilton@dodgit.com", :birthday => "1972/10/30 00:00:00" -Person.create :gender => "female", :first_name => "Eloise", :middle_initial => "Z", :last_name => "Kennedy", :street_address => "18 Mt Berryman Road", :city => "Lilydale", :state => "QLD", :postcode => "4344", :email => "Eloise.Z.Kennedy@spambob.com", :birthday => "1973/9/28 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "L", :last_name => "Manning", :street_address => "60 Ocean Pde", :city => "Greenvale", :state => "QLD", :postcode => "4816", :email => "Shannon.L.Manning@dodgit.com", :birthday => "1956/6/13 00:00:00" -Person.create :gender => "male", :first_name => "Oscar", :middle_initial => "C", :last_name => "Lawson", :street_address => "43 Feather Street", :city => "Battery Hill", :state => "QLD", :postcode => "4551", :email => "Oscar.C.Lawson@spambob.com", :birthday => "1979/10/17 00:00:00" -Person.create :gender => "female", :first_name => "Sofia", :middle_initial => "K", :last_name => "Bray", :street_address => "26 Clifton Street", :city => "Pental Island", :state => "VIC", :postcode => "3586", :email => "Sofia.K.Bray@mailinator.com", :birthday => "1970/5/10 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "N", :last_name => "Byrne", :street_address => "35 Cecil Street", :city => "Monash Park", :state => "NSW", :postcode => "2111", :email => "Andrew.N.Byrne@spambob.com", :birthday => "1983/2/16 00:00:00" -Person.create :gender => "female", :first_name => "Jasmine", :middle_initial => "D", :last_name => "Whittaker", :street_address => "91 Creegans Road", :city => "Upper Eden Creek", :state => "NSW", :postcode => "2474", :email => "Jasmine.D.Whittaker@dodgit.com", :birthday => "1954/11/25 00:00:00" -Person.create :gender => "male", :first_name => "Charlie", :middle_initial => "L", :last_name => "Savage", :street_address => "48 Cecil Street", :city => "Denistone East", :state => "NSW", :postcode => "2112", :email => "Charlie.L.Savage@mailinator.com", :birthday => "1962/6/2 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "M", :last_name => "Edwards", :street_address => "75 Point Walter Road", :city => "Wattleup", :state => "WA", :postcode => "6166", :email => "Ethan.M.Edwards@dodgit.com", :birthday => "1985/6/4 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "L", :last_name => "Macdonald", :street_address => "2 Bass Street", :city => "Dungarubba", :state => "NSW", :postcode => "2480", :email => "Imogen.L.Macdonald@mailinator.com", :birthday => "1963/7/1 00:00:00" -Person.create :gender => "female", :first_name => "Isabel", :middle_initial => "H", :last_name => "Nolan", :street_address => "58 Saggers Road", :city => "Lake King", :state => "WA", :postcode => "6356", :email => "Isabel.H.Nolan@spambob.com", :birthday => "1951/1/4 00:00:00" -Person.create :gender => "male", :first_name => "Peter", :middle_initial => "C", :last_name => "Andrews", :street_address => "53 Wilson Street", :city => "Sea Lake", :state => "VIC", :postcode => "3533", :email => "Peter.C.Andrews@dodgit.com", :birthday => "1965/7/25 00:00:00" -Person.create :gender => "female", :first_name => "Hollie", :middle_initial => "C", :last_name => "Hunter", :street_address => "34 Cornish Street", :city => "Kensington", :state => "VIC", :postcode => "3031", :email => "Hollie.C.Hunter@mailinator.com", :birthday => "1954/2/16 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "C", :last_name => "Turner", :street_address => "2 Kopkes Road", :city => "Carngham", :state => "VIC", :postcode => "3351", :email => "Jonathan.C.Turner@trashymail.com", :birthday => "1963/8/26 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "S", :last_name => "Doyle", :street_address => "42 Gregory Way", :city => "Mungalup", :state => "WA", :postcode => "6225", :email => "Kate.S.Doyle@mailinator.com", :birthday => "1974/1/5 00:00:00" -Person.create :gender => "male", :first_name => "Harley", :middle_initial => "M", :last_name => "Abbott", :street_address => "39 Faulkner Street", :city => "Tilbuster", :state => "NSW", :postcode => "2350", :email => "Harley.M.Abbott@trashymail.com", :birthday => "1953/10/4 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "E", :last_name => "Iqbal", :street_address => "64 Carlisle Street", :city => "Dysart", :state => "VIC", :postcode => "3660", :email => "Morgan.E.Iqbal@spambob.com", :birthday => "1954/7/6 00:00:00" -Person.create :gender => "female", :first_name => "Phoebe", :middle_initial => "T", :last_name => "Wells", :street_address => "10 Mnimbah Road", :city => "Eccleston", :state => "NSW", :postcode => "2311", :email => "Phoebe.T.Wells@trashymail.com", :birthday => "1949/5/27 00:00:00" -Person.create :gender => "male", :first_name => "Finley", :middle_initial => "I", :last_name => "Martin", :street_address => "15 Thomas Lane", :city => "Epping", :state => "VIC", :postcode => "3076", :email => "Finley.I.Martin@dodgit.com", :birthday => "1983/3/12 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "Z", :last_name => "Graham", :street_address => "70 Ranworth Road", :city => "Canning Vale South", :state => "WA", :postcode => "6155", :email => "Eva.Z.Graham@spambob.com", :birthday => "1963/2/16 00:00:00" -Person.create :gender => "female", :first_name => "Rachel", :middle_initial => "M", :last_name => "Ball", :street_address => "37 Bellion Drive", :city => "Carlotta", :state => "WA", :postcode => "6275", :email => "Rachel.M.Ball@mailinator.com", :birthday => "1959/5/28 00:00:00" -Person.create :gender => "female", :first_name => "Alicia", :middle_initial => "H", :last_name => "Hancock", :street_address => "32 Atkinson Way", :city => "Gnoorea", :state => "WA", :postcode => "6714", :email => "Alicia.H.Hancock@mailinator.com", :birthday => "1946/6/14 00:00:00" -Person.create :gender => "female", :first_name => "Jennifer", :middle_initial => "K", :last_name => "Farrell", :street_address => "93 Frouds Road", :city => "Johnsonville", :state => "VIC", :postcode => "3902", :email => "Jennifer.K.Farrell@mailinator.com", :birthday => "1956/9/3 00:00:00" -Person.create :gender => "female", :first_name => "Chloe", :middle_initial => "T", :last_name => "Harvey", :street_address => "77 Endeavour Drive", :city => "Duck Ponds", :state => "SA", :postcode => "5607", :email => "Chloe.T.Harvey@dodgit.com", :birthday => "1944/11/20 00:00:00" -Person.create :gender => "female", :first_name => "Hollie", :middle_initial => "T", :last_name => "Waters", :street_address => "24 Avondale Drive", :city => "Corrimal East", :state => "NSW", :postcode => "2518", :email => "Hollie.T.Waters@dodgit.com", :birthday => "1968/10/2 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "L", :last_name => "Steele", :street_address => "43 Cubbine Road", :city => "North Bodallin", :state => "WA", :postcode => "6424", :email => "Ben.L.Steele@dodgit.com", :birthday => "1952/10/30 00:00:00" -Person.create :gender => "female", :first_name => "Isobel", :middle_initial => "S", :last_name => "Stewart", :street_address => "59 Hummocky Road", :city => "De Mole River", :state => "SA", :postcode => "5223", :email => "Isobel.S.Stewart@spambob.com", :birthday => "1940/5/6 00:00:00" -Person.create :gender => "female", :first_name => "Hollie", :middle_initial => "K", :last_name => "Byrne", :street_address => "56 Ranworth Road", :city => "Willagee Central", :state => "WA", :postcode => "6156", :email => "Hollie.K.Byrne@pookmail.com", :birthday => "1964/10/3 00:00:00" -Person.create :gender => "female", :first_name => "Victoria", :middle_initial => "H", :last_name => "Dobson", :street_address => "6 Creedon Street", :city => "Westmeadows", :state => "VIC", :postcode => "3049", :email => "Victoria.H.Dobson@trashymail.com", :birthday => "1968/10/22 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "J", :last_name => "Harding", :street_address => "19 Cornish Street", :city => "Quandong", :state => "VIC", :postcode => "3030", :email => "Summer.J.Harding@mailinator.com", :birthday => "1953/11/15 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "E", :last_name => "Kirk", :street_address => "43 Main Street", :city => "Qualco", :state => "SA", :postcode => "5322", :email => "Aimee.E.Kirk@pookmail.com", :birthday => "1947/7/3 00:00:00" -Person.create :gender => "female", :first_name => "Amelia", :middle_initial => "A", :last_name => "Hyde", :street_address => "31 South Molle Boulevard", :city => "Mission Beach", :state => "QLD", :postcode => "4852", :email => "Amelia.A.Hyde@mailinator.com", :birthday => "1941/7/15 00:00:00" -Person.create :gender => "male", :first_name => "Leo", :middle_initial => "K", :last_name => "Storey", :street_address => "34 McGregor Street", :city => "Trida", :state => "NSW", :postcode => "2878", :email => "Leo.K.Storey@trashymail.com", :birthday => "1960/7/10 00:00:00" -Person.create :gender => "female", :first_name => "Hannah", :middle_initial => "J", :last_name => "Young", :street_address => "39 Acheron Road", :city => "Willung South", :state => "VIC", :postcode => "3844", :email => "Hannah.J.Young@spambob.com", :birthday => "1965/8/3 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "H", :last_name => "Nelson", :street_address => "17 Crofts Road", :city => "Tamboon", :state => "VIC", :postcode => "3890", :email => "Chelsea.H.Nelson@dodgit.com", :birthday => "1969/9/26 00:00:00" -Person.create :gender => "male", :first_name => "Isaac", :middle_initial => "A", :last_name => "West", :street_address => "57 Webb Road", :city => "Fern Bay", :state => "NSW", :postcode => "2295", :email => "Isaac.A.West@spambob.com", :birthday => "1981/8/20 00:00:00" -Person.create :gender => "male", :first_name => "Charlie", :middle_initial => "G", :last_name => "Hancock", :street_address => "17 Burnley Street", :city => "Kyeema", :state => "SA", :postcode => "5172", :email => "Charlie.G.Hancock@trashymail.com", :birthday => "1965/12/21 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "M", :last_name => "Potter", :street_address => "63 Adavale Road", :city => "Tarago", :state => "NSW", :postcode => "2580", :email => "Oliver.M.Potter@pookmail.com", :birthday => "1977/6/7 00:00:00" -Person.create :gender => "female", :first_name => "Harriet", :middle_initial => "J", :last_name => "Cameron", :street_address => "72 Manchester Road", :city => "Bundook", :state => "NSW", :postcode => "2422", :email => "Harriet.J.Cameron@dodgit.com", :birthday => "1942/2/23 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "E", :last_name => "Tomlinson", :street_address => "68 Roseda-Tinamba Road", :city => "Hazel Park", :state => "VIC", :postcode => "3966", :email => "Owen.E.Tomlinson@mailinator.com", :birthday => "1949/5/2 00:00:00" -Person.create :gender => "female", :first_name => "Zoe", :middle_initial => "J", :last_name => "Flynn", :street_address => "67 Fairview Street", :city => "Curdies River", :state => "VIC", :postcode => "3268", :email => "Zoe.J.Flynn@spambob.com", :birthday => "1969/2/2 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "J", :last_name => "Marsh", :street_address => "35 McLaughlin Road", :city => "Taromeo", :state => "QLD", :postcode => "4306", :email => "Niamh.J.Marsh@trashymail.com", :birthday => "1970/4/30 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "Z", :last_name => "Wilson", :street_address => "76 Glenpark Road", :city => "Ulmarra", :state => "NSW", :postcode => "2462", :email => "Charlotte.Z.Wilson@dodgit.com", :birthday => "1944/1/9 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "J", :last_name => "Miah", :street_address => "25 Bayfield Street", :city => "Kellevie", :state => "TAS", :postcode => "7176", :email => "Ben.J.Miah@pookmail.com", :birthday => "1960/5/1 00:00:00" -Person.create :gender => "female", :first_name => "Lara", :middle_initial => "D", :last_name => "Rice", :street_address => "19 Bayfield Street", :city => "Rheban", :state => "TAS", :postcode => "7190", :email => "Lara.D.Rice@trashymail.com", :birthday => "1942/5/14 00:00:00" -Person.create :gender => "male", :first_name => "Jordan", :middle_initial => "M", :last_name => "Short", :street_address => "67 Feather Street", :city => "Mount Nebo", :state => "QLD", :postcode => "4520", :email => "Jordan.M.Short@pookmail.com", :birthday => "1977/8/17 00:00:00" -Person.create :gender => "female", :first_name => "Gracie", :middle_initial => "K", :last_name => "Davey", :street_address => "38 Marx Hill Road", :city => "Upper Thora", :state => "NSW", :postcode => "2454", :email => "Gracie.K.Davey@pookmail.com", :birthday => "1950/11/27 00:00:00" -Person.create :gender => "male", :first_name => "Harrison", :middle_initial => "D", :last_name => "Allen", :street_address => "68 Brown Street", :city => "Pymble", :state => "NSW", :postcode => "2073", :email => "Harrison.D.Allen@dodgit.com", :birthday => "1976/6/14 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "M", :last_name => "Charlton", :street_address => "30 Hummocky Road", :city => "Wisanger", :state => "SA", :postcode => "5223", :email => "Ben.M.Charlton@spambob.com", :birthday => "1955/4/30 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "Z", :last_name => "Winter", :street_address => "55 Magnolia Drive", :city => "South Hurstville", :state => "NSW", :postcode => "2221", :email => "Kayleigh.Z.Winter@pookmail.com", :birthday => "1971/6/27 00:00:00" -Person.create :gender => "female", :first_name => "Rosie", :middle_initial => "J", :last_name => "Carey", :street_address => "13 Ugoa Street", :city => "Crowdy Head", :state => "NSW", :postcode => "2427", :email => "Rosie.J.Carey@spambob.com", :birthday => "1946/8/27 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "S", :last_name => "Jackson", :street_address => "17 Bapaume Road", :city => "Narko", :state => "QLD", :postcode => "4352", :email => "Spencer.S.Jackson@mailinator.com", :birthday => "1946/4/11 00:00:00" -Person.create :gender => "male", :first_name => "Kyle", :middle_initial => "E", :last_name => "Bond", :street_address => "6 Village Drive", :city => "Lansvale", :state => "NSW", :postcode => "2166", :email => "Kyle.E.Bond@trashymail.com", :birthday => "1953/5/15 00:00:00" -Person.create :gender => "male", :first_name => "Toby", :middle_initial => "C", :last_name => "Burgess", :street_address => "94 Redesdale Rd", :city => "Myola", :state => "VIC", :postcode => "3551", :email => "Toby.C.Burgess@trashymail.com", :birthday => "1979/2/3 00:00:00" -Person.create :gender => "female", :first_name => "Megan", :middle_initial => "F", :last_name => "Heath", :street_address => "96 Dossiter Street", :city => "Antill Ponds", :state => "TAS", :postcode => "7120", :email => "Megan.F.Heath@trashymail.com", :birthday => "1942/6/13 00:00:00" -Person.create :gender => "male", :first_name => "Billy", :middle_initial => "M", :last_name => "Bishop", :street_address => "21 Mills Street", :city => "Nundah", :state => "QLD", :postcode => "4012", :email => "Billy.M.Bishop@dodgit.com", :birthday => "1968/11/15 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "G", :last_name => "Pearce", :street_address => "76 Treasure Island Avenue", :city => "Tallebudgera Valley", :state => "QLD", :postcode => "4228", :email => "Reece.G.Pearce@dodgit.com", :birthday => "1940/6/26 00:00:00" -Person.create :gender => "female", :first_name => "Skye", :middle_initial => "F", :last_name => "Bailey", :street_address => "66 Goebels Road", :city => "Hatton Vale", :state => "QLD", :postcode => "4341", :email => "Skye.F.Bailey@dodgit.com", :birthday => "1956/5/23 00:00:00" -Person.create :gender => "female", :first_name => "Emma", :middle_initial => "F", :last_name => "Parkinson", :street_address => "27 Arthur Street", :city => "Bogan", :state => "NSW", :postcode => "2825", :email => "Emma.F.Parkinson@mailinator.com", :birthday => "1948/12/11 00:00:00" -Person.create :gender => "male", :first_name => "Evan", :middle_initial => "M", :last_name => "Chambers", :street_address => "28 Pelican Road", :city => "Bathurst Street Po", :state => "TAS", :postcode => "7000", :email => "Evan.M.Chambers@dodgit.com", :birthday => "1956/7/30 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "S", :last_name => "Rees", :street_address => "76 Foreshore Road", :city => "Nollamara", :state => "WA", :postcode => "6061", :email => "Scott.S.Rees@mailinator.com", :birthday => "1965/4/13 00:00:00" -Person.create :gender => "female", :first_name => "Keira", :middle_initial => "M", :last_name => "Hurst", :street_address => "82 Commercial Street", :city => "Hesket", :state => "VIC", :postcode => "3442", :email => "Keira.M.Hurst@trashymail.com", :birthday => "1958/11/6 00:00:00" -Person.create :gender => "male", :first_name => "Finlay", :middle_initial => "L", :last_name => "Parkinson", :street_address => "25 Black Range Road", :city => "Quaama", :state => "NSW", :postcode => "2550", :email => "Finlay.L.Parkinson@dodgit.com", :birthday => "1952/10/18 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "I", :last_name => "Hartley", :street_address => "8 South Molle Boulevard", :city => "Tam O'hanter", :state => "QLD", :postcode => "4852", :email => "Owen.I.Hartley@mailinator.com", :birthday => "1974/11/9 00:00:00" -Person.create :gender => "female", :first_name => "Erin", :middle_initial => "J", :last_name => "Daly", :street_address => "88 Magnolia Drive", :city => "Beverley Park", :state => "NSW", :postcode => "2217", :email => "Erin.J.Daly@dodgit.com", :birthday => "1975/4/12 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "P", :last_name => "North", :street_address => "26 Glenpark Road", :city => "Urunga", :state => "NSW", :postcode => "2455", :email => "Tilly.P.North@spambob.com", :birthday => "1968/5/4 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "Z", :last_name => "Owens", :street_address => "89 Walter Crescent", :city => "Brogers Creek", :state => "NSW", :postcode => "2535", :email => "Morgan.Z.Owens@spambob.com", :birthday => "1956/10/17 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "E", :last_name => "Kemp", :street_address => "63 Elizabeth Street", :city => "Munna Creek", :state => "QLD", :postcode => "4570", :email => "Poppy.E.Kemp@pookmail.com", :birthday => "1942/9/2 00:00:00" -Person.create :gender => "female", :first_name => "Katie", :middle_initial => "S", :last_name => "Davies", :street_address => "89 Mnimbah Road", :city => "Glen William", :state => "NSW", :postcode => "2321", :email => "Katie.S.Davies@pookmail.com", :birthday => "1947/6/22 00:00:00" -Person.create :gender => "female", :first_name => "Sophia", :middle_initial => "J", :last_name => "Finch", :street_address => "18 Bellion Drive", :city => "Cundinup", :state => "WA", :postcode => "6275", :email => "Sophia.J.Finch@pookmail.com", :birthday => "1940/2/3 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "L", :last_name => "Wells", :street_address => "28 Austin Road", :city => "Dunmarra", :state => "NT", :postcode => "0852", :email => "Madison.L.Wells@dodgit.com", :birthday => "1954/10/20 00:00:00" -Person.create :gender => "female", :first_name => "Jasmine", :middle_initial => "L", :last_name => "Blackburn", :street_address => "27 Southwell Crescent", :city => "Argyle", :state => "WA", :postcode => "6239", :email => "Jasmine.L.Blackburn@pookmail.com", :birthday => "1956/8/1 00:00:00" -Person.create :gender => "female", :first_name => "Zoe", :middle_initial => "H", :last_name => "Slater", :street_address => "42 Panorama Road", :city => "North Tamworth", :state => "NSW", :postcode => "2340", :email => "Zoe.H.Slater@dodgit.com", :birthday => "1968/11/5 00:00:00" -Person.create :gender => "male", :first_name => "Michael", :middle_initial => "N", :last_name => "Bibi", :street_address => "91 Moores Drive", :city => "Nedlands Dc", :state => "WA", :postcode => "6009", :email => "Michael.N.Bibi@trashymail.com", :birthday => "1957/10/18 00:00:00" -Person.create :gender => "male", :first_name => "Mason", :middle_initial => "A", :last_name => "Coles", :street_address => "55 Sunnyside Road", :city => "Wappilka", :state => "SA", :postcode => "5332", :email => "Mason.A.Coles@spambob.com", :birthday => "1940/8/9 00:00:00" -Person.create :gender => "male", :first_name => "Joe", :middle_initial => "R", :last_name => "Hardy", :street_address => "38 Marloo Street", :city => "Royston Park", :state => "SA", :postcode => "5070", :email => "Joe.R.Hardy@dodgit.com", :birthday => "1942/5/9 00:00:00" -Person.create :gender => "male", :first_name => "Jamie", :middle_initial => "J", :last_name => "Akhtar", :street_address => "52 Flinstone Drive", :city => "Bagdad", :state => "TAS", :postcode => "7030", :email => "Jamie.J.Akhtar@trashymail.com", :birthday => "1947/10/24 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "E", :last_name => "Smart", :street_address => "32 Kintyre Street", :city => "Wakerley", :state => "QLD", :postcode => "4154", :email => "Ava.E.Smart@pookmail.com", :birthday => "1975/9/6 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "C", :last_name => "Bond", :street_address => "60 Treasure Island Avenue", :city => "Ferny Glen", :state => "QLD", :postcode => "4275", :email => "Ethan.C.Bond@trashymail.com", :birthday => "1954/3/1 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "M", :last_name => "Perry", :street_address => "52 Grayson Street", :city => "Wagga Wagga Raaf", :state => "NSW", :postcode => "2651", :email => "Elliot.M.Perry@mailinator.com", :birthday => "1960/11/9 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "C", :last_name => "Owen", :street_address => "69 West Street", :city => "Phillip Dc", :state => "ACT", :postcode => "2606", :email => "Louie.C.Owen@dodgit.com", :birthday => "1959/12/27 00:00:00" -Person.create :gender => "female", :first_name => "Mollie", :middle_initial => "J", :last_name => "McDonald", :street_address => "28 Inglewood Court", :city => "Fern Hill", :state => "VIC", :postcode => "3458", :email => "Mollie.J.McDonald@pookmail.com", :birthday => "1954/11/30 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "J", :last_name => "Perkins", :street_address => "97 Duff Street", :city => "Wyalkatchem", :state => "WA", :postcode => "6485", :email => "Maddison.J.Perkins@dodgit.com", :birthday => "1971/7/10 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "J", :last_name => "Carpenter", :street_address => "79 McGregor Street", :city => "Theodore", :state => "ACT", :postcode => "2905", :email => "Ryan.J.Carpenter@spambob.com", :birthday => "1985/4/16 00:00:00" -Person.create :gender => "male", :first_name => "Bradley", :middle_initial => "M", :last_name => "Connolly", :street_address => "56 Cherry Grove", :city => "Nelson Bay", :state => "TAS", :postcode => "7330", :email => "Bradley.M.Connolly@mailinator.com", :birthday => "1983/3/13 00:00:00" -Person.create :gender => "female", :first_name => "Louise", :middle_initial => "C", :last_name => "Lee", :street_address => "63 Flinstone Drive", :city => "Bridgewater", :state => "TAS", :postcode => "7030", :email => "Louise.C.Lee@mailinator.com", :birthday => "1967/12/13 00:00:00" -Person.create :gender => "male", :first_name => "Riley", :middle_initial => "S", :last_name => "Skinner", :street_address => "93 Ranworth Road", :city => "Mount Pleasant", :state => "WA", :postcode => "6153", :email => "Riley.S.Skinner@mailinator.com", :birthday => "1980/6/16 00:00:00" -Person.create :gender => "female", :first_name => "Lara", :middle_initial => "N", :last_name => "George", :street_address => "54 McGregor Street", :city => "Wanniassa", :state => "ACT", :postcode => "2903", :email => "Lara.N.George@pookmail.com", :birthday => "1949/8/13 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "H", :last_name => "Wall", :street_address => "96 Quoin Road", :city => "Loira", :state => "TAS", :postcode => "7275", :email => "Isabelle.H.Wall@pookmail.com", :birthday => "1970/1/19 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "O", :last_name => "Francis", :street_address => "12 Oak Street", :city => "Acacia Plateau", :state => "NSW", :postcode => "2476", :email => "Tegan.O.Francis@spambob.com", :birthday => "1941/4/3 00:00:00" -Person.create :gender => "male", :first_name => "Bradley", :middle_initial => "A", :last_name => "Watson", :street_address => "11 Church Street", :city => "Coonawarra", :state => "SA", :postcode => "5263", :email => "Bradley.A.Watson@trashymail.com", :birthday => "1977/12/11 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "E", :last_name => "Webster", :street_address => "83 Carolina Park Road", :city => "Hardys Bay", :state => "NSW", :postcode => "2257", :email => "Alexander.E.Webster@dodgit.com", :birthday => "1967/3/1 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "T", :last_name => "Fowler", :street_address => "37 Marloo Street", :city => "Hackney", :state => "SA", :postcode => "5069", :email => "Tegan.T.Fowler@pookmail.com", :birthday => "1979/3/3 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "K", :last_name => "Bibi", :street_address => "24 Purcell Place", :city => "Chaelundi", :state => "NSW", :postcode => "2460", :email => "Christopher.K.Bibi@dodgit.com", :birthday => "1949/9/11 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "A", :last_name => "Gough", :street_address => "28 Cassinia Street", :city => "Black Creek", :state => "NSW", :postcode => "2729", :email => "Andrew.A.Gough@mailinator.com", :birthday => "1947/11/13 00:00:00" -Person.create :gender => "female", :first_name => "Naomi", :middle_initial => "J", :last_name => "Fitzgerald", :street_address => "23 Peninsula Drive", :city => "Gymea Bay", :state => "NSW", :postcode => "2227", :email => "Naomi.J.Fitzgerald@dodgit.com", :birthday => "1968/8/20 00:00:00" -Person.create :gender => "female", :first_name => "Alicia", :middle_initial => "S", :last_name => "Berry", :street_address => "5 Lewin Street", :city => "Mirrool", :state => "NSW", :postcode => "2665", :email => "Alicia.S.Berry@dodgit.com", :birthday => "1946/3/23 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "C", :last_name => "Stanley", :street_address => "81 Bayview Close", :city => "Tarramba", :state => "QLD", :postcode => "4715", :email => "Harvey.C.Stanley@dodgit.com", :birthday => "1980/9/28 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "J", :last_name => "Daniels", :street_address => "82 West Street", :city => "Isaacs", :state => "ACT", :postcode => "2607", :email => "Lucy.J.Daniels@spambob.com", :birthday => "1968/8/9 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "J", :last_name => "Tyler", :street_address => "75 Derry Street", :city => "Petrie", :state => "QLD", :postcode => "4502", :email => "Cerys.J.Tyler@mailinator.com", :birthday => "1941/12/21 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "Y", :last_name => "Bell", :street_address => "98 Grandis Road", :city => "Lower Creek", :state => "NSW", :postcode => "2440", :email => "Reece.Y.Bell@spambob.com", :birthday => "1973/11/28 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "C", :last_name => "Hill", :street_address => "79 Walpole Avenue", :city => "Nirranda", :state => "VIC", :postcode => "3268", :email => "Shannon.C.Hill@spambob.com", :birthday => "1955/4/5 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "S", :last_name => "Lee", :street_address => "43 Learmouth Street", :city => "Heathmere", :state => "VIC", :postcode => "3305", :email => "Maisie.S.Lee@spambob.com", :birthday => "1969/6/19 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "H", :last_name => "Connor", :street_address => "85 Sullivan Court", :city => "Whirily", :state => "VIC", :postcode => "3483", :email => "Tegan.H.Connor@trashymail.com", :birthday => "1942/12/28 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "A", :last_name => "Gardner", :street_address => "96 Kaesler Road", :city => "Parilla", :state => "SA", :postcode => "5303", :email => "Cerys.A.Gardner@pookmail.com", :birthday => "1950/1/20 00:00:00" -Person.create :gender => "female", :first_name => "Hollie", :middle_initial => "L", :last_name => "Robinson", :street_address => "72 Burnley Street", :city => "Morphett Vale", :state => "SA", :postcode => "5162", :email => "Hollie.L.Robinson@dodgit.com", :birthday => "1960/5/29 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "C", :last_name => "Sanders", :street_address => "81 Hay Point Road", :city => "West Mackay", :state => "QLD", :postcode => "4740", :email => "Robert.C.Sanders@trashymail.com", :birthday => "1957/2/22 00:00:00" -Person.create :gender => "female", :first_name => "Jodie", :middle_initial => "M", :last_name => "Leach", :street_address => "75 Thomas Lane", :city => "Lalor Plaza", :state => "VIC", :postcode => "3075", :email => "Jodie.M.Leach@dodgit.com", :birthday => "1979/3/7 00:00:00" -Person.create :gender => "male", :first_name => "Luca", :middle_initial => "K", :last_name => "Barnes", :street_address => "91 Zipfs Road", :city => "Goodna Dc", :state => "QLD", :postcode => "4300", :email => "Luca.K.Barnes@mailinator.com", :birthday => "1981/8/12 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "E", :last_name => "Dyer", :street_address => "9 Cornish Street", :city => "Deer Park", :state => "VIC", :postcode => "3023", :email => "Andrew.E.Dyer@trashymail.com", :birthday => "1963/10/1 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "M", :last_name => "Steele", :street_address => "22 Quintin Street", :city => "Caboolture South", :state => "QLD", :postcode => "4510", :email => "Ben.M.Steele@spambob.com", :birthday => "1973/9/25 00:00:00" -Person.create :gender => "male", :first_name => "Rhys", :middle_initial => "O", :last_name => "Morgan", :street_address => "68 Reynolds Road", :city => "Glenwood", :state => "QLD", :postcode => "4570", :email => "Rhys.O.Morgan@spambob.com", :birthday => "1945/5/25 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "J", :last_name => "Ross", :street_address => "92 Panorama Road", :city => "Piallamore", :state => "NSW", :postcode => "2340", :email => "Niamh.J.Ross@mailinator.com", :birthday => "1961/9/22 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "A", :last_name => "Bolton", :street_address => "95 Treasure Island Avenue", :city => "Biddaddaba", :state => "QLD", :postcode => "4275", :email => "Tom.A.Bolton@spambob.com", :birthday => "1977/8/4 00:00:00" -Person.create :gender => "male", :first_name => "Alfie", :middle_initial => "L", :last_name => "Gray", :street_address => "96 Meyer Road", :city => "Nuriootpa", :state => "SA", :postcode => "5355", :email => "Alfie.L.Gray@mailinator.com", :birthday => "1941/5/19 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "K", :last_name => "Hayes", :street_address => "92 Marley Point Road", :city => "Buchan South", :state => "VIC", :postcode => "3885", :email => "Imogen.K.Hayes@spambob.com", :birthday => "1979/11/13 00:00:00" -Person.create :gender => "male", :first_name => "Daniel", :middle_initial => "N", :last_name => "Savage", :street_address => "65 Thone Street", :city => "Kimbriki", :state => "NSW", :postcode => "2429", :email => "Daniel.N.Savage@dodgit.com", :birthday => "1960/1/15 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "F", :last_name => "Schofield", :street_address => "39 Mnimbah Road", :city => "Nelson Bay", :state => "NSW", :postcode => "2315", :email => "Jonathan.F.Schofield@mailinator.com", :birthday => "1960/1/18 00:00:00" -Person.create :gender => "female", :first_name => "Rachel", :middle_initial => "L", :last_name => "Sanderson", :street_address => "96 Devon Street", :city => "Marleston Dc", :state => "SA", :postcode => "5033", :email => "Rachel.L.Sanderson@pookmail.com", :birthday => "1968/1/17 00:00:00" -Person.create :gender => "female", :first_name => "Grace", :middle_initial => "B", :last_name => "Goodwin", :street_address => "30 Noalimba Avenue", :city => "Camerons Creek", :state => "NSW", :postcode => "2359", :email => "Grace.B.Goodwin@pookmail.com", :birthday => "1957/5/5 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "R", :last_name => "Clements", :street_address => "39 Cunningham Street", :city => "Beilba", :state => "QLD", :postcode => "4454", :email => "Shannon.R.Clements@dodgit.com", :birthday => "1967/5/27 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "B", :last_name => "Archer", :street_address => "87 McKillop Street", :city => "Waterloo", :state => "VIC", :postcode => "3373", :email => "Jade.B.Archer@pookmail.com", :birthday => "1944/7/31 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "B", :last_name => "Mitchell", :street_address => "1 Farrar Parade", :city => "Grey", :state => "WA", :postcode => "6521", :email => "Imogen.B.Mitchell@pookmail.com", :birthday => "1965/1/15 00:00:00" -Person.create :gender => "male", :first_name => "Joseph", :middle_initial => "N", :last_name => "Iqbal", :street_address => "44 Dabinett Road", :city => "Teal Flat", :state => "SA", :postcode => "5238", :email => "Joseph.N.Iqbal@trashymail.com", :birthday => "1947/3/28 00:00:00" -Person.create :gender => "male", :first_name => "Harry", :middle_initial => "B", :last_name => "Mills", :street_address => "86 Faunce Crescent", :city => "Tharbogang", :state => "NSW", :postcode => "2680", :email => "Harry.B.Mills@pookmail.com", :birthday => "1964/1/29 00:00:00" -Person.create :gender => "female", :first_name => "Mia", :middle_initial => "T", :last_name => "Gould", :street_address => "90 Argyle Street", :city => "Tugrabakh", :state => "NSW", :postcode => "2422", :email => "Mia.T.Gould@dodgit.com", :birthday => "1965/1/10 00:00:00" -Person.create :gender => "female", :first_name => "Grace", :middle_initial => "E", :last_name => "Thompson", :street_address => "68 Thomas Lane", :city => "Roxburgh Park", :state => "VIC", :postcode => "3064", :email => "Grace.E.Thompson@pookmail.com", :birthday => "1940/2/1 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "L", :last_name => "Franklin", :street_address => "3 Elizabeth Street", :city => "Widgee Crossing North", :state => "QLD", :postcode => "4570", :email => "Dylan.L.Franklin@trashymail.com", :birthday => "1949/11/22 00:00:00" -Person.create :gender => "male", :first_name => "Kieran", :middle_initial => "A", :last_name => "Atkins", :street_address => "88 Peninsula Drive", :city => "Illawong", :state => "NSW", :postcode => "2234", :email => "Kieran.A.Atkins@spambob.com", :birthday => "1962/2/7 00:00:00" -Person.create :gender => "male", :first_name => "Aidan", :middle_initial => "E", :last_name => "Carroll", :street_address => "95 Wallis Street", :city => "Darling Point", :state => "NSW", :postcode => "2027", :email => "Aidan.E.Carroll@dodgit.com", :birthday => "1964/10/4 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "L", :last_name => "Carey", :street_address => "40 Cherry Grove", :city => "Scotchtown", :state => "TAS", :postcode => "7330", :email => "Shannon.L.Carey@mailinator.com", :birthday => "1964/2/24 00:00:00" -Person.create :gender => "female", :first_name => "Laura", :middle_initial => "A", :last_name => "Taylor", :street_address => "4 Sunnyside Road", :city => "Moorook", :state => "SA", :postcode => "5332", :email => "Laura.A.Taylor@trashymail.com", :birthday => "1961/7/12 00:00:00" -Person.create :gender => "male", :first_name => "Gabriel", :middle_initial => "J", :last_name => "Walton", :street_address => "15 Woolnough Road", :city => "Parkside", :state => "SA", :postcode => "5063", :email => "Gabriel.J.Walton@mailinator.com", :birthday => "1947/2/16 00:00:00" -Person.create :gender => "female", :first_name => "Abbie", :middle_initial => "T", :last_name => "Heath", :street_address => "42 Creek Street", :city => "Weranga", :state => "QLD", :postcode => "4405", :email => "Abbie.T.Heath@dodgit.com", :birthday => "1956/2/4 00:00:00" -Person.create :gender => "female", :first_name => "Scarlett", :middle_initial => "S", :last_name => "Baxter", :street_address => "27 Bellion Drive", :city => "Dunsborough", :state => "WA", :postcode => "6281", :email => "Scarlett.S.Baxter@dodgit.com", :birthday => "1951/1/29 00:00:00" -Person.create :gender => "female", :first_name => "Demi", :middle_initial => "B", :last_name => "Harvey", :street_address => "68 Trelawney Street", :city => "Paddington", :state => "NSW", :postcode => "2021", :email => "Demi.B.Harvey@dodgit.com", :birthday => "1957/1/26 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "H", :last_name => "Lowe", :street_address => "52 Brentwood Drive", :city => "Karron", :state => "QLD", :postcode => "4871", :email => "Oliver.H.Lowe@pookmail.com", :birthday => "1971/3/28 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "L", :last_name => "Dobson", :street_address => "63 McLeans Road", :city => "Rawbelle", :state => "QLD", :postcode => "4630", :email => "Jake.L.Dobson@dodgit.com", :birthday => "1977/5/22 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "L", :last_name => "Wells", :street_address => "20 Ocean Street", :city => "Warrego", :state => "NT", :postcode => "0862", :email => "Elise.L.Wells@trashymail.com", :birthday => "1958/6/21 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "F", :last_name => "Knowles", :street_address => "99 Auricht Road", :city => "Greenways", :state => "SA", :postcode => "5272", :email => "Jade.F.Knowles@trashymail.com", :birthday => "1973/1/11 00:00:00" -Person.create :gender => "male", :first_name => "Thomas", :middle_initial => "M", :last_name => "Iqbal", :street_address => "3 Commercial Street", :city => "Ashbourne", :state => "VIC", :postcode => "3442", :email => "Thomas.M.Iqbal@spambob.com", :birthday => "1949/11/24 00:00:00" -Person.create :gender => "female", :first_name => "Isobel", :middle_initial => "A", :last_name => "Edwards", :street_address => "42 Sale Street", :city => "Orange East", :state => "NSW", :postcode => "2800", :email => "Isobel.A.Edwards@dodgit.com", :birthday => "1967/3/1 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "P", :last_name => "Brookes", :street_address => "48 Grey Street", :city => "Gascoyne River", :state => "WA", :postcode => "6705", :email => "Ryan.P.Brookes@trashymail.com", :birthday => "1948/1/16 00:00:00" -Person.create :gender => "female", :first_name => "Natasha", :middle_initial => "N", :last_name => "Adams", :street_address => "91 Bette McNee Street", :city => "Kunama", :state => "NSW", :postcode => "2730", :email => "Natasha.N.Adams@spambob.com", :birthday => "1973/2/3 00:00:00" -Person.create :gender => "female", :first_name => "Demi", :middle_initial => "A", :last_name => "Tyler", :street_address => "68 Kooljak Rd", :city => "Reinscourt", :state => "WA", :postcode => "6280", :email => "Demi.A.Tyler@spambob.com", :birthday => "1944/2/6 00:00:00" -Person.create :gender => "male", :first_name => "Nicholas", :middle_initial => "E", :last_name => "Bishop", :street_address => "23 Glenpark Road", :city => "Corindi Beach", :state => "NSW", :postcode => "2456", :email => "Nicholas.E.Bishop@mailinator.com", :birthday => "1958/10/26 00:00:00" -Person.create :gender => "male", :first_name => "Sean", :middle_initial => "D", :last_name => "Dickinson", :street_address => "37 McKillop Street", :city => "Great Western", :state => "VIC", :postcode => "3377", :email => "Sean.D.Dickinson@spambob.com", :birthday => "1984/2/17 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "D", :last_name => "Marsden", :street_address => "28 Jacabina Court", :city => "Thirroul", :state => "NSW", :postcode => "2515", :email => "Kayleigh.D.Marsden@trashymail.com", :birthday => "1972/12/17 00:00:00" -Person.create :gender => "female", :first_name => "Keira", :middle_initial => "B", :last_name => "Nicholls", :street_address => "79 Kerma Crescent", :city => "Rydal", :state => "NSW", :postcode => "2790", :email => "Keira.B.Nicholls@dodgit.com", :birthday => "1960/5/8 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "R", :last_name => "Ellis", :street_address => "49 Nerrigundah Drive", :city => "Bayles", :state => "VIC", :postcode => "3981", :email => "Imogen.R.Ellis@pookmail.com", :birthday => "1959/10/12 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "S", :last_name => "O'Connor", :street_address => "62 Kerma Crescent", :city => "Blackmans Flat", :state => "NSW", :postcode => "2790", :email => "Zachary.S.OConnor@pookmail.com", :birthday => "1957/8/3 00:00:00" -Person.create :gender => "male", :first_name => "Peter", :middle_initial => "J", :last_name => "Leach", :street_address => "22 Farrar Parade", :city => "Kadathinni", :state => "WA", :postcode => "6519", :email => "Peter.J.Leach@spambob.com", :birthday => "1981/4/22 00:00:00" -Person.create :gender => "male", :first_name => "Joshua", :middle_initial => "M", :last_name => "Brennan", :street_address => "22 Bourke Crescent", :city => "Antwerp", :state => "VIC", :postcode => "3414", :email => "Joshua.M.Brennan@trashymail.com", :birthday => "1982/10/13 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "C", :last_name => "Martin", :street_address => "89 Nandewar Street", :city => "Riverside", :state => "NSW", :postcode => "2444", :email => "Jade.C.Martin@spambob.com", :birthday => "1985/8/5 00:00:00" -Person.create :gender => "male", :first_name => "Isaac", :middle_initial => "D", :last_name => "Wyatt", :street_address => "14 Ross Street", :city => "Tallai", :state => "QLD", :postcode => "4213", :email => "Isaac.D.Wyatt@pookmail.com", :birthday => "1976/10/18 00:00:00" -Person.create :gender => "male", :first_name => "Liam", :middle_initial => "V", :last_name => "Knight", :street_address => "53 Farrar Parade", :city => "Badgingarra", :state => "WA", :postcode => "6521", :email => "Liam.V.Knight@spambob.com", :birthday => "1952/6/3 00:00:00" -Person.create :gender => "male", :first_name => "Corey", :middle_initial => "A", :last_name => "Davey", :street_address => "33 Marley Point Road", :city => "Buchan South", :state => "VIC", :postcode => "3885", :email => "Corey.A.Davey@dodgit.com", :birthday => "1952/2/6 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "E", :last_name => "Morgan", :street_address => "63 Foreshore Road", :city => "Boya", :state => "WA", :postcode => "6056", :email => "Nicole.E.Morgan@trashymail.com", :birthday => "1941/6/9 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "J", :last_name => "Griffin", :street_address => "30 Normans Road", :city => "Wando Bridge", :state => "VIC", :postcode => "3312", :email => "Tegan.J.Griffin@spambob.com", :birthday => "1978/11/27 00:00:00" -Person.create :gender => "male", :first_name => "George", :middle_initial => "M", :last_name => "Watts", :street_address => "73 Buoro Street", :city => "Hampden", :state => "QLD", :postcode => "4741", :email => "George.M.Watts@spambob.com", :birthday => "1950/4/17 00:00:00" -Person.create :gender => "male", :first_name => "Gabriel", :middle_initial => "I", :last_name => "Clements", :street_address => "35 Boonah Qld", :city => "Gregors Creek", :state => "QLD", :postcode => "4313", :email => "Gabriel.I.Clements@trashymail.com", :birthday => "1964/2/8 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "Z", :last_name => "Randall", :street_address => "90 Tanner Street", :city => "Avoca", :state => "QLD", :postcode => "4670", :email => "Sam.Z.Randall@spambob.com", :birthday => "1975/9/25 00:00:00" -Person.create :gender => "male", :first_name => "Joshua", :middle_initial => "M", :last_name => "Cooper", :street_address => "13 Mendooran Road", :city => "Coolbaggie", :state => "NSW", :postcode => "2830", :email => "Joshua.M.Cooper@pookmail.com", :birthday => "1978/12/24 00:00:00" -Person.create :gender => "female", :first_name => "Sophie", :middle_initial => "P", :last_name => "Barton", :street_address => "56 Harris Street", :city => "Homewood", :state => "VIC", :postcode => "3717", :email => "Sophie.P.Barton@spambob.com", :birthday => "1977/8/5 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "B", :last_name => "Dennis", :street_address => "76 Point Walter Road", :city => "Fremantle", :state => "WA", :postcode => "6160", :email => "Poppy.B.Dennis@spambob.com", :birthday => "1944/9/2 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "E", :last_name => "Weston", :street_address => "76 Friar John Way", :city => "Coodanup", :state => "WA", :postcode => "6210", :email => "Maddison.E.Weston@pookmail.com", :birthday => "1952/8/30 00:00:00" -Person.create :gender => "male", :first_name => "Mohammed", :middle_initial => "H", :last_name => "Rice", :street_address => "39 Sinclair Street", :city => "Dowling", :state => "SA", :postcode => "5555", :email => "Mohammed.H.Rice@trashymail.com", :birthday => "1963/5/12 00:00:00" -Person.create :gender => "female", :first_name => "Millie", :middle_initial => "R", :last_name => "Mills", :street_address => "52 Horsington Street", :city => "Laburnum", :state => "VIC", :postcode => "3130", :email => "Millie.R.Mills@mailinator.com", :birthday => "1957/5/12 00:00:00" -Person.create :gender => "male", :first_name => "Jayden", :middle_initial => "G", :last_name => "Bull", :street_address => "14 Wollombi Street", :city => "Greenlands", :state => "NSW", :postcode => "2330", :email => "Jayden.G.Bull@trashymail.com", :birthday => "1957/9/25 00:00:00" -Person.create :gender => "female", :first_name => "Danielle", :middle_initial => "L", :last_name => "Fisher", :street_address => "47 Commercial Street", :city => "Monegeetta", :state => "VIC", :postcode => "3433", :email => "Danielle.L.Fisher@dodgit.com", :birthday => "1984/3/29 00:00:00" -Person.create :gender => "male", :first_name => "Mason", :middle_initial => "S", :last_name => "Rowe", :street_address => "23 Correa Place", :city => "Archer", :state => "NT", :postcode => "0830", :email => "Mason.S.Rowe@trashymail.com", :birthday => "1983/3/3 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "B", :last_name => "Morris", :street_address => "22 Maintongoon Road", :city => "Yallourn North", :state => "VIC", :postcode => "3825", :email => "Amber.B.Morris@spambob.com", :birthday => "1982/10/17 00:00:00" -Person.create :gender => "male", :first_name => "Mohammed", :middle_initial => "P", :last_name => "Fry", :street_address => "70 Carlisle Street", :city => "Katamatite", :state => "VIC", :postcode => "3649", :email => "Mohammed.P.Fry@dodgit.com", :birthday => "1944/5/10 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "C", :last_name => "Burton", :street_address => "34 Magnolia Drive", :city => "Hurstville", :state => "NSW", :postcode => "2220", :email => "Elliot.C.Burton@dodgit.com", :birthday => "1941/8/5 00:00:00" -Person.create :gender => "male", :first_name => "Liam", :middle_initial => "K", :last_name => "Turnbull", :street_address => "54 Creek Street", :city => "Silverleigh", :state => "QLD", :postcode => "4401", :email => "Liam.K.Turnbull@trashymail.com", :birthday => "1944/11/8 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "D", :last_name => "Cartwright", :street_address => "44 Chatsworth Drive", :city => "Kelmscott Dc", :state => "WA", :postcode => "6111", :email => "Elliot.D.Cartwright@dodgit.com", :birthday => "1979/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Olivia", :middle_initial => "E", :last_name => "Potts", :street_address => "53 Thomas Lane", :city => "Epping Dc", :state => "VIC", :postcode => "3076", :email => "Olivia.E.Potts@dodgit.com", :birthday => "1947/7/25 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "L", :last_name => "Barnett", :street_address => "71 Gilbert Street", :city => "Park Grove", :state => "TAS", :postcode => "7320", :email => "Jonathan.L.Barnett@trashymail.com", :birthday => "1956/1/23 00:00:00" -Person.create :gender => "female", :first_name => "Eloise", :middle_initial => "T", :last_name => "Barton", :street_address => "9 Shell Road", :city => "Glenaire", :state => "VIC", :postcode => "3238", :email => "Eloise.T.Barton@trashymail.com", :birthday => "1946/5/17 00:00:00" -Person.create :gender => "female", :first_name => "Leah", :middle_initial => "A", :last_name => "Miah", :street_address => "85 Cecil Street", :city => "Blenheim Road", :state => "NSW", :postcode => "2113", :email => "Leah.A.Miah@mailinator.com", :birthday => "1977/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Libby", :middle_initial => "J", :last_name => "Wallis", :street_address => "36 Frouds Road", :city => "Mallacoota", :state => "VIC", :postcode => "3892", :email => "Libby.J.Wallis@mailinator.com", :birthday => "1979/9/10 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "A", :last_name => "Rogers", :street_address => "81 Magnolia Drive", :city => "Hurstville Westfield", :state => "NSW", :postcode => "2220", :email => "Hayden.A.Rogers@pookmail.com", :birthday => "1956/1/24 00:00:00" -Person.create :gender => "female", :first_name => "Abby", :middle_initial => "M", :last_name => "Harper", :street_address => "43 Lane Street", :city => "Cotham", :state => "VIC", :postcode => "3101", :email => "Abby.M.Harper@spambob.com", :birthday => "1950/9/16 00:00:00" -Person.create :gender => "female", :first_name => "Mollie", :middle_initial => "N", :last_name => "Potter", :street_address => "28 Spring Creek Road", :city => "Labertouche", :state => "VIC", :postcode => "3816", :email => "Mollie.N.Potter@trashymail.com", :birthday => "1952/2/11 00:00:00" -Person.create :gender => "female", :first_name => "Rachel", :middle_initial => "A", :last_name => "Pickering", :street_address => "74 McPherson Road", :city => "Thowgla Valley", :state => "VIC", :postcode => "3707", :email => "Rachel.A.Pickering@mailinator.com", :birthday => "1958/2/2 00:00:00" -Person.create :gender => "male", :first_name => "Ewan", :middle_initial => "I", :last_name => "Holmes", :street_address => "18 Walpole Avenue", :city => "Nirranda East", :state => "VIC", :postcode => "3268", :email => "Ewan.I.Holmes@mailinator.com", :birthday => "1968/7/23 00:00:00" -Person.create :gender => "male", :first_name => "Archie", :middle_initial => "S", :last_name => "Thompson", :street_address => "68 Farrar Parade", :city => "Cataby", :state => "WA", :postcode => "6507", :email => "Archie.S.Thompson@dodgit.com", :birthday => "1984/1/18 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "R", :last_name => "Osborne", :street_address => "86 Butler Crescent", :city => "Mount View", :state => "NSW", :postcode => "2325", :email => "Tilly.R.Osborne@trashymail.com", :birthday => "1960/11/15 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "K", :last_name => "Nelson", :street_address => "91 Sale Street", :city => "Borenore", :state => "NSW", :postcode => "2800", :email => "Kate.K.Nelson@mailinator.com", :birthday => "1959/6/2 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "S", :last_name => "Brooks", :street_address => "71 Walder Crescent", :city => "Marmor", :state => "QLD", :postcode => "4702", :email => "Spencer.S.Brooks@spambob.com", :birthday => "1966/5/9 00:00:00" -Person.create :gender => "female", :first_name => "Kiera", :middle_initial => "J", :last_name => "Wyatt", :street_address => "52 Yulara Drive", :city => "Nyirripi", :state => "NT", :postcode => "0872", :email => "Kiera.J.Wyatt@spambob.com", :birthday => "1952/5/5 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "A", :last_name => "Hart", :street_address => "94 Devon Street", :city => "Outer Harbor", :state => "SA", :postcode => "5018", :email => "Hayden.A.Hart@mailinator.com", :birthday => "1970/7/4 00:00:00" -Person.create :gender => "male", :first_name => "Kian", :middle_initial => "A", :last_name => "Chadwick", :street_address => "95 Ashton Road", :city => "Yoting", :state => "WA", :postcode => "6383", :email => "Kian.A.Chadwick@spambob.com", :birthday => "1961/1/12 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "L", :last_name => "Burrows", :street_address => "63 Ross Street", :city => "Nerang Dc", :state => "QLD", :postcode => "4211", :email => "Tegan.L.Burrows@dodgit.com", :birthday => "1959/9/8 00:00:00" -Person.create :gender => "female", :first_name => "Chloe", :middle_initial => "O", :last_name => "Norris", :street_address => "30 Jones Road", :city => "Forest Lake", :state => "QLD", :postcode => "4078", :email => "Chloe.O.Norris@trashymail.com", :birthday => "1970/4/15 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "M", :last_name => "Cross", :street_address => "41 Harris Street", :city => "Towong Upper", :state => "VIC", :postcode => "3707", :email => "Tilly.M.Cross@dodgit.com", :birthday => "1952/9/2 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "I", :last_name => "Sinclair", :street_address => "73 Jacolite Street", :city => "Darlington", :state => "WA", :postcode => "6070", :email => "Jacob.I.Sinclair@spambob.com", :birthday => "1971/4/15 00:00:00" -Person.create :gender => "male", :first_name => "John", :middle_initial => "S", :last_name => "Lucas", :street_address => "61 Dossiter Street", :city => "Andover", :state => "TAS", :postcode => "7120", :email => "John.S.Lucas@dodgit.com", :birthday => "1971/4/12 00:00:00" -Person.create :gender => "female", :first_name => "Morgan", :middle_initial => "A", :last_name => "Richardson", :street_address => "29 Taylor Street", :city => "Barmah", :state => "VIC", :postcode => "3639", :email => "Morgan.A.Richardson@dodgit.com", :birthday => "1983/12/10 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "J", :last_name => "Holmes", :street_address => "33 Acheron Road", :city => "Morwell", :state => "VIC", :postcode => "3840", :email => "Niamh.J.Holmes@trashymail.com", :birthday => "1950/4/29 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "J", :last_name => "Grant", :street_address => "51 Milbrodale Road", :city => "Roughit", :state => "NSW", :postcode => "2330", :email => "Summer.J.Grant@spambob.com", :birthday => "1951/3/1 00:00:00" -Person.create :gender => "female", :first_name => "Rosie", :middle_initial => "B", :last_name => "Holloway", :street_address => "7 Ross Smith Avenue", :city => "Darwin", :state => "NT", :postcode => "0800", :email => "Rosie.B.Holloway@pookmail.com", :birthday => "1951/11/27 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "S", :last_name => "Boyle", :street_address => "62 Ridge Road", :city => "Lowmead", :state => "QLD", :postcode => "4676", :email => "Harvey.S.Boyle@spambob.com", :birthday => "1984/9/3 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "H", :last_name => "May", :street_address => "14 Goebels Road", :city => "Merryvale", :state => "QLD", :postcode => "4340", :email => "Aimee.H.May@pookmail.com", :birthday => "1949/4/12 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "E", :last_name => "Connor", :street_address => "72 Boobialla Street", :city => "Sandigo", :state => "NSW", :postcode => "2700", :email => "Ryan.E.Connor@spambob.com", :birthday => "1974/11/29 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "C", :last_name => "Bevan", :street_address => "8 Patton Street", :city => "Ashwood", :state => "VIC", :postcode => "3147", :email => "Shannon.C.Bevan@mailinator.com", :birthday => "1977/5/23 00:00:00" -Person.create :gender => "male", :first_name => "Freddie", :middle_initial => "B", :last_name => "Fisher", :street_address => "46 Nerrigundah Drive", :city => "Cranbourne West", :state => "VIC", :postcode => "3977", :email => "Freddie.B.Fisher@trashymail.com", :birthday => "1959/12/7 00:00:00" -Person.create :gender => "male", :first_name => "Joel", :middle_initial => "C", :last_name => "Manning", :street_address => "16 Alfred Street", :city => "Bandy Creek", :state => "WA", :postcode => "6450", :email => "Joel.C.Manning@spambob.com", :birthday => "1945/12/2 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "A", :last_name => "Fraser", :street_address => "77 Lapko Road", :city => "Cowalellup", :state => "WA", :postcode => "6336", :email => "Adam.A.Fraser@trashymail.com", :birthday => "1952/4/14 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "N", :last_name => "McCarthy", :street_address => "86 Warren Avenue", :city => "Catherine Hill Bay", :state => "NSW", :postcode => "2281", :email => "Ben.N.McCarthy@trashymail.com", :birthday => "1983/5/9 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "F", :last_name => "Johnson", :street_address => "81 Weigall Avenue", :city => "Alma", :state => "SA", :postcode => "5401", :email => "Zachary.F.Johnson@mailinator.com", :birthday => "1947/11/23 00:00:00" -Person.create :gender => "female", :first_name => "Morgan", :middle_initial => "M", :last_name => "Coleman", :street_address => "80 Glen William Road", :city => "Croydon", :state => "QLD", :postcode => "4871", :email => "Morgan.M.Coleman@trashymail.com", :birthday => "1973/10/26 00:00:00" -Person.create :gender => "male", :first_name => "Jay", :middle_initial => "E", :last_name => "Craig", :street_address => "43 Loris Way", :city => "Hillside", :state => "WA", :postcode => "6312", :email => "Jay.E.Craig@mailinator.com", :birthday => "1959/9/17 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "J", :last_name => "Elliott", :street_address => "51 Boobialla Street", :city => "Widgiewa", :state => "NSW", :postcode => "2700", :email => "Ava.J.Elliott@spambob.com", :birthday => "1949/5/20 00:00:00" -Person.create :gender => "male", :first_name => "William", :middle_initial => "P", :last_name => "Nelson", :street_address => "36 Stanley Drive", :city => "Laguna Quays", :state => "QLD", :postcode => "4800", :email => "William.P.Nelson@dodgit.com", :birthday => "1962/6/15 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "K", :last_name => "Palmer", :street_address => "47 Anderson Street", :city => "Bulwer", :state => "QLD", :postcode => "4025", :email => "Ryan.K.Palmer@dodgit.com", :birthday => "1940/7/11 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "C", :last_name => "Harding", :street_address => "38 Argyle Street", :city => "Mernot", :state => "NSW", :postcode => "2422", :email => "Isabella.C.Harding@trashymail.com", :birthday => "1985/7/23 00:00:00" -Person.create :gender => "female", :first_name => "Samantha", :middle_initial => "C", :last_name => "Price", :street_address => "53 Inglewood Court", :city => "North Blackwood", :state => "VIC", :postcode => "3458", :email => "Samantha.C.Price@mailinator.com", :birthday => "1968/7/30 00:00:00" -Person.create :gender => "male", :first_name => "Samuel", :middle_initial => "R", :last_name => "Hayes", :street_address => "25 Wilson Street", :city => "Nyarrin", :state => "VIC", :postcode => "3533", :email => "Samuel.R.Hayes@pookmail.com", :birthday => "1968/11/8 00:00:00" -Person.create :gender => "male", :first_name => "Harry", :middle_initial => "H", :last_name => "Hurst", :street_address => "55 Bapaume Road", :city => "Pechey", :state => "QLD", :postcode => "4352", :email => "Harry.H.Hurst@dodgit.com", :birthday => "1983/3/1 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "R", :last_name => "Hewitt", :street_address => "2 Ronald Crescent", :city => "River Ranch", :state => "QLD", :postcode => "4680", :email => "Tyler.R.Hewitt@spambob.com", :birthday => "1957/5/9 00:00:00" -Person.create :gender => "female", :first_name => "Olivia", :middle_initial => "A", :last_name => "McCarthy", :street_address => "4 Ross Street", :city => "Southern Lamington", :state => "QLD", :postcode => "4211", :email => "Olivia.A.McCarthy@dodgit.com", :birthday => "1981/9/2 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "M", :last_name => "Price", :street_address => "69 Normans Road", :city => "Brit Brit", :state => "VIC", :postcode => "3315", :email => "Eve.M.Price@trashymail.com", :birthday => "1940/1/10 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "M", :last_name => "Douglas", :street_address => "39 Crofts Road", :city => "Simpsons Creek", :state => "VIC", :postcode => "3888", :email => "Niamh.M.Douglas@spambob.com", :birthday => "1947/4/7 00:00:00" -Person.create :gender => "male", :first_name => "Muhammad", :middle_initial => "G", :last_name => "Dobson", :street_address => "12 Glenpark Road", :city => "Bayldon", :state => "NSW", :postcode => "2452", :email => "Muhammad.G.Dobson@pookmail.com", :birthday => "1960/8/30 00:00:00" -Person.create :gender => "female", :first_name => "Jessica", :middle_initial => "J", :last_name => "Webb", :street_address => "3 Myrtle Street", :city => "Almonds", :state => "VIC", :postcode => "3727", :email => "Jessica.J.Webb@trashymail.com", :birthday => "1952/6/10 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "L", :last_name => "Holland", :street_address => "48 Porana Place", :city => "Nugadong", :state => "WA", :postcode => "6609", :email => "Zachary.L.Holland@mailinator.com", :birthday => "1984/3/13 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "A", :last_name => "Hyde", :street_address => "95 Spencer Street", :city => "Tin Can Bay", :state => "QLD", :postcode => "4580", :email => "Scott.A.Hyde@mailinator.com", :birthday => "1942/5/4 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "P", :last_name => "Myers", :street_address => "9 Rose Street", :city => "Narre Warren", :state => "VIC", :postcode => "3805", :email => "Shannon.P.Myers@mailinator.com", :birthday => "1955/3/2 00:00:00" -Person.create :gender => "female", :first_name => "Lauren", :middle_initial => "H", :last_name => "Whitehouse", :street_address => "47 Begley Street", :city => "Herberton", :state => "QLD", :postcode => "4887", :email => "Lauren.H.Whitehouse@trashymail.com", :birthday => "1975/3/10 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "L", :last_name => "George", :street_address => "42 Chatsworth Road", :city => "Kippenduff", :state => "NSW", :postcode => "2469", :email => "Ella.L.George@spambob.com", :birthday => "1945/12/8 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "L", :last_name => "Morton", :street_address => "92 Railway Street", :city => "Greenmount", :state => "QLD", :postcode => "4359", :email => "Lilly.L.Morton@spambob.com", :birthday => "1980/12/9 00:00:00" -Person.create :gender => "male", :first_name => "Jamie", :middle_initial => "I", :last_name => "Rose", :street_address => "35 Benny Street", :city => "Miandetta", :state => "TAS", :postcode => "7310", :email => "Jamie.I.Rose@mailinator.com", :birthday => "1952/7/24 00:00:00" -Person.create :gender => "female", :first_name => "Millie", :middle_initial => "D", :last_name => "Daniels", :street_address => "52 Point Walter Road", :city => "Naval Base", :state => "WA", :postcode => "6165", :email => "Millie.D.Daniels@trashymail.com", :birthday => "1962/5/3 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "G", :last_name => "Banks", :street_address => "21 Ugoa Street", :city => "Boomerang Beach", :state => "NSW", :postcode => "2428", :email => "Eva.G.Banks@spambob.com", :birthday => "1945/4/3 00:00:00" -Person.create :gender => "male", :first_name => "Nathan", :middle_initial => "L", :last_name => "Collins", :street_address => "63 Tapleys Hill Road", :city => "Paralowie", :state => "SA", :postcode => "5108", :email => "Nathan.L.Collins@spambob.com", :birthday => "1959/12/1 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "H", :last_name => "Rees", :street_address => "85 Forrest Road", :city => "Bourke", :state => "NSW", :postcode => "2840", :email => "Imogen.H.Rees@spambob.com", :birthday => "1949/7/20 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "L", :last_name => "Lord", :street_address => "60 Trelawney Street", :city => "Mascot", :state => "NSW", :postcode => "2020", :email => "Adam.L.Lord@mailinator.com", :birthday => "1973/12/31 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "I", :last_name => "Stone", :street_address => "71 Buoro Street", :city => "Pinnacle", :state => "QLD", :postcode => "4741", :email => "Alexander.I.Stone@mailinator.com", :birthday => "1949/4/26 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "L", :last_name => "Dodd", :street_address => "98 Arthur Street", :city => "Mount Aquila", :state => "NSW", :postcode => "2820", :email => "Bethany.L.Dodd@pookmail.com", :birthday => "1963/11/25 00:00:00" -Person.create :gender => "female", :first_name => "Jessica", :middle_initial => "R", :last_name => "Marsden", :street_address => "43 Railway Avenue", :city => "Taripta", :state => "VIC", :postcode => "3620", :email => "Jessica.R.Marsden@trashymail.com", :birthday => "1955/11/17 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "R", :last_name => "Pope", :street_address => "65 Faunce Crescent", :city => "Warburn", :state => "NSW", :postcode => "2680", :email => "Owen.R.Pope@dodgit.com", :birthday => "1976/7/12 00:00:00" -Person.create :gender => "female", :first_name => "Aaliyah", :middle_initial => "J", :last_name => "Howard", :street_address => "21 Armstrong Street", :city => "Murray Downs", :state => "NSW", :postcode => "3585", :email => "Aaliyah.J.Howard@pookmail.com", :birthday => "1943/7/9 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "D", :last_name => "Turnbull", :street_address => "31 Gaffney Street", :city => "Middle Park", :state => "VIC", :postcode => "3206", :email => "Lucy.D.Turnbull@dodgit.com", :birthday => "1959/10/29 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "G", :last_name => "Potts", :street_address => "12 George Street", :city => "Lumeah", :state => "QLD", :postcode => "4478", :email => "Ella.G.Potts@trashymail.com", :birthday => "1974/1/31 00:00:00" -Person.create :gender => "male", :first_name => "Edward", :middle_initial => "H", :last_name => "Ali", :street_address => "17 Sale Street", :city => "Lower Lewis Ponds", :state => "NSW", :postcode => "2800", :email => "Edward.H.Ali@dodgit.com", :birthday => "1951/9/11 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "K", :last_name => "Shaw", :street_address => "54 Jones Road", :city => "Richlands Dc", :state => "QLD", :postcode => "4077", :email => "Tyler.K.Shaw@pookmail.com", :birthday => "1974/4/1 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "K", :last_name => "Hopkins", :street_address => "90 Woodlands Avenue", :city => "Berrilee", :state => "NSW", :postcode => "2159", :email => "Summer.K.Hopkins@pookmail.com", :birthday => "1984/10/29 00:00:00" -Person.create :gender => "male", :first_name => "Callum", :middle_initial => "Y", :last_name => "Carr", :street_address => "77 Jacabina Court", :city => "Mangerton", :state => "NSW", :postcode => "2500", :email => "Callum.Y.Carr@pookmail.com", :birthday => "1948/1/7 00:00:00" -Person.create :gender => "female", :first_name => "Freya", :middle_initial => "O", :last_name => "O'onnor", :street_address => "75 Capper Street", :city => "The Limits", :state => "QLD", :postcode => "4625", :email => "Freya.O.O'onnor@pookmail.com", :birthday => "1973/4/12 00:00:00" -Person.create :gender => "male", :first_name => "Sean", :middle_initial => "A", :last_name => "Burton", :street_address => "91 Kogil Street", :city => "Narrabri West", :state => "NSW", :postcode => "2390", :email => "Sean.A.Burton@dodgit.com", :birthday => "1943/5/4 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "A", :last_name => "Nicholls", :street_address => "3 Wright Street", :city => "Kulangoor", :state => "QLD", :postcode => "4560", :email => "David.A.Nicholls@pookmail.com", :birthday => "1943/2/26 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "M", :last_name => "Brady", :street_address => "39 Woerdens Road", :city => "Manobalai", :state => "NSW", :postcode => "2333", :email => "David.M.Brady@pookmail.com", :birthday => "1962/1/19 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "G", :last_name => "Pearson", :street_address => "87 Carolina Park Road", :city => "Umina Beach", :state => "NSW", :postcode => "2257", :email => "Isabelle.G.Pearson@dodgit.com", :birthday => "1981/5/31 00:00:00" -Person.create :gender => "male", :first_name => "Mason", :middle_initial => "L", :last_name => "Kent", :street_address => "22 Bailey Street", :city => "Tower Hill", :state => "VIC", :postcode => "3283", :email => "Mason.L.Kent@pookmail.com", :birthday => "1964/2/18 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "Z", :last_name => "Stephens", :street_address => "47 Black Point Drive", :city => "Whyalla Playford", :state => "SA", :postcode => "5600", :email => "Sebastian.Z.Stephens@dodgit.com", :birthday => "1960/1/19 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "C", :last_name => "Shaw", :street_address => "10 Todd Street", :city => "East Bowes", :state => "WA", :postcode => "6535", :email => "Chelsea.C.Shaw@mailinator.com", :birthday => "1964/2/22 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "C", :last_name => "Webster", :street_address => "33 Banksia Court", :city => "Cambridge", :state => "QLD", :postcode => "4822", :email => "Reece.C.Webster@trashymail.com", :birthday => "1947/6/2 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "J", :last_name => "Morris", :street_address => "83 Carolina Park Road", :city => "Koolewong", :state => "NSW", :postcode => "2256", :email => "Jonathan.J.Morris@dodgit.com", :birthday => "1955/7/28 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "L", :last_name => "Scott", :street_address => "88 Chatsworth Drive", :city => "Ascot", :state => "WA", :postcode => "6104", :email => "Elise.L.Scott@pookmail.com", :birthday => "1976/5/11 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "R", :last_name => "Simmons", :street_address => "63 Mandible Street", :city => "Mount Isa East", :state => "QLD", :postcode => "4825", :email => "Christopher.R.Simmons@trashymail.com", :birthday => "1955/11/25 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "L", :last_name => "Peacock", :street_address => "84 Weemala Avenue", :city => "Canowindra", :state => "NSW", :postcode => "2804", :email => "Tom.L.Peacock@pookmail.com", :birthday => "1954/2/16 00:00:00" -Person.create :gender => "female", :first_name => "Ruby", :middle_initial => "S", :last_name => "Atkinson", :street_address => "55 Derry Street", :city => "North Bungunya", :state => "QLD", :postcode => "4494", :email => "Ruby.S.Atkinson@spambob.com", :birthday => "1982/2/4 00:00:00" -Person.create :gender => "male", :first_name => "Joel", :middle_initial => "E", :last_name => "Summers", :street_address => "95 Alfred Street", :city => "Leonora", :state => "WA", :postcode => "6438", :email => "Joel.E.Summers@pookmail.com", :birthday => "1965/4/23 00:00:00" -Person.create :gender => "female", :first_name => "Rebecca", :middle_initial => "L", :last_name => "Burgess", :street_address => "14 Gloucester Avenue", :city => "West Hindmarsh", :state => "SA", :postcode => "5007", :email => "Rebecca.L.Burgess@mailinator.com", :birthday => "1964/6/19 00:00:00" -Person.create :gender => "female", :first_name => "Lola", :middle_initial => "G", :last_name => "Holden", :street_address => "40 Buoro Street", :city => "Yalboroo", :state => "QLD", :postcode => "4741", :email => "Lola.G.Holden@pookmail.com", :birthday => "1952/1/4 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "R", :last_name => "Lees", :street_address => "3 Peninsula Drive", :city => "Engadine", :state => "NSW", :postcode => "2233", :email => "Louis.R.Lees@mailinator.com", :birthday => "1984/3/13 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "H", :last_name => "Moss", :street_address => "27 Buoro Street", :city => "Owens Creek", :state => "QLD", :postcode => "4741", :email => "Maya.H.Moss@dodgit.com", :birthday => "1966/8/27 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "M", :last_name => "Cunningham", :street_address => "18 McKillop Street", :city => "Mena Park", :state => "VIC", :postcode => "3373", :email => "Jude.M.Cunningham@trashymail.com", :birthday => "1954/9/16 00:00:00" -Person.create :gender => "male", :first_name => "Cameron", :middle_initial => "L", :last_name => "Johnson", :street_address => "12 Carnegie Avenue", :city => "Buller", :state => "WA", :postcode => "6532", :email => "Cameron.L.Johnson@spambob.com", :birthday => "1958/1/5 00:00:00" -Person.create :gender => "female", :first_name => "Freya", :middle_initial => "C", :last_name => "Reed", :street_address => "31 Boughtman Street", :city => "Carnegie", :state => "VIC", :postcode => "3163", :email => "Freya.C.Reed@pookmail.com", :birthday => "1962/3/25 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "B", :last_name => "Moore", :street_address => "68 Larissa Court", :city => "Iraak", :state => "VIC", :postcode => "3494", :email => "Charlotte.B.Moore@mailinator.com", :birthday => "1985/2/23 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "S", :last_name => "Bennett", :street_address => "96 Acheron Road", :city => "Wurruk", :state => "VIC", :postcode => "3850", :email => "Jude.S.Bennett@dodgit.com", :birthday => "1974/8/8 00:00:00" -Person.create :gender => "female", :first_name => "Isabel", :middle_initial => "C", :last_name => "Hanson", :street_address => "7 McLaughlin Road", :city => "Nukku", :state => "QLD", :postcode => "4306", :email => "Isabel.C.Hanson@trashymail.com", :birthday => "1968/12/1 00:00:00" -Person.create :gender => "male", :first_name => "Leo", :middle_initial => "N", :last_name => "West", :street_address => "96 Holthouse Road", :city => "Muston", :state => "SA", :postcode => "5221", :email => "Leo.N.West@trashymail.com", :birthday => "1973/12/18 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "M", :last_name => "Hammond", :street_address => "45 Cambridge Street", :city => "Freemans Reach", :state => "NSW", :postcode => "2756", :email => "Summer.M.Hammond@dodgit.com", :birthday => "1971/2/12 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "H", :last_name => "Jenkins", :street_address => "85 Sydney Road", :city => "Wilpinjong", :state => "NSW", :postcode => "2850", :email => "Eve.H.Jenkins@spambob.com", :birthday => "1957/9/7 00:00:00" -Person.create :gender => "male", :first_name => "Luke", :middle_initial => "E", :last_name => "Connor", :street_address => "76 Jacabina Court", :city => "Tanglewood", :state => "NSW", :postcode => "2488", :email => "Luke.E.Connor@dodgit.com", :birthday => "1962/10/2 00:00:00" -Person.create :gender => "female", :first_name => "Amy", :middle_initial => "A", :last_name => "Stanley", :street_address => "84 Ghost Hill Road", :city => "East Kurrajong", :state => "NSW", :postcode => "2758", :email => "Amy.A.Stanley@trashymail.com", :birthday => "1974/1/4 00:00:00" -Person.create :gender => "female", :first_name => "Jodie", :middle_initial => "R", :last_name => "Johnston", :street_address => "79 South Molle Boulevard", :city => "Nicholson", :state => "QLD", :postcode => "4830", :email => "Jodie.R.Johnston@spambob.com", :birthday => "1974/1/22 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "N", :last_name => "Dickinson", :street_address => "84 Spring Creek Road", :city => "Brandy Creek", :state => "VIC", :postcode => "3821", :email => "Spencer.N.Dickinson@dodgit.com", :birthday => "1974/10/13 00:00:00" -Person.create :gender => "female", :first_name => "Tia", :middle_initial => "F", :last_name => "Nolan", :street_address => "43 Tennyson Road", :city => "Marrickville South", :state => "NSW", :postcode => "2204", :email => "Tia.F.Nolan@dodgit.com", :birthday => "1970/10/8 00:00:00" -Person.create :gender => "female", :first_name => "Molly", :middle_initial => "T", :last_name => "Byrne", :street_address => "32 Sale-Heyfield Road", :city => "Berrys Creek", :state => "VIC", :postcode => "3953", :email => "Molly.T.Byrne@trashymail.com", :birthday => "1956/2/27 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "L", :last_name => "Allen", :street_address => "73 Round Drive", :city => "Rathmines", :state => "NSW", :postcode => "2283", :email => "Jonathan.L.Allen@dodgit.com", :birthday => "1951/5/17 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "Y", :last_name => "Vincent", :street_address => "69 Glendonbrook Road", :city => "Camboon", :state => "NSW", :postcode => "2849", :email => "Andrew.Y.Vincent@trashymail.com", :birthday => "1950/11/30 00:00:00" -Person.create :gender => "male", :first_name => "Sean", :middle_initial => "H", :last_name => "Perkins", :street_address => "20 Roseda-Tinamba Road", :city => "Bennison", :state => "VIC", :postcode => "3960", :email => "Sean.H.Perkins@spambob.com", :birthday => "1961/11/18 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "A", :last_name => "Burrows", :street_address => "75 Souttar Terrace", :city => "Mount Claremont", :state => "WA", :postcode => "6010", :email => "Caitlin.A.Burrows@pookmail.com", :birthday => "1976/12/7 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "J", :last_name => "Booth", :street_address => "17 Milbrodale Road", :city => "Jerrys Plains", :state => "NSW", :postcode => "2330", :email => "Amber.J.Booth@spambob.com", :birthday => "1966/3/27 00:00:00" -Person.create :gender => "female", :first_name => "Amelie", :middle_initial => "C", :last_name => "Skinner", :street_address => "26 Adavale Road", :city => "Wowagin", :state => "NSW", :postcode => "2580", :email => "Amelie.C.Skinner@pookmail.com", :birthday => "1967/3/21 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "T", :last_name => "Wall", :street_address => "34 Atkinson Way", :city => "Millstream", :state => "WA", :postcode => "6716", :email => "Maya.T.Wall@trashymail.com", :birthday => "1961/7/12 00:00:00" -Person.create :gender => "male", :first_name => "Daniel", :middle_initial => "M", :last_name => "Bird", :street_address => "73 Wallis Street", :city => "Rose Bay North", :state => "NSW", :postcode => "2030", :email => "Daniel.M.Bird@mailinator.com", :birthday => "1976/11/30 00:00:00" -Person.create :gender => "male", :first_name => "Evan", :middle_initial => "T", :last_name => "Blackburn", :street_address => "22 Glendonbrook Road", :city => "Bogee", :state => "NSW", :postcode => "2849", :email => "Evan.T.Blackburn@dodgit.com", :birthday => "1955/8/24 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "J", :last_name => "Burns", :street_address => "37 Goldfields Road", :city => "Cement Mills", :state => "QLD", :postcode => "4352", :email => "Imogen.J.Burns@mailinator.com", :birthday => "1980/1/22 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "K", :last_name => "Craig", :street_address => "86 Darwinia Loop", :city => "Tom Price", :state => "WA", :postcode => "6751", :email => "Owen.K.Craig@pookmail.com", :birthday => "1944/7/19 00:00:00" -Person.create :gender => "female", :first_name => "Emily", :middle_initial => "T", :last_name => "Elliott", :street_address => "37 Goebels Road", :city => "Blenheim", :state => "QLD", :postcode => "4341", :email => "Emily.T.Elliott@trashymail.com", :birthday => "1965/1/3 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "T", :last_name => "Johnson", :street_address => "84 Bette McNee Street", :city => "Cunninyeuk", :state => "NSW", :postcode => "2734", :email => "Maisie.T.Johnson@spambob.com", :birthday => "1984/8/16 00:00:00" -Person.create :gender => "female", :first_name => "Daisy", :middle_initial => "A", :last_name => "Bryan", :street_address => "65 Frouds Road", :city => "Glen Wills", :state => "VIC", :postcode => "3898", :email => "Daisy.A.Bryan@trashymail.com", :birthday => "1944/7/28 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "N", :last_name => "Holden", :street_address => "32 Mills Street", :city => "Glen Forbes", :state => "VIC", :postcode => "3990", :email => "Aimee.N.Holden@trashymail.com", :birthday => "1942/3/26 00:00:00" -Person.create :gender => "male", :first_name => "Nicholas", :middle_initial => "A", :last_name => "Parkinson", :street_address => "25 Anderson Street", :city => "Zillmere", :state => "QLD", :postcode => "4034", :email => "Nicholas.A.Parkinson@dodgit.com", :birthday => "1952/5/10 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "K", :last_name => "Hill", :street_address => "56 Jones Road", :city => "Sumner", :state => "QLD", :postcode => "4074", :email => "David.K.Hill@dodgit.com", :birthday => "1947/8/5 00:00:00" -Person.create :gender => "female", :first_name => "Molly", :middle_initial => "J", :last_name => "Gough", :street_address => "82 Milbrodale Road", :city => "Reedy Creek", :state => "NSW", :postcode => "2330", :email => "Molly.J.Gough@trashymail.com", :birthday => "1965/6/2 00:00:00" -Person.create :gender => "male", :first_name => "Patrick", :middle_initial => "E", :last_name => "Blake", :street_address => "91 Stanley Drive", :city => "Glen Isla", :state => "QLD", :postcode => "4800", :email => "Patrick.E.Blake@pookmail.com", :birthday => "1969/12/20 00:00:00" -Person.create :gender => "male", :first_name => "Nathan", :middle_initial => "E", :last_name => "Miles", :street_address => "58 Cherokee Road", :city => "Hepburn Springs", :state => "VIC", :postcode => "3461", :email => "Nathan.E.Miles@trashymail.com", :birthday => "1967/7/5 00:00:00" -Person.create :gender => "female", :first_name => "Alisha", :middle_initial => "A", :last_name => "Manning", :street_address => "28 Begley Street", :city => "Bloomfield", :state => "QLD", :postcode => "4895", :email => "Alisha.A.Manning@trashymail.com", :birthday => "1944/4/13 00:00:00" -Person.create :gender => "male", :first_name => "Connor", :middle_initial => "J", :last_name => "Price", :street_address => "75 Round Drive", :city => "Macquarie Hills", :state => "NSW", :postcode => "2285", :email => "Connor.J.Price@dodgit.com", :birthday => "1942/6/21 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "J", :last_name => "Patterson", :street_address => "43 Webb Road", :city => "Hamilton South", :state => "NSW", :postcode => "2303", :email => "Nicole.J.Patterson@trashymail.com", :birthday => "1980/2/29 00:00:00" -Person.create :gender => "female", :first_name => "Sofia", :middle_initial => "M", :last_name => "Parsons", :street_address => "52 Plug Street", :city => "Oakwood", :state => "NSW", :postcode => "2360", :email => "Sofia.M.Parsons@mailinator.com", :birthday => "1985/8/27 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "E", :last_name => "Holloway", :street_address => "12 Ulomogo Street", :city => "Newbury Park", :state => "NSW", :postcode => "2830", :email => "Tegan.E.Holloway@dodgit.com", :birthday => "1981/5/26 00:00:00" -Person.create :gender => "female", :first_name => "Amelia", :middle_initial => "S", :last_name => "Dawson", :street_address => "88 Timms Drive", :city => "Kentlyn", :state => "NSW", :postcode => "2560", :email => "Amelia.S.Dawson@pookmail.com", :birthday => "1967/4/9 00:00:00" -Person.create :gender => "male", :first_name => "Luke", :middle_initial => "M", :last_name => "Martin", :street_address => "59 Mt Berryman Road", :city => "Grantham", :state => "QLD", :postcode => "4347", :email => "Luke.M.Martin@pookmail.com", :birthday => "1956/4/10 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "L", :last_name => "Doyle", :street_address => "49 Mackie Street", :city => "Nerrigundah", :state => "NSW", :postcode => "2545", :email => "Jacob.L.Doyle@dodgit.com", :birthday => "1968/3/1 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "J", :last_name => "Lee", :street_address => "23 Clifton Street", :city => "Murchison North", :state => "VIC", :postcode => "3610", :email => "Charlotte.J.Lee@pookmail.com", :birthday => "1966/2/26 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "J", :last_name => "Whittaker", :street_address => "67 Eurack Court", :city => "Third Creek", :state => "NSW", :postcode => "2583", :email => "Caitlin.J.Whittaker@pookmail.com", :birthday => "1978/5/23 00:00:00" -Person.create :gender => "male", :first_name => "Corey", :middle_initial => "G", :last_name => "Houghton", :street_address => "6 Mildura Street", :city => "Gray", :state => "TAS", :postcode => "7215", :email => "Corey.G.Houghton@trashymail.com", :birthday => "1974/12/15 00:00:00" -Person.create :gender => "male", :first_name => "Samuel", :middle_initial => "A", :last_name => "Akhtar", :street_address => "59 Bourke Crescent", :city => "Broughton", :state => "VIC", :postcode => "3418", :email => "Samuel.A.Akhtar@dodgit.com", :birthday => "1944/3/11 00:00:00" -Person.create :gender => "male", :first_name => "Alex", :middle_initial => "L", :last_name => "Hamilton", :street_address => "46 Derry Street", :city => "Cashmere", :state => "QLD", :postcode => "4500", :email => "Alex.L.Hamilton@trashymail.com", :birthday => "1941/10/21 00:00:00" -Person.create :gender => "female", :first_name => "Natasha", :middle_initial => "H", :last_name => "Lees", :street_address => "46 Barker Street", :city => "Coblinine", :state => "WA", :postcode => "6317", :email => "Natasha.H.Lees@pookmail.com", :birthday => "1953/6/2 00:00:00" -Person.create :gender => "female", :first_name => "Lauren", :middle_initial => "S", :last_name => "Sharpe", :street_address => "47 Manchester Road", :city => "Berrico", :state => "NSW", :postcode => "2422", :email => "Lauren.S.Sharpe@spambob.com", :birthday => "1985/8/7 00:00:00" -Person.create :gender => "male", :first_name => "Mason", :middle_initial => "A", :last_name => "Ellis", :street_address => "14 Halsey Road", :city => "Encounter Bay", :state => "SA", :postcode => "5211", :email => "Mason.A.Ellis@trashymail.com", :birthday => "1952/2/11 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "L", :last_name => "Buckley", :street_address => "97 Redesdale Rd", :city => "Mandurang", :state => "VIC", :postcode => "3551", :email => "Elise.L.Buckley@mailinator.com", :birthday => "1940/6/18 00:00:00" -Person.create :gender => "female", :first_name => "Louise", :middle_initial => "A", :last_name => "Garner", :street_address => "41 Stanley Drive", :city => "Mount Julian", :state => "QLD", :postcode => "4800", :email => "Louise.A.Garner@spambob.com", :birthday => "1944/8/25 00:00:00" -Person.create :gender => "female", :first_name => "Lara", :middle_initial => "J", :last_name => "White", :street_address => "39 Shamrock Avenue", :city => "Lilli Pilli", :state => "NSW", :postcode => "2536", :email => "Lara.J.White@spambob.com", :birthday => "1968/8/31 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "C", :last_name => "Goddard", :street_address => "75 Alfred Street", :city => "Widgiemooltha", :state => "WA", :postcode => "6443", :email => "Dylan.C.Goddard@dodgit.com", :birthday => "1960/10/7 00:00:00" -Person.create :gender => "male", :first_name => "Harrison", :middle_initial => "J", :last_name => "Bryan", :street_address => "76 Capper Street", :city => "The Limits", :state => "QLD", :postcode => "4625", :email => "Harrison.J.Bryan@spambob.com", :birthday => "1942/12/13 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "L", :last_name => "Duffy", :street_address => "35 South Street", :city => "Sandford", :state => "TAS", :postcode => "7020", :email => "Chelsea.L.Duffy@mailinator.com", :birthday => "1961/7/25 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "G", :last_name => "Noble", :street_address => "91 Nerrigundah Drive", :city => "Pioneer Bay", :state => "VIC", :postcode => "3984", :email => "David.G.Noble@dodgit.com", :birthday => "1971/6/7 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "P", :last_name => "Lucas", :street_address => "28 Halsey Road", :city => "Silverton", :state => "SA", :postcode => "5204", :email => "Ava.P.Lucas@pookmail.com", :birthday => "1981/9/2 00:00:00" -Person.create :gender => "female", :first_name => "Sofia", :middle_initial => "J", :last_name => "Mann", :street_address => "76 Boulter Close", :city => "Wooroonooran", :state => "QLD", :postcode => "4860", :email => "Sofia.J.Mann@mailinator.com", :birthday => "1952/9/26 00:00:00" -Person.create :gender => "male", :first_name => "Nicholas", :middle_initial => "B", :last_name => "Wall", :street_address => "51 South Street", :city => "Ridgeway", :state => "TAS", :postcode => "7054", :email => "Nicholas.B.Wall@spambob.com", :birthday => "1967/11/10 00:00:00" -Person.create :gender => "male", :first_name => "Aaron", :middle_initial => "A", :last_name => "Nelson", :street_address => "53 Alfred Street", :city => "Monjingup", :state => "WA", :postcode => "6450", :email => "Aaron.A.Nelson@trashymail.com", :birthday => "1947/6/5 00:00:00" -Person.create :gender => "female", :first_name => "Harriet", :middle_initial => "R", :last_name => "Armstrong", :street_address => "70 Frencham Street", :city => "Geehi", :state => "NSW", :postcode => "2642", :email => "Harriet.R.Armstrong@trashymail.com", :birthday => "1947/1/9 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "S", :last_name => "Hayward", :street_address => "35 Lane Street", :city => "Diamond Creek", :state => "VIC", :postcode => "3089", :email => "Aimee.S.Hayward@spambob.com", :birthday => "1968/11/21 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "S", :last_name => "Watkins", :street_address => "3 Magnolia Drive", :city => "Revesby Heights", :state => "NSW", :postcode => "2212", :email => "Adam.S.Watkins@spambob.com", :birthday => "1942/10/25 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "M", :last_name => "Clarke", :street_address => "92 Frouds Road", :city => "Cobberas", :state => "VIC", :postcode => "3900", :email => "Ella.M.Clarke@pookmail.com", :birthday => "1965/4/12 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "F", :last_name => "Mellor", :street_address => "97 English Street", :city => "Greenbanks", :state => "SA", :postcode => "5253", :email => "Reece.F.Mellor@trashymail.com", :birthday => "1973/9/30 00:00:00" -Person.create :gender => "female", :first_name => "Mollie", :middle_initial => "J", :last_name => "Lucas", :street_address => "44 Corio Street", :city => "Barongarook West", :state => "VIC", :postcode => "3249", :email => "Mollie.J.Lucas@trashymail.com", :birthday => "1948/11/28 00:00:00" -Person.create :gender => "male", :first_name => "Jordan", :middle_initial => "D", :last_name => "Butcher", :street_address => "7 Kintyre Street", :city => "Rochedale South", :state => "QLD", :postcode => "4123", :email => "Jordan.D.Butcher@spambob.com", :birthday => "1955/6/9 00:00:00" -Person.create :gender => "female", :first_name => "Mia", :middle_initial => "R", :last_name => "Alexander", :street_address => "74 Dora Creek", :city => "North Lismore", :state => "NSW", :postcode => "2480", :email => "Mia.R.Alexander@dodgit.com", :birthday => "1984/2/10 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "S", :last_name => "Griffin", :street_address => "34 Bette McNee Street", :city => "Nacurrie", :state => "NSW", :postcode => "2734", :email => "Reece.S.Griffin@dodgit.com", :birthday => "1963/4/1 00:00:00" -Person.create :gender => "female", :first_name => "Evie", :middle_initial => "J", :last_name => "Pugh", :street_address => "60 Adavale Road", :city => "Currawang", :state => "NSW", :postcode => "2580", :email => "Evie.J.Pugh@pookmail.com", :birthday => "1978/1/27 00:00:00" -Person.create :gender => "female", :first_name => "Paige", :middle_initial => "J", :last_name => "Todd", :street_address => "95 Southwell Crescent", :city => "Elgin", :state => "WA", :postcode => "6237", :email => "Paige.J.Todd@mailinator.com", :birthday => "1976/5/27 00:00:00" -Person.create :gender => "female", :first_name => "Katie", :middle_initial => "D", :last_name => "Baxter", :street_address => "70 Friar John Way", :city => "Leda", :state => "WA", :postcode => "6170", :email => "Katie.D.Baxter@pookmail.com", :birthday => "1985/12/18 00:00:00" -Person.create :gender => "male", :first_name => "Joe", :middle_initial => "P", :last_name => "Flynn", :street_address => "26 Lapko Road", :city => "Jacup", :state => "WA", :postcode => "6337", :email => "Joe.P.Flynn@dodgit.com", :birthday => "1981/7/28 00:00:00" -Person.create :gender => "male", :first_name => "Nathan", :middle_initial => "J", :last_name => "Barton", :street_address => "23 Oriana Street", :city => "Kiar", :state => "NSW", :postcode => "2259", :email => "Nathan.J.Barton@spambob.com", :birthday => "1958/5/26 00:00:00" -Person.create :gender => "female", :first_name => "Sienna", :middle_initial => "D", :last_name => "Barker", :street_address => "98 Watson Street", :city => "Roslynmead", :state => "VIC", :postcode => "3564", :email => "Sienna.D.Barker@mailinator.com", :birthday => "1980/1/27 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "E", :last_name => "Dyer", :street_address => "20 Marlin Avenue", :city => "Gunning", :state => "NSW", :postcode => "2581", :email => "Robert.E.Dyer@spambob.com", :birthday => "1971/5/1 00:00:00" -Person.create :gender => "female", :first_name => "Alisha", :middle_initial => "F", :last_name => "Lyons", :street_address => "10 Noalimba Avenue", :city => "Arding", :state => "NSW", :postcode => "2358", :email => "Alisha.F.Lyons@spambob.com", :birthday => "1943/2/25 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "C", :last_name => "Norman", :street_address => "47 Pelican Road", :city => "Hobart", :state => "TAS", :postcode => "7001", :email => "Lucy.C.Norman@mailinator.com", :birthday => "1942/5/24 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "M", :last_name => "Freeman", :street_address => "96 Ugoa Street", :city => "Mount George", :state => "NSW", :postcode => "2424", :email => "Lilly.M.Freeman@pookmail.com", :birthday => "1978/5/18 00:00:00" -Person.create :gender => "male", :first_name => "Riley", :middle_initial => "C", :last_name => "Henderson", :street_address => "55 Prince Street", :city => "Kungala", :state => "NSW", :postcode => "2460", :email => "Riley.C.Henderson@mailinator.com", :birthday => "1977/7/27 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "L", :last_name => "Lane", :street_address => "3 Masthead Drive", :city => "Kawana", :state => "QLD", :postcode => "4701", :email => "Maddison.L.Lane@trashymail.com", :birthday => "1983/3/31 00:00:00" -Person.create :gender => "female", :first_name => "Alexandra", :middle_initial => "L", :last_name => "Chadwick", :street_address => "33 Ocean Pde", :city => "Buchanan", :state => "QLD", :postcode => "4816", :email => "Alexandra.L.Chadwick@mailinator.com", :birthday => "1957/1/31 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "P", :last_name => "Russell", :street_address => "91 Villeneuve Street", :city => "Smoko", :state => "VIC", :postcode => "3741", :email => "Harvey.P.Russell@trashymail.com", :birthday => "1954/2/3 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "Z", :last_name => "Jenkins", :street_address => "54 Dora Creek", :city => "Mountain Top", :state => "NSW", :postcode => "2480", :email => "Courtney.Z.Jenkins@trashymail.com", :birthday => "1957/10/30 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "O", :last_name => "Armstrong", :street_address => "95 Cambridge Street", :city => "Emu Plains", :state => "NSW", :postcode => "2750", :email => "Niamh.O.Armstrong@dodgit.com", :birthday => "1985/10/30 00:00:00" -Person.create :gender => "male", :first_name => "Toby", :middle_initial => "C", :last_name => "Sutton", :street_address => "53 Bass Street", :city => "Booyong", :state => "NSW", :postcode => "2480", :email => "Toby.C.Sutton@mailinator.com", :birthday => "1941/8/13 00:00:00" -Person.create :gender => "male", :first_name => "Taylor", :middle_initial => "K", :last_name => "Lowe", :street_address => "74 Raglan Street", :city => "Ballogie", :state => "QLD", :postcode => "4610", :email => "Taylor.K.Lowe@mailinator.com", :birthday => "1975/3/24 00:00:00" -Person.create :gender => "male", :first_name => "Bradley", :middle_initial => "R", :last_name => "Cook", :street_address => "95 Peterho Boulevard", :city => "Hillier", :state => "SA", :postcode => "5116", :email => "Bradley.R.Cook@dodgit.com", :birthday => "1962/2/26 00:00:00" -Person.create :gender => "female", :first_name => "Danielle", :middle_initial => "L", :last_name => "Graham", :street_address => "25 Maintongoon Road", :city => "Walhalla", :state => "VIC", :postcode => "3825", :email => "Danielle.L.Graham@pookmail.com", :birthday => "1948/10/6 00:00:00" -Person.create :gender => "female", :first_name => "Louise", :middle_initial => "L", :last_name => "Collier", :street_address => "37 Thule Drive", :city => "Bethel", :state => "SA", :postcode => "5373", :email => "Louise.L.Collier@mailinator.com", :birthday => "1947/2/12 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "G", :last_name => "Grant", :street_address => "11 Arthur Street", :city => "Burroway", :state => "NSW", :postcode => "2821", :email => "Dylan.G.Grant@dodgit.com", :birthday => "1954/11/14 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "M", :last_name => "Evans", :street_address => "39 Bresnahans Lane", :city => "Paddys River", :state => "NSW", :postcode => "2577", :email => "Elliot.M.Evans@spambob.com", :birthday => "1981/10/1 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "T", :last_name => "Phillips", :street_address => "93 Taltarni Road", :city => "Tottington", :state => "VIC", :postcode => "3478", :email => "Kate.T.Phillips@trashymail.com", :birthday => "1947/5/4 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "N", :last_name => "Simmons", :street_address => "80 Derry Street", :city => "Brendale Dc", :state => "QLD", :postcode => "4500", :email => "Isabelle.N.Simmons@dodgit.com", :birthday => "1963/4/14 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "M", :last_name => "Green", :street_address => "61 Kintyre Street", :city => "Lyons", :state => "QLD", :postcode => "4124", :email => "Tyler.M.Green@trashymail.com", :birthday => "1947/12/11 00:00:00" -Person.create :gender => "male", :first_name => "Kian", :middle_initial => "S", :last_name => "Parry", :street_address => "44 Mt Berryman Road", :city => "Crows Nest", :state => "QLD", :postcode => "4355", :email => "Kian.S.Parry@mailinator.com", :birthday => "1952/6/15 00:00:00" -Person.create :gender => "male", :first_name => "Henry", :middle_initial => "D", :last_name => "Daly", :street_address => "11 Grandis Road", :city => "Temagog", :state => "NSW", :postcode => "2440", :email => "Henry.D.Daly@trashymail.com", :birthday => "1972/11/22 00:00:00" -Person.create :gender => "male", :first_name => "Riley", :middle_initial => "K", :last_name => "Gallagher", :street_address => "36 Ghost Hill Road", :city => "Vineyard", :state => "NSW", :postcode => "2765", :email => "Riley.K.Gallagher@dodgit.com", :birthday => "1940/6/10 00:00:00" -Person.create :gender => "male", :first_name => "Finley", :middle_initial => "L", :last_name => "Collins", :street_address => "8 Walpole Avenue", :city => "Paaratte", :state => "VIC", :postcode => "3268", :email => "Finley.L.Collins@dodgit.com", :birthday => "1975/7/17 00:00:00" -Person.create :gender => "female", :first_name => "Yasmin", :middle_initial => "M", :last_name => "Fraser", :street_address => "19 Forrest Road", :city => "Cobbora", :state => "NSW", :postcode => "2844", :email => "Yasmin.M.Fraser@spambob.com", :birthday => "1948/3/6 00:00:00" -Person.create :gender => "male", :first_name => "Kian", :middle_initial => "J", :last_name => "Wilson", :street_address => "29 Queen Street", :city => "Bayview", :state => "NSW", :postcode => "2104", :email => "Kian.J.Wilson@pookmail.com", :birthday => "1967/8/11 00:00:00" -Person.create :gender => "female", :first_name => "Hannah", :middle_initial => "L", :last_name => "Bird", :street_address => "34 Hill Street", :city => "Tiberias", :state => "TAS", :postcode => "7120", :email => "Hannah.L.Bird@trashymail.com", :birthday => "1967/1/7 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "L", :last_name => "Hope", :street_address => "13 Hodgson St", :city => "Womina", :state => "QLD", :postcode => "4370", :email => "Francesca.L.Hope@trashymail.com", :birthday => "1984/12/20 00:00:00" -Person.create :gender => "female", :first_name => "Natasha", :middle_initial => "L", :last_name => "Blake", :street_address => "60 Masthead Drive", :city => "Limestone Creek", :state => "QLD", :postcode => "4701", :email => "Natasha.L.Blake@dodgit.com", :birthday => "1960/2/3 00:00:00" -Person.create :gender => "male", :first_name => "Evan", :middle_initial => "D", :last_name => "Warren", :street_address => "57 Ferny Avenue", :city => "Givelda", :state => "QLD", :postcode => "4670", :email => "Evan.D.Warren@pookmail.com", :birthday => "1943/7/17 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "T", :last_name => "Stevenson", :street_address => "84 Wynyard Street", :city => "Gilmore", :state => "NSW", :postcode => "2720", :email => "Francesca.T.Stevenson@spambob.com", :birthday => "1981/8/10 00:00:00" -Person.create :gender => "female", :first_name => "Scarlett", :middle_initial => "M", :last_name => "Parkin", :street_address => "58 Lane Street", :city => "Kew", :state => "VIC", :postcode => "3101", :email => "Scarlett.M.Parkin@mailinator.com", :birthday => "1954/2/20 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "S", :last_name => "Moss", :street_address => "87 Atkinson Way", :city => "Mount Anketell", :state => "WA", :postcode => "6714", :email => "Kayleigh.S.Moss@pookmail.com", :birthday => "1963/8/29 00:00:00" -Person.create :gender => "male", :first_name => "Evan", :middle_initial => "V", :last_name => "Mellor", :street_address => "75 George Street", :city => "Eromanga", :state => "QLD", :postcode => "4480", :email => "Evan.V.Mellor@pookmail.com", :birthday => "1948/10/1 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "L", :last_name => "Waters", :street_address => "47 Fernleigh Ave", :city => "Ponto", :state => "NSW", :postcode => "2831", :email => "Tilly.L.Waters@mailinator.com", :birthday => "1976/5/31 00:00:00" -Person.create :gender => "male", :first_name => "Billy", :middle_initial => "G", :last_name => "Woods", :street_address => "16 Redesdale Rd", :city => "Toolleen", :state => "VIC", :postcode => "3551", :email => "Billy.G.Woods@dodgit.com", :birthday => "1942/3/17 00:00:00" -Person.create :gender => "male", :first_name => "Aaron", :middle_initial => "E", :last_name => "Newman", :street_address => "84 Rupara Street", :city => "Nailsworth", :state => "SA", :postcode => "5083", :email => "Aaron.E.Newman@spambob.com", :birthday => "1983/5/20 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "D", :last_name => "Randall", :street_address => "8 Learmouth Street", :city => "Yatchaw", :state => "VIC", :postcode => "3301", :email => "Jade.D.Randall@pookmail.com", :birthday => "1947/6/7 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "A", :last_name => "Cunningham", :street_address => "57 Shell Road", :city => "Hordern Vale", :state => "VIC", :postcode => "3238", :email => "Robert.A.Cunningham@mailinator.com", :birthday => "1956/9/10 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "A", :last_name => "Simmons", :street_address => "72 Endeavour Drive", :city => "Tooligie", :state => "SA", :postcode => "5607", :email => "Isabelle.A.Simmons@trashymail.com", :birthday => "1955/11/15 00:00:00" -Person.create :gender => "male", :first_name => "Jack", :middle_initial => "T", :last_name => "Wade", :street_address => "66 Walter Crescent", :city => "Broughton", :state => "NSW", :postcode => "2535", :email => "Jack.T.Wade@trashymail.com", :birthday => "1956/10/10 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "E", :last_name => "Ali", :street_address => "59 Seaview Court", :city => "Victoria Plains", :state => "QLD", :postcode => "4751", :email => "Lucy.E.Ali@spambob.com", :birthday => "1954/10/29 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "R", :last_name => "O'onnor", :street_address => "5 Wharf St", :city => "Holgate", :state => "NSW", :postcode => "2250", :email => "Zachary.R.O'onnor@trashymail.com", :birthday => "1969/7/26 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "K", :last_name => "Lucas", :street_address => "71 Gregory Way", :city => "Mandurah North", :state => "WA", :postcode => "6210", :email => "Zachary.K.Lucas@trashymail.com", :birthday => "1963/3/26 00:00:00" -Person.create :gender => "female", :first_name => "Phoebe", :middle_initial => "O", :last_name => "Duncan", :street_address => "2 Berambing Crescent", :city => "Doonside", :state => "NSW", :postcode => "2767", :email => "Phoebe.O.Duncan@spambob.com", :birthday => "1950/4/15 00:00:00" -Person.create :gender => "female", :first_name => "Molly", :middle_initial => "G", :last_name => "Marsden", :street_address => "19 Panorama Road", :city => "Weabonga", :state => "NSW", :postcode => "2340", :email => "Molly.G.Marsden@mailinator.com", :birthday => "1940/8/30 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "L", :last_name => "Banks", :street_address => "27 Dalgarno Street", :city => "Jews Lagoon", :state => "NSW", :postcode => "2388", :email => "Scott.L.Banks@pookmail.com", :birthday => "1982/9/14 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "K", :last_name => "Cooper", :street_address => "61 Railway Street", :city => "Greenmount East", :state => "QLD", :postcode => "4359", :email => "Courtney.K.Cooper@pookmail.com", :birthday => "1949/7/29 00:00:00" -Person.create :gender => "male", :first_name => "Michael", :middle_initial => "R", :last_name => "Marsden", :street_address => "46 Glenpark Road", :city => "Tucabia", :state => "NSW", :postcode => "2462", :email => "Michael.R.Marsden@trashymail.com", :birthday => "1972/8/25 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "R", :last_name => "Howe", :street_address => "45 Pipeclay Road", :city => "Wauchope", :state => "NSW", :postcode => "2446", :email => "Owen.R.Howe@dodgit.com", :birthday => "1964/6/26 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "J", :last_name => "Moran", :street_address => "7 Masthead Drive", :city => "Bororen", :state => "QLD", :postcode => "4678", :email => "Francesca.J.Moran@spambob.com", :birthday => "1960/2/17 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "E", :last_name => "Phillips", :street_address => "33 Atkinson Way", :city => "Bilingurr", :state => "WA", :postcode => "6725", :email => "Courtney.E.Phillips@trashymail.com", :birthday => "1951/6/18 00:00:00" -Person.create :gender => "female", :first_name => "Paige", :middle_initial => "O", :last_name => "Nash", :street_address => "73 Hillsdale Road", :city => "Teebar", :state => "QLD", :postcode => "4620", :email => "Paige.O.Nash@spambob.com", :birthday => "1984/3/3 00:00:00" -Person.create :gender => "female", :first_name => "Georgia", :middle_initial => "T", :last_name => "Charlton", :street_address => "14 Quayside Vista", :city => "Tomerong", :state => "NSW", :postcode => "2540", :email => "Georgia.T.Charlton@spambob.com", :birthday => "1978/6/28 00:00:00" -Person.create :gender => "male", :first_name => "Rhys", :middle_initial => "N", :last_name => "Rees", :street_address => "50 South Street", :city => "Glaziers Bay", :state => "TAS", :postcode => "7109", :email => "Rhys.N.Rees@spambob.com", :birthday => "1941/8/6 00:00:00" -Person.create :gender => "male", :first_name => "Jack", :middle_initial => "L", :last_name => "Bryant", :street_address => "3 Edgewater Close", :city => "Cambewarra", :state => "NSW", :postcode => "2540", :email => "Jack.L.Bryant@trashymail.com", :birthday => "1957/3/28 00:00:00" -Person.create :gender => "male", :first_name => "Isaac", :middle_initial => "J", :last_name => "Hughes", :street_address => "62 Bowden Street", :city => "La Perouse", :state => "NSW", :postcode => "2036", :email => "Isaac.J.Hughes@pookmail.com", :birthday => "1967/9/23 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "P", :last_name => "Schofield", :street_address => "19 Spencer Street", :city => "Goomeribong", :state => "QLD", :postcode => "4601", :email => "Ava.P.Schofield@spambob.com", :birthday => "1968/7/30 00:00:00" -Person.create :gender => "female", :first_name => "Chloe", :middle_initial => "H", :last_name => "Wilkinson", :street_address => "20 Treasure Island Avenue", :city => "Sarabah", :state => "QLD", :postcode => "4275", :email => "Chloe.H.Wilkinson@pookmail.com", :birthday => "1967/2/3 00:00:00" -Person.create :gender => "female", :first_name => "Samantha", :middle_initial => "E", :last_name => "Berry", :street_address => "31 Woodwark Crescent", :city => "Badu Island", :state => "QLD", :postcode => "4875", :email => "Samantha.E.Berry@dodgit.com", :birthday => "1973/2/18 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "L", :last_name => "Ferguson", :street_address => "49 Bathurst Road", :city => "Wiagdon", :state => "NSW", :postcode => "2795", :email => "Cerys.L.Ferguson@pookmail.com", :birthday => "1958/7/24 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "G", :last_name => "Sharpe", :street_address => "96 Ocean Street", :city => "Wollogorang Station", :state => "NT", :postcode => "0862", :email => "Isabelle.G.Sharpe@spambob.com", :birthday => "1983/9/15 00:00:00" -Person.create :gender => "female", :first_name => "Rachel", :middle_initial => "M", :last_name => "Wells", :street_address => "14 Ronald Crescent", :city => "Boyne Island", :state => "QLD", :postcode => "4680", :email => "Rachel.M.Wells@trashymail.com", :birthday => "1968/5/29 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "C", :last_name => "Parkinson", :street_address => "12 Davis Street", :city => "Kelvin Grove Dc", :state => "QLD", :postcode => "4059", :email => "Dylan.C.Parkinson@trashymail.com", :birthday => "1966/10/21 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "B", :last_name => "Parsons", :street_address => "74 Commercial Street", :city => "Woodend North", :state => "VIC", :postcode => "3442", :email => "Matilda.B.Parsons@trashymail.com", :birthday => "1942/8/2 00:00:00" -Person.create :gender => "female", :first_name => "Scarlett", :middle_initial => "M", :last_name => "May", :street_address => "74 Wigley Street", :city => "Bedford Park", :state => "SA", :postcode => "5042", :email => "Scarlett.M.May@spambob.com", :birthday => "1977/10/12 00:00:00" -Person.create :gender => "male", :first_name => "Aidan", :middle_initial => "A", :last_name => "Chamberlain", :street_address => "51 Roseda-Tinamba Road", :city => "Turtons Creek", :state => "VIC", :postcode => "3960", :email => "Aidan.A.Chamberlain@trashymail.com", :birthday => "1955/4/9 00:00:00" -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "J", :last_name => "Greenwood", :street_address => "37 Webb Road", :city => "Hillsborough", :state => "NSW", :postcode => "2290", :email => "Ellie.J.Greenwood@trashymail.com", :birthday => "1970/12/17 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "Z", :last_name => "McDonald", :street_address => "73 Kooljak Rd", :city => "Sabina River", :state => "WA", :postcode => "6280", :email => "Christopher.Z.McDonald@trashymail.com", :birthday => "1961/6/15 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "T", :last_name => "Arnold", :street_address => "28 Grandis Road", :city => "Lower Creek", :state => "NSW", :postcode => "2440", :email => "Amber.T.Arnold@dodgit.com", :birthday => "1961/3/1 00:00:00" -Person.create :gender => "male", :first_name => "Josh", :middle_initial => "N", :last_name => "Hardy", :street_address => "74 Thone Street", :city => "Killawarra", :state => "NSW", :postcode => "2429", :email => "Josh.N.Hardy@spambob.com", :birthday => "1944/9/13 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "M", :last_name => "Pollard", :street_address => "44 Adavale Road", :city => "Six Mile Flat", :state => "NSW", :postcode => "2580", :email => "Maddison.M.Pollard@spambob.com", :birthday => "1945/6/27 00:00:00" -Person.create :gender => "female", :first_name => "Holly", :middle_initial => "J", :last_name => "Hughes", :street_address => "93 Yangan Drive", :city => "Chilcotts Creek", :state => "NSW", :postcode => "2339", :email => "Holly.J.Hughes@trashymail.com", :birthday => "1951/8/6 00:00:00" -Person.create :gender => "male", :first_name => "Bailey", :middle_initial => "N", :last_name => "Martin", :street_address => "87 Moruya Road", :city => "Farringdon", :state => "NSW", :postcode => "2622", :email => "Bailey.N.Martin@trashymail.com", :birthday => "1980/7/3 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "R", :last_name => "O'ullivan", :street_address => "55 Taylor Street", :city => "Waaia", :state => "VIC", :postcode => "3637", :email => "Ryan.R.O'ullivan@spambob.com", :birthday => "1981/4/26 00:00:00" -Person.create :gender => "female", :first_name => "Georgia", :middle_initial => "B", :last_name => "Gough", :street_address => "29 Bungana Drive", :city => "Brinkworth", :state => "SA", :postcode => "5464", :email => "Georgia.B.Gough@pookmail.com", :birthday => "1984/2/7 00:00:00" -Person.create :gender => "male", :first_name => "Edward", :middle_initial => "N", :last_name => "Harper", :street_address => "38 Taylor Street", :city => "Katandra", :state => "VIC", :postcode => "3634", :email => "Edward.N.Harper@trashymail.com", :birthday => "1980/7/31 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "A", :last_name => "Cross", :street_address => "11 Rimbanda Road", :city => "Tungsten", :state => "NSW", :postcode => "2371", :email => "Christopher.A.Cross@spambob.com", :birthday => "1963/12/13 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "P", :last_name => "Archer", :street_address => "15 Spencer Street", :city => "Yaroomba", :state => "QLD", :postcode => "4573", :email => "Ethan.P.Archer@spambob.com", :birthday => "1963/10/12 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "B", :last_name => "Dean", :street_address => "86 Bellion Drive", :city => "Jalbarragup", :state => "WA", :postcode => "6275", :email => "Sarah.B.Dean@mailinator.com", :birthday => "1948/4/29 00:00:00" -Person.create :gender => "male", :first_name => "Henry", :middle_initial => "T", :last_name => "Randall", :street_address => "22 Amiens Road", :city => "Long Creek", :state => "NSW", :postcode => "2850", :email => "Henry.T.Randall@dodgit.com", :birthday => "1946/8/10 00:00:00" -Person.create :gender => "male", :first_name => "Rhys", :middle_initial => "C", :last_name => "Pearson", :street_address => "30 Paradise Falls Road", :city => "Bowser", :state => "VIC", :postcode => "3678", :email => "Rhys.C.Pearson@spambob.com", :birthday => "1973/5/12 00:00:00" -Person.create :gender => "female", :first_name => "Natasha", :middle_initial => "C", :last_name => "Kennedy", :street_address => "79 McKillop Street", :city => "Berrambool", :state => "VIC", :postcode => "3379", :email => "Natasha.C.Kennedy@trashymail.com", :birthday => "1953/6/19 00:00:00" -Person.create :gender => "female", :first_name => "Lily", :middle_initial => "D", :last_name => "Gray", :street_address => "75 Shaw Drive", :city => "Eddington", :state => "VIC", :postcode => "3472", :email => "Lily.D.Gray@dodgit.com", :birthday => "1968/3/1 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "H", :last_name => "Woodward", :street_address => "1 Sullivan Court", :city => "Boolite", :state => "VIC", :postcode => "3480", :email => "Ella.H.Woodward@mailinator.com", :birthday => "1979/10/19 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "E", :last_name => "Sanderson", :street_address => "59 Myrtle Street", :city => "Yarrawonga", :state => "VIC", :postcode => "3730", :email => "Andrew.E.Sanderson@mailinator.com", :birthday => "1954/12/13 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "T", :last_name => "Gordon", :street_address => "54 Eshelby Drive", :city => "Rangewood", :state => "QLD", :postcode => "4817", :email => "Niamh.T.Gordon@pookmail.com", :birthday => "1977/4/17 00:00:00" -Person.create :gender => "female", :first_name => "Lola", :middle_initial => "O", :last_name => "Cole", :street_address => "93 Armstrong Street", :city => "Mincha", :state => "VIC", :postcode => "3575", :email => "Lola.O.Cole@pookmail.com", :birthday => "1985/5/10 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "A", :last_name => "Kaur", :street_address => "97 Bayview Close", :city => "Orange Creek", :state => "QLD", :postcode => "4715", :email => "Kayleigh.A.Kaur@trashymail.com", :birthday => "1953/3/9 00:00:00" -Person.create :gender => "male", :first_name => "Ewan", :middle_initial => "M", :last_name => "Bradley", :street_address => "56 Butler Crescent", :city => "Sweetmans Creek", :state => "NSW", :postcode => "2325", :email => "Ewan.M.Bradley@spambob.com", :birthday => "1966/4/13 00:00:00" -Person.create :gender => "female", :first_name => "Daisy", :middle_initial => "S", :last_name => "Peters", :street_address => "6 Cassinia Street", :city => "Darbalara", :state => "NSW", :postcode => "2722", :email => "Daisy.S.Peters@spambob.com", :birthday => "1969/6/27 00:00:00" -Person.create :gender => "male", :first_name => "Jordan", :middle_initial => "B", :last_name => "Stephens", :street_address => "63 Shaw Drive", :city => "Elmhurst", :state => "VIC", :postcode => "3469", :email => "Jordan.B.Stephens@trashymail.com", :birthday => "1977/8/9 00:00:00" -Person.create :gender => "female", :first_name => "Charlie", :middle_initial => "E", :last_name => "Davey", :street_address => "27 Creek Street", :city => "Southwood", :state => "QLD", :postcode => "4406", :email => "Charlie.E.Davey@trashymail.com", :birthday => "1981/11/13 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "D", :last_name => "Fraser", :street_address => "46 Myrtle Street", :city => "Yarrawonga South", :state => "VIC", :postcode => "3730", :email => "Robert.D.Fraser@mailinator.com", :birthday => "1948/12/23 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "M", :last_name => "Willis", :street_address => "14 Faulkner Street", :city => "Thalgarrah", :state => "NSW", :postcode => "2350", :email => "Jake.M.Willis@spambob.com", :birthday => "1964/3/24 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "E", :last_name => "Middleton", :street_address => "64 Meyer Road", :city => "Krondorf", :state => "SA", :postcode => "5352", :email => "Oliver.E.Middleton@spambob.com", :birthday => "1951/11/21 00:00:00" -Person.create :gender => "female", :first_name => "Katie", :middle_initial => "H", :last_name => "Archer", :street_address => "20 Gregory Way", :city => "Wagerup", :state => "WA", :postcode => "6215", :email => "Katie.H.Archer@mailinator.com", :birthday => "1971/5/9 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "G", :last_name => "Nicholls", :street_address => "54 Healy Road", :city => "Wilga West", :state => "WA", :postcode => "6243", :email => "Jacob.G.Nicholls@pookmail.com", :birthday => "1965/9/17 00:00:00" -Person.create :gender => "male", :first_name => "Harrison", :middle_initial => "K", :last_name => "Russell", :street_address => "12 Banksia Street", :city => "Nunile", :state => "WA", :postcode => "6566", :email => "Harrison.K.Russell@trashymail.com", :birthday => "1961/8/6 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "C", :last_name => "Miller", :street_address => "87 Jacolite Street", :city => "Wangara Dc", :state => "WA", :postcode => "6065", :email => "Poppy.C.Miller@trashymail.com", :birthday => "1975/6/22 00:00:00" -Person.create :gender => "female", :first_name => "Mollie", :middle_initial => "M", :last_name => "Stewart", :street_address => "69 Henry Moss Court", :city => "Germein Bay", :state => "SA", :postcode => "5495", :email => "Mollie.M.Stewart@trashymail.com", :birthday => "1968/6/19 00:00:00" -Person.create :gender => "male", :first_name => "Kian", :middle_initial => "M", :last_name => "Barker", :street_address => "75 Girvan Grove", :city => "Robinvale", :state => "VIC", :postcode => "3549", :email => "Kian.M.Barker@pookmail.com", :birthday => "1980/3/25 00:00:00" -Person.create :gender => "female", :first_name => "Skye", :middle_initial => "G", :last_name => "Hart", :street_address => "17 Banksia Street", :city => "Wooroloo", :state => "WA", :postcode => "6558", :email => "Skye.G.Hart@mailinator.com", :birthday => "1974/10/19 00:00:00" -Person.create :gender => "male", :first_name => "Charles", :middle_initial => "R", :last_name => "Kemp", :street_address => "54 Seiferts Rd", :city => "Byfield", :state => "QLD", :postcode => "4703", :email => "Charles.R.Kemp@mailinator.com", :birthday => "1943/10/28 00:00:00" -Person.create :gender => "female", :first_name => "Evie", :middle_initial => "H", :last_name => "Atkinson", :street_address => "72 Peninsula Drive", :city => "Engadine", :state => "NSW", :postcode => "2233", :email => "Evie.H.Atkinson@trashymail.com", :birthday => "1975/8/27 00:00:00" -Person.create :gender => "female", :first_name => "Hollie", :middle_initial => "S", :last_name => "John", :street_address => "60 Seaview Court", :city => "Sarina", :state => "QLD", :postcode => "4737", :email => "Hollie.S.John@dodgit.com", :birthday => "1979/6/2 00:00:00" -Person.create :gender => "male", :first_name => "Nathan", :middle_initial => "B", :last_name => "Kemp", :street_address => "21 Jones Road", :city => "Wacol", :state => "QLD", :postcode => "4076", :email => "Nathan.B.Kemp@dodgit.com", :birthday => "1940/9/30 00:00:00" -Person.create :gender => "female", :first_name => "Demi", :middle_initial => "J", :last_name => "Whitehead", :street_address => "35 Wallis Street", :city => "Randwick", :state => "NSW", :postcode => "2031", :email => "Demi.J.Whitehead@mailinator.com", :birthday => "1962/11/11 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "L", :last_name => "Knight", :street_address => "28 Cubbine Road", :city => "Turkey Hill", :state => "WA", :postcode => "6426", :email => "Jacob.L.Knight@mailinator.com", :birthday => "1959/10/29 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "J", :last_name => "Pritchard", :street_address => "7 Nerrigundah Drive", :city => "Cardinia", :state => "VIC", :postcode => "3978", :email => "Maisie.J.Pritchard@mailinator.com", :birthday => "1970/9/15 00:00:00" -Person.create :gender => "female", :first_name => "Rosie", :middle_initial => "K", :last_name => "Adams", :street_address => "72 Webb Road", :city => "New Lambton Heights", :state => "NSW", :postcode => "2305", :email => "Rosie.K.Adams@pookmail.com", :birthday => "1948/8/13 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "S", :last_name => "Shaw", :street_address => "4 High Street", :city => "Warooka", :state => "SA", :postcode => "5577", :email => "Nicole.S.Shaw@trashymail.com", :birthday => "1983/12/20 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "N", :last_name => "Gardiner", :street_address => "12 Church Street", :city => "Bordertown", :state => "SA", :postcode => "5268", :email => "Matilda.N.Gardiner@mailinator.com", :birthday => "1968/11/14 00:00:00" -Person.create :gender => "male", :first_name => "George", :middle_initial => "N", :last_name => "Patterson", :street_address => "86 Hill Street", :city => "Oyster Cove", :state => "TAS", :postcode => "7150", :email => "George.N.Patterson@mailinator.com", :birthday => "1956/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Isobel", :middle_initial => "J", :last_name => "West", :street_address => "85 Quoin Road", :city => "Rushy Lagoon", :state => "TAS", :postcode => "7264", :email => "Isobel.J.West@dodgit.com", :birthday => "1968/7/29 00:00:00" -Person.create :gender => "female", :first_name => "Freya", :middle_initial => "B", :last_name => "Gregory", :street_address => "16 Gregory Way", :city => "Coolup", :state => "WA", :postcode => "6214", :email => "Freya.B.Gregory@pookmail.com", :birthday => "1974/7/26 00:00:00" -Person.create :gender => "female", :first_name => "Madeleine", :middle_initial => "J", :last_name => "Chandler", :street_address => "75 Clifton Street", :city => "Narrung", :state => "VIC", :postcode => "3597", :email => "Madeleine.J.Chandler@dodgit.com", :birthday => "1940/1/4 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "B", :last_name => "Reynolds", :street_address => "34 Point Walter Road", :city => "South Lake", :state => "WA", :postcode => "6164", :email => "Nicole.B.Reynolds@pookmail.com", :birthday => "1969/3/22 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "I", :last_name => "Baldwin", :street_address => "63 George Street", :city => "Cooladdi", :state => "QLD", :postcode => "4479", :email => "Jude.I.Baldwin@mailinator.com", :birthday => "1964/8/5 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "B", :last_name => "Brady", :street_address => "38 Lapko Road", :city => "Magitup", :state => "WA", :postcode => "6338", :email => "Lilly.B.Brady@mailinator.com", :birthday => "1972/2/23 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "T", :last_name => "Harvey", :street_address => "85 Corio Street", :city => "Wensleydale", :state => "VIC", :postcode => "3241", :email => "Francesca.T.Harvey@mailinator.com", :birthday => "1985/9/9 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "L", :last_name => "Davey", :street_address => "3 Monteagle Road", :city => "Braddon", :state => "ACT", :postcode => "2612", :email => "Oliver.L.Davey@dodgit.com", :birthday => "1979/6/18 00:00:00" -Person.create :gender => "male", :first_name => "Taylor", :middle_initial => "L", :last_name => "Ferguson", :street_address => "67 Norton Street", :city => "Spit Junction", :state => "NSW", :postcode => "2088", :email => "Taylor.L.Ferguson@trashymail.com", :birthday => "1974/12/27 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "F", :last_name => "Slater", :street_address => "39 Faunce Crescent", :city => "Burgooney", :state => "NSW", :postcode => "2672", :email => "Sarah.F.Slater@spambob.com", :birthday => "1944/2/25 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "M", :last_name => "Connolly", :street_address => "85 Chapman Avenue", :city => "Duckmaloi", :state => "NSW", :postcode => "2787", :email => "Ava.M.Connolly@dodgit.com", :birthday => "1984/9/13 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "J", :last_name => "Higgins", :street_address => "45 Farnell Street", :city => "Cumnock", :state => "NSW", :postcode => "2867", :email => "Summer.J.Higgins@pookmail.com", :birthday => "1964/8/23 00:00:00" -Person.create :gender => "male", :first_name => "Alfie", :middle_initial => "A", :last_name => "Quinn", :street_address => "81 Pipeclay Road", :city => "Debenham", :state => "NSW", :postcode => "2446", :email => "Alfie.A.Quinn@dodgit.com", :birthday => "1956/9/26 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "R", :last_name => "White", :street_address => "40 Benny Street", :city => "Paloona", :state => "TAS", :postcode => "7310", :email => "Isabelle.R.White@pookmail.com", :birthday => "1980/7/12 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "L", :last_name => "Turnbull", :street_address => "14 Shell Road", :city => "Big Hill", :state => "VIC", :postcode => "3231", :email => "Madison.L.Turnbull@dodgit.com", :birthday => "1962/8/21 00:00:00" -Person.create :gender => "male", :first_name => "Callum", :middle_initial => "J", :last_name => "Dunn", :street_address => "24 Cornish Street", :city => "Chartwell", :state => "VIC", :postcode => "3030", :email => "Callum.J.Dunn@spambob.com", :birthday => "1956/6/20 00:00:00" -Person.create :gender => "male", :first_name => "Jamie", :middle_initial => "R", :last_name => "Ford", :street_address => "67 Eshelby Drive", :city => "Saunders Beach", :state => "QLD", :postcode => "4818", :email => "Jamie.R.Ford@pookmail.com", :birthday => "1981/11/21 00:00:00" -Person.create :gender => "male", :first_name => "James", :middle_initial => "S", :last_name => "Chamberlain", :street_address => "44 Boonah Qld", :city => "Fulham", :state => "QLD", :postcode => "4313", :email => "James.S.Chamberlain@pookmail.com", :birthday => "1951/2/4 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "S", :last_name => "Fuller", :street_address => "19 Devon Street", :city => "Marleston Dc", :state => "SA", :postcode => "5033", :email => "Adam.S.Fuller@mailinator.com", :birthday => "1976/11/11 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "J", :last_name => "Farmer", :street_address => "1 Walters Street", :city => "Benalla West", :state => "VIC", :postcode => "3672", :email => "Zachary.J.Farmer@trashymail.com", :birthday => "1980/12/16 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "F", :last_name => "Kent", :street_address => "28 Maritime Avenue", :city => "Leeuwin", :state => "WA", :postcode => "6290", :email => "Alexander.F.Kent@trashymail.com", :birthday => "1947/6/19 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "J", :last_name => "Sharp", :street_address => "63 Loris Way", :city => "Cuballing", :state => "WA", :postcode => "6311", :email => "Kayleigh.J.Sharp@pookmail.com", :birthday => "1954/7/31 00:00:00" -Person.create :gender => "male", :first_name => "Henry", :middle_initial => "M", :last_name => "Howe", :street_address => "23 Fergusson Street", :city => "Backmede", :state => "NSW", :postcode => "2470", :email => "Henry.M.Howe@dodgit.com", :birthday => "1966/3/7 00:00:00" -Person.create :gender => "female", :first_name => "Ruby", :middle_initial => "J", :last_name => "Barton", :street_address => "37 Glenpark Road", :city => "Lower Bucca", :state => "NSW", :postcode => "2450", :email => "Ruby.J.Barton@dodgit.com", :birthday => "1968/11/18 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "H", :last_name => "Sullivan", :street_address => "87 Thyme Avenue", :city => "Pilton", :state => "QLD", :postcode => "4361", :email => "Jade.H.Sullivan@mailinator.com", :birthday => "1979/7/14 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "E", :last_name => "Law", :street_address => "13 Panorama Road", :city => "Woolomin", :state => "NSW", :postcode => "2340", :email => "Harvey.E.Law@pookmail.com", :birthday => "1959/4/30 00:00:00" -Person.create :gender => "male", :first_name => "Logan", :middle_initial => "A", :last_name => "Riley", :street_address => "46 Darwinia Loop", :city => "Nanutarra", :state => "WA", :postcode => "6751", :email => "Logan.A.Riley@pookmail.com", :birthday => "1951/10/14 00:00:00" -Person.create :gender => "male", :first_name => "Patrick", :middle_initial => "P", :last_name => "Johnson", :street_address => "92 Daly Terrace", :city => "Kinross", :state => "WA", :postcode => "6028", :email => "Patrick.P.Johnson@pookmail.com", :birthday => "1950/2/13 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "M", :last_name => "Joyce", :street_address => "59 High Street", :city => "Bluff Beach", :state => "SA", :postcode => "5575", :email => "Sarah.M.Joyce@mailinator.com", :birthday => "1985/11/20 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "N", :last_name => "Morley", :street_address => "85 Saggers Road", :city => "Walyurin", :state => "WA", :postcode => "6363", :email => "Tom.N.Morley@trashymail.com", :birthday => "1959/12/19 00:00:00" -Person.create :gender => "female", :first_name => "Emily", :middle_initial => "C", :last_name => "Andrews", :street_address => "68 Souttar Terrace", :city => "Herdsman", :state => "WA", :postcode => "6017", :email => "Emily.C.Andrews@trashymail.com", :birthday => "1971/4/1 00:00:00" -Person.create :gender => "male", :first_name => "Finlay", :middle_initial => "K", :last_name => "James", :street_address => "22 Blairgowrie Avenue", :city => "Middle Flat", :state => "NSW", :postcode => "2630", :email => "Finlay.K.James@spambob.com", :birthday => "1955/12/25 00:00:00" -Person.create :gender => "female", :first_name => "Victoria", :middle_initial => "S", :last_name => "Lee", :street_address => "96 Yangan Drive", :city => "Colly Blue", :state => "NSW", :postcode => "2343", :email => "Victoria.S.Lee@mailinator.com", :birthday => "1968/3/1 00:00:00" -Person.create :gender => "male", :first_name => "Harry", :middle_initial => "E", :last_name => "Dunn", :street_address => "51 Myrtle Street", :city => "Bungeet", :state => "VIC", :postcode => "3726", :email => "Harry.E.Dunn@pookmail.com", :birthday => "1981/1/7 00:00:00" -Person.create :gender => "male", :first_name => "Joseph", :middle_initial => "H", :last_name => "Banks", :street_address => "35 Plug Street", :city => "New Valley", :state => "NSW", :postcode => "2365", :email => "Joseph.H.Banks@dodgit.com", :birthday => "1941/11/17 00:00:00" -Person.create :gender => "male", :first_name => "Logan", :middle_initial => "M", :last_name => "Gardiner", :street_address => "86 Mnimbah Road", :city => "Callaghan", :state => "NSW", :postcode => "2308", :email => "Logan.M.Gardiner@trashymail.com", :birthday => "1970/4/15 00:00:00" -Person.create :gender => "male", :first_name => "Max", :middle_initial => "E", :last_name => "Mills", :street_address => "45 Hart Street", :city => "Upper Rouchel", :state => "NSW", :postcode => "2336", :email => "Max.E.Mills@dodgit.com", :birthday => "1956/11/28 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "H", :last_name => "Grant", :street_address => "22 Cambridge Street", :city => "Grose Wold", :state => "NSW", :postcode => "2753", :email => "Sarah.H.Grant@spambob.com", :birthday => "1985/12/29 00:00:00" -Person.create :gender => "male", :first_name => "Harley", :middle_initial => "L", :last_name => "Fraser", :street_address => "70 Border Drive", :city => "Euberta", :state => "NSW", :postcode => "2650", :email => "Harley.L.Fraser@mailinator.com", :birthday => "1959/10/1 00:00:00" -Person.create :gender => "female", :first_name => "Brooke", :middle_initial => "L", :last_name => "Reed", :street_address => "39 Burnley Street", :city => "Woodcroft", :state => "SA", :postcode => "5162", :email => "Brooke.L.Reed@spambob.com", :birthday => "1961/9/17 00:00:00" -Person.create :gender => "female", :first_name => "Rebecca", :middle_initial => "R", :last_name => "Parkinson", :street_address => "79 Bourke Crescent", :city => "Antwerp", :state => "VIC", :postcode => "3414", :email => "Rebecca.R.Parkinson@trashymail.com", :birthday => "1985/2/6 00:00:00" -Person.create :gender => "male", :first_name => "Gabriel", :middle_initial => "F", :last_name => "Davison", :street_address => "10 Creek Street", :city => "Marmadua", :state => "QLD", :postcode => "4405", :email => "Gabriel.F.Davison@trashymail.com", :birthday => "1980/12/20 00:00:00" -Person.create :gender => "female", :first_name => "Chloe", :middle_initial => "S", :last_name => "Farrell", :street_address => "1 Nandewar Street", :city => "Coffs Harbour", :state => "NSW", :postcode => "2450", :email => "Chloe.S.Farrell@pookmail.com", :birthday => "1947/8/12 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "M", :last_name => "Cartwright", :street_address => "23 Dossiter Street", :city => "Lemont", :state => "TAS", :postcode => "7120", :email => "Tilly.M.Cartwright@mailinator.com", :birthday => "1982/2/24 00:00:00" -Person.create :gender => "female", :first_name => "Naomi", :middle_initial => "D", :last_name => "Sullivan", :street_address => "54 Dabinett Road", :city => "Lake Carlet", :state => "SA", :postcode => "5238", :email => "Naomi.D.Sullivan@trashymail.com", :birthday => "1963/11/19 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "O", :last_name => "Bolton", :street_address => "8 Grey Street", :city => "Gascoyne Junction", :state => "WA", :postcode => "6705", :email => "Ava.O.Bolton@spambob.com", :birthday => "1978/11/5 00:00:00" -Person.create :gender => "female", :first_name => "Ruby", :middle_initial => "J", :last_name => "Parkin", :street_address => "27 Patton Street", :city => "Hawksburn", :state => "VIC", :postcode => "3142", :email => "Ruby.J.Parkin@pookmail.com", :birthday => "1977/9/4 00:00:00" -Person.create :gender => "female", :first_name => "Megan", :middle_initial => "J", :last_name => "Potts", :street_address => "2 Boughtman Street", :city => "Mulgrave", :state => "VIC", :postcode => "3170", :email => "Megan.J.Potts@pookmail.com", :birthday => "1978/8/7 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "M", :last_name => "Stephenson", :street_address => "5 Marlin Avenue", :city => "Jerrawa", :state => "NSW", :postcode => "2582", :email => "Adam.M.Stephenson@dodgit.com", :birthday => "1970/11/19 00:00:00" -Person.create :gender => "female", :first_name => "Rosie", :middle_initial => "M", :last_name => "Barlow", :street_address => "15 Paradise Falls Road", :city => "Waldara", :state => "VIC", :postcode => "3678", :email => "Rosie.M.Barlow@trashymail.com", :birthday => "1952/11/29 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "P", :last_name => "Stephens", :street_address => "58 Bayley Street", :city => "Pheasant Creek", :state => "VIC", :postcode => "3757", :email => "Robert.P.Stephens@spambob.com", :birthday => "1971/3/13 00:00:00" -Person.create :gender => "male", :first_name => "Kyle", :middle_initial => "T", :last_name => "Ferguson", :street_address => "54 Mnimbah Road", :city => "Cliftleigh", :state => "NSW", :postcode => "2321", :email => "Kyle.T.Ferguson@dodgit.com", :birthday => "1982/11/29 00:00:00" -Person.create :gender => "female", :first_name => "Anna", :middle_initial => "D", :last_name => "Hyde", :street_address => "20 Todd Street", :city => "Horrocks", :state => "WA", :postcode => "6535", :email => "Anna.D.Hyde@trashymail.com", :birthday => "1982/3/20 00:00:00" -Person.create :gender => "male", :first_name => "Luke", :middle_initial => "M", :last_name => "Stone", :street_address => "27 Avondale Drive", :city => "Tongarra", :state => "NSW", :postcode => "2527", :email => "Luke.M.Stone@trashymail.com", :birthday => "1974/8/13 00:00:00" -Person.create :gender => "male", :first_name => "Taylor", :middle_initial => "L", :last_name => "Miah", :street_address => "38 Feather Street", :city => "Mount Samson", :state => "QLD", :postcode => "4520", :email => "Taylor.L.Miah@dodgit.com", :birthday => "1983/11/24 00:00:00" -Person.create :gender => "male", :first_name => "Joseph", :middle_initial => "G", :last_name => "Kaur", :street_address => "73 Hill Street", :city => "Mount Rumney", :state => "TAS", :postcode => "7170", :email => "Joseph.G.Kaur@spambob.com", :birthday => "1984/12/14 00:00:00" -Person.create :gender => "male", :first_name => "Bailey", :middle_initial => "T", :last_name => "Wheeler", :street_address => "72 Chatsworth Drive", :city => "Welshpool Dc", :state => "WA", :postcode => "6106", :email => "Bailey.T.Wheeler@mailinator.com", :birthday => "1954/11/16 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "D", :last_name => "Browne", :street_address => "51 Davidson Street", :city => "Yarrabah", :state => "QLD", :postcode => "4871", :email => "Jude.D.Browne@spambob.com", :birthday => "1955/4/23 00:00:00" -Person.create :gender => "male", :first_name => "Patrick", :middle_initial => "E", :last_name => "Hale", :street_address => "36 Henry Moss Court", :city => "Jamestown", :state => "SA", :postcode => "5491", :email => "Patrick.E.Hale@trashymail.com", :birthday => "1951/3/23 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "E", :last_name => "Barton", :street_address => "84 Round Drive", :city => "Buttaba", :state => "NSW", :postcode => "2283", :email => "Harvey.E.Barton@pookmail.com", :birthday => "1952/3/2 00:00:00" -Person.create :gender => "female", :first_name => "Isobel", :middle_initial => "J", :last_name => "Daly", :street_address => "65 Muscat Street", :city => "Bonnie Rock", :state => "WA", :postcode => "6479", :email => "Isobel.J.Daly@spambob.com", :birthday => "1968/12/12 00:00:00" -Person.create :gender => "male", :first_name => "Brandon", :middle_initial => "S", :last_name => "Perry", :street_address => "70 Watson Street", :city => "Nanneella", :state => "VIC", :postcode => "3561", :email => "Brandon.S.Perry@trashymail.com", :birthday => "1959/1/19 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "L", :last_name => "Gray", :street_address => "85 Old Tenterfield Road", :city => "Warregah Island", :state => "NSW", :postcode => "2469", :email => "Tegan.L.Gray@spambob.com", :birthday => "1954/4/11 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "A", :last_name => "Watkins", :street_address => "14 Cherry Grove", :city => "Brittons Swamp", :state => "TAS", :postcode => "7330", :email => "Nicole.A.Watkins@mailinator.com", :birthday => "1950/7/7 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "C", :last_name => "Black", :street_address => "24 Purcell Place", :city => "Dalmorton", :state => "NSW", :postcode => "2460", :email => "Spencer.C.Black@spambob.com", :birthday => "1979/5/7 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "M", :last_name => "Hamilton", :street_address => "9 Hargrave Road", :city => "Blackbutt North", :state => "QLD", :postcode => "4306", :email => "Courtney.M.Hamilton@mailinator.com", :birthday => "1940/9/15 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "N", :last_name => "Davison", :street_address => "70 Railway Street", :city => "Greenmount East", :state => "QLD", :postcode => "4359", :email => "David.N.Davison@mailinator.com", :birthday => "1967/1/28 00:00:00" -Person.create :gender => "male", :first_name => "Leon", :middle_initial => "O", :last_name => "Jarvis", :street_address => "62 Shell Road", :city => "Carlisle River", :state => "VIC", :postcode => "3239", :email => "Leon.O.Jarvis@trashymail.com", :birthday => "1949/7/4 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "L", :last_name => "Henderson", :street_address => "30 Meyer Road", :city => "Gomersal", :state => "SA", :postcode => "5352", :email => "Bethany.L.Henderson@mailinator.com", :birthday => "1955/8/19 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "M", :last_name => "Stokes", :street_address => "83 Adavale Road", :city => "Mummel", :state => "NSW", :postcode => "2580", :email => "Louie.M.Stokes@mailinator.com", :birthday => "1948/11/7 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "J", :last_name => "Richards", :street_address => "86 Hill Street", :city => "Tunbridge", :state => "TAS", :postcode => "7120", :email => "Eve.J.Richards@dodgit.com", :birthday => "1965/5/22 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "S", :last_name => "Perkins", :street_address => "56 Peterho Boulevard", :city => "Ward Belt", :state => "SA", :postcode => "5118", :email => "Bethany.S.Perkins@mailinator.com", :birthday => "1967/10/7 00:00:00" -Person.create :gender => "male", :first_name => "Cameron", :middle_initial => "L", :last_name => "Lambert", :street_address => "33 Farnell Street", :city => "Obley", :state => "NSW", :postcode => "2868", :email => "Cameron.L.Lambert@trashymail.com", :birthday => "1955/1/20 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "C", :last_name => "Bishop", :street_address => "11 Burnley Street", :city => "Seaford Heights", :state => "SA", :postcode => "5169", :email => "Courtney.C.Bishop@spambob.com", :birthday => "1947/12/16 00:00:00" -Person.create :gender => "male", :first_name => "Thomas", :middle_initial => "S", :last_name => "Harper", :street_address => "77 Bathurst Road", :city => "Perthville", :state => "NSW", :postcode => "2795", :email => "Thomas.S.Harper@trashymail.com", :birthday => "1970/10/30 00:00:00" -Person.create :gender => "male", :first_name => "Ryan", :middle_initial => "E", :last_name => "Barnes", :street_address => "25 McLachlan Street", :city => "Quantong", :state => "VIC", :postcode => "3401", :email => "Ryan.E.Barnes@dodgit.com", :birthday => "1961/4/29 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "A", :last_name => "Coates", :street_address => "89 Sunnyside Road", :city => "Stockyard Plain", :state => "SA", :postcode => "5330", :email => "Tom.A.Coates@mailinator.com", :birthday => "1944/2/19 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "L", :last_name => "Clarke", :street_address => "6 Hummocky Road", :city => "Gosse", :state => "SA", :postcode => "5223", :email => "Isabella.L.Clarke@pookmail.com", :birthday => "1953/5/26 00:00:00" -Person.create :gender => "female", :first_name => "Georgina", :middle_initial => "B", :last_name => "Rowley", :street_address => "66 Shadforth Street", :city => "Teal Point", :state => "VIC", :postcode => "3579", :email => "Georgina.B.Rowley@trashymail.com", :birthday => "1983/12/14 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "D", :last_name => "Howard", :street_address => "57 Mildura Street", :city => "Four Mile Creek", :state => "TAS", :postcode => "7215", :email => "Maddison.D.Howard@pookmail.com", :birthday => "1962/12/3 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "A", :last_name => "Hamilton", :street_address => "91 Amiens Road", :city => "Erudgere", :state => "NSW", :postcode => "2850", :email => "Bethany.A.Hamilton@trashymail.com", :birthday => "1965/5/20 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "C", :last_name => "Duffy", :street_address => "3 Glen William Road", :city => "Crystalbrook", :state => "QLD", :postcode => "4871", :email => "Christopher.C.Duffy@trashymail.com", :birthday => "1966/4/25 00:00:00" -Person.create :gender => "male", :first_name => "Rhys", :middle_initial => "E", :last_name => "Bird", :street_address => "12 Daly Terrace", :city => "Caraban", :state => "WA", :postcode => "6041", :email => "Rhys.E.Bird@spambob.com", :birthday => "1946/10/7 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "S", :last_name => "Moss", :street_address => "42 Quoin Road", :city => "Karoola", :state => "TAS", :postcode => "7267", :email => "Jake.S.Moss@spambob.com", :birthday => "1961/1/2 00:00:00" -Person.create :gender => "female", :first_name => "Lily", :middle_initial => "T", :last_name => "Harris", :street_address => "23 Feather Street", :city => "Dayboro", :state => "QLD", :postcode => "4521", :email => "Lily.T.Harris@trashymail.com", :birthday => "1950/3/20 00:00:00" -Person.create :gender => "female", :first_name => "Imogen", :middle_initial => "D", :last_name => "Fox", :street_address => "80 Porana Place", :city => "Tardun", :state => "WA", :postcode => "6628", :email => "Imogen.D.Fox@dodgit.com", :birthday => "1963/6/24 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "A", :last_name => "Barber", :street_address => "40 Frouds Road", :city => "Bundara", :state => "VIC", :postcode => "3898", :email => "Eve.A.Barber@dodgit.com", :birthday => "1979/11/11 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "L", :last_name => "Moran", :street_address => "79 Norton Street", :city => "Brooklyn", :state => "NSW", :postcode => "2083", :email => "Bethany.L.Moran@trashymail.com", :birthday => "1954/11/24 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "R", :last_name => "Marshall", :street_address => "7 Duff Street", :city => "Orange Springs", :state => "WA", :postcode => "6503", :email => "Ava.R.Marshall@pookmail.com", :birthday => "1964/12/26 00:00:00" -Person.create :gender => "female", :first_name => "Scarlett", :middle_initial => "I", :last_name => "Byrne", :street_address => "33 Henley Beach Road", :city => "Trott Park", :state => "SA", :postcode => "5158", :email => "Scarlett.I.Byrne@dodgit.com", :birthday => "1976/8/3 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "L", :last_name => "Conway", :street_address => "67 Gregory Way", :city => "Allanson", :state => "WA", :postcode => "6225", :email => "Tegan.L.Conway@dodgit.com", :birthday => "1954/2/9 00:00:00" -Person.create :gender => "female", :first_name => "Katherine", :middle_initial => "R", :last_name => "Lucas", :street_address => "95 Bathurst Road", :city => "Wattle Flat", :state => "NSW", :postcode => "2795", :email => "Katherine.R.Lucas@dodgit.com", :birthday => "1947/9/4 00:00:00" -Person.create :gender => "female", :first_name => "Amy", :middle_initial => "S", :last_name => "Chamberlain", :street_address => "32 Marx Hill Road", :city => "Upper Kalang", :state => "NSW", :postcode => "2454", :email => "Amy.S.Chamberlain@spambob.com", :birthday => "1982/2/17 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "N", :last_name => "McLean", :street_address => "3 Mt Berryman Road", :city => "Lockyer", :state => "QLD", :postcode => "4344", :email => "Tom.N.McLean@pookmail.com", :birthday => "1984/2/20 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "E", :last_name => "George", :street_address => "47 Holthouse Road", :city => "Forreston", :state => "SA", :postcode => "5233", :email => "Owen.E.George@spambob.com", :birthday => "1970/5/11 00:00:00" -Person.create :gender => "male", :first_name => "Finlay", :middle_initial => "P", :last_name => "Black", :street_address => "64 Taylor Street", :city => "Naring", :state => "VIC", :postcode => "3636", :email => "Finlay.P.Black@spambob.com", :birthday => "1980/7/2 00:00:00" -Person.create :gender => "male", :first_name => "George", :middle_initial => "A", :last_name => "Rees", :street_address => "39 Millicent Drive", :city => "Brownmore", :state => "NSW", :postcode => "2420", :email => "George.A.Rees@dodgit.com", :birthday => "1974/5/26 00:00:00" -Person.create :gender => "male", :first_name => "Cameron", :middle_initial => "L", :last_name => "Walker", :street_address => "26 Wallis Street", :city => "Dover Heights", :state => "NSW", :postcode => "2030", :email => "Cameron.L.Walker@dodgit.com", :birthday => "1963/4/12 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "J", :last_name => "Howarth", :street_address => "68 Flinstone Drive", :city => "Hermitage", :state => "TAS", :postcode => "7030", :email => "Shannon.J.Howarth@pookmail.com", :birthday => "1952/11/19 00:00:00" -Person.create :gender => "female", :first_name => "Aaliyah", :middle_initial => "R", :last_name => "Austin", :street_address => "6 Faulkner Street", :city => "Duval", :state => "NSW", :postcode => "2350", :email => "Aaliyah.R.Austin@dodgit.com", :birthday => "1970/6/28 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "C", :last_name => "Kennedy", :street_address => "10 Old Tenterfield Road", :city => "Mummulgum", :state => "NSW", :postcode => "2469", :email => "Louis.C.Kennedy@dodgit.com", :birthday => "1960/5/28 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "A", :last_name => "Hunt", :street_address => "81 Reynolds Road", :city => "Lake Borumba", :state => "QLD", :postcode => "4570", :email => "Bethany.A.Hunt@dodgit.com", :birthday => "1962/5/2 00:00:00" -Person.create :gender => "male", :first_name => "Joseph", :middle_initial => "A", :last_name => "Poole", :street_address => "57 Lewin Street", :city => "Mount Crystal", :state => "NSW", :postcode => "2665", :email => "Joseph.A.Poole@trashymail.com", :birthday => "1960/12/13 00:00:00" -Person.create :gender => "male", :first_name => "Luca", :middle_initial => "G", :last_name => "Harper", :street_address => "87 Villeneuve Street", :city => "Eurobin", :state => "VIC", :postcode => "3739", :email => "Luca.G.Harper@spambob.com", :birthday => "1965/11/26 00:00:00" -Person.create :gender => "female", :first_name => "Georgia", :middle_initial => "E", :last_name => "Bull", :street_address => "66 Cedar Street", :city => "Trebonne", :state => "QLD", :postcode => "4850", :email => "Georgia.E.Bull@dodgit.com", :birthday => "1983/4/19 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "L", :last_name => "Kemp", :street_address => "70 Hunter Street", :city => "North Toowoomba", :state => "QLD", :postcode => "4350", :email => "Jude.L.Kemp@spambob.com", :birthday => "1956/12/20 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "B", :last_name => "Anderson", :street_address => "82 Campbells River Road", :city => "Cryon", :state => "NSW", :postcode => "2832", :email => "Eva.B.Anderson@dodgit.com", :birthday => "1958/10/18 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "A", :last_name => "Lynch", :street_address => "19 Darwinia Loop", :city => "Gingerah", :state => "WA", :postcode => "6725", :email => "Poppy.A.Lynch@spambob.com", :birthday => "1957/11/15 00:00:00" -Person.create :gender => "female", :first_name => "Katherine", :middle_initial => "J", :last_name => "Power", :street_address => "7 Peterho Boulevard", :city => "Concordia", :state => "SA", :postcode => "5118", :email => "Katherine.J.Power@spambob.com", :birthday => "1965/4/7 00:00:00" -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "B", :last_name => "Cross", :street_address => "18 Boughtman Street", :city => "Hopetoun Gardens", :state => "VIC", :postcode => "3162", :email => "Ellie.B.Cross@dodgit.com", :birthday => "1983/5/4 00:00:00" -Person.create :gender => "female", :first_name => "Eleanor", :middle_initial => "A", :last_name => "Riley", :street_address => "67 Boland Drive", :city => "Lennox Head", :state => "NSW", :postcode => "2478", :email => "Eleanor.A.Riley@mailinator.com", :birthday => "1965/2/26 00:00:00" -Person.create :gender => "female", :first_name => "Lola", :middle_initial => "S", :last_name => "Reynolds", :street_address => "92 Grey Street", :city => "Carbla", :state => "WA", :postcode => "6701", :email => "Lola.S.Reynolds@mailinator.com", :birthday => "1957/7/13 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "L", :last_name => "Burton", :street_address => "48 Millicent Drive", :city => "Torryburn", :state => "NSW", :postcode => "2421", :email => "Hayden.L.Burton@spambob.com", :birthday => "1959/1/6 00:00:00" -Person.create :gender => "female", :first_name => "Elizabeth", :middle_initial => "P", :last_name => "O'eill", :street_address => "71 Grey Street", :city => "Brown Range", :state => "WA", :postcode => "6701", :email => "Elizabeth.P.O'eill@dodgit.com", :birthday => "1983/6/3 00:00:00" -Person.create :gender => "female", :first_name => "Ruby", :middle_initial => "H", :last_name => "Rose", :street_address => "9 Gadd Avenue", :city => "Port Pirie", :state => "SA", :postcode => "5540", :email => "Ruby.H.Rose@spambob.com", :birthday => "1968/2/2 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "T", :last_name => "Hargreaves", :street_address => "89 Plantation Place", :city => "Arkell", :state => "NSW", :postcode => "2795", :email => "Tyler.T.Hargreaves@trashymail.com", :birthday => "1963/2/13 00:00:00" -Person.create :gender => "male", :first_name => "Joseph", :middle_initial => "E", :last_name => "Thomas", :street_address => "61 Wigley Street", :city => "South Plympton", :state => "SA", :postcode => "5038", :email => "Joseph.E.Thomas@mailinator.com", :birthday => "1974/4/16 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "J", :last_name => "Power", :street_address => "31 Dalgarno Street", :city => "Maules Creek", :state => "NSW", :postcode => "2382", :email => "Lilly.J.Power@dodgit.com", :birthday => "1976/7/28 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "D", :last_name => "Wright", :street_address => "73 Saggers Road", :city => "Dudinin", :state => "WA", :postcode => "6363", :email => "Caitlin.D.Wright@dodgit.com", :birthday => "1970/6/18 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "D", :last_name => "Pope", :street_address => "66 Kopkes Road", :city => "Carngham", :state => "VIC", :postcode => "3351", :email => "Eve.D.Pope@dodgit.com", :birthday => "1942/2/11 00:00:00" -Person.create :gender => "male", :first_name => "Taylor", :middle_initial => "T", :last_name => "Dunn", :street_address => "50 Cambridge Street", :city => "Central Colo", :state => "NSW", :postcode => "2756", :email => "Taylor.T.Dunn@dodgit.com", :birthday => "1973/3/19 00:00:00" -Person.create :gender => "female", :first_name => "Alice", :middle_initial => "H", :last_name => "Cox", :street_address => "48 Chatsworth Drive", :city => "Seville Grove", :state => "WA", :postcode => "6112", :email => "Alice.H.Cox@mailinator.com", :birthday => "1973/4/25 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "A", :last_name => "Kemp", :street_address => "37 Healy Road", :city => "Lowden", :state => "WA", :postcode => "6240", :email => "Eva.A.Kemp@mailinator.com", :birthday => "1963/7/12 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "K", :last_name => "Doyle", :street_address => "61 Martens Place", :city => "Amity Point", :state => "QLD", :postcode => "4183", :email => "Maisie.K.Doyle@dodgit.com", :birthday => "1951/9/5 00:00:00" -Person.create :gender => "male", :first_name => "Mohammad", :middle_initial => "L", :last_name => "Ellis", :street_address => "85 Little Myers Street", :city => "Coimadai", :state => "VIC", :postcode => "3340", :email => "Mohammad.L.Ellis@spambob.com", :birthday => "1962/10/20 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "K", :last_name => "Foster", :street_address => "85 Ashton Road", :city => "Boolading", :state => "WA", :postcode => "6392", :email => "Tyler.K.Foster@trashymail.com", :birthday => "1977/4/18 00:00:00" -Person.create :gender => "male", :first_name => "Benjamin", :middle_initial => "F", :last_name => "Ross", :street_address => "45 Fernleigh Ave", :city => "Goodooga", :state => "NSW", :postcode => "2831", :email => "Benjamin.F.Ross@pookmail.com", :birthday => "1944/11/8 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "C", :last_name => "Pope", :street_address => "55 Punchs Creek Road", :city => "Silver Spur", :state => "QLD", :postcode => "4385", :email => "Kate.C.Pope@trashymail.com", :birthday => "1971/5/25 00:00:00" -Person.create :gender => "male", :first_name => "Ewan", :middle_initial => "K", :last_name => "Whittaker", :street_address => "61 Quoin Road", :city => "Gravelly Beach", :state => "TAS", :postcode => "7276", :email => "Ewan.K.Whittaker@dodgit.com", :birthday => "1963/4/11 00:00:00" -Person.create :gender => "male", :first_name => "Jamie", :middle_initial => "E", :last_name => "Andrews", :street_address => "52 Little Myers Street", :city => "Larralea", :state => "VIC", :postcode => "3325", :email => "Jamie.E.Andrews@mailinator.com", :birthday => "1950/7/15 00:00:00" -Person.create :gender => "female", :first_name => "Amelia", :middle_initial => "R", :last_name => "Smith", :street_address => "51 Wharf St", :city => "East Gosford", :state => "NSW", :postcode => "2250", :email => "Amelia.R.Smith@pookmail.com", :birthday => "1983/6/19 00:00:00" -Person.create :gender => "male", :first_name => "Sean", :middle_initial => "J", :last_name => "Savage", :street_address => "68 Main Street", :city => "Woodleigh", :state => "SA", :postcode => "5311", :email => "Sean.J.Savage@mailinator.com", :birthday => "1962/4/16 00:00:00" -Person.create :gender => "male", :first_name => "Callum", :middle_initial => "L", :last_name => "Cook", :street_address => "42 Moruya Street", :city => "Doon Doon", :state => "NSW", :postcode => "2484", :email => "Callum.L.Cook@dodgit.com", :birthday => "1965/2/12 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "E", :last_name => "Atkinson", :street_address => "36 Mildura Street", :city => "Goulds Country", :state => "TAS", :postcode => "7216", :email => "Jacob.E.Atkinson@trashymail.com", :birthday => "1945/6/21 00:00:00" -Person.create :gender => "female", :first_name => "Shannon", :middle_initial => "B", :last_name => "Francis", :street_address => "72 Chapman Avenue", :city => "Bumbaldry", :state => "NSW", :postcode => "2794", :email => "Shannon.B.Francis@mailinator.com", :birthday => "1960/7/2 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "A", :last_name => "Chambers", :street_address => "66 McPherson Road", :city => "Mount Beauty", :state => "VIC", :postcode => "3699", :email => "Louie.A.Chambers@spambob.com", :birthday => "1957/8/5 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "C", :last_name => "Butler", :street_address => "61 Bayley Street", :city => "Dixons Creek", :state => "VIC", :postcode => "3775", :email => "Caitlin.C.Butler@mailinator.com", :birthday => "1954/8/13 00:00:00" -Person.create :gender => "male", :first_name => "Thomas", :middle_initial => "I", :last_name => "Ellis", :street_address => "3 Ugoa Street", :city => "Forster Shopping Village", :state => "NSW", :postcode => "2428", :email => "Thomas.I.Ellis@spambob.com", :birthday => "1981/1/21 00:00:00" -Person.create :gender => "male", :first_name => "William", :middle_initial => "N", :last_name => "Brennan", :street_address => "64 Bayley Street", :city => "Whittlesea", :state => "VIC", :postcode => "3757", :email => "William.N.Brennan@spambob.com", :birthday => "1956/1/6 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "A", :last_name => "Barker", :street_address => "44 Boland Drive", :city => "Meerschaum Vale", :state => "NSW", :postcode => "2477", :email => "Hayden.A.Barker@pookmail.com", :birthday => "1958/11/22 00:00:00" -Person.create :gender => "male", :first_name => "Aaron", :middle_initial => "T", :last_name => "Phillips", :street_address => "68 McLaughlin Road", :city => "Pine Mountain", :state => "QLD", :postcode => "4306", :email => "Aaron.T.Phillips@pookmail.com", :birthday => "1956/5/4 00:00:00" -Person.create :gender => "female", :first_name => "Madeleine", :middle_initial => "G", :last_name => "Flynn", :street_address => "51 Begley Street", :city => "Howitt", :state => "QLD", :postcode => "4890", :email => "Madeleine.G.Flynn@pookmail.com", :birthday => "1959/6/30 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "M", :last_name => "Harris", :street_address => "43 Nerrigundah Drive", :city => "Monomeith", :state => "VIC", :postcode => "3984", :email => "Spencer.M.Harris@spambob.com", :birthday => "1970/2/7 00:00:00" -Person.create :gender => "female", :first_name => "Paige", :middle_initial => "S", :last_name => "Chan", :street_address => "53 Queen Street", :city => "Oxford Falls", :state => "NSW", :postcode => "2100", :email => "Paige.S.Chan@trashymail.com", :birthday => "1944/3/1 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "J", :last_name => "Harrison", :street_address => "12 South Molle Boulevard", :city => "Damper Creek", :state => "QLD", :postcode => "4849", :email => "Francesca.J.Harrison@spambob.com", :birthday => "1978/2/21 00:00:00" -Person.create :gender => "female", :first_name => "Alexandra", :middle_initial => "T", :last_name => "Curtis", :street_address => "39 Kaesler Road", :city => "Clay Wells", :state => "SA", :postcode => "5280", :email => "Alexandra.T.Curtis@trashymail.com", :birthday => "1962/7/16 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "H", :last_name => "Todd", :street_address => "48 Healy Road", :city => "Boyup Brook", :state => "WA", :postcode => "6244", :email => "Alexander.H.Todd@mailinator.com", :birthday => "1948/1/11 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "K", :last_name => "Lloyd", :street_address => "16 Little Myers Street", :city => "Cressy", :state => "VIC", :postcode => "3322", :email => "Louis.K.Lloyd@pookmail.com", :birthday => "1984/6/15 00:00:00" -Person.create :gender => "female", :first_name => "Katherine", :middle_initial => "B", :last_name => "Brennan", :street_address => "37 Spring Creek Road", :city => "Labertouche", :state => "VIC", :postcode => "3816", :email => "Katherine.B.Brennan@trashymail.com", :birthday => "1943/8/31 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "B", :last_name => "Palmer", :street_address => "97 Kintyre Street", :city => "Springwood", :state => "QLD", :postcode => "4127", :email => "Sarah.B.Palmer@spambob.com", :birthday => "1962/12/8 00:00:00" -Person.create :gender => "female", :first_name => "Phoebe", :middle_initial => "J", :last_name => "Hartley", :street_address => "41 Rimbanda Road", :city => "Moggs Swamp", :state => "NSW", :postcode => "2370", :email => "Phoebe.J.Hartley@spambob.com", :birthday => "1977/1/29 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "O", :last_name => "Phillips", :street_address => "4 Henry Street", :city => "Highton", :state => "VIC", :postcode => "3216", :email => "Caitlin.O.Phillips@spambob.com", :birthday => "1968/6/8 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "M", :last_name => "Patel", :street_address => "29 Barker Street", :city => "Coblinine", :state => "WA", :postcode => "6317", :email => "Sam.M.Patel@mailinator.com", :birthday => "1983/11/3 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "I", :last_name => "Bell", :street_address => "41 Quoin Road", :city => "Beaconsfield", :state => "TAS", :postcode => "7270", :email => "Spencer.I.Bell@spambob.com", :birthday => "1965/1/17 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "A", :last_name => "Duncan", :street_address => "59 Corio Street", :city => "Barongarook West", :state => "VIC", :postcode => "3249", :email => "Kayleigh.A.Duncan@trashymail.com", :birthday => "1965/6/10 00:00:00" -Person.create :gender => "male", :first_name => "Muhammad", :middle_initial => "A", :last_name => "Duncan", :street_address => "65 Loris Way", :city => "Dumberning", :state => "WA", :postcode => "6312", :email => "Muhammad.A.Duncan@dodgit.com", :birthday => "1943/4/22 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "S", :last_name => "Leach", :street_address => "27 Grey Street", :city => "Woodleigh", :state => "WA", :postcode => "6701", :email => "Isabella.S.Leach@mailinator.com", :birthday => "1959/3/19 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "E", :last_name => "Steele", :street_address => "89 Shirley Street", :city => "Studio Village", :state => "QLD", :postcode => "4210", :email => "Louie.E.Steele@pookmail.com", :birthday => "1954/1/13 00:00:00" -Person.create :gender => "male", :first_name => "William", :middle_initial => "C", :last_name => "Joyce", :street_address => "40 Cherry Grove", :city => "Scopus", :state => "TAS", :postcode => "7330", :email => "William.C.Joyce@mailinator.com", :birthday => "1964/10/24 00:00:00" -Person.create :gender => "male", :first_name => "Riley", :middle_initial => "C", :last_name => "Hyde", :street_address => "23 Edmundsons Road", :city => "Leigh Creek", :state => "VIC", :postcode => "3352", :email => "Riley.C.Hyde@spambob.com", :birthday => "1985/7/22 00:00:00" -Person.create :gender => "female", :first_name => "Madeleine", :middle_initial => "W", :last_name => "Coles", :street_address => "12 Rose Street", :city => "Clematis", :state => "VIC", :postcode => "3782", :email => "Madeleine.W.Coles@trashymail.com", :birthday => "1961/2/18 00:00:00" -Person.create :gender => "female", :first_name => "Erin", :middle_initial => "B", :last_name => "Conway", :street_address => "71 Lapko Road", :city => "Cowalellup", :state => "WA", :postcode => "6336", :email => "Erin.B.Conway@mailinator.com", :birthday => "1978/10/12 00:00:00" -Person.create :gender => "male", :first_name => "Kyle", :middle_initial => "Z", :last_name => "King", :street_address => "85 McLachlan Street", :city => "Brimpaen", :state => "VIC", :postcode => "3401", :email => "Kyle.Z.King@mailinator.com", :birthday => "1977/12/16 00:00:00" -Person.create :gender => "male", :first_name => "Charles", :middle_initial => "K", :last_name => "Bowen", :street_address => "32 Walpole Avenue", :city => "Nullawarre North", :state => "VIC", :postcode => "3268", :email => "Charles.K.Bowen@spambob.com", :birthday => "1945/11/24 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "F", :last_name => "Ferguson", :street_address => "62 Eshelby Drive", :city => "Rosslea", :state => "QLD", :postcode => "4812", :email => "Louie.F.Ferguson@mailinator.com", :birthday => "1953/11/20 00:00:00" -Person.create :gender => "male", :first_name => "Liam", :middle_initial => "G", :last_name => "McCarthy", :street_address => "81 Spencer Street", :city => "Pomona", :state => "QLD", :postcode => "4568", :email => "Liam.G.McCarthy@trashymail.com", :birthday => "1942/5/5 00:00:00" -Person.create :gender => "female", :first_name => "Holly", :middle_initial => "J", :last_name => "Kennedy", :street_address => "31 Learmouth Street", :city => "Lyons", :state => "VIC", :postcode => "3304", :email => "Holly.J.Kennedy@trashymail.com", :birthday => "1985/12/2 00:00:00" -Person.create :gender => "female", :first_name => "Lily", :middle_initial => "R", :last_name => "Hope", :street_address => "52 Shell Road", :city => "Glenaire", :state => "VIC", :postcode => "3238", :email => "Lily.R.Hope@mailinator.com", :birthday => "1969/1/18 00:00:00" -Person.create :gender => "male", :first_name => "Kyle", :middle_initial => "I", :last_name => "Hope", :street_address => "15 Peninsula Drive", :city => "Burraneer", :state => "NSW", :postcode => "2230", :email => "Kyle.I.Hope@pookmail.com", :birthday => "1982/3/18 00:00:00" -Person.create :gender => "male", :first_name => "Adam", :middle_initial => "A", :last_name => "Mann", :street_address => "14 Thyme Avenue", :city => "Spring Creek", :state => "QLD", :postcode => "4361", :email => "Adam.A.Mann@trashymail.com", :birthday => "1976/3/9 00:00:00" -Person.create :gender => "female", :first_name => "Isabel", :middle_initial => "N", :last_name => "Lowe", :street_address => "43 Thyme Avenue", :city => "Cottonvale", :state => "QLD", :postcode => "4375", :email => "Isabel.N.Lowe@dodgit.com", :birthday => "1982/9/1 00:00:00" -Person.create :gender => "male", :first_name => "Benjamin", :middle_initial => "S", :last_name => "Carter", :street_address => "10 Garden Place", :city => "Korong Vale", :state => "VIC", :postcode => "3520", :email => "Benjamin.S.Carter@dodgit.com", :birthday => "1952/8/27 00:00:00" -Person.create :gender => "female", :first_name => "Laura", :middle_initial => "J", :last_name => "Burns", :street_address => "46 Junction St", :city => "Conargo", :state => "NSW", :postcode => "2710", :email => "Laura.J.Burns@dodgit.com", :birthday => "1962/12/14 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "T", :last_name => "Ferguson", :street_address => "20 Glendonbrook Road", :city => "Upper Bylong", :state => "NSW", :postcode => "2849", :email => "Maya.T.Ferguson@dodgit.com", :birthday => "1980/8/19 00:00:00" -Person.create :gender => "male", :first_name => "Edward", :middle_initial => "A", :last_name => "Kerr", :street_address => "50 Thule Drive", :city => "Australia Plains", :state => "SA", :postcode => "5374", :email => "Edward.A.Kerr@trashymail.com", :birthday => "1975/4/8 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "S", :last_name => "Winter", :street_address => "17 Myrtle Street", :city => "Wilby", :state => "VIC", :postcode => "3728", :email => "Tom.S.Winter@dodgit.com", :birthday => "1968/4/6 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "N", :last_name => "Jenkins", :street_address => "39 Purcell Place", :city => "Copmanhurst", :state => "NSW", :postcode => "2460", :email => "Cerys.N.Jenkins@mailinator.com", :birthday => "1957/3/5 00:00:00" -Person.create :gender => "male", :first_name => "Henry", :middle_initial => "L", :last_name => "Fisher", :street_address => "19 Redesdale Rd", :city => "Bendigo Forward", :state => "VIC", :postcode => "3551", :email => "Henry.L.Fisher@dodgit.com", :birthday => "1974/1/25 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "S", :last_name => "Dean", :street_address => "28 Walters Street", :city => "Upotipotpon", :state => "VIC", :postcode => "3673", :email => "Matilda.S.Dean@spambob.com", :birthday => "1983/7/5 00:00:00" -Person.create :gender => "male", :first_name => "Leon", :middle_initial => "L", :last_name => "Hussain", :street_address => "78 Albert Street", :city => "Woodhill", :state => "QLD", :postcode => "4285", :email => "Leon.L.Hussain@mailinator.com", :birthday => "1970/3/11 00:00:00" -Person.create :gender => "male", :first_name => "Ellis", :middle_initial => "P", :last_name => "Hamilton", :street_address => "18 Cherokee Road", :city => "Daylesford", :state => "VIC", :postcode => "3460", :email => "Ellis.P.Hamilton@pookmail.com", :birthday => "1965/3/7 00:00:00" -Person.create :gender => "female", :first_name => "Kayleigh", :middle_initial => "M", :last_name => "Bull", :street_address => "80 Murphy Street", :city => "Cunjardine", :state => "WA", :postcode => "6401", :email => "Kayleigh.M.Bull@spambob.com", :birthday => "1979/7/29 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "E", :last_name => "Collins", :street_address => "84 English Street", :city => "Long Flat", :state => "SA", :postcode => "5253", :email => "Isabella.E.Collins@mailinator.com", :birthday => "1944/8/5 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "C", :last_name => "Patel", :street_address => "15 Adavale Road", :city => "Six Mile Flat", :state => "NSW", :postcode => "2580", :email => "Amber.C.Patel@pookmail.com", :birthday => "1968/8/9 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "L", :last_name => "Sanders", :street_address => "18 Fitzroy Street", :city => "Linton", :state => "VIC", :postcode => "3360", :email => "Charlotte.L.Sanders@mailinator.com", :birthday => "1945/7/20 00:00:00" -Person.create :gender => "male", :first_name => "Finley", :middle_initial => "P", :last_name => "Long", :street_address => "20 Frencham Street", :city => "Collendina", :state => "NSW", :postcode => "2646", :email => "Finley.P.Long@dodgit.com", :birthday => "1945/10/7 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "V", :last_name => "Marshall", :street_address => "28 Harris Street", :city => "Woodfield", :state => "VIC", :postcode => "3715", :email => "Louie.V.Marshall@dodgit.com", :birthday => "1982/7/6 00:00:00" -Person.create :gender => "female", :first_name => "Alexandra", :middle_initial => "K", :last_name => "Thomson", :street_address => "15 Oak Street", :city => "Coraki", :state => "NSW", :postcode => "2471", :email => "Alexandra.K.Thomson@spambob.com", :birthday => "1978/2/28 00:00:00" -Person.create :gender => "female", :first_name => "Chloe", :middle_initial => "J", :last_name => "Mills", :street_address => "11 Shadforth Street", :city => "Teal Point", :state => "VIC", :postcode => "3579", :email => "Chloe.J.Mills@trashymail.com", :birthday => "1958/10/4 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "J", :last_name => "Wallis", :street_address => "54 Carlisle Street", :city => "Katamatite", :state => "VIC", :postcode => "3649", :email => "Maya.J.Wallis@spambob.com", :birthday => "1951/11/19 00:00:00" -Person.create :gender => "female", :first_name => "Lara", :middle_initial => "N", :last_name => "Ashton", :street_address => "35 Kaesler Road", :city => "Smithville", :state => "SA", :postcode => "5302", :email => "Lara.N.Ashton@trashymail.com", :birthday => "1962/2/18 00:00:00" -Person.create :gender => "female", :first_name => "Lauren", :middle_initial => "J", :last_name => "Doherty", :street_address => "30 Wagga Road", :city => "Tolland", :state => "NSW", :postcode => "2650", :email => "Lauren.J.Doherty@dodgit.com", :birthday => "1956/12/17 00:00:00" -Person.create :gender => "male", :first_name => "Joe", :middle_initial => "L", :last_name => "Kirk", :street_address => "68 Clifton Street", :city => "Goulburn Weir", :state => "VIC", :postcode => "3608", :email => "Joe.L.Kirk@trashymail.com", :birthday => "1957/2/15 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "A", :last_name => "Davies", :street_address => "98 Paradise Falls Road", :city => "Cheshunt South", :state => "VIC", :postcode => "3678", :email => "Louis.A.Davies@spambob.com", :birthday => "1974/10/17 00:00:00" -Person.create :gender => "male", :first_name => "Jordan", :middle_initial => "I", :last_name => "Bishop", :street_address => "48 Boulter Close", :city => "South Innisfail", :state => "QLD", :postcode => "4860", :email => "Jordan.I.Bishop@trashymail.com", :birthday => "1955/5/23 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "L", :last_name => "Waters", :street_address => "72 Wigley Street", :city => "North Plympton", :state => "SA", :postcode => "5037", :email => "Dylan.L.Waters@trashymail.com", :birthday => "1981/11/21 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "S", :last_name => "Hooper", :street_address => "85 Queen Street", :city => "Taylors Point", :state => "NSW", :postcode => "2107", :email => "Jacob.S.Hooper@mailinator.com", :birthday => "1974/5/16 00:00:00" -Person.create :gender => "male", :first_name => "Zak", :middle_initial => "S", :last_name => "Donnelly", :street_address => "43 Chapman Avenue", :city => "Yosemite", :state => "NSW", :postcode => "2780", :email => "Zak.S.Donnelly@spambob.com", :birthday => "1967/2/12 00:00:00" -Person.create :gender => "male", :first_name => "Anthony", :middle_initial => "A", :last_name => "Bentley", :street_address => "50 Rockhampton Qld", :city => "Rossmoya", :state => "QLD", :postcode => "4702", :email => "Anthony.A.Bentley@trashymail.com", :birthday => "1967/5/22 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "S", :last_name => "Spencer", :street_address => "83 Weemala Avenue", :city => "Garland", :state => "NSW", :postcode => "2797", :email => "Elliot.S.Spencer@spambob.com", :birthday => "1974/5/27 00:00:00" -Person.create :gender => "male", :first_name => "James", :middle_initial => "M", :last_name => "Lowe", :street_address => "66 Garden Place", :city => "Kurraca West", :state => "VIC", :postcode => "3518", :email => "James.M.Lowe@trashymail.com", :birthday => "1985/1/25 00:00:00" -Person.create :gender => "male", :first_name => "Charles", :middle_initial => "K", :last_name => "O'ullivan", :street_address => "63 Auricht Road", :city => "Keppoch", :state => "SA", :postcode => "5271", :email => "Charles.K.O'ullivan@pookmail.com", :birthday => "1984/11/11 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "A", :last_name => "Birch", :street_address => "49 Weemala Avenue", :city => "Koorawatha", :state => "NSW", :postcode => "2807", :email => "Christopher.A.Birch@spambob.com", :birthday => "1979/9/16 00:00:00" -Person.create :gender => "male", :first_name => "Alfie", :middle_initial => "C", :last_name => "Heath", :street_address => "91 Clifton Street", :city => "Wahring", :state => "VIC", :postcode => "3608", :email => "Alfie.C.Heath@pookmail.com", :birthday => "1957/10/11 00:00:00" -Person.create :gender => "female", :first_name => "Alisha", :middle_initial => "J", :last_name => "Dixon", :street_address => "31 Raglan Street", :city => "Boyneside", :state => "QLD", :postcode => "4610", :email => "Alisha.J.Dixon@dodgit.com", :birthday => "1974/7/14 00:00:00" -Person.create :gender => "male", :first_name => "Bailey", :middle_initial => "M", :last_name => "Simpson", :street_address => "13 Halsey Road", :city => "Goolwa", :state => "SA", :postcode => "5214", :email => "Bailey.M.Simpson@trashymail.com", :birthday => "1949/5/8 00:00:00" -Person.create :gender => "male", :first_name => "Joshua", :middle_initial => "A", :last_name => "Hope", :street_address => "43 Springhill Bottom Road", :city => "Cluan", :state => "TAS", :postcode => "7303", :email => "Joshua.A.Hope@trashymail.com", :birthday => "1955/7/11 00:00:00" -Person.create :gender => "male", :first_name => "Kian", :middle_initial => "L", :last_name => "O'onnor", :street_address => "64 Loris Way", :city => "Yilliminning", :state => "WA", :postcode => "6312", :email => "Kian.L.O'onnor@spambob.com", :birthday => "1974/9/9 00:00:00" -Person.create :gender => "male", :first_name => "John", :middle_initial => "H", :last_name => "Collins", :street_address => "39 Peterho Boulevard", :city => "Buckland Park", :state => "SA", :postcode => "5120", :email => "John.H.Collins@dodgit.com", :birthday => "1962/12/2 00:00:00" -Person.create :gender => "male", :first_name => "George", :middle_initial => "E", :last_name => "O'rien", :street_address => "48 Woolnough Road", :city => "Waterfall Gully", :state => "SA", :postcode => "5066", :email => "George.E.O'rien@mailinator.com", :birthday => "1973/8/27 00:00:00" -Person.create :gender => "male", :first_name => "Leon", :middle_initial => "L", :last_name => "Holmes", :street_address => "53 Bayview Close", :city => "Tieri", :state => "QLD", :postcode => "4709", :email => "Leon.L.Holmes@trashymail.com", :birthday => "1976/9/25 00:00:00" -Person.create :gender => "female", :first_name => "Eloise", :middle_initial => "M", :last_name => "King", :street_address => "75 Southwell Crescent", :city => "Charley Creek", :state => "WA", :postcode => "6239", :email => "Eloise.M.King@trashymail.com", :birthday => "1950/3/19 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "E", :last_name => "Iqbal", :street_address => "36 Bailey Street", :city => "Yarpturk", :state => "VIC", :postcode => "3283", :email => "Jude.E.Iqbal@dodgit.com", :birthday => "1970/7/24 00:00:00" -Person.create :gender => "male", :first_name => "Michael", :middle_initial => "A", :last_name => "Stephens", :street_address => "78 Old Tenterfield Road", :city => "Mookima Wybra", :state => "NSW", :postcode => "2469", :email => "Michael.A.Stephens@mailinator.com", :birthday => "1955/7/9 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "S", :last_name => "Hilton", :street_address => "58 Chatsworth Drive", :city => "St James", :state => "WA", :postcode => "6102", :email => "Chelsea.S.Hilton@mailinator.com", :birthday => "1967/10/12 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "A", :last_name => "Cunningham", :street_address => "6 Southwell Crescent", :city => "Withers", :state => "WA", :postcode => "6230", :email => "Sam.A.Cunningham@trashymail.com", :birthday => "1973/4/9 00:00:00" -Person.create :gender => "male", :first_name => "Spencer", :middle_initial => "E", :last_name => "Thomson", :street_address => "16 Barker Street", :city => "Broomehill", :state => "WA", :postcode => "6318", :email => "Spencer.E.Thomson@trashymail.com", :birthday => "1977/3/15 00:00:00" -Person.create :gender => "female", :first_name => "Katie", :middle_initial => "R", :last_name => "Dennis", :street_address => "57 Healy Road", :city => "Queenwood", :state => "WA", :postcode => "6239", :email => "Katie.R.Dennis@trashymail.com", :birthday => "1942/3/13 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "S", :last_name => "Parry", :street_address => "91 Begley Street", :city => "Tarzali", :state => "QLD", :postcode => "4885", :email => "Matilda.S.Parry@trashymail.com", :birthday => "1985/2/4 00:00:00" -Person.create :gender => "male", :first_name => "Lewis", :middle_initial => "J", :last_name => "Gregory", :street_address => "22 Gaffney Street", :city => "Lara", :state => "VIC", :postcode => "3212", :email => "Lewis.J.Gregory@spambob.com", :birthday => "1968/4/30 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "B", :last_name => "McCarthy", :street_address => "35 Albert Street", :city => "Christmas Creek", :state => "QLD", :postcode => "4285", :email => "Jake.B.McCarthy@dodgit.com", :birthday => "1960/1/6 00:00:00" -Person.create :gender => "male", :first_name => "Leon", :middle_initial => "F", :last_name => "Rees", :street_address => "79 Ashton Road", :city => "Pantapin", :state => "WA", :postcode => "6384", :email => "Leon.F.Rees@trashymail.com", :birthday => "1960/12/3 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "P", :last_name => "Wright", :street_address => "50 Armstrong Street", :city => "Mincha West", :state => "VIC", :postcode => "3568", :email => "Maya.P.Wright@mailinator.com", :birthday => "1962/5/31 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "C", :last_name => "Hargreaves", :street_address => "97 Elizabeth Street", :city => "Widgee", :state => "QLD", :postcode => "4570", :email => "Francesca.C.Hargreaves@dodgit.com", :birthday => "1940/10/22 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "C", :last_name => "Norton", :street_address => "7 Elgin Street", :city => "Windella", :state => "NSW", :postcode => "2320", :email => "Zachary.C.Norton@dodgit.com", :birthday => "1954/9/9 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "A", :last_name => "Weston", :street_address => "32 Reynolds Road", :city => "Banks Pocket", :state => "QLD", :postcode => "4570", :email => "Matilda.A.Weston@dodgit.com", :birthday => "1977/5/25 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "I", :last_name => "McKenzie", :street_address => "72 Shamrock Avenue", :city => "Tomakin", :state => "NSW", :postcode => "2537", :email => "Morgan.I.McKenzie@dodgit.com", :birthday => "1963/3/13 00:00:00" -Person.create :gender => "male", :first_name => "Bradley", :middle_initial => "A", :last_name => "Akhtar", :street_address => "85 Argyle Street", :city => "Rookhurst", :state => "NSW", :postcode => "2422", :email => "Bradley.A.Akhtar@pookmail.com", :birthday => "1941/4/3 00:00:00" -Person.create :gender => "female", :first_name => "Laura", :middle_initial => "A", :last_name => "Turner", :street_address => "18 Dora Creek", :city => "Woodlawn", :state => "NSW", :postcode => "2480", :email => "Laura.A.Turner@pookmail.com", :birthday => "1962/9/30 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "L", :last_name => "Stephenson", :street_address => "92 Scenic Road", :city => "Berridale", :state => "NSW", :postcode => "2628", :email => "Jake.L.Stephenson@trashymail.com", :birthday => "1943/9/5 00:00:00" -Person.create :gender => "male", :first_name => "Alfie", :middle_initial => "C", :last_name => "Cole", :street_address => "33 Gadd Avenue", :city => "Snowtown", :state => "SA", :postcode => "5520", :email => "Alfie.C.Cole@dodgit.com", :birthday => "1958/10/11 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "G", :last_name => "Joyce", :street_address => "85 Hillsdale Road", :city => "Brovinia", :state => "QLD", :postcode => "4626", :email => "Ethan.G.Joyce@dodgit.com", :birthday => "1958/3/26 00:00:00" -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "M", :last_name => "Quinn", :street_address => "17 Springhill Bottom Road", :city => "Paradise", :state => "TAS", :postcode => "7306", :email => "Ellie.M.Quinn@trashymail.com", :birthday => "1968/12/21 00:00:00" -Person.create :gender => "female", :first_name => "Gracie", :middle_initial => "I", :last_name => "Davies", :street_address => "67 Carlisle Street", :city => "Tarcombe", :state => "VIC", :postcode => "3666", :email => "Gracie.I.Davies@spambob.com", :birthday => "1961/12/23 00:00:00" -Person.create :gender => "male", :first_name => "Kieran", :middle_initial => "C", :last_name => "Field", :street_address => "29 Avondale Drive", :city => "Windang", :state => "NSW", :postcode => "2528", :email => "Kieran.C.Field@mailinator.com", :birthday => "1950/7/4 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "J", :last_name => "Harvey", :street_address => "61 Alfred Street", :city => "Esperance", :state => "WA", :postcode => "6450", :email => "Owen.J.Harvey@mailinator.com", :birthday => "1952/11/21 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "J", :last_name => "Young", :street_address => "36 Tooraweenah Road", :city => "Nanardine", :state => "NSW", :postcode => "2870", :email => "Tegan.J.Young@spambob.com", :birthday => "1953/8/15 00:00:00" -Person.create :gender => "female", :first_name => "Gracie", :middle_initial => "T", :last_name => "Adams", :street_address => "67 Southwell Crescent", :city => "Waterloo", :state => "WA", :postcode => "6228", :email => "Gracie.T.Adams@pookmail.com", :birthday => "1950/6/6 00:00:00" -Person.create :gender => "male", :first_name => "Archie", :middle_initial => "M", :last_name => "Whitehouse", :street_address => "85 Maintongoon Road", :city => "Moe South", :state => "VIC", :postcode => "3825", :email => "Archie.M.Whitehouse@dodgit.com", :birthday => "1961/2/22 00:00:00" -Person.create :gender => "female", :first_name => "Libby", :middle_initial => "D", :last_name => "Marsh", :street_address => "81 Barker Street", :city => "Kenmare", :state => "WA", :postcode => "6316", :email => "Libby.D.Marsh@pookmail.com", :birthday => "1980/9/17 00:00:00" -Person.create :gender => "male", :first_name => "Harry", :middle_initial => "E", :last_name => "Hargreaves", :street_address => "79 Patton Street", :city => "Warranwood", :state => "VIC", :postcode => "3134", :email => "Harry.E.Hargreaves@trashymail.com", :birthday => "1959/12/24 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "J", :last_name => "Gilbert", :street_address => "68 Rose Street", :city => "Kallista", :state => "VIC", :postcode => "3791", :email => "Madison.J.Gilbert@dodgit.com", :birthday => "1978/10/11 00:00:00" -Person.create :gender => "male", :first_name => "Henry", :middle_initial => "S", :last_name => "Nicholson", :street_address => "97 Parkes Road", :city => "Ardeer", :state => "VIC", :postcode => "3022", :email => "Henry.S.Nicholson@mailinator.com", :birthday => "1953/10/9 00:00:00" -Person.create :gender => "female", :first_name => "Samantha", :middle_initial => "J", :last_name => "Warren", :street_address => "63 McGregor Street", :city => "Bonython", :state => "ACT", :postcode => "2905", :email => "Samantha.J.Warren@spambob.com", :birthday => "1948/8/10 00:00:00" -Person.create :gender => "male", :first_name => "Noah", :middle_initial => "R", :last_name => "Palmer", :street_address => "42 Sydney Road", :city => "Maitland Bar", :state => "NSW", :postcode => "2850", :email => "Noah.R.Palmer@mailinator.com", :birthday => "1941/8/22 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "N", :last_name => "Bird", :street_address => "34 Taylor Street", :city => "Bunbartha", :state => "VIC", :postcode => "3634", :email => "Morgan.N.Bird@dodgit.com", :birthday => "1980/6/29 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "S", :last_name => "Watts", :street_address => "9 Queen Street", :city => "Collaroy Beach", :state => "NSW", :postcode => "2097", :email => "Charlotte.S.Watts@trashymail.com", :birthday => "1964/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Libby", :middle_initial => "S", :last_name => "Carter", :street_address => "60 Woerdens Road", :city => "Raymond Terrace", :state => "NSW", :postcode => "2324", :email => "Libby.S.Carter@mailinator.com", :birthday => "1951/6/26 00:00:00" -Person.create :gender => "female", :first_name => "Jessica", :middle_initial => "E", :last_name => "Begum", :street_address => "77 Mnimbah Road", :city => "Gresford", :state => "NSW", :postcode => "2311", :email => "Jessica.E.Begum@pookmail.com", :birthday => "1981/12/28 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "H", :last_name => "Storey", :street_address => "45 Edmundsons Road", :city => "Lamplough", :state => "VIC", :postcode => "3352", :email => "Sebastian.H.Storey@mailinator.com", :birthday => "1956/1/6 00:00:00" -Person.create :gender => "male", :first_name => "Kyle", :middle_initial => "S", :last_name => "Morris", :street_address => "88 Girvan Grove", :city => "Eaglehawk North", :state => "VIC", :postcode => "3556", :email => "Kyle.S.Morris@dodgit.com", :birthday => "1944/10/18 00:00:00" -Person.create :gender => "male", :first_name => "Finlay", :middle_initial => "G", :last_name => "Lewis", :street_address => "38 Panorama Road", :city => "Kingswood", :state => "NSW", :postcode => "2340", :email => "Finlay.G.Lewis@pookmail.com", :birthday => "1977/5/31 00:00:00" -Person.create :gender => "female", :first_name => "Lydia", :middle_initial => "H", :last_name => "Fitzgerald", :street_address => "49 Bungana Drive", :city => "Polish Hill River", :state => "SA", :postcode => "5453", :email => "Lydia.H.Fitzgerald@trashymail.com", :birthday => "1943/6/27 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "B", :last_name => "Atkins", :street_address => "4 Saggers Road", :city => "Forrestania", :state => "WA", :postcode => "6359", :email => "Caitlin.B.Atkins@dodgit.com", :birthday => "1982/2/15 00:00:00" -Person.create :gender => "male", :first_name => "Billy", :middle_initial => "S", :last_name => "Page", :street_address => "25 Sunraysia Road", :city => "Cardigan", :state => "VIC", :postcode => "3352", :email => "Billy.S.Page@trashymail.com", :birthday => "1983/11/4 00:00:00" -Person.create :gender => "female", :first_name => "Charlie", :middle_initial => "E", :last_name => "Khan", :street_address => "72 Dalgarno Street", :city => "Nowley", :state => "NSW", :postcode => "2386", :email => "Charlie.E.Khan@dodgit.com", :birthday => "1951/12/12 00:00:00" -Person.create :gender => "male", :first_name => "Owen", :middle_initial => "H", :last_name => "Ali", :street_address => "90 Fitzroy Street", :city => "Ballarat East", :state => "VIC", :postcode => "3350", :email => "Owen.H.Ali@spambob.com", :birthday => "1958/9/5 00:00:00" -Person.create :gender => "female", :first_name => "Mia", :middle_initial => "A", :last_name => "Gould", :street_address => "33 Thule Drive", :city => "Julia", :state => "SA", :postcode => "5374", :email => "Mia.A.Gould@spambob.com", :birthday => "1959/10/7 00:00:00" -Person.create :gender => "male", :first_name => "Joe", :middle_initial => "P", :last_name => "Miah", :street_address => "83 South Street", :city => "Tinderbox", :state => "TAS", :postcode => "7054", :email => "Joe.P.Miah@spambob.com", :birthday => "1940/8/17 00:00:00" -Person.create :gender => "male", :first_name => "Jamie", :middle_initial => "S", :last_name => "Mills", :street_address => "18 Chapman Avenue", :city => "Mozart", :state => "NSW", :postcode => "2787", :email => "Jamie.S.Mills@mailinator.com", :birthday => "1980/6/10 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "A", :last_name => "Carter", :street_address => "35 Larissa Court", :city => "Merrinee", :state => "VIC", :postcode => "3496", :email => "Louie.A.Carter@pookmail.com", :birthday => "1971/9/5 00:00:00" -Person.create :gender => "female", :first_name => "Grace", :middle_initial => "M", :last_name => "Wade", :street_address => "89 Black Range Road", :city => "New Buildings", :state => "NSW", :postcode => "2550", :email => "Grace.M.Wade@spambob.com", :birthday => "1945/2/9 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "Z", :last_name => "Robson", :street_address => "29 Glenpark Road", :city => "North Boambee Valley", :state => "NSW", :postcode => "2450", :email => "Sarah.Z.Robson@pookmail.com", :birthday => "1957/10/2 00:00:00" -Person.create :gender => "female", :first_name => "Abbie", :middle_initial => "J", :last_name => "Dobson", :street_address => "56 Porana Place", :city => "Tenindewa", :state => "WA", :postcode => "6632", :email => "Abbie.J.Dobson@pookmail.com", :birthday => "1952/1/12 00:00:00" -Person.create :gender => "male", :first_name => "Michael", :middle_initial => "S", :last_name => "O'ullivan", :street_address => "17 Jones Road", :city => "Dutton Park", :state => "QLD", :postcode => "4102", :email => "Michael.S.O'ullivan@pookmail.com", :birthday => "1959/7/22 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "S", :last_name => "Johnston", :street_address => "69 Healy Road", :city => "Greenbushes", :state => "WA", :postcode => "6254", :email => "Oliver.S.Johnston@dodgit.com", :birthday => "1975/8/18 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "A", :last_name => "Bull", :street_address => "86 Wilson Street", :city => "Towaninny South", :state => "VIC", :postcode => "3527", :email => "Amber.A.Bull@dodgit.com", :birthday => "1941/1/26 00:00:00" -Person.create :gender => "female", :first_name => "Harriet", :middle_initial => "T", :last_name => "Ryan", :street_address => "25 Carlisle Street", :city => "Mangalore", :state => "VIC", :postcode => "3663", :email => "Harriet.T.Ryan@mailinator.com", :birthday => "1950/11/11 00:00:00" -Person.create :gender => "female", :first_name => "Samantha", :middle_initial => "C", :last_name => "Hooper", :street_address => "21 Aquatic Road", :city => "Dumaresq Island", :state => "NSW", :postcode => "2430", :email => "Samantha.C.Hooper@dodgit.com", :birthday => "1974/7/9 00:00:00" -Person.create :gender => "male", :first_name => "Mohammad", :middle_initial => "S", :last_name => "Webb", :street_address => "17 Aquatic Road", :city => "Taree West", :state => "NSW", :postcode => "2430", :email => "Mohammad.S.Webb@pookmail.com", :birthday => "1962/7/21 00:00:00" -Person.create :gender => "male", :first_name => "Billy", :middle_initial => "D", :last_name => "Robertson", :street_address => "32 Delan Road", :city => "Mungy", :state => "QLD", :postcode => "4671", :email => "Billy.D.Robertson@trashymail.com", :birthday => "1940/4/20 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "T", :last_name => "Williamson", :street_address => "53 Hunter Street", :city => "Drayton North", :state => "QLD", :postcode => "4350", :email => "Aimee.T.Williamson@dodgit.com", :birthday => "1960/10/8 00:00:00" -Person.create :gender => "male", :first_name => "Max", :middle_initial => "A", :last_name => "Tomlinson", :street_address => "99 Bellion Drive", :city => "Northcliffe", :state => "WA", :postcode => "6262", :email => "Max.A.Tomlinson@spambob.com", :birthday => "1947/7/11 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "C", :last_name => "Ryan", :street_address => "77 Edgewater Close", :city => "Bream Beach", :state => "NSW", :postcode => "2540", :email => "Reece.C.Ryan@mailinator.com", :birthday => "1960/12/9 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "S", :last_name => "Cameron", :street_address => "46 Cherokee Road", :city => "Bradford", :state => "VIC", :postcode => "3463", :email => "Jake.S.Cameron@spambob.com", :birthday => "1980/1/21 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "H", :last_name => "Owens", :street_address => "24 Sunset Drive", :city => "Maneroo", :state => "QLD", :postcode => "4730", :email => "Christopher.H.Owens@spambob.com", :birthday => "1984/10/6 00:00:00" -Person.create :gender => "male", :first_name => "Elliot", :middle_initial => "H", :last_name => "Pratt", :street_address => "77 Boughtman Street", :city => "Oakleigh South", :state => "VIC", :postcode => "3167", :email => "Elliot.H.Pratt@mailinator.com", :birthday => "1940/1/28 00:00:00" -Person.create :gender => "female", :first_name => "Leah", :middle_initial => "W", :last_name => "Pearson", :street_address => "67 Larissa Court", :city => "Linga", :state => "VIC", :postcode => "3509", :email => "Leah.W.Pearson@trashymail.com", :birthday => "1947/3/21 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "E", :last_name => "Poole", :street_address => "96 Amiens Road", :city => "Carcalgong", :state => "NSW", :postcode => "2850", :email => "Sebastian.E.Poole@spambob.com", :birthday => "1963/11/21 00:00:00" -Person.create :gender => "female", :first_name => "Brooke", :middle_initial => "Z", :last_name => "Byrne", :street_address => "30 Queen Street", :city => "Oxford Falls", :state => "NSW", :postcode => "2100", :email => "Brooke.Z.Byrne@pookmail.com", :birthday => "1966/9/16 00:00:00" -Person.create :gender => "female", :first_name => "Lily", :middle_initial => "C", :last_name => "Rowe", :street_address => "46 Redesdale Rd", :city => "Painswick", :state => "VIC", :postcode => "3551", :email => "Lily.C.Rowe@spambob.com", :birthday => "1966/10/15 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "S", :last_name => "Cole", :street_address => "82 Yangan Drive", :city => "Tintinhull", :state => "NSW", :postcode => "2352", :email => "Ella.S.Cole@pookmail.com", :birthday => "1978/7/19 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "J", :last_name => "Bray", :street_address => "85 Wilson Street", :city => "Dumosa", :state => "VIC", :postcode => "3527", :email => "Nicole.J.Bray@pookmail.com", :birthday => "1955/8/15 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "A", :last_name => "Hurst", :street_address => "51 Maintongoon Road", :city => "Toorongo", :state => "VIC", :postcode => "3833", :email => "Zachary.A.Hurst@trashymail.com", :birthday => "1985/7/16 00:00:00" -Person.create :gender => "female", :first_name => "Melissa", :middle_initial => "S", :last_name => "Miles", :street_address => "29 Grandis Road", :city => "Temagog", :state => "NSW", :postcode => "2440", :email => "Melissa.S.Miles@pookmail.com", :birthday => "1973/4/15 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "G", :last_name => "Nixon", :street_address => "67 Mt Berryman Road", :city => "Ringwood", :state => "QLD", :postcode => "4343", :email => "Christopher.G.Nixon@spambob.com", :birthday => "1965/3/14 00:00:00" -Person.create :gender => "male", :first_name => "John", :middle_initial => "A", :last_name => "Marsh", :street_address => "88 Mendooran Road", :city => "Delroy Gardens", :state => "NSW", :postcode => "2830", :email => "John.A.Marsh@dodgit.com", :birthday => "1970/5/25 00:00:00" -Person.create :gender => "female", :first_name => "Danielle", :middle_initial => "L", :last_name => "Nolan", :street_address => "37 McLeans Road", :city => "Mungungo", :state => "QLD", :postcode => "4630", :email => "Danielle.L.Nolan@trashymail.com", :birthday => "1974/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "J", :last_name => "Davison", :street_address => "71 Kerma Crescent", :city => "Bowenfels", :state => "NSW", :postcode => "2790", :email => "Maisie.J.Davison@dodgit.com", :birthday => "1962/3/15 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "E", :last_name => "Fuller", :street_address => "40 Gaffney Street", :city => "Belvedere Park", :state => "VIC", :postcode => "3198", :email => "Oliver.E.Fuller@spambob.com", :birthday => "1941/10/15 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "C", :last_name => "Watson", :street_address => "2 Wilson Street", :city => "Lalbert", :state => "VIC", :postcode => "3542", :email => "Sam.C.Watson@dodgit.com", :birthday => "1952/4/11 00:00:00" -Person.create :gender => "female", :first_name => "Jodie", :middle_initial => "E", :last_name => "Faulkner", :street_address => "87 Auricht Road", :city => "Monbulla", :state => "SA", :postcode => "5277", :email => "Jodie.E.Faulkner@mailinator.com", :birthday => "1974/10/2 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "S", :last_name => "Parsons", :street_address => "32 Purcell Place", :city => "Grafton West", :state => "NSW", :postcode => "2460", :email => "Francesca.S.Parsons@spambob.com", :birthday => "1966/1/5 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "S", :last_name => "Hooper", :street_address => "51 Davenport Street", :city => "Ournie", :state => "NSW", :postcode => "2640", :email => "Alexander.S.Hooper@mailinator.com", :birthday => "1969/6/17 00:00:00" -Person.create :gender => "male", :first_name => "Lucas", :middle_initial => "M", :last_name => "Berry", :street_address => "59 Loris Way", :city => "Pumphreys Bridge", :state => "WA", :postcode => "6308", :email => "Lucas.M.Berry@pookmail.com", :birthday => "1940/2/19 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "D", :last_name => "Watson", :street_address => "96 Henry Moss Court", :city => "Lewiston", :state => "SA", :postcode => "5501", :email => "Scott.D.Watson@spambob.com", :birthday => "1952/7/8 00:00:00" -Person.create :gender => "female", :first_name => "Alisha", :middle_initial => "T", :last_name => "Perkins", :street_address => "10 Shannon Court", :city => "Erskine", :state => "SA", :postcode => "5422", :email => "Alisha.T.Perkins@trashymail.com", :birthday => "1975/10/11 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "R", :last_name => "Thomson", :street_address => "16 Dabinett Road", :city => "Cowirra", :state => "SA", :postcode => "5238", :email => "Cerys.R.Thomson@pookmail.com", :birthday => "1970/8/18 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "I", :last_name => "Campbell", :street_address => "73 Frouds Road", :city => "Hastings", :state => "VIC", :postcode => "3915", :email => "David.I.Campbell@spambob.com", :birthday => "1973/7/22 00:00:00" -Person.create :gender => "female", :first_name => "Hannah", :middle_initial => "D", :last_name => "Norton", :street_address => "79 Gadd Avenue", :city => "Thrington", :state => "SA", :postcode => "5552", :email => "Hannah.D.Norton@mailinator.com", :birthday => "1966/8/3 00:00:00" -Person.create :gender => "male", :first_name => "Luca", :middle_initial => "R", :last_name => "Iqbal", :street_address => "38 Goldfields Road", :city => "Cutella", :state => "QLD", :postcode => "4352", :email => "Luca.R.Iqbal@trashymail.com", :birthday => "1942/8/12 00:00:00" -Person.create :gender => "male", :first_name => "Nicholas", :middle_initial => "A", :last_name => "Burrows", :street_address => "86 Devon Street", :city => "Marleston", :state => "SA", :postcode => "5033", :email => "Nicholas.A.Burrows@pookmail.com", :birthday => "1957/7/2 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "T", :last_name => "Norman", :street_address => "84 Carolina Park Road", :city => "Forresters Beach", :state => "NSW", :postcode => "2260", :email => "Tyler.T.Norman@spambob.com", :birthday => "1984/7/17 00:00:00" -Person.create :gender => "male", :first_name => "Christopher", :middle_initial => "S", :last_name => "Perkins", :street_address => "90 Woerdens Road", :city => "Seaham", :state => "NSW", :postcode => "2324", :email => "Christopher.S.Perkins@pookmail.com", :birthday => "1965/3/28 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "G", :last_name => "Scott", :street_address => "36 Sinclair Street", :city => "Alford", :state => "SA", :postcode => "5555", :email => "Harvey.G.Scott@pookmail.com", :birthday => "1950/1/5 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "H", :last_name => "Jordan", :street_address => "40 Rose Street", :city => "Guys Hill", :state => "VIC", :postcode => "3807", :email => "Isabelle.H.Jordan@mailinator.com", :birthday => "1948/7/14 00:00:00" -Person.create :gender => "male", :first_name => "Muhammad", :middle_initial => "L", :last_name => "Smart", :street_address => "93 Denison Road", :city => "Crib Point", :state => "VIC", :postcode => "3919", :email => "Muhammad.L.Smart@pookmail.com", :birthday => "1948/8/25 00:00:00" -Person.create :gender => "male", :first_name => "Zachary", :middle_initial => "K", :last_name => "Sheppard", :street_address => "5 Alfred Street", :city => "Salmon Gums", :state => "WA", :postcode => "6445", :email => "Zachary.K.Sheppard@pookmail.com", :birthday => "1985/10/27 00:00:00" -Person.create :gender => "male", :first_name => "Joel", :middle_initial => "J", :last_name => "Chamberlain", :street_address => "8 English Street", :city => "Burdett", :state => "SA", :postcode => "5253", :email => "Joel.J.Chamberlain@dodgit.com", :birthday => "1965/11/27 00:00:00" -Person.create :gender => "female", :first_name => "Jennifer", :middle_initial => "J", :last_name => "Rice", :street_address => "88 Benny Street", :city => "Erriba", :state => "TAS", :postcode => "7310", :email => "Jennifer.J.Rice@mailinator.com", :birthday => "1964/1/15 00:00:00" -Person.create :gender => "female", :first_name => "Scarlett", :middle_initial => "A", :last_name => "Ward", :street_address => "49 Chester Street", :city => "Cobaki", :state => "NSW", :postcode => "2486", :email => "Scarlett.A.Ward@dodgit.com", :birthday => "1969/2/16 00:00:00" -Person.create :gender => "male", :first_name => "Dominic", :middle_initial => "S", :last_name => "Higgins", :street_address => "7 Bailey Street", :city => "Warrabkook", :state => "VIC", :postcode => "3286", :email => "Dominic.S.Higgins@mailinator.com", :birthday => "1953/1/21 00:00:00" -Person.create :gender => "male", :first_name => "Alfie", :middle_initial => "J", :last_name => "Matthews", :street_address => "53 Derry Street", :city => "Strathpine", :state => "QLD", :postcode => "4500", :email => "Alfie.J.Matthews@spambob.com", :birthday => "1949/10/13 00:00:00" -Person.create :gender => "female", :first_name => "Jasmine", :middle_initial => "A", :last_name => "Morrison", :street_address => "71 Tanner Street", :city => "Barrow Creek", :state => "NT", :postcode => "0872", :email => "Jasmine.A.Morrison@dodgit.com", :birthday => "1979/3/14 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "A", :last_name => "Farmer", :street_address => "27 Banksia Street", :city => "Useless Loop", :state => "WA", :postcode => "6537", :email => "Hayden.A.Farmer@pookmail.com", :birthday => "1974/5/12 00:00:00" -Person.create :gender => "female", :first_name => "Sienna", :middle_initial => "S", :last_name => "Dyer", :street_address => "5 Thule Drive", :city => "Templers", :state => "SA", :postcode => "5371", :email => "Sienna.S.Dyer@trashymail.com", :birthday => "1954/5/31 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "R", :last_name => "Turnbull", :street_address => "27 Moruya Street", :city => "Cobargo", :state => "NSW", :postcode => "2550", :email => "Louis.R.Turnbull@spambob.com", :birthday => "1958/4/10 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "D", :last_name => "Alexander", :street_address => "25 Normans Road", :city => "Patyah", :state => "VIC", :postcode => "3318", :email => "Elise.D.Alexander@pookmail.com", :birthday => "1960/4/29 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "N", :last_name => "Evans", :street_address => "48 Denison Road", :city => "Silverleaves", :state => "VIC", :postcode => "3922", :email => "Jake.N.Evans@trashymail.com", :birthday => "1972/1/21 00:00:00" -Person.create :gender => "male", :first_name => "Josh", :middle_initial => "E", :last_name => "Bentley", :street_address => "32 Balonne Street", :city => "Gum Scrub", :state => "NSW", :postcode => "2441", :email => "Josh.E.Bentley@pookmail.com", :birthday => "1969/8/9 00:00:00" -Person.create :gender => "male", :first_name => "Leo", :middle_initial => "A", :last_name => "Dale", :street_address => "10 Boorie Road", :city => "Gordonbrook", :state => "QLD", :postcode => "4610", :email => "Leo.A.Dale@pookmail.com", :birthday => "1956/3/5 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "G", :last_name => "Gallagher", :street_address => "89 Raglan Street", :city => "Crownthorpe", :state => "QLD", :postcode => "4605", :email => "Alexander.G.Gallagher@spambob.com", :birthday => "1958/1/13 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "H", :last_name => "Matthews", :street_address => "96 Village Drive", :city => "Smithfield West", :state => "NSW", :postcode => "2164", :email => "David.H.Matthews@trashymail.com", :birthday => "1957/7/18 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "W", :last_name => "Bibi", :street_address => "37 Chatsworth Road", :city => "Camira", :state => "NSW", :postcode => "2469", :email => "Lilly.W.Bibi@pookmail.com", :birthday => "1965/11/10 00:00:00" -Person.create :gender => "male", :first_name => "Mason", :middle_initial => "Z", :last_name => "Rees", :street_address => "42 Shirley Street", :city => "Bahrs Scrub", :state => "QLD", :postcode => "4207", :email => "Mason.Z.Rees@trashymail.com", :birthday => "1971/8/11 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "E", :last_name => "Rees", :street_address => "59 High Street", :city => "Port Julia", :state => "SA", :postcode => "5575", :email => "Ethan.E.Rees@trashymail.com", :birthday => "1943/5/3 00:00:00" -Person.create :gender => "male", :first_name => "Joel", :middle_initial => "M", :last_name => "Woods", :street_address => "88 Taltarni Road", :city => "St Arnaud", :state => "VIC", :postcode => "3478", :email => "Joel.M.Woods@pookmail.com", :birthday => "1979/6/13 00:00:00" -Person.create :gender => "male", :first_name => "Bradley", :middle_initial => "L", :last_name => "Cook", :street_address => "49 Derry Street", :city => "Weengallon", :state => "QLD", :postcode => "4497", :email => "Bradley.L.Cook@trashymail.com", :birthday => "1959/7/19 00:00:00" -Person.create :gender => "female", :first_name => "Nicole", :middle_initial => "Z", :last_name => "Walters", :street_address => "33 Inglewood Court", :city => "Metcalfe East", :state => "VIC", :postcode => "3444", :email => "Nicole.Z.Walters@trashymail.com", :birthday => "1953/7/24 00:00:00" -Person.create :gender => "female", :first_name => "Rebecca", :middle_initial => "C", :last_name => "Anderson", :street_address => "90 Dossiter Street", :city => "Brooks Bay", :state => "TAS", :postcode => "7116", :email => "Rebecca.C.Anderson@trashymail.com", :birthday => "1968/11/24 00:00:00" -Person.create :gender => "female", :first_name => "Yasmin", :middle_initial => "N", :last_name => "Hooper", :street_address => "96 Shell Road", :city => "Ferguson", :state => "VIC", :postcode => "3237", :email => "Yasmin.N.Hooper@mailinator.com", :birthday => "1971/4/5 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "C", :last_name => "Burns", :street_address => "68 McGregor Street", :city => "Richardson", :state => "ACT", :postcode => "2905", :email => "Ava.C.Burns@dodgit.com", :birthday => "1968/6/30 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "R", :last_name => "Goodwin", :street_address => "80 Cedar Street", :city => "Valley Of Lagoons", :state => "QLD", :postcode => "4850", :email => "Tegan.R.Goodwin@mailinator.com", :birthday => "1976/7/11 00:00:00" -Person.create :gender => "female", :first_name => "Courtney", :middle_initial => "A", :last_name => "Wong", :street_address => "19 Ugoa Street", :city => "Booti Booti", :state => "NSW", :postcode => "2428", :email => "Courtney.A.Wong@pookmail.com", :birthday => "1973/3/23 00:00:00" -Person.create :gender => "male", :first_name => "Joshua", :middle_initial => "R", :last_name => "Cook", :street_address => "29 Arthur Street", :city => "Wuuluman", :state => "NSW", :postcode => "2820", :email => "Joshua.R.Cook@dodgit.com", :birthday => "1949/9/21 00:00:00" -Person.create :gender => "female", :first_name => "Niamh", :middle_initial => "B", :last_name => "Bailey", :street_address => "63 Swanston Street", :city => "Glenorchy", :state => "VIC", :postcode => "3385", :email => "Niamh.B.Bailey@mailinator.com", :birthday => "1958/2/6 00:00:00" -Person.create :gender => "female", :first_name => "Molly", :middle_initial => "A", :last_name => "Edwards", :street_address => "47 Barnett Street", :city => "Montumana", :state => "TAS", :postcode => "7321", :email => "Molly.A.Edwards@pookmail.com", :birthday => "1941/4/2 00:00:00" -Person.create :gender => "female", :first_name => "Alicia", :middle_initial => "H", :last_name => "Mitchell", :street_address => "40 Devon Street", :city => "Semaphore South", :state => "SA", :postcode => "5019", :email => "Alicia.H.Mitchell@mailinator.com", :birthday => "1950/9/27 00:00:00" -Person.create :gender => "male", :first_name => "Luke", :middle_initial => "M", :last_name => "Henry", :street_address => "68 Yangan Drive", :city => "Manilla", :state => "NSW", :postcode => "2346", :email => "Luke.M.Henry@trashymail.com", :birthday => "1946/2/19 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "R", :last_name => "Murray", :street_address => "44 Yangan Drive", :city => "Mayvale", :state => "NSW", :postcode => "2347", :email => "Madison.R.Murray@mailinator.com", :birthday => "1942/8/12 00:00:00" -Person.create :gender => "female", :first_name => "Hannah", :middle_initial => "J", :last_name => "Hayes", :street_address => "90 Larissa Court", :city => "Paringi", :state => "NSW", :postcode => "3500", :email => "Hannah.J.Hayes@spambob.com", :birthday => "1983/9/15 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "C", :last_name => "Brady", :street_address => "74 High Street", :city => "Tiddy Widdy Beach", :state => "SA", :postcode => "5571", :email => "Maddison.C.Brady@trashymail.com", :birthday => "1982/11/22 00:00:00" -Person.create :gender => "male", :first_name => "Ellis", :middle_initial => "I", :last_name => "Walsh", :street_address => "42 Banksia Street", :city => "Denham", :state => "WA", :postcode => "6537", :email => "Ellis.I.Walsh@trashymail.com", :birthday => "1963/4/18 00:00:00" -Person.create :gender => "male", :first_name => "Benjamin", :middle_initial => "M", :last_name => "Preston", :street_address => "44 Cofton Close", :city => "Dorrigo", :state => "NSW", :postcode => "2453", :email => "Benjamin.M.Preston@mailinator.com", :birthday => "1982/4/16 00:00:00" -Person.create :gender => "female", :first_name => "Amber", :middle_initial => "D", :last_name => "Henry", :street_address => "64 Forrest Road", :city => "Coolah", :state => "NSW", :postcode => "2843", :email => "Amber.D.Henry@dodgit.com", :birthday => "1945/6/1 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "T", :last_name => "Burke", :street_address => "71 Springhill Bottom Road", :city => "Quamby Bend", :state => "TAS", :postcode => "7292", :email => "Poppy.T.Burke@mailinator.com", :birthday => "1967/2/1 00:00:00" -Person.create :gender => "female", :first_name => "Victoria", :middle_initial => "J", :last_name => "Austin", :street_address => "47 Ageston Road", :city => "Cannon Creek", :state => "QLD", :postcode => "4310", :email => "Victoria.J.Austin@trashymail.com", :birthday => "1984/8/25 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "W", :last_name => "Norris", :street_address => "47 Cubbine Road", :city => "Baandee", :state => "WA", :postcode => "6412", :email => "Madison.W.Norris@trashymail.com", :birthday => "1974/9/3 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "T", :last_name => "Houghton", :street_address => "94 Kaesler Road", :city => "Karoonda", :state => "SA", :postcode => "5307", :email => "Kate.T.Houghton@pookmail.com", :birthday => "1966/8/20 00:00:00" -Person.create :gender => "male", :first_name => "Tyler", :middle_initial => "K", :last_name => "Carter", :street_address => "65 Acheron Road", :city => "Sale", :state => "VIC", :postcode => "3850", :email => "Tyler.K.Carter@spambob.com", :birthday => "1985/9/25 00:00:00" -Person.create :gender => "male", :first_name => "Josh", :middle_initial => "E", :last_name => "Gill", :street_address => "42 Walpole Avenue", :city => "Curdievale", :state => "VIC", :postcode => "3268", :email => "Josh.E.Gill@trashymail.com", :birthday => "1975/11/18 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "L", :last_name => "Kaur", :street_address => "52 Dalgarno Street", :city => "Breeza", :state => "NSW", :postcode => "2381", :email => "Ava.L.Kaur@pookmail.com", :birthday => "1960/3/17 00:00:00" -Person.create :gender => "male", :first_name => "Charlie", :middle_initial => "E", :last_name => "Gibbons", :street_address => "88 Dora Creek", :city => "Tuntable Creek", :state => "NSW", :postcode => "2480", :email => "Charlie.E.Gibbons@dodgit.com", :birthday => "1955/10/3 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "J", :last_name => "Pickering", :street_address => "68 Sullivan Court", :city => "Boinka", :state => "VIC", :postcode => "3490", :email => "Morgan.J.Pickering@mailinator.com", :birthday => "1942/6/24 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "D", :last_name => "Buckley", :street_address => "54 Kooljak Rd", :city => "Ambergate", :state => "WA", :postcode => "6280", :email => "Jake.D.Buckley@dodgit.com", :birthday => "1962/1/5 00:00:00" -Person.create :gender => "male", :first_name => "Matthew", :middle_initial => "L", :last_name => "Chamberlain", :street_address => "7 Noalimba Avenue", :city => "Yarrowyck", :state => "NSW", :postcode => "2358", :email => "Matthew.L.Chamberlain@spambob.com", :birthday => "1963/12/28 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "A", :last_name => "Austin", :street_address => "24 Mildura Street", :city => "Rocherlea", :state => "TAS", :postcode => "7248", :email => "Sam.A.Austin@mailinator.com", :birthday => "1955/5/27 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "S", :last_name => "Lowe", :street_address => "18 Loris Way", :city => "Morbinning", :state => "WA", :postcode => "6304", :email => "Sam.S.Lowe@spambob.com", :birthday => "1946/9/19 00:00:00" -Person.create :gender => "male", :first_name => "Bailey", :middle_initial => "K", :last_name => "Anderson", :street_address => "49 McLachlan Street", :city => "Toolondo", :state => "VIC", :postcode => "3401", :email => "Bailey.K.Anderson@pookmail.com", :birthday => "1956/6/23 00:00:00" -Person.create :gender => "female", :first_name => "Sofia", :middle_initial => "J", :last_name => "Gough", :street_address => "31 English Street", :city => "Murray Bridge East", :state => "SA", :postcode => "5253", :email => "Sofia.J.Gough@pookmail.com", :birthday => "1974/10/7 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "F", :last_name => "Tyler", :street_address => "12 Boobialla Street", :city => "Hay South", :state => "NSW", :postcode => "2711", :email => "Maddison.F.Tyler@trashymail.com", :birthday => "1971/1/27 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "J", :last_name => "Pratt", :street_address => "91 Taylor Street", :city => "Picola", :state => "VIC", :postcode => "3639", :email => "Aimee.J.Pratt@dodgit.com", :birthday => "1980/2/14 00:00:00" -Person.create :gender => "female", :first_name => "Cerys", :middle_initial => "J", :last_name => "Hobbs", :street_address => "15 Ridge Road", :city => "Baffle Creek", :state => "QLD", :postcode => "4674", :email => "Cerys.J.Hobbs@dodgit.com", :birthday => "1944/9/1 00:00:00" -Person.create :gender => "male", :first_name => "Freddie", :middle_initial => "Y", :last_name => "Harding", :street_address => "64 Sinclair Street", :city => "Wallaroo Plain", :state => "SA", :postcode => "5556", :email => "Freddie.Y.Harding@pookmail.com", :birthday => "1956/2/2 00:00:00" -Person.create :gender => "female", :first_name => "Naomi", :middle_initial => "R", :last_name => "Fry", :street_address => "9 Roseda-Tinamba Road", :city => "Port Franklin", :state => "VIC", :postcode => "3964", :email => "Naomi.R.Fry@spambob.com", :birthday => "1984/11/15 00:00:00" -Person.create :gender => "male", :first_name => "Freddie", :middle_initial => "S", :last_name => "Mitchell", :street_address => "41 Walter Crescent", :city => "Benandarah", :state => "NSW", :postcode => "2536", :email => "Freddie.S.Mitchell@dodgit.com", :birthday => "1980/7/1 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "N", :last_name => "Browne", :street_address => "59 Kaesler Road", :city => "Koorine", :state => "SA", :postcode => "5279", :email => "Elise.N.Browne@pookmail.com", :birthday => "1982/3/20 00:00:00" -Person.create :gender => "female", :first_name => "Ella", :middle_initial => "C", :last_name => "Hammond", :street_address => "86 Cofton Close", :city => "North Dorrigo", :state => "NSW", :postcode => "2453", :email => "Ella.C.Hammond@spambob.com", :birthday => "1972/9/7 00:00:00" -Person.create :gender => "female", :first_name => "Leah", :middle_initial => "H", :last_name => "Miah", :street_address => "5 Rockhampton Qld", :city => "Plum Tree", :state => "QLD", :postcode => "4702", :email => "Leah.H.Miah@dodgit.com", :birthday => "1985/4/27 00:00:00" -Person.create :gender => "male", :first_name => "Daniel", :middle_initial => "R", :last_name => "Pratt", :street_address => "28 Sunraysia Road", :city => "Addington", :state => "VIC", :postcode => "3352", :email => "Daniel.R.Pratt@trashymail.com", :birthday => "1952/1/23 00:00:00" -Person.create :gender => "female", :first_name => "Ruby", :middle_initial => "J", :last_name => "Conway", :street_address => "50 Oriana Street", :city => "Lake Munmorah", :state => "NSW", :postcode => "2259", :email => "Ruby.J.Conway@trashymail.com", :birthday => "1963/11/17 00:00:00" -Person.create :gender => "male", :first_name => "Ellis", :middle_initial => "E", :last_name => "Day", :street_address => "90 Boobialla Street", :city => "Carrathool", :state => "NSW", :postcode => "2711", :email => "Ellis.E.Day@pookmail.com", :birthday => "1947/4/6 00:00:00" -Person.create :gender => "female", :first_name => "Millie", :middle_initial => "A", :last_name => "Hurst", :street_address => "1 Edgewater Close", :city => "Berrara", :state => "NSW", :postcode => "2540", :email => "Millie.A.Hurst@pookmail.com", :birthday => "1948/6/30 00:00:00" -Person.create :gender => "male", :first_name => "Ben", :middle_initial => "A", :last_name => "Dyer", :street_address => "68 Woodwark Crescent", :city => "Erub", :state => "QLD", :postcode => "4875", :email => "Ben.A.Dyer@dodgit.com", :birthday => "1966/7/28 00:00:00" -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "O", :last_name => "Savage", :street_address => "81 Shadforth Street", :city => "Kerang East", :state => "VIC", :postcode => "3579", :email => "Ellie.O.Savage@trashymail.com", :birthday => "1947/7/10 00:00:00" -Person.create :gender => "male", :first_name => "John", :middle_initial => "C", :last_name => "Blackburn", :street_address => "86 Hebbard Street", :city => "Dandenong North", :state => "VIC", :postcode => "3175", :email => "John.C.Blackburn@spambob.com", :birthday => "1984/4/22 00:00:00" -Person.create :gender => "female", :first_name => "Ellie", :middle_initial => "H", :last_name => "George", :street_address => "55 Atkinson Way", :city => "Roebourne", :state => "WA", :postcode => "6718", :email => "Ellie.H.George@dodgit.com", :birthday => "1952/5/19 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "E", :last_name => "Whittaker", :street_address => "91 Springhill Bottom Road", :city => "Hawley Beach", :state => "TAS", :postcode => "7307", :email => "Andrew.E.Whittaker@mailinator.com", :birthday => "1960/8/7 00:00:00" -Person.create :gender => "male", :first_name => "Toby", :middle_initial => "C", :last_name => "O'ullivan", :street_address => "98 Hill Street", :city => "York Plains", :state => "TAS", :postcode => "7120", :email => "Toby.C.O'ullivan@pookmail.com", :birthday => "1942/5/5 00:00:00" -Person.create :gender => "male", :first_name => "Reece", :middle_initial => "M", :last_name => "Vaughan", :street_address => "73 Foreshore Road", :city => "Stratton", :state => "WA", :postcode => "6056", :email => "Reece.M.Vaughan@pookmail.com", :birthday => "1941/6/2 00:00:00" -Person.create :gender => "female", :first_name => "Maya", :middle_initial => "A", :last_name => "Morley", :street_address => "22 Bayview Road", :city => "Yaninee", :state => "SA", :postcode => "5653", :email => "Maya.A.Morley@spambob.com", :birthday => "1966/1/15 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "S", :last_name => "Hicks", :street_address => "53 Norton Street", :city => "Neutral Bay", :state => "NSW", :postcode => "2089", :email => "Ethan.S.Hicks@dodgit.com", :birthday => "1959/3/6 00:00:00" -Person.create :gender => "male", :first_name => "Cameron", :middle_initial => "N", :last_name => "Wood", :street_address => "71 Brown Street", :city => "Osborne Park", :state => "NSW", :postcode => "2066", :email => "Cameron.N.Wood@trashymail.com", :birthday => "1951/3/17 00:00:00" -Person.create :gender => "female", :first_name => "Bethany", :middle_initial => "A", :last_name => "Francis", :street_address => "57 Marloo Street", :city => "Hackney", :state => "SA", :postcode => "5069", :email => "Bethany.A.Francis@trashymail.com", :birthday => "1966/9/19 00:00:00" -Person.create :gender => "male", :first_name => "Alexander", :middle_initial => "C", :last_name => "Watkins", :street_address => "18 Daly Terrace", :city => "Mount Lawley", :state => "WA", :postcode => "6050", :email => "Alexander.C.Watkins@pookmail.com", :birthday => "1958/7/8 00:00:00" -Person.create :gender => "female", :first_name => "Aaliyah", :middle_initial => "R", :last_name => "Hanson", :street_address => "31 Frencham Street", :city => "Rand", :state => "NSW", :postcode => "2642", :email => "Aaliyah.R.Hanson@trashymail.com", :birthday => "1974/10/28 00:00:00" -Person.create :gender => "female", :first_name => "Erin", :middle_initial => "G", :last_name => "Lees", :street_address => "50 Carolina Park Road", :city => "Berkeley Vale", :state => "NSW", :postcode => "2261", :email => "Erin.G.Lees@trashymail.com", :birthday => "1968/10/26 00:00:00" -Person.create :gender => "female", :first_name => "Libby", :middle_initial => "F", :last_name => "Birch", :street_address => "63 Ageston Road", :city => "Anthony", :state => "QLD", :postcode => "4310", :email => "Libby.F.Birch@spambob.com", :birthday => "1973/1/22 00:00:00" -Person.create :gender => "female", :first_name => "Amelia", :middle_initial => "E", :last_name => "Hancock", :street_address => "63 Campbells River Road", :city => "Pine Grove", :state => "NSW", :postcode => "2829", :email => "Amelia.E.Hancock@spambob.com", :birthday => "1961/2/24 00:00:00" -Person.create :gender => "male", :first_name => "Harrison", :middle_initial => "L", :last_name => "Newman", :street_address => "6 Faulkner Street", :city => "Wards Mistake", :state => "NSW", :postcode => "2350", :email => "Harrison.L.Newman@spambob.com", :birthday => "1969/11/2 00:00:00" -Person.create :gender => "male", :first_name => "Declan", :middle_initial => "C", :last_name => "Whitehead", :street_address => "39 Blairgowrie Avenue", :city => "Ironmungy", :state => "NSW", :postcode => "2630", :email => "Declan.C.Whitehead@spambob.com", :birthday => "1971/1/2 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "T", :last_name => "Scott", :street_address => "98 Berambing Crescent", :city => "Stanhope Gardens", :state => "NSW", :postcode => "2768", :email => "Sebastian.T.Scott@mailinator.com", :birthday => "1962/3/14 00:00:00" -Person.create :gender => "male", :first_name => "Taylor", :middle_initial => "T", :last_name => "Morley", :street_address => "70 Round Drive", :city => "Eleebana", :state => "NSW", :postcode => "2282", :email => "Taylor.T.Morley@dodgit.com", :birthday => "1951/7/31 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "A", :last_name => "Lucas", :street_address => "58 Spencer Street", :city => "Cooloola Cove", :state => "QLD", :postcode => "4580", :email => "Sam.A.Lucas@spambob.com", :birthday => "1953/1/15 00:00:00" -Person.create :gender => "female", :first_name => "Grace", :middle_initial => "D", :last_name => "Hope", :street_address => "16 Wallum Court", :city => "Dunbible", :state => "NSW", :postcode => "2484", :email => "Grace.D.Hope@pookmail.com", :birthday => "1957/8/19 00:00:00" -Person.create :gender => "male", :first_name => "Aidan", :middle_initial => "M", :last_name => "Savage", :street_address => "64 Burnley Street", :city => "Willunga Hill", :state => "SA", :postcode => "5172", :email => "Aidan.M.Savage@trashymail.com", :birthday => "1946/3/3 00:00:00" -Person.create :gender => "female", :first_name => "Georgina", :middle_initial => "L", :last_name => "Macdonald", :street_address => "94 Norton Street", :city => "Bar Point", :state => "NSW", :postcode => "2083", :email => "Georgina.L.Macdonald@mailinator.com", :birthday => "1954/7/16 00:00:00" -Person.create :gender => "male", :first_name => "Luca", :middle_initial => "N", :last_name => "Lane", :street_address => "36 Berambing Crescent", :city => "Hawkesbury Heights", :state => "NSW", :postcode => "2777", :email => "Luca.N.Lane@trashymail.com", :birthday => "1970/8/3 00:00:00" -Person.create :gender => "female", :first_name => "Amy", :middle_initial => "D", :last_name => "Birch", :street_address => "9 Kerma Crescent", :city => "Clarence", :state => "NSW", :postcode => "2790", :email => "Amy.D.Birch@mailinator.com", :birthday => "1967/6/26 00:00:00" -Person.create :gender => "male", :first_name => "Riley", :middle_initial => "O", :last_name => "Cooper", :street_address => "22 Jacolite Street", :city => "Upper Swan", :state => "WA", :postcode => "6069", :email => "Riley.O.Cooper@spambob.com", :birthday => "1954/8/10 00:00:00" -Person.create :gender => "male", :first_name => "Samuel", :middle_initial => "A", :last_name => "Marsh", :street_address => "30 Shamrock Avenue", :city => "Mossy Point", :state => "NSW", :postcode => "2537", :email => "Samuel.A.Marsh@spambob.com", :birthday => "1985/1/7 00:00:00" -Person.create :gender => "female", :first_name => "Brooke", :middle_initial => "R", :last_name => "Wong", :street_address => "34 Davenport Street", :city => "Coolumbooka", :state => "NSW", :postcode => "2632", :email => "Brooke.R.Wong@spambob.com", :birthday => "1941/5/13 00:00:00" -Person.create :gender => "female", :first_name => "Mollie", :middle_initial => "J", :last_name => "Heath", :street_address => "56 Seninis Road", :city => "Idalia", :state => "QLD", :postcode => "4811", :email => "Mollie.J.Heath@spambob.com", :birthday => "1977/10/18 00:00:00" -Person.create :gender => "male", :first_name => "Harrison", :middle_initial => "T", :last_name => "Richards", :street_address => "13 Main Street", :city => "Devlins Pound", :state => "SA", :postcode => "5330", :email => "Harrison.T.Richards@pookmail.com", :birthday => "1965/10/31 00:00:00" -Person.create :gender => "male", :first_name => "Samuel", :middle_initial => "J", :last_name => "Glover", :street_address => "37 Gaggin Street", :city => "Tarro", :state => "NSW", :postcode => "2322", :email => "Samuel.J.Glover@pookmail.com", :birthday => "1971/1/18 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "M", :last_name => "Lord", :street_address => "89 Correa Place", :city => "Pinelands", :state => "NT", :postcode => "0829", :email => "Jacob.M.Lord@trashymail.com", :birthday => "1956/7/1 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "J", :last_name => "Thomson", :street_address => "19 Main Street", :city => "Schell Well", :state => "SA", :postcode => "5311", :email => "Charlotte.J.Thomson@spambob.com", :birthday => "1959/10/21 00:00:00" -Person.create :gender => "male", :first_name => "Kai", :middle_initial => "E", :last_name => "Rowley", :street_address => "86 Shell Road", :city => "Lavers Hill", :state => "VIC", :postcode => "3238", :email => "Kai.E.Rowley@pookmail.com", :birthday => "1961/11/6 00:00:00" -Person.create :gender => "male", :first_name => "Muhammad", :middle_initial => "P", :last_name => "Simpson", :street_address => "40 Porana Place", :city => "East Damboring", :state => "WA", :postcode => "6608", :email => "Muhammad.P.Simpson@dodgit.com", :birthday => "1982/4/6 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "A", :last_name => "Murray", :street_address => "82 Warren Avenue", :city => "Bonnells Bay", :state => "NSW", :postcode => "2264", :email => "Sebastian.A.Murray@mailinator.com", :birthday => "1954/11/4 00:00:00" -Person.create :gender => "female", :first_name => "Aaliyah", :middle_initial => "A", :last_name => "White", :street_address => "86 Wynyard Street", :city => "Jones Bridge", :state => "NSW", :postcode => "2720", :email => "Aaliyah.A.White@pookmail.com", :birthday => "1944/12/12 00:00:00" -Person.create :gender => "female", :first_name => "Faith", :middle_initial => "J", :last_name => "Ingram", :street_address => "48 Sale-Heyfield Road", :city => "Jeetho", :state => "VIC", :postcode => "3945", :email => "Faith.J.Ingram@dodgit.com", :birthday => "1953/5/27 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "B", :last_name => "Peters", :street_address => "34 Henry Street", :city => "Whittington", :state => "VIC", :postcode => "3219", :email => "Tegan.B.Peters@mailinator.com", :birthday => "1961/4/17 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "J", :last_name => "Doherty", :street_address => "28 Woerdens Road", :city => "Mangoola", :state => "NSW", :postcode => "2328", :email => "Maddison.J.Doherty@dodgit.com", :birthday => "1984/1/12 00:00:00" -Person.create :gender => "male", :first_name => "Lewis", :middle_initial => "N", :last_name => "Shepherd", :street_address => "97 Corio Street", :city => "Nalangil", :state => "VIC", :postcode => "3249", :email => "Lewis.N.Shepherd@dodgit.com", :birthday => "1950/1/4 00:00:00" -Person.create :gender => "female", :first_name => "Sophie", :middle_initial => "C", :last_name => "Baker", :street_address => "92 Dossiter Street", :city => "Levendale", :state => "TAS", :postcode => "7120", :email => "Sophie.C.Baker@dodgit.com", :birthday => "1957/10/8 00:00:00" -Person.create :gender => "male", :first_name => "Patrick", :middle_initial => "M", :last_name => "Ahmed", :street_address => "65 Link Road", :city => "Legerwood", :state => "TAS", :postcode => "7263", :email => "Patrick.M.Ahmed@trashymail.com", :birthday => "1978/8/20 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "L", :last_name => "Kennedy", :street_address => "97 Grandis Road", :city => "Rainbow Reach", :state => "NSW", :postcode => "2440", :email => "Scott.L.Kennedy@pookmail.com", :birthday => "1948/3/3 00:00:00" -Person.create :gender => "female", :first_name => "Aimee", :middle_initial => "J", :last_name => "Reeves", :street_address => "86 Atkinson Way", :city => "Pardoo", :state => "WA", :postcode => "6721", :email => "Aimee.J.Reeves@dodgit.com", :birthday => "1949/8/29 00:00:00" -Person.create :gender => "male", :first_name => "Evan", :middle_initial => "B", :last_name => "Woods", :street_address => "97 Cambridge Street", :city => "Glossodia", :state => "NSW", :postcode => "2756", :email => "Evan.B.Woods@mailinator.com", :birthday => "1966/4/2 00:00:00" -Person.create :gender => "female", :first_name => "Jennifer", :middle_initial => "J", :last_name => "Baldwin", :street_address => "35 Thule Drive", :city => "Truro", :state => "SA", :postcode => "5356", :email => "Jennifer.J.Baldwin@trashymail.com", :birthday => "1972/7/27 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "O", :last_name => "Holloway", :street_address => "25 Patton Street", :city => "Kooyong", :state => "VIC", :postcode => "3144", :email => "Louis.O.Holloway@mailinator.com", :birthday => "1948/4/11 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "S", :last_name => "Lane", :street_address => "88 Carba Road", :city => "Mount Gambier", :state => "SA", :postcode => "5291", :email => "Tom.S.Lane@dodgit.com", :birthday => "1969/12/8 00:00:00" -Person.create :gender => "female", :first_name => "Elise", :middle_initial => "R", :last_name => "Spencer", :street_address => "48 Carlisle Street", :city => "Longwood", :state => "VIC", :postcode => "3665", :email => "Elise.R.Spencer@trashymail.com", :birthday => "1965/3/9 00:00:00" -Person.create :gender => "female", :first_name => "Holly", :middle_initial => "D", :last_name => "Fisher", :street_address => "48 Mandible Street", :city => "Parkside", :state => "QLD", :postcode => "4825", :email => "Holly.D.Fisher@spambob.com", :birthday => "1982/2/6 00:00:00" -Person.create :gender => "female", :first_name => "Megan", :middle_initial => "D", :last_name => "Wilkins", :street_address => "89 Gaggin Street", :city => "Morpeth", :state => "NSW", :postcode => "2321", :email => "Megan.D.Wilkins@dodgit.com", :birthday => "1980/1/2 00:00:00" -Person.create :gender => "male", :first_name => "Leon", :middle_initial => "S", :last_name => "Dickinson", :street_address => "87 Foreshore Road", :city => "Herne Hill", :state => "WA", :postcode => "6056", :email => "Leon.S.Dickinson@trashymail.com", :birthday => "1962/4/19 00:00:00" -Person.create :gender => "female", :first_name => "Evie", :middle_initial => "H", :last_name => "Chamberlain", :street_address => "22 Maritime Avenue", :city => "Gnarabup", :state => "WA", :postcode => "6285", :email => "Evie.H.Chamberlain@spambob.com", :birthday => "1976/7/13 00:00:00" -Person.create :gender => "male", :first_name => "Oliver", :middle_initial => "H", :last_name => "Sharp", :street_address => "15 Acheron Road", :city => "Willung South", :state => "VIC", :postcode => "3844", :email => "Oliver.H.Sharp@spambob.com", :birthday => "1968/7/16 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "J", :last_name => "Porter", :street_address => "65 Sale-Heyfield Road", :city => "Koorooman", :state => "VIC", :postcode => "3953", :email => "Isabella.J.Porter@dodgit.com", :birthday => "1972/8/7 00:00:00" -Person.create :gender => "male", :first_name => "Max", :middle_initial => "P", :last_name => "Myers", :street_address => "67 Ugoa Street", :city => "South West Rocks", :state => "NSW", :postcode => "2431", :email => "Max.P.Myers@spambob.com", :birthday => "1964/11/28 00:00:00" -Person.create :gender => "male", :first_name => "David", :middle_initial => "L", :last_name => "Ashton", :street_address => "72 Friar John Way", :city => "Safety Bay", :state => "WA", :postcode => "6169", :email => "David.L.Ashton@dodgit.com", :birthday => "1968/12/14 00:00:00" -Person.create :gender => "female", :first_name => "Morgan", :middle_initial => "S", :last_name => "Barton", :street_address => "75 Walters Street", :city => "Baddaginnie", :state => "VIC", :postcode => "3670", :email => "Morgan.S.Barton@mailinator.com", :birthday => "1946/1/11 00:00:00" -Person.create :gender => "female", :first_name => "Caitlin", :middle_initial => "T", :last_name => "Perkins", :street_address => "22 Wharf St", :city => "Glenworth Valley", :state => "NSW", :postcode => "2250", :email => "Caitlin.T.Perkins@spambob.com", :birthday => "1960/5/7 00:00:00" -Person.create :gender => "male", :first_name => "Matthew", :middle_initial => "K", :last_name => "Bell", :street_address => "13 Muscat Street", :city => "Beacon", :state => "WA", :postcode => "6472", :email => "Matthew.K.Bell@dodgit.com", :birthday => "1969/9/9 00:00:00" -Person.create :gender => "female", :first_name => "Katherine", :middle_initial => "F", :last_name => "Browne", :street_address => "73 Chatsworth Drive", :city => "Cloverdale", :state => "WA", :postcode => "6105", :email => "Katherine.F.Browne@mailinator.com", :birthday => "1948/12/22 00:00:00" -Person.create :gender => "female", :first_name => "Rachel", :middle_initial => "A", :last_name => "Freeman", :street_address => "1 Old Tenterfield Road", :city => "Simpkins Creek", :state => "NSW", :postcode => "2469", :email => "Rachel.A.Freeman@mailinator.com", :birthday => "1946/10/31 00:00:00" -Person.create :gender => "male", :first_name => "Jacob", :middle_initial => "I", :last_name => "Lewis", :street_address => "31 Elizabeth Street", :city => "Marys Creek", :state => "QLD", :postcode => "4570", :email => "Jacob.I.Lewis@spambob.com", :birthday => "1961/11/30 00:00:00" -Person.create :gender => "male", :first_name => "Leo", :middle_initial => "B", :last_name => "Tomlinson", :street_address => "27 Daly Terrace", :city => "Bassendean Dc", :state => "WA", :postcode => "6054", :email => "Leo.B.Tomlinson@trashymail.com", :birthday => "1975/10/31 00:00:00" -Person.create :gender => "female", :first_name => "Emily", :middle_initial => "J", :last_name => "Wallis", :street_address => "48 Eungella Road", :city => "Bogie", :state => "QLD", :postcode => "4805", :email => "Emily.J.Wallis@mailinator.com", :birthday => "1968/3/6 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "P", :last_name => "Preston", :street_address => "52 Punchs Creek Road", :city => "Billa Billa", :state => "QLD", :postcode => "4390", :email => "Hayden.P.Preston@mailinator.com", :birthday => "1942/11/14 00:00:00" -Person.create :gender => "male", :first_name => "Harvey", :middle_initial => "J", :last_name => "Preston", :street_address => "10 Bette McNee Street", :city => "Wetuppa", :state => "NSW", :postcode => "2734", :email => "Harvey.J.Preston@trashymail.com", :birthday => "1944/8/25 00:00:00" -Person.create :gender => "male", :first_name => "Peter", :middle_initial => "Z", :last_name => "Gibson", :street_address => "14 Feather Street", :city => "Samford Valley", :state => "QLD", :postcode => "4520", :email => "Peter.Z.Gibson@mailinator.com", :birthday => "1968/1/29 00:00:00" -Person.create :gender => "female", :first_name => "Naomi", :middle_initial => "D", :last_name => "Morris", :street_address => "66 Border Drive", :city => "Gelston Park", :state => "NSW", :postcode => "2650", :email => "Naomi.D.Morris@pookmail.com", :birthday => "1976/3/8 00:00:00" -Person.create :gender => "female", :first_name => "Isobel", :middle_initial => "C", :last_name => "Hale", :street_address => "4 Cherokee Road", :city => "Shepherds Flat", :state => "VIC", :postcode => "3461", :email => "Isobel.C.Hale@spambob.com", :birthday => "1975/4/9 00:00:00" -Person.create :gender => "male", :first_name => "Robert", :middle_initial => "L", :last_name => "Long", :street_address => "86 Moruya Road", :city => "Marlowe", :state => "NSW", :postcode => "2622", :email => "Robert.L.Long@dodgit.com", :birthday => "1966/6/17 00:00:00" -Person.create :gender => "female", :first_name => "Laura", :middle_initial => "B", :last_name => "Campbell", :street_address => "59 Lowe Street", :city => "Warkon", :state => "QLD", :postcode => "4417", :email => "Laura.B.Campbell@pookmail.com", :birthday => "1945/8/26 00:00:00" -Person.create :gender => "female", :first_name => "Madeleine", :middle_initial => "Z", :last_name => "Young", :street_address => "86 Frencham Street", :city => "Geehi", :state => "NSW", :postcode => "2642", :email => "Madeleine.Z.Young@mailinator.com", :birthday => "1955/8/2 00:00:00" -Person.create :gender => "male", :first_name => "Lucas", :middle_initial => "B", :last_name => "Walton", :street_address => "16 Paradise Falls Road", :city => "Carboor", :state => "VIC", :postcode => "3678", :email => "Lucas.B.Walton@mailinator.com", :birthday => "1960/5/10 00:00:00" -Person.create :gender => "female", :first_name => "Rebecca", :middle_initial => "J", :last_name => "Dawson", :street_address => "31 Monteagle Road", :city => "Kippax", :state => "ACT", :postcode => "2615", :email => "Rebecca.J.Dawson@pookmail.com", :birthday => "1942/8/6 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "C", :last_name => "Ryan", :street_address => "63 Monteagle Road", :city => "Stirling", :state => "ACT", :postcode => "2611", :email => "Eva.C.Ryan@mailinator.com", :birthday => "1967/1/19 00:00:00" -Person.create :gender => "female", :first_name => "Harriet", :middle_initial => "A", :last_name => "Kaur", :street_address => "29 Yangan Drive", :city => "Colly Blue", :state => "NSW", :postcode => "2343", :email => "Harriet.A.Kaur@dodgit.com", :birthday => "1984/11/29 00:00:00" -Person.create :gender => "female", :first_name => "Sarah", :middle_initial => "Z", :last_name => "Conway", :street_address => "77 Shirley Street", :city => "Bahrs Scrub", :state => "QLD", :postcode => "4207", :email => "Sarah.Z.Conway@mailinator.com", :birthday => "1946/9/17 00:00:00" -Person.create :gender => "male", :first_name => "Freddie", :middle_initial => "E", :last_name => "Howells", :street_address => "21 McDowall Street", :city => "Highworth", :state => "QLD", :postcode => "4560", :email => "Freddie.E.Howells@spambob.com", :birthday => "1971/10/6 00:00:00" -Person.create :gender => "female", :first_name => "Ava", :middle_initial => "E", :last_name => "Perkins", :street_address => "30 Avondale Drive", :city => "Albion Park Rail", :state => "NSW", :postcode => "2527", :email => "Ava.E.Perkins@dodgit.com", :birthday => "1956/1/25 00:00:00" -Person.create :gender => "male", :first_name => "Matthew", :middle_initial => "K", :last_name => "Gallagher", :street_address => "44 Kogil Street", :city => "Warialda", :state => "NSW", :postcode => "2402", :email => "Matthew.K.Gallagher@dodgit.com", :birthday => "1964/6/28 00:00:00" -Person.create :gender => "male", :first_name => "Jude", :middle_initial => "A", :last_name => "Sharpe", :street_address => "6 Crofts Road", :city => "Wulgulmerang East", :state => "VIC", :postcode => "3885", :email => "Jude.A.Sharpe@trashymail.com", :birthday => "1955/1/10 00:00:00" -Person.create :gender => "female", :first_name => "Gracie", :middle_initial => "N", :last_name => "Flynn", :street_address => "31 Jacabina Court", :city => "Coalcliff", :state => "NSW", :postcode => "2508", :email => "Gracie.N.Flynn@pookmail.com", :birthday => "1966/12/23 00:00:00" -Person.create :gender => "male", :first_name => "Kai", :middle_initial => "I", :last_name => "Richardson", :street_address => "97 Ferny Avenue", :city => "Elliott Heads", :state => "QLD", :postcode => "4670", :email => "Kai.I.Richardson@pookmail.com", :birthday => "1963/1/28 00:00:00" -Person.create :gender => "male", :first_name => "Sebastian", :middle_initial => "L", :last_name => "Khan", :street_address => "80 Ulomogo Street", :city => "Mugga Hill", :state => "NSW", :postcode => "2830", :email => "Sebastian.L.Khan@dodgit.com", :birthday => "1940/1/21 00:00:00" -Person.create :gender => "male", :first_name => "Louie", :middle_initial => "G", :last_name => "Parkin", :street_address => "70 Tooraweenah Road", :city => "Jemalong", :state => "NSW", :postcode => "2871", :email => "Louie.G.Parkin@spambob.com", :birthday => "1946/5/15 00:00:00" -Person.create :gender => "male", :first_name => "Bailey", :middle_initial => "E", :last_name => "Barton", :street_address => "27 Cambridge Street", :city => "Glossodia", :state => "NSW", :postcode => "2756", :email => "Bailey.E.Barton@pookmail.com", :birthday => "1983/9/19 00:00:00" -Person.create :gender => "male", :first_name => "Charles", :middle_initial => "L", :last_name => "Curtis", :street_address => "15 Wharf St", :city => "East Gosford", :state => "NSW", :postcode => "2250", :email => "Charles.L.Curtis@spambob.com", :birthday => "1971/7/13 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "G", :last_name => "Carey", :street_address => "89 Myrtle Street", :city => "Youarang", :state => "VIC", :postcode => "3728", :email => "Lucy.G.Carey@trashymail.com", :birthday => "1945/1/10 00:00:00" -Person.create :gender => "female", :first_name => "Samantha", :middle_initial => "K", :last_name => "Palmer", :street_address => "61 Henley Beach Road", :city => "Happy Valley", :state => "SA", :postcode => "5159", :email => "Samantha.K.Palmer@trashymail.com", :birthday => "1952/7/11 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "H", :last_name => "Gibbs", :street_address => "20 Horsington Street", :city => "Mitcham", :state => "VIC", :postcode => "3132", :email => "Maisie.H.Gibbs@trashymail.com", :birthday => "1944/8/18 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "J", :last_name => "Patterson", :street_address => "86 Noalimba Avenue", :city => "Purlewaugh", :state => "NSW", :postcode => "2357", :email => "Chelsea.J.Patterson@mailinator.com", :birthday => "1955/1/5 00:00:00" -Person.create :gender => "female", :first_name => "Katie", :middle_initial => "L", :last_name => "Storey", :street_address => "6 Sunset Drive", :city => "Mexico", :state => "QLD", :postcode => "4728", :email => "Katie.L.Storey@pookmail.com", :birthday => "1943/4/21 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "F", :last_name => "Hunt", :street_address => "77 Ross Street", :city => "Numinbah Valley", :state => "QLD", :postcode => "4211", :email => "Sam.F.Hunt@pookmail.com", :birthday => "1970/3/19 00:00:00" -Person.create :gender => "female", :first_name => "Amelie", :middle_initial => "W", :last_name => "Brady", :street_address => "80 Jones Road", :city => "Doolandella", :state => "QLD", :postcode => "4077", :email => "Amelie.W.Brady@trashymail.com", :birthday => "1953/1/5 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "I", :last_name => "Hussain", :street_address => "49 Davis Street", :city => "Toowong", :state => "QLD", :postcode => "4066", :email => "Ethan.I.Hussain@spambob.com", :birthday => "1946/9/25 00:00:00" -Person.create :gender => "female", :first_name => "Evie", :middle_initial => "M", :last_name => "Dickinson", :street_address => "64 Yulara Drive", :city => "Wanarn", :state => "NT", :postcode => "0872", :email => "Evie.M.Dickinson@mailinator.com", :birthday => "1950/12/6 00:00:00" -Person.create :gender => "female", :first_name => "Naomi", :middle_initial => "R", :last_name => "Owen", :street_address => "46 Prince Street", :city => "Mcphersons Crossing", :state => "NSW", :postcode => "2460", :email => "Naomi.R.Owen@trashymail.com", :birthday => "1953/12/31 00:00:00" -Person.create :gender => "female", :first_name => "Emma", :middle_initial => "J", :last_name => "Barlow", :street_address => "82 Patton Street", :city => "Armadale", :state => "VIC", :postcode => "3143", :email => "Emma.J.Barlow@mailinator.com", :birthday => "1967/3/24 00:00:00" -Person.create :gender => "female", :first_name => "Sophia", :middle_initial => "J", :last_name => "Lloyd", :street_address => "61 Taylor Street", :city => "Kotupna", :state => "VIC", :postcode => "3638", :email => "Sophia.J.Lloyd@pookmail.com", :birthday => "1951/2/25 00:00:00" -Person.create :gender => "female", :first_name => "Summer", :middle_initial => "Z", :last_name => "Norris", :street_address => "15 Romawi Road", :city => "Clifton Creek", :state => "VIC", :postcode => "3875", :email => "Summer.Z.Norris@mailinator.com", :birthday => "1978/4/16 00:00:00" -Person.create :gender => "female", :first_name => "Poppy", :middle_initial => "A", :last_name => "Hancock", :street_address => "56 Spencer Street", :city => "Inskip", :state => "QLD", :postcode => "4581", :email => "Poppy.A.Hancock@spambob.com", :birthday => "1983/1/10 00:00:00" -Person.create :gender => "male", :first_name => "Leo", :middle_initial => "M", :last_name => "Russell", :street_address => "28 Boulter Close", :city => "Bamboo Creek", :state => "QLD", :postcode => "4860", :email => "Leo.M.Russell@pookmail.com", :birthday => "1951/11/10 00:00:00" -Person.create :gender => "female", :first_name => "Lucy", :middle_initial => "S", :last_name => "Connolly", :street_address => "40 Harris Street", :city => "Howqua", :state => "VIC", :postcode => "3723", :email => "Lucy.S.Connolly@pookmail.com", :birthday => "1967/4/18 00:00:00" -Person.create :gender => "female", :first_name => "Lola", :middle_initial => "J", :last_name => "Ryan", :street_address => "55 Flax Court", :city => "Kergunyah", :state => "VIC", :postcode => "3691", :email => "Lola.J.Ryan@trashymail.com", :birthday => "1949/5/19 00:00:00" -Person.create :gender => "female", :first_name => "Maisie", :middle_initial => "H", :last_name => "Daly", :street_address => "79 Woolnough Road", :city => "Burnside", :state => "SA", :postcode => "5066", :email => "Maisie.H.Daly@trashymail.com", :birthday => "1979/6/22 00:00:00" -Person.create :gender => "female", :first_name => "Libby", :middle_initial => "N", :last_name => "Kirk", :street_address => "20 Shell Road", :city => "Petticoat Creek", :state => "VIC", :postcode => "3233", :email => "Libby.N.Kirk@spambob.com", :birthday => "1948/4/1 00:00:00" -Person.create :gender => "female", :first_name => "Matilda", :middle_initial => "C", :last_name => "Ali", :street_address => "97 Normans Road", :city => "Tarrenlea", :state => "VIC", :postcode => "3315", :email => "Matilda.C.Ali@mailinator.com", :birthday => "1948/1/1 00:00:00" -Person.create :gender => "male", :first_name => "Finley", :middle_initial => "A", :last_name => "Wong", :street_address => "28 Marloo Street", :city => "Glynde", :state => "SA", :postcode => "5070", :email => "Finley.A.Wong@dodgit.com", :birthday => "1957/1/13 00:00:00" -Person.create :gender => "female", :first_name => "Isabelle", :middle_initial => "T", :last_name => "McCarthy", :street_address => "48 Marx Hill Road", :city => "Bielsdown Hills", :state => "NSW", :postcode => "2454", :email => "Isabelle.T.McCarthy@spambob.com", :birthday => "1946/5/5 00:00:00" -Person.create :gender => "male", :first_name => "Joshua", :middle_initial => "M", :last_name => "Miah", :street_address => "83 Cassinia Street", :city => "Four Corners", :state => "NSW", :postcode => "2716", :email => "Joshua.M.Miah@trashymail.com", :birthday => "1970/5/14 00:00:00" -Person.create :gender => "female", :first_name => "Holly", :middle_initial => "B", :last_name => "Anderson", :street_address => "68 Sale Street", :city => "Mullion Creek", :state => "NSW", :postcode => "2800", :email => "Holly.B.Anderson@spambob.com", :birthday => "1962/9/16 00:00:00" -Person.create :gender => "male", :first_name => "Zak", :middle_initial => "G", :last_name => "Moran", :street_address => "39 Taylor Street", :city => "Shepparton North", :state => "VIC", :postcode => "3631", :email => "Zak.G.Moran@mailinator.com", :birthday => "1975/7/12 00:00:00" -Person.create :gender => "male", :first_name => "Louis", :middle_initial => "E", :last_name => "Pickering", :street_address => "21 Farrar Parade", :city => "Coorow", :state => "WA", :postcode => "6515", :email => "Louis.E.Pickering@pookmail.com", :birthday => "1943/9/22 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "Z", :last_name => "Patterson", :street_address => "74 Walpole Avenue", :city => "Nirranda", :state => "VIC", :postcode => "3268", :email => "Eve.Z.Patterson@mailinator.com", :birthday => "1966/7/23 00:00:00" -Person.create :gender => "female", :first_name => "Rosie", :middle_initial => "H", :last_name => "Cooke", :street_address => "20 Romawi Road", :city => "Melwood", :state => "VIC", :postcode => "3875", :email => "Rosie.H.Cooke@mailinator.com", :birthday => "1943/6/9 00:00:00" -Person.create :gender => "female", :first_name => "Alexandra", :middle_initial => "R", :last_name => "Mahmood", :street_address => "53 Carlisle Street", :city => "Moglonemby", :state => "VIC", :postcode => "3666", :email => "Alexandra.R.Mahmood@trashymail.com", :birthday => "1979/6/23 00:00:00" -Person.create :gender => "male", :first_name => "Thomas", :middle_initial => "M", :last_name => "Joyce", :street_address => "44 Larissa Court", :city => "Mildura South", :state => "VIC", :postcode => "3501", :email => "Thomas.M.Joyce@spambob.com", :birthday => "1953/3/22 00:00:00" -Person.create :gender => "female", :first_name => "Lara", :middle_initial => "C", :last_name => "Ferguson", :street_address => "6 Jacabina Court", :city => "Tumbulgum", :state => "NSW", :postcode => "2490", :email => "Lara.C.Ferguson@pookmail.com", :birthday => "1977/11/24 00:00:00" -Person.create :gender => "female", :first_name => "Katherine", :middle_initial => "P", :last_name => "Sykes", :street_address => "40 Rose Street", :city => "Big Pats Creek", :state => "VIC", :postcode => "3799", :email => "Katherine.P.Sykes@mailinator.com", :birthday => "1972/9/27 00:00:00" -Person.create :gender => "male", :first_name => "Hayden", :middle_initial => "V", :last_name => "Todd", :street_address => "75 Carnegie Avenue", :city => "Burma Road", :state => "WA", :postcode => "6532", :email => "Hayden.V.Todd@dodgit.com", :birthday => "1973/6/13 00:00:00" -Person.create :gender => "female", :first_name => "Lilly", :middle_initial => "L", :last_name => "Macdonald", :street_address => "9 Romawi Road", :city => "Merrijig", :state => "VIC", :postcode => "3875", :email => "Lilly.L.Macdonald@pookmail.com", :birthday => "1961/4/23 00:00:00" -Person.create :gender => "female", :first_name => "Amelie", :middle_initial => "S", :last_name => "Stanley", :street_address => "98 McPherson Road", :city => "Thowgla Valley", :state => "VIC", :postcode => "3707", :email => "Amelie.S.Stanley@pookmail.com", :birthday => "1973/11/27 00:00:00" -Person.create :gender => "female", :first_name => "Eve", :middle_initial => "E", :last_name => "Sinclair", :street_address => "13 Little Myers Street", :city => "Long Forest", :state => "VIC", :postcode => "3340", :email => "Eve.E.Sinclair@spambob.com", :birthday => "1965/7/30 00:00:00" -Person.create :gender => "female", :first_name => "Jade", :middle_initial => "L", :last_name => "Davies", :street_address => "78 Kogil Street", :city => "Mungle", :state => "NSW", :postcode => "2408", :email => "Jade.L.Davies@pookmail.com", :birthday => "1972/12/29 00:00:00" -Person.create :gender => "male", :first_name => "Ethan", :middle_initial => "R", :last_name => "Barton", :street_address => "15 Border Drive", :city => "Borambola", :state => "NSW", :postcode => "2650", :email => "Ethan.R.Barton@pookmail.com", :birthday => "1965/4/22 00:00:00" -Person.create :gender => "male", :first_name => "Jake", :middle_initial => "V", :last_name => "Scott", :street_address => "11 Bailey Street", :city => "Toolong", :state => "VIC", :postcode => "3285", :email => "Jake.V.Scott@dodgit.com", :birthday => "1975/1/26 00:00:00" -Person.create :gender => "male", :first_name => "Tom", :middle_initial => "A", :last_name => "Owens", :street_address => "21 Ugoa Street", :city => "Mount George", :state => "NSW", :postcode => "2424", :email => "Tom.A.Owens@mailinator.com", :birthday => "1985/5/4 00:00:00" -Person.create :gender => "male", :first_name => "Freddie", :middle_initial => "A", :last_name => "Kemp", :street_address => "87 Kooljak Rd", :city => "Yelverton", :state => "WA", :postcode => "6280", :email => "Freddie.A.Kemp@dodgit.com", :birthday => "1970/7/30 00:00:00" -Person.create :gender => "male", :first_name => "Luke", :middle_initial => "E", :last_name => "Fuller", :street_address => "91 Ronald Crescent", :city => "South Trees", :state => "QLD", :postcode => "4680", :email => "Luke.E.Fuller@mailinator.com", :birthday => "1959/1/10 00:00:00" -Person.create :gender => "female", :first_name => "Tilly", :middle_initial => "T", :last_name => "Bray", :street_address => "32 Noalimba Avenue", :city => "Yarrowyck", :state => "NSW", :postcode => "2358", :email => "Tilly.T.Bray@pookmail.com", :birthday => "1941/10/20 00:00:00" -Person.create :gender => "male", :first_name => "Joel", :middle_initial => "H", :last_name => "Middleton", :street_address => "94 Hereford Avenue", :city => "Sandergrove", :state => "SA", :postcode => "5255", :email => "Joel.H.Middleton@spambob.com", :birthday => "1950/6/30 00:00:00" -Person.create :gender => "male", :first_name => "William", :middle_initial => "J", :last_name => "Boyle", :street_address => "63 Cherokee Road", :city => "Sailors Hill", :state => "VIC", :postcode => "3461", :email => "William.J.Boyle@dodgit.com", :birthday => "1948/12/9 00:00:00" -Person.create :gender => "male", :first_name => "Jonathan", :middle_initial => "S", :last_name => "Field", :street_address => "96 Yangan Drive", :city => "Parraweena", :state => "NSW", :postcode => "2339", :email => "Jonathan.S.Field@spambob.com", :birthday => "1984/4/26 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "S", :last_name => "Birch", :street_address => "42 Cornish Street", :city => "Travancore", :state => "VIC", :postcode => "3032", :email => "Sam.S.Birch@mailinator.com", :birthday => "1972/8/13 00:00:00" -Person.create :gender => "female", :first_name => "Jennifer", :middle_initial => "L", :last_name => "Nolan", :street_address => "3 Old Gayndah Road", :city => "Pallas Street Maryborough", :state => "QLD", :postcode => "4650", :email => "Jennifer.L.Nolan@spambob.com", :birthday => "1973/7/19 00:00:00" -Person.create :gender => "male", :first_name => "Benjamin", :middle_initial => "H", :last_name => "Gibson", :street_address => "38 Carlisle Street", :city => "Balmattum", :state => "VIC", :postcode => "3666", :email => "Benjamin.H.Gibson@trashymail.com", :birthday => "1960/10/1 00:00:00" -Person.create :gender => "male", :first_name => "Gabriel", :middle_initial => "C", :last_name => "Humphreys", :street_address => "11 Bresnahans Lane", :city => "Medway", :state => "NSW", :postcode => "2577", :email => "Gabriel.C.Humphreys@pookmail.com", :birthday => "1964/3/25 00:00:00" -Person.create :gender => "male", :first_name => "Morgan", :middle_initial => "F", :last_name => "Barton", :street_address => "24 Raglan Street", :city => "Warnung", :state => "QLD", :postcode => "4605", :email => "Morgan.F.Barton@mailinator.com", :birthday => "1985/9/26 00:00:00" -Person.create :gender => "female", :first_name => "Sophia", :middle_initial => "J", :last_name => "Talbot", :street_address => "59 Rimbanda Road", :city => "Yarrowford", :state => "NSW", :postcode => "2370", :email => "Sophia.J.Talbot@mailinator.com", :birthday => "1949/9/18 00:00:00" -Person.create :gender => "male", :first_name => "Gabriel", :middle_initial => "M", :last_name => "Finch", :street_address => "60 Banksia Street", :city => "Old Plains", :state => "WA", :postcode => "6569", :email => "Gabriel.M.Finch@mailinator.com", :birthday => "1966/2/13 00:00:00" -Person.create :gender => "male", :first_name => "Dylan", :middle_initial => "G", :last_name => "Riley", :street_address => "91 Campbells River Road", :city => "Combara", :state => "NSW", :postcode => "2829", :email => "Dylan.G.Riley@pookmail.com", :birthday => "1976/3/8 00:00:00" -Person.create :gender => "female", :first_name => "Maddison", :middle_initial => "J", :last_name => "Giles", :street_address => "79 Milbrodale Road", :city => "Obanvale", :state => "NSW", :postcode => "2330", :email => "Maddison.J.Giles@trashymail.com", :birthday => "1940/5/13 00:00:00" -Person.create :gender => "male", :first_name => "Nathan", :middle_initial => "A", :last_name => "Allen", :street_address => "80 McLachlan Street", :city => "Dooen", :state => "VIC", :postcode => "3401", :email => "Nathan.A.Allen@dodgit.com", :birthday => "1976/9/11 00:00:00" -Person.create :gender => "female", :first_name => "Chelsea", :middle_initial => "R", :last_name => "Leonard", :street_address => "2 Burnley Street", :city => "Hackham", :state => "SA", :postcode => "5163", :email => "Chelsea.R.Leonard@mailinator.com", :birthday => "1961/11/1 00:00:00" -Person.create :gender => "female", :first_name => "Francesca", :middle_initial => "J", :last_name => "Cross", :street_address => "72 Chapel Close", :city => "Brinsmead", :state => "QLD", :postcode => "4870", :email => "Francesca.J.Cross@pookmail.com", :birthday => "1943/9/30 00:00:00" -Person.create :gender => "male", :first_name => "Andrew", :middle_initial => "D", :last_name => "Atkins", :street_address => "59 Feather Street", :city => "Jollys Lookout", :state => "QLD", :postcode => "4520", :email => "Andrew.D.Atkins@pookmail.com", :birthday => "1954/4/3 00:00:00" -Person.create :gender => "female", :first_name => "Tegan", :middle_initial => "D", :last_name => "Rowley", :street_address => "37 Isaac Road", :city => "Victoria Valley", :state => "TAS", :postcode => "7140", :email => "Tegan.D.Rowley@pookmail.com", :birthday => "1951/9/9 00:00:00" -Person.create :gender => "female", :first_name => "Molly", :middle_initial => "L", :last_name => "Perry", :street_address => "12 Old Tenterfield Road", :city => "Lower Peacock", :state => "NSW", :postcode => "2469", :email => "Molly.L.Perry@trashymail.com", :birthday => "1970/2/5 00:00:00" -Person.create :gender => "female", :first_name => "Kate", :middle_initial => "K", :last_name => "Stephenson", :street_address => "32 Henley Beach Road", :city => "Cleland", :state => "SA", :postcode => "5152", :email => "Kate.K.Stephenson@pookmail.com", :birthday => "1960/8/9 00:00:00" -Person.create :gender => "female", :first_name => "Natasha", :middle_initial => "B", :last_name => "Short", :street_address => "14 Glen William Road", :city => "Gununa", :state => "QLD", :postcode => "4871", :email => "Natasha.B.Short@trashymail.com", :birthday => "1983/12/10 00:00:00" -Person.create :gender => "female", :first_name => "Madison", :middle_initial => "H", :last_name => "Butler", :street_address => "19 Grandis Road", :city => "Greenhill", :state => "NSW", :postcode => "2440", :email => "Madison.H.Butler@dodgit.com", :birthday => "1960/12/9 00:00:00" -Person.create :gender => "male", :first_name => "Sam", :middle_initial => "S", :last_name => "Bolton", :street_address => "44 Yarra Street", :city => "Newlyn North", :state => "VIC", :postcode => "3364", :email => "Sam.S.Bolton@mailinator.com", :birthday => "1974/11/16 00:00:00" -Person.create :gender => "female", :first_name => "Daisy", :middle_initial => "L", :last_name => "Rahman", :street_address => "2 Crofts Road", :city => "Brodribb River", :state => "VIC", :postcode => "3888", :email => "Daisy.L.Rahman@mailinator.com", :birthday => "1946/8/14 00:00:00" -Person.create :gender => "female", :first_name => "Eva", :middle_initial => "H", :last_name => "Lees", :street_address => "8 Bowden Street", :city => "Lilyfield", :state => "NSW", :postcode => "2040", :email => "Eva.H.Lees@dodgit.com", :birthday => "1968/8/9 00:00:00" -Person.create :gender => "female", :first_name => "Holly", :middle_initial => "E", :last_name => "Hart", :street_address => "5 Parkes Road", :city => "Bonner", :state => "ACT", :postcode => "2914", :email => "Holly.E.Hart@pookmail.com", :birthday => "1947/5/5 00:00:00" -Person.create :gender => "female", :first_name => "Charlotte", :middle_initial => "C", :last_name => "Bryan", :street_address => "71 Black Range Road", :city => "Doctor George Mountain", :state => "NSW", :postcode => "2550", :email => "Charlotte.C.Bryan@dodgit.com", :birthday => "1960/12/27 00:00:00" -Person.create :gender => "male", :first_name => "Scott", :middle_initial => "I", :last_name => "Richardson", :street_address => "12 Old Tenterfield Road", :city => "Paddys Flat", :state => "NSW", :postcode => "2469", :email => "Scott.I.Richardson@mailinator.com", :birthday => "1985/8/20 00:00:00" -Person.create :gender => "female", :first_name => "Sophia", :middle_initial => "T", :last_name => "Hicks", :street_address => "94 Whitehaven Crescent", :city => "Kamma", :state => "QLD", :postcode => "4865", :email => "Sophia.T.Hicks@dodgit.com", :birthday => "1953/7/22 00:00:00" -Person.create :gender => "female", :first_name => "Brooke", :middle_initial => "C", :last_name => "Lyons", :street_address => "4 Stillwater Avenue", :city => "Wandana", :state => "WA", :postcode => "6532", :email => "Brooke.C.Lyons@spambob.com", :birthday => "1983/4/7 00:00:00" -Person.create :gender => "female", :first_name => "Lydia", :middle_initial => "N", :last_name => "Simmons", :street_address => "19 Jones Street", :city => "Reynolds Neck", :state => "TAS", :postcode => "7304", :email => "Lydia.N.Simmons@pookmail.com", :birthday => "1972/4/25 00:00:00" -Person.create :gender => "female", :first_name => "Isabella", :middle_initial => "B", :last_name => "Skinner", :street_address => "6 Hart Street", :city => "Parkville", :state => "NSW", :postcode => "2337", :email => "Isabella.B.Skinner@pookmail.com", :birthday => "1965/3/8 00:00:00" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_posts.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_posts.rb deleted file mode 100644 index a583d05..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_posts.rb +++ /dev/null @@ -1,6 +0,0 @@ -ActiveRecord::Base.connection.create_table :posts, :force => true do |t| - t.column :subject, :string, :null => false - t.column :content, :text -end - -Post.create :subject => "Hello World", :content => "Um Text" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_thetas.rb b/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_thetas.rb deleted file mode 100644 index 1712599..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/migrations/create_thetas.rb +++ /dev/null @@ -1,16 +0,0 @@ -ActiveRecord::Base.connection.create_table :thetas, :force => true do |t| - t.column :name, :string, :null => false - t.column :created_at, :datetime, :null => false - t.column :updated_at, :datetime, :null => false -end - -Theta.create :name => "one" -Theta.create :name => "two" -Theta.create :name => "three" -Theta.create :name => "four" -Theta.create :name => "five" -Theta.create :name => "six" -Theta.create :name => "seven" -Theta.create :name => "eight" -Theta.create :name => "nine" -Theta.create :name => "ten" diff --git a/vendor/plugins/thinking-sphinx/features/support/db/mysql.rb b/vendor/plugins/thinking-sphinx/features/support/db/mysql.rb deleted file mode 100644 index 0e999c9..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/mysql.rb +++ /dev/null @@ -1,4 +0,0 @@ -require 'active_record' -require 'active_record/connection_adapters/mysql_adapter' - -Database = 'mysql' \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/db/postgresql.rb b/vendor/plugins/thinking-sphinx/features/support/db/postgresql.rb deleted file mode 100644 index e3f7629..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/db/postgresql.rb +++ /dev/null @@ -1,4 +0,0 @@ -require 'active_record' -require 'active_record/connection_adapters/postgresql_adapter' - -Database = 'postgresql' \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/env.rb b/vendor/plugins/thinking-sphinx/features/support/env.rb deleted file mode 100644 index c837d8c..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/env.rb +++ /dev/null @@ -1,6 +0,0 @@ -require 'rubygems' -require 'cucumber' -require 'spec' -require 'fileutils' -require 'ginger' -require 'will_paginate' \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/models/alpha.rb b/vendor/plugins/thinking-sphinx/features/support/models/alpha.rb deleted file mode 100644 index 0973c1c..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/alpha.rb +++ /dev/null @@ -1,9 +0,0 @@ -class Alpha < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - has value, cost, created_at, created_on - - set_property :field_weights => {"name" => 10} - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/animal.rb b/vendor/plugins/thinking-sphinx/features/support/models/animal.rb deleted file mode 100644 index 75f8cc5..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/animal.rb +++ /dev/null @@ -1,5 +0,0 @@ -class Animal < ActiveRecord::Base - define_index do - indexes name, :sortable => true - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/beta.rb b/vendor/plugins/thinking-sphinx/features/support/models/beta.rb deleted file mode 100644 index 096a78c..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/beta.rb +++ /dev/null @@ -1,7 +0,0 @@ -class Beta < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - set_property :delta => true - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/box.rb b/vendor/plugins/thinking-sphinx/features/support/models/box.rb deleted file mode 100644 index 6a01402..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/box.rb +++ /dev/null @@ -1,7 +0,0 @@ -class Box < ActiveRecord::Base - define_index do - indexes width, :as => :width_field - - has width, length, depth - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/models/cat.rb b/vendor/plugins/thinking-sphinx/features/support/models/cat.rb deleted file mode 100644 index f6cf75a..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/cat.rb +++ /dev/null @@ -1,3 +0,0 @@ -class Cat < Animal - # -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/comment.rb b/vendor/plugins/thinking-sphinx/features/support/models/comment.rb deleted file mode 100644 index 662b7db..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/comment.rb +++ /dev/null @@ -1,3 +0,0 @@ -class Comment < ActiveRecord::Base - belongs_to :post -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/models/delayed_beta.rb b/vendor/plugins/thinking-sphinx/features/support/models/delayed_beta.rb deleted file mode 100644 index ccd9846..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/delayed_beta.rb +++ /dev/null @@ -1,7 +0,0 @@ -class DelayedBeta < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - set_property :delta => :delayed - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/developer.rb b/vendor/plugins/thinking-sphinx/features/support/models/developer.rb deleted file mode 100644 index c12e9d2..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/developer.rb +++ /dev/null @@ -1,8 +0,0 @@ -class Developer < ActiveRecord::Base - define_index do - indexes country, :facet => true - indexes state, :facet => true - has age, :facet => true - facet city - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/models/gamma.rb b/vendor/plugins/thinking-sphinx/features/support/models/gamma.rb deleted file mode 100644 index 5e9d259..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/gamma.rb +++ /dev/null @@ -1,5 +0,0 @@ -class Gamma < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/person.rb b/vendor/plugins/thinking-sphinx/features/support/models/person.rb deleted file mode 100644 index 1b81aa5..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/person.rb +++ /dev/null @@ -1,8 +0,0 @@ -class Person < ActiveRecord::Base - define_index do - indexes first_name, last_name, :sortable => true - - has [first_name, middle_initial, last_name], :as => :name_sort - has birthday - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/models/post.rb b/vendor/plugins/thinking-sphinx/features/support/models/post.rb deleted file mode 100644 index 8cdb78b..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/post.rb +++ /dev/null @@ -1,8 +0,0 @@ -class Post < ActiveRecord::Base - has_many :comments - - define_index do - indexes subject - indexes content - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/features/support/models/theta.rb b/vendor/plugins/thinking-sphinx/features/support/models/theta.rb deleted file mode 100644 index 0ce91d6..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/models/theta.rb +++ /dev/null @@ -1,7 +0,0 @@ -class Theta < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - set_property :delta => :datetime, :threshold => 1.hour - end -end diff --git a/vendor/plugins/thinking-sphinx/features/support/post_database.rb b/vendor/plugins/thinking-sphinx/features/support/post_database.rb deleted file mode 100644 index 51cefb5..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/post_database.rb +++ /dev/null @@ -1,37 +0,0 @@ -$:.unshift File.dirname(__FILE__) + '/../../lib' - -require 'lib/thinking_sphinx' - -%w( tmp/config tmp/log tmp/db/sphinx/development ).each do |path| - FileUtils.mkdir_p "#{Dir.pwd}/#{path}" -end - -Kernel.const_set :RAILS_ROOT, "#{Dir.pwd}/tmp" unless defined?(RAILS_ROOT) - -at_exit do - ThinkingSphinx::Configuration.instance.controller.stop - sleep(1) # Ensure Sphinx has shut down completely - FileUtils.rm_r "#{Dir.pwd}/tmp" -end - -# Add log file -ActiveRecord::Base.logger = Logger.new open("tmp/active_record.log", "a") - -ThinkingSphinx.deltas_enabled = false - -# Load Models -Dir["features/support/models/*.rb"].sort.each do |file| - require file.gsub(/\.rb$/, '') -end - -# Set up database tables and records -Dir["features/support/db/migrations/*.rb"].each do |file| - require file.gsub(/\.rb$/, '') -end - -ThinkingSphinx.deltas_enabled = true -ThinkingSphinx.suppress_delta_output = true - -ThinkingSphinx::Configuration.instance.build -ThinkingSphinx::Configuration.instance.controller.index -ThinkingSphinx::Configuration.instance.controller.start diff --git a/vendor/plugins/thinking-sphinx/features/support/z.rb b/vendor/plugins/thinking-sphinx/features/support/z.rb deleted file mode 100644 index e7ad8b3..0000000 --- a/vendor/plugins/thinking-sphinx/features/support/z.rb +++ /dev/null @@ -1,19 +0,0 @@ -# This file exists because Cucumber likes to auto-load all ruby files -puts <<-MESSAGE -Cucumber 0.1.13 defaults to loading all ruby files within the features folder, -with something approaching reverse-alphabetical order, and preferring the -features/support folder over everything else. This is annoying, because some -files need to be loaded before others (and others perhaps not at all, given -missing dependencies). Hence this place-holder imaginatively named 'z.rb', to -force this message. - -A work-around is to use cucumber profiles. You will find the default profile in -cucumber.yml should serve your needs fine, unless you add new step definitions. -When you do that, you can regenerate the YAML file by running: -rake cucumber_defaults - -And then run specific features as follows is slightly more verbose, but it -works, whereas this doesn't. -cucumber -p default features/something.feature -MESSAGE -exit 0 \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/ginger_scenarios.rb b/vendor/plugins/thinking-sphinx/ginger_scenarios.rb deleted file mode 100644 index 811b891..0000000 --- a/vendor/plugins/thinking-sphinx/ginger_scenarios.rb +++ /dev/null @@ -1,24 +0,0 @@ -require 'ginger' - -Ginger.configure do |config| - config.aliases["active_record"] = "activerecord" - config.aliases["active_support"] = "activesupport" - - ar_1_2_6 = Ginger::Scenario.new - ar_1_2_6[/^active_?support$/] = "1.4.4" - ar_1_2_6[/^active_?record$/] = "1.15.6" - - ar_2_0_4 = Ginger::Scenario.new - ar_2_0_4[/^active_?support$/] = "2.0.4" - ar_2_0_4[/^active_?record$/] = "2.0.4" - - ar_2_1_2 = Ginger::Scenario.new - ar_2_1_2[/^active_?support$/] = "2.1.2" - ar_2_1_2[/^active_?record$/] = "2.1.2" - - ar_2_2_0 = Ginger::Scenario.new - ar_2_2_0[/^active_?support$/] = "2.2.0" - ar_2_2_0[/^active_?record$/] = "2.2.0" - - config.scenarios << ar_1_2_6 << ar_2_0_4 << ar_2_1_2 << ar_2_2_0 -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/init.rb b/vendor/plugins/thinking-sphinx/init.rb deleted file mode 100644 index a7e3920..0000000 --- a/vendor/plugins/thinking-sphinx/init.rb +++ /dev/null @@ -1,12 +0,0 @@ -require 'thinking_sphinx' - -if Rails::VERSION::STRING.to_f < 2.1 - ThinkingSphinx::Configuration.instance.load_models -end - -if Rails::VERSION::STRING.to_f > 1.2 - require 'action_controller/dispatcher' - ActionController::Dispatcher.to_prepare :thinking_sphinx do - ThinkingSphinx::Configuration.instance.load_models - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx.rb deleted file mode 100644 index 2b72358..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx.rb +++ /dev/null @@ -1,144 +0,0 @@ -Dir[File.join(File.dirname(__FILE__), '../vendor/*/lib')].each do |path| - $LOAD_PATH.unshift path -end - -require 'active_record' -require 'riddle' -require 'after_commit' - -require 'thinking_sphinx/core/string' -require 'thinking_sphinx/active_record' -require 'thinking_sphinx/association' -require 'thinking_sphinx/attribute' -require 'thinking_sphinx/collection' -require 'thinking_sphinx/configuration' -require 'thinking_sphinx/facet' -require 'thinking_sphinx/facet_collection' -require 'thinking_sphinx/field' -require 'thinking_sphinx/index' -require 'thinking_sphinx/rails_additions' -require 'thinking_sphinx/search' -require 'thinking_sphinx/deltas' - -require 'thinking_sphinx/adapters/abstract_adapter' -require 'thinking_sphinx/adapters/mysql_adapter' -require 'thinking_sphinx/adapters/postgresql_adapter' - -ActiveRecord::Base.send(:include, ThinkingSphinx::ActiveRecord) - -Merb::Plugins.add_rakefiles( - File.join(File.dirname(__FILE__), "thinking_sphinx", "tasks") -) if defined?(Merb) - -module ThinkingSphinx - module Version #:nodoc: - Major = 1 - Minor = 1 - Tiny = 3 - - String = [Major, Minor, Tiny].join('.') - end - - # A ConnectionError will get thrown when a connection to Sphinx can't be - # made. - class ConnectionError < StandardError - end - - # A StaleIdsException is thrown by Collection.instances_from_matches if there - # are records in Sphinx but not in the database, so the search can be retried. - class StaleIdsException < StandardError - attr_accessor :ids - def initialize(ids) - self.ids = ids - end - end - - # The collection of indexed models. Keep in mind that Rails lazily loads - # its classes, so this may not actually be populated with _all_ the models - # that have Sphinx indexes. - def self.indexed_models - @@indexed_models ||= [] - end - - # Check if index definition is disabled. - # - def self.define_indexes? - @@define_indexes = true unless defined?(@@define_indexes) - @@define_indexes == true - end - - # Enable/disable indexes - you may want to do this while migrating data. - # - # ThinkingSphinx.define_indexes = false - # - def self.define_indexes=(value) - @@define_indexes = value - end - - @@deltas_enabled = nil - - # Check if delta indexing is enabled. - # - def self.deltas_enabled? - @@deltas_enabled = (ThinkingSphinx::Configuration.environment != 'test') if @@deltas_enabled.nil? - @@deltas_enabled - end - - # Enable/disable all delta indexing. - # - # ThinkingSphinx.deltas_enabled = false - # - def self.deltas_enabled=(value) - @@deltas_enabled = value - end - - @@updates_enabled = nil - - # Check if updates are enabled. True by default, unless within the test - # environment. - # - def self.updates_enabled? - @@updates_enabled = (ThinkingSphinx::Configuration.environment != 'test') if @@updates_enabled.nil? - @@updates_enabled - end - - # Enable/disable updates to Sphinx - # - # ThinkingSphinx.updates_enabled = false - # - def self.updates_enabled=(value) - @@updates_enabled = value - end - - @@suppress_delta_output = false - - def self.suppress_delta_output? - @@suppress_delta_output - end - - def self.suppress_delta_output=(value) - @@suppress_delta_output = value - end - - # Checks to see if MySQL will allow simplistic GROUP BY statements. If not, - # or if not using MySQL, this will return false. - # - def self.use_group_by_shortcut? - ::ActiveRecord::ConnectionAdapters.constants.include?("MysqlAdapter") && - ::ActiveRecord::Base.connection.is_a?( - ::ActiveRecord::ConnectionAdapters::MysqlAdapter - ) && - ::ActiveRecord::Base.connection.select_all( - "SELECT @@global.sql_mode, @@session.sql_mode;" - ).all? { |key,value| value.nil? || value[/ONLY_FULL_GROUP_BY/].nil? } - end - - def self.sphinx_running? - !!sphinx_pid - end - - def self.sphinx_pid - pid_file = ThinkingSphinx::Configuration.instance.pid_file - `cat #{pid_file}`[/\d+/] if File.exists?(pid_file) - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record.rb deleted file mode 100644 index 8fc6694..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record.rb +++ /dev/null @@ -1,245 +0,0 @@ -require 'thinking_sphinx/active_record/delta' -require 'thinking_sphinx/active_record/search' -require 'thinking_sphinx/active_record/has_many_association' - -module ThinkingSphinx - # Core additions to ActiveRecord models - define_index for creating indexes - # for models. If you want to interrogate the index objects created for the - # model, you can use the class-level accessor :sphinx_indexes. - # - module ActiveRecord - def self.included(base) - base.class_eval do - class_inheritable_array :sphinx_indexes, :sphinx_facets - class << self - # Allows creation of indexes for Sphinx. If you don't do this, there - # isn't much point trying to search (or using this plugin at all, - # really). - # - # An example or two: - # - # define_index - # indexes :id, :as => :model_id - # indexes name - # end - # - # You can also grab fields from associations - multiple levels deep - # if necessary. - # - # define_index do - # indexes tags.name, :as => :tag - # indexes articles.content - # indexes orders.line_items.product.name, :as => :product - # end - # - # And it will automatically concatenate multiple fields: - # - # define_index do - # indexes [author.first_name, author.last_name], :as => :author - # end - # - # The #indexes method is for fields - if you want attributes, use - # #has instead. All the same rules apply - but keep in mind that - # attributes are for sorting, grouping and filtering, not searching. - # - # define_index do - # # fields ... - # - # has created_at, updated_at - # end - # - # One last feature is the delta index. This requires the model to - # have a boolean field named 'delta', and is enabled as follows: - # - # define_index do - # # fields ... - # # attributes ... - # - # set_property :delta => true - # end - # - # Check out the more detailed documentation for each of these methods - # at ThinkingSphinx::Index::Builder. - # - def define_index(&block) - return unless ThinkingSphinx.define_indexes? - - self.sphinx_indexes ||= [] - index = Index.new(self, &block) - - self.sphinx_indexes << index - unless ThinkingSphinx.indexed_models.include?(self.name) - ThinkingSphinx.indexed_models << self.name - end - - if index.delta? - before_save :toggle_delta - after_commit :index_delta - end - - after_destroy :toggle_deleted - - index - end - alias_method :sphinx_index, :define_index - - def sphinx_index_options - sphinx_indexes.last.options - end - - # Generate a unique CRC value for the model's name, to use to - # determine which Sphinx documents belong to which AR records. - # - # Really only written for internal use - but hey, if it's useful to - # you in some other way, awesome. - # - def to_crc32 - self.name.to_crc32 - end - - def to_crc32s - (subclasses << self).collect { |klass| klass.to_crc32 } - end - - def source_of_sphinx_index - possible_models = self.sphinx_indexes.collect { |index| index.model } - return self if possible_models.include?(self) - - parent = self.superclass - while !possible_models.include?(parent) && parent != ::ActiveRecord::Base - parent = parent.superclass - end - - return parent - end - - def to_riddle(offset) - sphinx_database_adapter.setup - - indexes = [to_riddle_for_core(offset)] - indexes << to_riddle_for_delta(offset) if sphinx_delta? - indexes << to_riddle_for_distributed - end - - def sphinx_database_adapter - @sphinx_database_adapter ||= - ThinkingSphinx::AbstractAdapter.detect(self) - end - - private - - def sphinx_name - self.name.underscore.tr(':/\\', '_') - end - - def sphinx_delta? - self.sphinx_indexes.any? { |index| index.delta? } - end - - def to_riddle_for_core(offset) - index = Riddle::Configuration::Index.new("#{sphinx_name}_core") - index.path = File.join( - ThinkingSphinx::Configuration.instance.searchd_file_path, index.name - ) - - set_configuration_options_for_indexes index - set_field_settings_for_indexes index - - self.sphinx_indexes.select { |ts_index| - ts_index.model == self - }.each_with_index do |ts_index, i| - index.sources << ts_index.to_riddle_for_core(offset, i) - end - - index - end - - def to_riddle_for_delta(offset) - index = Riddle::Configuration::Index.new("#{sphinx_name}_delta") - index.parent = "#{sphinx_name}_core" - index.path = File.join(ThinkingSphinx::Configuration.instance.searchd_file_path, index.name) - - self.sphinx_indexes.each_with_index do |ts_index, i| - index.sources << ts_index.to_riddle_for_delta(offset, i) if ts_index.delta? - end - - index - end - - def to_riddle_for_distributed - index = Riddle::Configuration::DistributedIndex.new(sphinx_name) - index.local_indexes << "#{sphinx_name}_core" - index.local_indexes.unshift "#{sphinx_name}_delta" if sphinx_delta? - index - end - - def set_configuration_options_for_indexes(index) - ThinkingSphinx::Configuration.instance.index_options.each do |key, value| - index.send("#{key}=".to_sym, value) - end - - self.sphinx_indexes.each do |ts_index| - ts_index.options.each do |key, value| - index.send("#{key}=".to_sym, value) if ThinkingSphinx::Configuration::IndexOptions.include?(key.to_s) && !value.nil? - end - end - end - - def set_field_settings_for_indexes(index) - field_names = lambda { |field| field.unique_name.to_s } - - self.sphinx_indexes.each do |ts_index| - index.prefix_field_names += ts_index.prefix_fields.collect(&field_names) - index.infix_field_names += ts_index.infix_fields.collect(&field_names) - end - end - end - end - - base.send(:include, ThinkingSphinx::ActiveRecord::Delta) - base.send(:include, ThinkingSphinx::ActiveRecord::Search) - - ::ActiveRecord::Associations::HasManyAssociation.send( - :include, ThinkingSphinx::ActiveRecord::HasManyAssociation - ) - ::ActiveRecord::Associations::HasManyThroughAssociation.send( - :include, ThinkingSphinx::ActiveRecord::HasManyAssociation - ) - end - - def in_core_index? - self.class.search_for_id( - self.sphinx_document_id, - "#{self.class.source_of_sphinx_index.name.underscore.tr(':/\\', '_')}_core" - ) - end - - def toggle_deleted - return unless ThinkingSphinx.updates_enabled? && ThinkingSphinx.sphinx_running? - - config = ThinkingSphinx::Configuration.instance - client = Riddle::Client.new config.address, config.port - - client.update( - "#{self.class.sphinx_indexes.first.name}_core", - ['sphinx_deleted'], - {self.sphinx_document_id => 1} - ) if self.in_core_index? - - client.update( - "#{self.class.sphinx_indexes.first.name}_delta", - ['sphinx_deleted'], - {self.sphinx_document_id => 1} - ) if ThinkingSphinx.deltas_enabled? && - self.class.sphinx_indexes.any? { |index| index.delta? } && - self.delta - rescue ::ThinkingSphinx::ConnectionError - # nothing - end - - def sphinx_document_id - (self.id * ThinkingSphinx.indexed_models.size) + - ThinkingSphinx.indexed_models.index(self.class.source_of_sphinx_index.name) - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/delta.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/delta.rb deleted file mode 100644 index a8fe96f..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/delta.rb +++ /dev/null @@ -1,74 +0,0 @@ -module ThinkingSphinx - module ActiveRecord - # This module contains all the delta-related code for models. There isn't - # really anything you need to call manually in here - except perhaps - # index_delta, but not sure what reason why. - # - module Delta - # Code for after_commit callback is written by Eli Miller: - # http://elimiller.blogspot.com/2007/06/proper-cache-expiry-with-aftercommit.html - # with slight modification from Joost Hietbrink. - # - def self.included(base) - base.class_eval do - class << self - # Temporarily disable delta indexing inside a block, then perform a single - # rebuild of index at the end. - # - # Useful when performing updates to batches of models to prevent - # the delta index being rebuilt after each individual update. - # - # In the following example, the delta index will only be rebuilt once, - # not 10 times. - # - # SomeModel.suspended_delta do - # 10.times do - # SomeModel.create( ... ) - # end - # end - # - def suspended_delta(reindex_after = true, &block) - original_setting = ThinkingSphinx.deltas_enabled? - ThinkingSphinx.deltas_enabled = false - begin - yield - ensure - ThinkingSphinx.deltas_enabled = original_setting - self.index_delta if reindex_after - end - end - - # Build the delta index for the related model. This won't be called - # if running in the test environment. - # - def index_delta(instance = nil) - delta_object.index(self, instance) - end - - def delta_object - self.sphinx_indexes.first.delta_object - end - end - - private - - # Set the delta value for the model to be true. - def toggle_delta - self.class.delta_object.toggle(self) if should_toggle_delta? - end - - # Build the delta index for the related model. This won't be called - # if running in the test environment. - # - def index_delta - self.class.index_delta(self) if self.class.delta_object.toggled(self) - end - - def should_toggle_delta? - !self.respond_to?(:changed?) || self.changed? || self.new_record? - end - end - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/has_many_association.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/has_many_association.rb deleted file mode 100644 index 44b25c0..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/has_many_association.rb +++ /dev/null @@ -1,29 +0,0 @@ -module ThinkingSphinx - module ActiveRecord - module HasManyAssociation - def search(*args) - foreign_key = @reflection.primary_key_name - stack = [@reflection.options[:through]].compact - - attribute = nil - (@reflection.klass.sphinx_indexes || []).each do |index| - attribute = index.attributes.detect { |attrib| - attrib.columns.length == 1 && - attrib.columns.first.__name == foreign_key.to_sym && - attrib.columns.first.__stack == stack - } - break if attribute - end - - raise "Missing Attribute for Foreign Key #{foreign_key}" unless attribute - - options = args.extract_options! - options[:with] ||= {} - options[:with][attribute.unique_name] = @owner.id - - args << options - @reflection.klass.search(*args) - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/search.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/search.rb deleted file mode 100644 index fc3f2b4..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/active_record/search.rb +++ /dev/null @@ -1,57 +0,0 @@ -module ThinkingSphinx - module ActiveRecord - # This module covers the specific model searches - but the syntax is - # exactly the same as the core Search class - so use that as your refence - # point. - # - module Search - def self.included(base) - base.class_eval do - class << self - # Searches for results that match the parameters provided. Will only - # return the ids for the matching objects. See - # ThinkingSphinx::Search#search for syntax examples. - # - def search_for_ids(*args) - options = args.extract_options! - options[:class] = self - args << options - ThinkingSphinx::Search.search_for_ids(*args) - end - - # Searches for results limited to a single model. See - # ThinkingSphinx::Search#search for syntax examples. - # - def search(*args) - options = args.extract_options! - options[:class] = self - args << options - ThinkingSphinx::Search.search(*args) - end - - def search_count(*args) - options = args.extract_options! - options[:class] = self - args << options - ThinkingSphinx::Search.count(*args) - end - - def search_for_id(*args) - options = args.extract_options! - options[:class] = self - args << options - ThinkingSphinx::Search.search_for_id(*args) - end - - def facets(*args) - options = args.extract_options! - options[:class] = self - args << options - ThinkingSphinx::Search.facets(*args) - end - end - end - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/abstract_adapter.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/abstract_adapter.rb deleted file mode 100644 index dc92a00..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/abstract_adapter.rb +++ /dev/null @@ -1,34 +0,0 @@ -module ThinkingSphinx - class AbstractAdapter - def initialize(model) - @model = model - end - - def setup - # Deliberately blank - subclasses should do something though. Well, if - # they need to. - end - - def self.detect(model) - case model.connection.class.name - when "ActiveRecord::ConnectionAdapters::MysqlAdapter", - "ActiveRecord::ConnectionAdapters::MysqlplusAdapter" - ThinkingSphinx::MysqlAdapter.new model - when "ActiveRecord::ConnectionAdapters::PostgreSQLAdapter" - ThinkingSphinx::PostgreSQLAdapter.new model - else - raise "Invalid Database Adapter: Sphinx only supports MySQL and PostgreSQL" - end - end - - def quote_with_table(column) - "#{@model.quoted_table_name}.#{@model.connection.quote_column_name(column)}" - end - - protected - - def connection - @connection ||= @model.connection - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/mysql_adapter.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/mysql_adapter.rb deleted file mode 100644 index e65fe46..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/mysql_adapter.rb +++ /dev/null @@ -1,53 +0,0 @@ -module ThinkingSphinx - class MysqlAdapter < AbstractAdapter - def setup - # Does MySQL actually need to do anything? - end - - def sphinx_identifier - "mysql" - end - - def concatenate(clause, separator = ' ') - "CONCAT_WS('#{separator}', #{clause})" - end - - def group_concatenate(clause, separator = ' ') - "GROUP_CONCAT(#{clause} SEPARATOR '#{separator}')" - end - - def cast_to_string(clause) - "CAST(#{clause} AS CHAR)" - end - - def cast_to_datetime(clause) - "UNIX_TIMESTAMP(#{clause})" - end - - def cast_to_unsigned(clause) - "CAST(#{clause} AS UNSIGNED)" - end - - def convert_nulls(clause, default = '') - default = "'#{default}'" if default.is_a?(String) - - "IFNULL(#{clause}, #{default})" - end - - def boolean(value) - value ? 1 : 0 - end - - def crc(clause) - "CRC32(#{clause})" - end - - def utf8_query_pre - "SET NAMES utf8" - end - - def time_difference(diff) - "DATE_SUB(NOW(), INTERVAL #{diff} SECOND)" - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/postgresql_adapter.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/postgresql_adapter.rb deleted file mode 100644 index aa1bfe6..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/adapters/postgresql_adapter.rb +++ /dev/null @@ -1,129 +0,0 @@ -module ThinkingSphinx - class PostgreSQLAdapter < AbstractAdapter - def setup - create_array_accum_function - create_crc32_function - end - - def sphinx_identifier - "pgsql" - end - - def concatenate(clause, separator = ' ') - clause.split(', ').collect { |field| - "COALESCE(#{field}, '')" - }.join(" || '#{separator}' || ") - end - - def group_concatenate(clause, separator = ' ') - "array_to_string(array_accum(#{clause}), '#{separator}')" - end - - def cast_to_string(clause) - clause - end - - def cast_to_datetime(clause) - "cast(extract(epoch from #{clause}) as int)" - end - - def cast_to_unsigned(clause) - clause - end - - def convert_nulls(clause, default = '') - default = "'#{default}'" if default.is_a?(String) - - "COALESCE(#{clause}, #{default})" - end - - def boolean(value) - value ? 'TRUE' : 'FALSE' - end - - def crc(clause) - "crc32(#{clause})" - end - - def utf8_query_pre - nil - end - - def time_difference(diff) - "current_timestamp - interval '#{diff} seconds'" - end - - private - - def execute(command, output_error = false) - connection.execute "begin" - connection.execute "savepoint ts" - begin - connection.execute command - rescue StandardError => err - puts err if output_error - connection.execute "rollback to savepoint ts" - end - connection.execute "release savepoint ts" - connection.execute "commit" - end - - def create_array_accum_function - if connection.raw_connection.server_version > 80200 - execute <<-SQL - CREATE AGGREGATE array_accum (anyelement) - ( - sfunc = array_append, - stype = anyarray, - initcond = '{}' - ); - SQL - else - execute <<-SQL - CREATE AGGREGATE array_accum - ( - basetype = anyelement, - sfunc = array_append, - stype = anyarray, - initcond = '{}' - ); - SQL - end - end - - def create_crc32_function - execute "CREATE LANGUAGE 'plpgsql';" - function = <<-SQL - CREATE OR REPLACE FUNCTION crc32(word text) - RETURNS bigint AS $$ - DECLARE tmp bigint; - DECLARE i int; - DECLARE j int; - DECLARE word_array bytea; - BEGIN - i = 0; - tmp = 4294967295; - word_array = decode(replace(word, E'\\\\', E'\\\\\\\\'), 'escape'); - LOOP - tmp = (tmp # get_byte(word_array, i))::bigint; - i = i + 1; - j = 0; - LOOP - tmp = ((tmp >> 1) # (3988292384 * (tmp & 1)))::bigint; - j = j + 1; - IF j >= 8 THEN - EXIT; - END IF; - END LOOP; - IF i >= char_length(word) THEN - EXIT; - END IF; - END LOOP; - return (tmp # 4294967295); - END - $$ IMMUTABLE STRICT LANGUAGE plpgsql; - SQL - execute function, true - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/association.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/association.rb deleted file mode 100644 index 3c2b353..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/association.rb +++ /dev/null @@ -1,144 +0,0 @@ -module ThinkingSphinx - # Association tracks a specific reflection and join to reference data that - # isn't in the base model. Very much an internal class for Thinking Sphinx - - # perhaps because I feel it's not as strong (or simple) as most of the rest. - # - class Association - attr_accessor :parent, :reflection, :join - - # Create a new association by passing in the parent association, and the - # corresponding reflection instance. If there is no parent, pass in nil. - # - # top = Association.new nil, top_reflection - # child = Association.new top, child_reflection - # - def initialize(parent, reflection) - @parent, @reflection = parent, reflection - @children = {} - end - - # Get the children associations for a given association name. The only time - # that there'll actually be more than one association is when the - # relationship is polymorphic. To keep things simple though, it will always - # be an Array that gets returned (an empty one if no matches). - # - # # where pages is an association on the class tied to the reflection. - # association.children(:pages) - # - def children(assoc) - @children[assoc] ||= Association.children(@reflection.klass, assoc, self) - end - - # Get the children associations for a given class, association name and - # parent association. Much like the instance method of the same name, it - # will return an empty array if no associations have the name, and only - # have multiple association instances if the underlying relationship is - # polymorphic. - # - # Association.children(User, :pages, user_association) - # - def self.children(klass, assoc, parent=nil) - ref = klass.reflect_on_association(assoc) - - return [] if ref.nil? - return [Association.new(parent, ref)] unless ref.options[:polymorphic] - - # association is polymorphic - create associations for each - # non-polymorphic reflection. - polymorphic_classes(ref).collect { |klass| - Association.new parent, ::ActiveRecord::Reflection::AssociationReflection.new( - ref.macro, - "#{ref.name}_#{klass.name}".to_sym, - casted_options(klass, ref), - ref.active_record - ) - } - end - - # Link up the join for this model from a base join - and set parent - # associations' joins recursively. - # - def join_to(base_join) - parent.join_to(base_join) if parent && parent.join.nil? - - @join ||= ::ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation.new( - @reflection, base_join, parent ? parent.join : base_join.joins.first - ) - end - - # Returns the association's join SQL statements - and it replaces - # ::ts_join_alias:: with the aliased table name so the generated reflection - # join conditions avoid column name collisions. - # - def to_sql - @join.association_join.gsub(/::ts_join_alias::/, - "#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}" - ) - end - - # Returns true if the association - or a parent - is a has_many or - # has_and_belongs_to_many. - # - def is_many? - case @reflection.macro - when :has_many, :has_and_belongs_to_many - true - else - @parent ? @parent.is_many? : false - end - end - - # Returns an array of all the associations that lead to this one - starting - # with the top level all the way to the current association object. - # - def ancestors - (parent ? parent.ancestors : []) << self - end - - def has_column?(column) - @reflection.klass.column_names.include?(column.to_s) - end - - private - - # Returns all the objects that could be currently instantiated from a - # polymorphic association. This is pretty damn fast if there's an index on - # the foreign type column - but if there isn't, it can take a while if you - # have a lot of data. - # - def self.polymorphic_classes(ref) - ref.active_record.connection.select_all( - "SELECT DISTINCT #{ref.options[:foreign_type]} " + - "FROM #{ref.active_record.table_name} " + - "WHERE #{ref.options[:foreign_type]} IS NOT NULL" - ).collect { |row| - row[ref.options[:foreign_type]].constantize - } - end - - # Returns a new set of options for an association that mimics an existing - # polymorphic relationship for a specific class. It adds a condition to - # filter by the appropriate object. - # - def self.casted_options(klass, ref) - options = ref.options.clone - options[:polymorphic] = nil - options[:class_name] = klass.name - options[:foreign_key] ||= "#{ref.name}_id" - - quoted_foreign_type = klass.connection.quote_column_name ref.options[:foreign_type] - case options[:conditions] - when nil - options[:conditions] = "::ts_join_alias::.#{quoted_foreign_type} = '#{klass.name}'" - when Array - options[:conditions] << "::ts_join_alias::.#{quoted_foreign_type} = '#{klass.name}'" - when Hash - options[:conditions].merge!(ref.options[:foreign_type] => klass.name) - else - options[:conditions] << " AND ::ts_join_alias::.#{quoted_foreign_type} = '#{klass.name}'" - end - - options - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/attribute.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/attribute.rb deleted file mode 100644 index 2b264ae..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/attribute.rb +++ /dev/null @@ -1,254 +0,0 @@ -module ThinkingSphinx - # Attributes - eternally useful when it comes to filtering, sorting or - # grouping. This class isn't really useful to you unless you're hacking - # around with the internals of Thinking Sphinx - but hey, don't let that - # stop you. - # - # One key thing to remember - if you're using the attribute manually to - # generate SQL statements, you'll need to set the base model, and all the - # associations. Which can get messy. Use Index.link!, it really helps. - # - class Attribute - attr_accessor :alias, :columns, :associations, :model, :faceted - - # To create a new attribute, you'll need to pass in either a single Column - # or an array of them, and some (optional) options. - # - # Valid options are: - # - :as => :alias_name - # - :type => :attribute_type - # - # Alias is only required in three circumstances: when there's - # another attribute or field with the same name, when the column name is - # 'id', or when there's more than one column. - # - # Type is not required, unless you want to force a column to be a certain - # type (but keep in mind the value will not be CASTed in the SQL - # statements). The only time you really need to use this is when the type - # can't be figured out by the column - ie: when not actually using a - # database column as your source. - # - # Example usage: - # - # Attribute.new( - # Column.new(:created_at) - # ) - # - # Attribute.new( - # Column.new(:posts, :id), - # :as => :post_ids - # ) - # - # Attribute.new( - # [Column.new(:pages, :id), Column.new(:articles, :id)], - # :as => :content_ids - # ) - # - # Attribute.new( - # Column.new("NOW()"), - # :as => :indexed_at, - # :type => :datetime - # ) - # - # If you're creating attributes for latitude and longitude, don't forget - # that Sphinx expects these values to be in radians. - # - def initialize(columns, options = {}) - @columns = Array(columns) - @associations = {} - - raise "Cannot define a field with no columns. Maybe you are trying to index a field with a reserved name (id, name). You can fix this error by using a symbol rather than a bare name (:id instead of id)." if @columns.empty? || @columns.any? { |column| !column.respond_to?(:__stack) } - - @alias = options[:as] - @type = options[:type] - @faceted = options[:facet] - end - - # Get the part of the SELECT clause related to this attribute. Don't forget - # to set your model and associations first though. - # - # This will concatenate strings and arrays of integers, and convert - # datetimes to timestamps, as needed. - # - def to_select_sql - clause = @columns.collect { |column| - column_with_prefix(column) - }.join(', ') - - separator = all_ints? ? ',' : ' ' - - clause = adapter.concatenate(clause, separator) if concat_ws? - clause = adapter.group_concatenate(clause, separator) if is_many? - clause = adapter.cast_to_datetime(clause) if type == :datetime - clause = adapter.convert_nulls(clause) if type == :string - - "#{clause} AS #{quote_column(unique_name)}" - end - - # Get the part of the GROUP BY clause related to this attribute - if one is - # needed. If not, all you'll get back is nil. The latter will happen if - # there isn't actually a real column to get data from, or if there's - # multiple data values (read: a has_many or has_and_belongs_to_many - # association). - # - def to_group_sql - case - when is_many?, is_string?, ThinkingSphinx.use_group_by_shortcut? - nil - else - @columns.collect { |column| - column_with_prefix(column) - } - end - end - - def type_to_config - { - :multi => :sql_attr_multi, - :datetime => :sql_attr_timestamp, - :string => :sql_attr_str2ordinal, - :float => :sql_attr_float, - :boolean => :sql_attr_bool, - :integer => :sql_attr_uint - }[type] - end - - def config_value - if type == :multi - "uint #{unique_name} from field" - else - unique_name - end - end - - # Returns the unique name of the attribute - which is either the alias of - # the attribute, or the name of the only column - if there is only one. If - # there isn't, there should be an alias. Else things probably won't work. - # Consider yourself warned. - # - def unique_name - if @columns.length == 1 - @alias || @columns.first.__name - else - @alias - end - end - - # Returns the type of the column. If that's not already set, it returns - # :multi if there's the possibility of more than one value, :string if - # there's more than one association, otherwise it figures out what the - # actual column's datatype is and returns that. - def type - @type ||= case - when is_many? - :multi - when @associations.values.flatten.length > 1 - :string - else - translated_type_from_database - end - end - - def to_facet - return nil unless @faceted - - ThinkingSphinx::Facet.new(self) - end - - private - - def adapter - @adapter ||= @model.sphinx_database_adapter - end - - def quote_column(column) - @model.connection.quote_column_name(column) - end - - # Indication of whether the columns should be concatenated with a space - # between each value. True if there's either multiple sources or multiple - # associations. - # - def concat_ws? - multiple_associations? || @columns.length > 1 - end - - # Checks whether any column requires multiple associations (which only - # happens for polymorphic situations). - # - def multiple_associations? - associations.any? { |col,assocs| assocs.length > 1 } - end - - # Builds a column reference tied to the appropriate associations. This - # dives into the associations hash and their corresponding joins to - # figure out how to correctly reference a column in SQL. - # - def column_with_prefix(column) - if column.is_string? - column.__name - elsif associations[column].empty? - "#{@model.quoted_table_name}.#{quote_column(column.__name)}" - else - associations[column].collect { |assoc| - assoc.has_column?(column.__name) ? - "#{@model.connection.quote_table_name(assoc.join.aliased_table_name)}" + - ".#{quote_column(column.__name)}" : - nil - }.compact.join(', ') - end - end - - # Could there be more than one value related to the parent record? If so, - # then this will return true. If not, false. It's that simple. - # - def is_many? - associations.values.flatten.any? { |assoc| assoc.is_many? } - end - - # Returns true if any of the columns are string values, instead of database - # column references. - def is_string? - columns.all? { |col| col.is_string? } - end - - def all_ints? - @columns.all? { |col| - klasses = @associations[col].empty? ? [@model] : - @associations[col].collect { |assoc| assoc.reflection.klass } - klasses.all? { |klass| - column = klass.columns.detect { |column| column.name == col.__name.to_s } - !column.nil? && column.type == :integer - } - } - end - - def type_from_database - klass = @associations.values.flatten.first ? - @associations.values.flatten.first.reflection.klass : @model - - klass.columns.detect { |col| - @columns.collect { |c| c.__name.to_s }.include? col.name - }.type - end - - def translated_type_from_database - case type_from_db = type_from_database - when :datetime, :string, :float, :boolean, :integer - type_from_db - when :decimal - :float - when :timestamp, :date - :datetime - else - raise <<-MESSAGE - -Cannot automatically map column type #{type_from_db} to an equivalent Sphinx -type (integer, float, boolean, datetime, string as ordinal). You could try to -explicitly convert the column's value in your define_index block: - has "CAST(column AS INT)", :type => :integer, :as => :column - MESSAGE - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/collection.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/collection.rb deleted file mode 100644 index fb5b171..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/collection.rb +++ /dev/null @@ -1,142 +0,0 @@ -module ThinkingSphinx - class Collection < ::Array - attr_reader :total_entries, :total_pages, :current_page, :per_page - attr_accessor :results - - # Compatibility with older versions of will_paginate - alias_method :page_count, :total_pages - - def initialize(page, per_page, entries, total_entries) - @current_page, @per_page, @total_entries = page, per_page, total_entries - - @total_pages = (entries / @per_page.to_f).ceil - end - - def self.ids_from_results(results, page, limit, options) - collection = self.new(page, limit, - results[:total] || 0, results[:total_found] || 0 - ) - collection.results = results - collection.replace results[:matches].collect { |match| - match[:attributes]["sphinx_internal_id"] - } - return collection - end - - def self.create_from_results(results, page, limit, options) - collection = self.new(page, limit, - results[:total] || 0, results[:total_found] || 0 - ) - collection.results = results - collection.replace instances_from_matches(results[:matches], options) - return collection - end - - def self.instances_from_matches(matches, options = {}) - if klass = options[:class] - instances_from_class klass, matches, options - else - instances_from_classes matches, options - end - end - - def self.instances_from_class(klass, matches, options = {}) - index_options = klass.sphinx_index_options - - ids = matches.collect { |match| match[:attributes]["sphinx_internal_id"] } - instances = ids.length > 0 ? klass.find( - :all, - :conditions => {klass.primary_key.to_sym => ids}, - :include => (options[:include] || index_options[:include]), - :select => (options[:select] || index_options[:select]) - ) : [] - - # Raise an exception if we find records in Sphinx but not in the DB, so - # the search method can retry without them. See - # ThinkingSphinx::Search.retry_search_on_stale_index. - if options[:raise_on_stale] && instances.length < ids.length - stale_ids = ids - instances.map {|i| i.id } - raise StaleIdsException, stale_ids - end - - ids.collect { |obj_id| - instances.detect { |obj| obj.id == obj_id } - } - end - - # Group results by class and call #find(:all) once for each group to reduce - # the number of #find's in multi-model searches. - # - def self.instances_from_classes(matches, options = {}) - groups = matches.group_by { |match| match[:attributes]["class_crc"] } - groups.each do |crc, group| - group.replace( - instances_from_class(class_from_crc(crc), group, options) - ) - end - - matches.collect do |match| - groups.detect { |crc, group| - crc == match[:attributes]["class_crc"] - }[1].detect { |obj| - obj.id == match[:attributes]["sphinx_internal_id"] - } - end - end - - def self.class_from_crc(crc) - @@models_by_crc ||= ThinkingSphinx.indexed_models.inject({}) do |hash, model| - hash[model.constantize.to_crc32] = model - model.constantize.subclasses.each { |subclass| - hash[subclass.to_crc32] = subclass.name - } - hash - end - @@models_by_crc[crc].constantize - end - - def previous_page - current_page > 1 ? (current_page - 1) : nil - end - - def next_page - current_page < total_pages ? (current_page + 1): nil - end - - def offset - (current_page - 1) * @per_page - end - - def method_missing(method, *args, &block) - super unless method.to_s[/^each_with_.*/] - - each_with_attribute method.to_s.gsub(/^each_with_/, ''), &block - end - - def each_with_groupby_and_count(&block) - results[:matches].each_with_index do |match, index| - yield self[index], match[:attributes]["@groupby"], match[:attributes]["@count"] - end - end - - def each_with_attribute(attribute, &block) - results[:matches].each_with_index do |match, index| - yield self[index], (match[:attributes][attribute] || match[:attributes]["@#{attribute}"]) - end - end - - def each_with_weighting(&block) - results[:matches].each_with_index do |match, index| - yield self[index], match[:weight] - end - end - - def inject_with_groupby_and_count(initial = nil, &block) - index = -1 - results[:matches].inject(initial) do |memo, match| - index += 1 - yield memo, self[index], match[:attributes]["@groupby"], match[:attributes]["@count"] - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/configuration.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/configuration.rb deleted file mode 100644 index b01717c..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/configuration.rb +++ /dev/null @@ -1,236 +0,0 @@ -require 'erb' -require 'singleton' - -module ThinkingSphinx - # This class both keeps track of the configuration settings for Sphinx and - # also generates the resulting file for Sphinx to use. - # - # Here are the default settings, relative to RAILS_ROOT where relevant: - # - # config file:: config/#{environment}.sphinx.conf - # searchd log file:: log/searchd.log - # query log file:: log/searchd.query.log - # pid file:: log/searchd.#{environment}.pid - # searchd files:: db/sphinx/#{environment}/ - # address:: 127.0.0.1 - # port:: 3312 - # allow star:: false - # min prefix length:: 1 - # min infix length:: 1 - # mem limit:: 64M - # max matches:: 1000 - # morphology:: stem_en - # charset type:: utf-8 - # charset table:: nil - # ignore chars:: nil - # html strip:: false - # html remove elements:: '' - # - # If you want to change these settings, create a YAML file at - # config/sphinx.yml with settings for each environment, in a similar - # fashion to database.yml - using the following keys: config_file, - # searchd_log_file, query_log_file, pid_file, searchd_file_path, port, - # allow_star, enable_star, min_prefix_len, min_infix_len, mem_limit, - # max_matches, # morphology, charset_type, charset_table, ignore_chars, - # html_strip, # html_remove_elements. I think you've got the idea. - # - # Each setting in the YAML file is optional - so only put in the ones you - # want to change. - # - # Keep in mind, if for some particular reason you're using a version of - # Sphinx older than 0.9.8 r871 (that's prior to the proper 0.9.8 release), - # don't set allow_star to true. - # - class Configuration - include Singleton - - SourceOptions = %w( mysql_connect_flags sql_range_step sql_query_pre - sql_query_post sql_ranged_throttle sql_query_post_index ) - - IndexOptions = %w( charset_table charset_type docinfo enable_star - exceptions html_index_attrs html_remove_elements html_strip ignore_chars - min_infix_len min_prefix_len min_word_len mlock morphology ngram_chars - ngram_len phrase_boundary phrase_boundary_step preopen stopwords - wordforms ) - - attr_accessor :config_file, :searchd_log_file, :query_log_file, - :pid_file, :searchd_file_path, :address, :port, :allow_star, - :database_yml_file, :app_root, :bin_path, :model_directories - - attr_accessor :source_options, :index_options - - attr_reader :environment, :configuration - - # Load in the configuration settings - this will look for config/sphinx.yml - # and parse it according to the current environment. - # - def initialize(app_root = Dir.pwd) - self.reset - end - - def reset - self.app_root = RAILS_ROOT if defined?(RAILS_ROOT) - self.app_root = Merb.root if defined?(Merb) - self.app_root ||= app_root - - @configuration = Riddle::Configuration.new - @configuration.searchd.address = "127.0.0.1" - @configuration.searchd.port = 3312 - @configuration.searchd.pid_file = "#{self.app_root}/log/searchd.#{environment}.pid" - @configuration.searchd.log = "#{self.app_root}/log/searchd.log" - @configuration.searchd.query_log = "#{self.app_root}/log/searchd.query.log" - - self.database_yml_file = "#{self.app_root}/config/database.yml" - self.config_file = "#{self.app_root}/config/#{environment}.sphinx.conf" - self.searchd_file_path = "#{self.app_root}/db/sphinx/#{environment}" - self.allow_star = false - self.bin_path = "" - self.model_directories = ["#{app_root}/app/models/"] - - self.source_options = {} - self.index_options = { - :charset_type => "utf-8", - :morphology => "stem_en" - } - - parse_config - - self - end - - def self.environment - @@environment ||= ( - defined?(Merb) ? Merb.environment : ENV['RAILS_ENV'] - ) || "development" - end - - def environment - self.class.environment - end - - def controller - @controller ||= Riddle::Controller.new(@configuration, self.config_file) - end - - # Generate the config file for Sphinx by using all the settings defined and - # looping through all the models with indexes to build the relevant - # indexer and searchd configuration, and sources and indexes details. - # - def build(file_path=nil) - load_models - file_path ||= "#{self.config_file}" - - @configuration.indexes.clear - - ThinkingSphinx.indexed_models.each_with_index do |model, model_index| - @configuration.indexes.concat model.constantize.to_riddle(model_index) - end - - open(file_path, "w") do |file| - file.write @configuration.render - end - end - - # Make sure all models are loaded - without reloading any that - # ActiveRecord::Base is already aware of (otherwise we start to hit some - # messy dependencies issues). - # - def load_models - self.model_directories.each do |base| - Dir["#{base}**/*.rb"].each do |file| - model_name = file.gsub(/^#{base}([\w_\/\\]+)\.rb/, '\1') - - next if model_name.nil? - next if ::ActiveRecord::Base.send(:subclasses).detect { |model| - model.name == model_name - } - - begin - model_name.camelize.constantize - rescue LoadError - model_name.gsub!(/.*[\/\\]/, '').nil? ? next : retry - rescue NameError - next - end - end - end - end - - def address - @configuration.searchd.address - end - - def address=(address) - @configuration.searchd.address = address - end - - def port - @configuration.searchd.port - end - - def port=(port) - @configuration.searchd.port = port - end - - def pid_file - @configuration.searchd.pid_file - end - - def pid_file=(pid_file) - @configuration.searchd.pid_file = pid_file - end - - def searchd_log_file - @configuration.searchd.log - end - - def searchd_log_file=(file) - @configuration.searchd.log = file - end - - def query_log_file - @configuration.searchd.query_log - end - - def query_log_file=(file) - @configuration.searchd.query_log = file - end - - private - - # Parse the config/sphinx.yml file - if it exists - then use the attribute - # accessors to set the appropriate values. Nothing too clever. - # - def parse_config - path = "#{app_root}/config/sphinx.yml" - return unless File.exists?(path) - - conf = YAML::load(ERB.new(IO.read(path)).result)[environment] - - conf.each do |key,value| - self.send("#{key}=", value) if self.methods.include?("#{key}=") - - set_sphinx_setting self.source_options, key, value, SourceOptions - set_sphinx_setting self.index_options, key, value, IndexOptions - set_sphinx_setting @configuration.searchd, key, value - set_sphinx_setting @configuration.indexer, key, value - end unless conf.nil? - - self.bin_path += '/' unless self.bin_path.blank? - - if self.allow_star - self.index_options[:enable_star] = true - self.index_options[:min_prefix_len] = 1 - end - end - - def set_sphinx_setting(object, key, value, allowed = {}) - if object.is_a?(Hash) - object[key.to_sym] = value if allowed.include?(key.to_s) - else - object.send("#{key}=", value) if object.methods.include?("#{key}") - send("#{key}=", value) if self.methods.include?("#{key}") - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/core/string.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/core/string.rb deleted file mode 100644 index 4642438..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/core/string.rb +++ /dev/null @@ -1,22 +0,0 @@ -module ThinkingSphinx - module Core - module String - - def to_crc32 - result = 0xFFFFFFFF - self.each_byte do |byte| - result ^= byte - 8.times do - result = (result >> 1) ^ (0xEDB88320 * (result & 1)) - end - end - result ^ 0xFFFFFFFF - end - - end - end -end - -class String - include ThinkingSphinx::Core::String -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas.rb deleted file mode 100644 index c92967f..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas.rb +++ /dev/null @@ -1,22 +0,0 @@ -require 'thinking_sphinx/deltas/default_delta' -require 'thinking_sphinx/deltas/delayed_delta' -require 'thinking_sphinx/deltas/datetime_delta' - -module ThinkingSphinx - module Deltas - def self.parse(index, options) - case options.delete(:delta) - when TrueClass, :default - DefaultDelta.new index, options - when :delayed - DelayedDelta.new index, options - when :datetime - DatetimeDelta.new index, options - when FalseClass, nil - nil - else - raise "Unknown delta type" - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/datetime_delta.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/datetime_delta.rb deleted file mode 100644 index 2ee46d4..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/datetime_delta.rb +++ /dev/null @@ -1,50 +0,0 @@ -module ThinkingSphinx - module Deltas - class DatetimeDelta < ThinkingSphinx::Deltas::DefaultDelta - attr_accessor :column, :threshold - - def initialize(index, options) - @index = index - @column = options.delete(:delta_column) || :updated_at - @threshold = options.delete(:threshold) || 1.day - end - - def index(model, instance = nil) - # do nothing - true - end - - def delayed_index(model) - config = ThinkingSphinx::Configuration.instance - rotate = ThinkingSphinx.sphinx_running? ? "--rotate" : "" - - output = `#{config.bin_path}indexer --config #{config.config_file} #{rotate} #{delta_index_name model}` - output += `#{config.bin_path}indexer --config #{config.config_file} #{rotate} --merge #{core_index_name model} #{delta_index_name model} --merge-dst-range sphinx_deleted 0 0` - puts output unless ThinkingSphinx.suppress_delta_output? - - true - end - - def toggle(instance) - # do nothing - end - - def toggled(instance) - instance.send(@column) > @threshold.ago - end - - def reset_query(model) - nil - end - - def clause(model, toggled) - if toggled - "#{model.quoted_table_name}.#{@index.quote_column(@column.to_s)}" + - " > #{adapter.time_difference(@threshold)}" - else - nil - end - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/default_delta.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/default_delta.rb deleted file mode 100644 index 3fa2533..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/default_delta.rb +++ /dev/null @@ -1,65 +0,0 @@ -module ThinkingSphinx - module Deltas - class DefaultDelta - attr_accessor :column - - def initialize(index, options) - @index = index - @column = options.delete(:delta_column) || :delta - end - - def index(model, instance = nil) - return true unless ThinkingSphinx.updates_enabled? && - ThinkingSphinx.deltas_enabled? - - config = ThinkingSphinx::Configuration.instance - client = Riddle::Client.new config.address, config.port - - client.update( - core_index_name(model), - ['sphinx_deleted'], - {instance.sphinx_document_id => [1]} - ) if instance && ThinkingSphinx.sphinx_running? && instance.in_core_index? - - output = `#{config.bin_path}indexer --config #{config.config_file} --rotate #{delta_index_name model}` - puts output unless ThinkingSphinx.suppress_delta_output? - - true - end - - def toggle(instance) - instance.delta = true - end - - def toggled(instance) - instance.delta - end - - def reset_query(model) - "UPDATE #{model.quoted_table_name} SET " + - "#{@index.quote_column(@column.to_s)} = #{adapter.boolean(false)}" - end - - def clause(model, toggled) - "#{model.quoted_table_name}.#{@index.quote_column(@column.to_s)}" + - " = #{adapter.boolean(toggled)}" - end - - protected - - def core_index_name(model) - "#{model.source_of_sphinx_index.name.underscore.tr(':/\\', '_')}_core" - end - - def delta_index_name(model) - "#{model.source_of_sphinx_index.name.underscore.tr(':/\\', '_')}_delta" - end - - private - - def adapter - @adapter = @index.model.sphinx_database_adapter - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta.rb deleted file mode 100644 index e95298b..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta.rb +++ /dev/null @@ -1,25 +0,0 @@ -require 'delayed/job' - -require 'thinking_sphinx/deltas/delayed_delta/delta_job' -require 'thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job' -require 'thinking_sphinx/deltas/delayed_delta/job' - -module ThinkingSphinx - module Deltas - class DelayedDelta < ThinkingSphinx::Deltas::DefaultDelta - def index(model, instance = nil) - ThinkingSphinx::Deltas::Job.enqueue( - ThinkingSphinx::Deltas::DeltaJob.new(delta_index_name(model)) - ) - - Delayed::Job.enqueue( - ThinkingSphinx::Deltas::FlagAsDeletedJob.new( - core_index_name(model), instance.sphinx_document_id - ) - ) if instance - - true - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/delta_job.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/delta_job.rb deleted file mode 100644 index f9511ec..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/delta_job.rb +++ /dev/null @@ -1,24 +0,0 @@ -module ThinkingSphinx - module Deltas - class DeltaJob - attr_accessor :index - - def initialize(index) - @index = index - end - - def perform - return true unless ThinkingSphinx.updates_enabled? && - ThinkingSphinx.deltas_enabled? - - config = ThinkingSphinx::Configuration.instance - client = Riddle::Client.new config.address, config.port - - output = `#{config.bin_path}indexer --config #{config.config_file} --rotate #{index}` - puts output unless ThinkingSphinx.suppress_delta_output? - - true - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job.rb deleted file mode 100644 index d6afd27..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job.rb +++ /dev/null @@ -1,27 +0,0 @@ -module ThinkingSphinx - module Deltas - class FlagAsDeletedJob - attr_accessor :index, :document_id - - def initialize(index, document_id) - @index, @document_id = index, document_id - end - - def perform - return true unless ThinkingSphinx.updates_enabled? - - config = ThinkingSphinx::Configuration.instance - client = Riddle::Client.new config.address, config.port - - client.update( - @index, - ['sphinx_deleted'], - {@document_id => [1]} - ) if ThinkingSphinx.sphinx_running? && - ThinkingSphinx::Search.search_for_id(@document_id, @index) - - true - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/job.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/job.rb deleted file mode 100644 index de0a7cb..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/deltas/delayed_delta/job.rb +++ /dev/null @@ -1,26 +0,0 @@ -module ThinkingSphinx - module Deltas - class Job < Delayed::Job - def self.enqueue(object, priority = 0) - super unless duplicates_exist(object) - end - - def self.cancel_thinking_sphinx_jobs - if connection.tables.include?("delayed_jobs") - delete_all("handler LIKE '--- !ruby/object:ThinkingSphinx::Deltas::%'") - end - end - - private - - def self.duplicates_exist(object) - count( - :conditions => { - :handler => object.to_yaml, - :locked_at => nil - } - ) > 0 - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet.rb deleted file mode 100644 index 89c86ee..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet.rb +++ /dev/null @@ -1,58 +0,0 @@ -module ThinkingSphinx - class Facet - attr_reader :reference - - def initialize(reference) - @reference = reference - - if reference.columns.length != 1 - raise "Can't translate Facets on multiple-column field or attribute" - end - end - - def name - reference.unique_name - end - - def attribute_name - @attribute_name ||= case @reference - when Attribute - @reference.unique_name.to_s - when Field - @reference.unique_name.to_s + "_sort" - end - end - - def value(object, attribute_value) - return translate(object, attribute_value) if @reference.is_a?(Field) - - case @reference.type - when :string, :multi - translate(object, attribute_value) - when :datetime - Time.at(attribute_value) - when :boolean - attribute_value > 0 - else - attribute_value - end - end - - def to_s - name - end - - private - - def translate(object, attribute_value) - column.__stack.each { |method| - object = object.send(method) - } - object.send(column.__name) - end - - def column - @reference.columns.first - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet_collection.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet_collection.rb deleted file mode 100644 index bf130c6..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/facet_collection.rb +++ /dev/null @@ -1,44 +0,0 @@ -module ThinkingSphinx - class FacetCollection < Hash - attr_accessor :arguments - - def initialize(arguments) - @arguments = arguments.clone - @attribute_values = {} - @facets = [] - end - - def add_from_results(facet, results) - self[facet.name] = {} - @attribute_values[facet.name] = {} - @facets << facet - - results.each_with_groupby_and_count { |result, group, count| - facet_value = facet.value(result, group) - - self[facet.name][facet_value] = count - @attribute_values[facet.name][facet_value] = group - } - end - - def for(hash = {}) - arguments = @arguments.clone - options = arguments.extract_options! - options[:with] ||= {} - - hash.each do |key, value| - attrib = facet_for_key(key).attribute_name - options[:with][attrib] = @attribute_values[key][value] - end - - arguments << options - ThinkingSphinx::Search.search *arguments - end - - private - - def facet_for_key(key) - @facets.detect { |facet| facet.name == key } - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/field.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/field.rb deleted file mode 100644 index 9edaede..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/field.rb +++ /dev/null @@ -1,172 +0,0 @@ -module ThinkingSphinx - # Fields - holding the string data which Sphinx indexes for your searches. - # This class isn't really useful to you unless you're hacking around with the - # internals of Thinking Sphinx - but hey, don't let that stop you. - # - # One key thing to remember - if you're using the field manually to - # generate SQL statements, you'll need to set the base model, and all the - # associations. Which can get messy. Use Index.link!, it really helps. - # - class Field - attr_accessor :alias, :columns, :sortable, :associations, :model, :infixes, - :prefixes, :faceted - - # To create a new field, you'll need to pass in either a single Column - # or an array of them, and some (optional) options. The columns are - # references to the data that will make up the field. - # - # Valid options are: - # - :as => :alias_name - # - :sortable => true - # - :infixes => true - # - :prefixes => true - # - # Alias is only required in three circumstances: when there's - # another attribute or field with the same name, when the column name is - # 'id', or when there's more than one column. - # - # Sortable defaults to false - but is quite useful when set to true, as - # it creates an attribute with the same string value (which Sphinx converts - # to an integer value), which can be sorted by. Thinking Sphinx is smart - # enough to realise that when you specify fields in sort statements, you - # mean their respective attributes. - # - # If you have partial matching enabled (ie: enable_star), then you can - # specify certain fields to have their prefixes and infixes indexed. Keep - # in mind, though, that Sphinx's default is _all_ fields - so once you - # highlight a particular field, no other fields in the index will have - # these partial indexes. - # - # Here's some examples: - # - # Field.new( - # Column.new(:name) - # ) - # - # Field.new( - # [Column.new(:first_name), Column.new(:last_name)], - # :as => :name, :sortable => true - # ) - # - # Field.new( - # [Column.new(:posts, :subject), Column.new(:posts, :content)], - # :as => :posts, :prefixes => true - # ) - # - def initialize(columns, options = {}) - @columns = Array(columns) - @associations = {} - - raise "Cannot define a field with no columns. Maybe you are trying to index a field with a reserved name (id, name). You can fix this error by using a symbol rather than a bare name (:id instead of id)." if @columns.empty? || @columns.any? { |column| !column.respond_to?(:__stack) } - - @alias = options[:as] - @sortable = options[:sortable] || false - @infixes = options[:infixes] || false - @prefixes = options[:prefixes] || false - @faceted = options[:facet] || false - end - - # Get the part of the SELECT clause related to this field. Don't forget - # to set your model and associations first though. - # - # This will concatenate strings if there's more than one data source or - # multiple data values (has_many or has_and_belongs_to_many associations). - # - def to_select_sql - clause = @columns.collect { |column| - column_with_prefix(column) - }.join(', ') - - clause = adapter.concatenate(clause) if concat_ws? - clause = adapter.group_concatenate(clause) if is_many? - - "#{adapter.cast_to_string clause } AS #{quote_column(unique_name)}" - end - - # Get the part of the GROUP BY clause related to this field - if one is - # needed. If not, all you'll get back is nil. The latter will happen if - # there's multiple data values (read: a has_many or has_and_belongs_to_many - # association). - # - def to_group_sql - case - when is_many?, ThinkingSphinx.use_group_by_shortcut? - nil - else - @columns.collect { |column| - column_with_prefix(column) - } - end - end - - # Returns the unique name of the field - which is either the alias of - # the field, or the name of the only column - if there is only one. If - # there isn't, there should be an alias. Else things probably won't work. - # Consider yourself warned. - # - def unique_name - if @columns.length == 1 - @alias || @columns.first.__name - else - @alias - end - end - - def to_facet - return nil unless @faceted - - ThinkingSphinx::Facet.new(self) - end - - private - - def adapter - @adapter ||= @model.sphinx_database_adapter - end - - def quote_column(column) - @model.connection.quote_column_name(column) - end - - # Indication of whether the columns should be concatenated with a space - # between each value. True if there's either multiple sources or multiple - # associations. - # - def concat_ws? - @columns.length > 1 || multiple_associations? - end - - # Checks whether any column requires multiple associations (which only - # happens for polymorphic situations). - # - def multiple_associations? - associations.any? { |col,assocs| assocs.length > 1 } - end - - # Builds a column reference tied to the appropriate associations. This - # dives into the associations hash and their corresponding joins to - # figure out how to correctly reference a column in SQL. - # - def column_with_prefix(column) - if column.is_string? - column.__name - elsif associations[column].empty? - "#{@model.quoted_table_name}.#{quote_column(column.__name)}" - else - associations[column].collect { |assoc| - assoc.has_column?(column.__name) ? - "#{@model.connection.quote_table_name(assoc.join.aliased_table_name)}" + - ".#{quote_column(column.__name)}" : - nil - }.compact.join(', ') - end - end - - # Could there be more than one value related to the parent record? If so, - # then this will return true. If not, false. It's that simple. - # - def is_many? - associations.values.flatten.any? { |assoc| assoc.is_many? } - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index.rb deleted file mode 100644 index c211b7e..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index.rb +++ /dev/null @@ -1,414 +0,0 @@ -require 'thinking_sphinx/index/builder' -require 'thinking_sphinx/index/faux_column' - -module ThinkingSphinx - # The Index class is a ruby representation of a Sphinx source (not a Sphinx - # index - yes, I know it's a little confusing. You'll manage). This is - # another 'internal' Thinking Sphinx class - if you're using it directly, - # you either know what you're doing, or messing with things beyond your ken. - # Enjoy. - # - class Index - attr_accessor :model, :fields, :attributes, :conditions, :groupings, - :delta_object, :options - - # Create a new index instance by passing in the model it is tied to, and - # a block to build it with (optional but recommended). For documentation - # on the syntax for inside the block, the Builder class is what you want. - # - # Quick Example: - # - # Index.new(User) do - # indexes login, email - # - # has created_at - # - # set_property :delta => true - # end - # - def initialize(model, &block) - @model = model - @associations = {} - @fields = [] - @attributes = [] - @conditions = [] - @groupings = [] - @options = {} - @delta_object = nil - - initialize_from_builder(&block) if block_given? - end - - def name - self.class.name(@model) - end - - def self.name(model) - model.name.underscore.tr(':/\\', '_') - end - - def to_riddle_for_core(offset, index) - add_internal_attributes - link! - - source = Riddle::Configuration::SQLSource.new( - "#{name}_core_#{index}", adapter.sphinx_identifier - ) - - set_source_database_settings source - set_source_attributes source - set_source_sql source, offset - set_source_settings source - - source - end - - def to_riddle_for_delta(offset, index) - add_internal_attributes - link! - - source = Riddle::Configuration::SQLSource.new( - "#{name}_delta_#{index}", adapter.sphinx_identifier - ) - source.parent = "#{name}_core_#{index}" - - set_source_database_settings source - set_source_attributes source - set_source_sql source, offset, true - - source - end - - # Link all the fields and associations to their corresponding - # associations and joins. This _must_ be called before interrogating - # the index's fields and associations for anything that may reference - # their SQL structure. - # - def link! - base = ::ActiveRecord::Associations::ClassMethods::JoinDependency.new( - @model, [], nil - ) - - @fields.each { |field| - field.model ||= @model - field.columns.each { |col| - field.associations[col] = associations(col.__stack.clone) - field.associations[col].each { |assoc| assoc.join_to(base) } - } - } - - @attributes.each { |attribute| - attribute.model ||= @model - attribute.columns.each { |col| - attribute.associations[col] = associations(col.__stack.clone) - attribute.associations[col].each { |assoc| assoc.join_to(base) } - } - } - end - - # Flag to indicate whether this index has a corresponding delta index. - # - def delta? - !@delta_object.nil? - end - - def adapter - @adapter ||= @model.sphinx_database_adapter - end - - def prefix_fields - @fields.select { |field| field.prefixes } - end - - def infix_fields - @fields.select { |field| field.infixes } - end - - def index_options - all_index_options = ThinkingSphinx::Configuration.instance.index_options.clone - @options.keys.select { |key| - ThinkingSphinx::Configuration::IndexOptions.include?(key.to_s) - }.each { |key| all_index_options[key.to_sym] = @options[key] } - all_index_options - end - - def quote_column(column) - @model.connection.quote_column_name(column) - end - - private - - def utf8? - self.index_options[:charset_type] == "utf-8" - end - - # Does all the magic with the block provided to the base #initialize. - # Creates a new class subclassed from Builder, and evaluates the block - # on it, then pulls all relevant settings - fields, attributes, conditions, - # properties - into the new index. - # - # Also creates a CRC attribute for the model. - # - def initialize_from_builder(&block) - builder = Class.new(Builder) - builder.setup - - builder.instance_eval &block - - unless @model.descends_from_active_record? - stored_class = @model.store_full_sti_class ? @model.name : @model.name.demodulize - builder.where("#{@model.quoted_table_name}.#{quote_column(@model.inheritance_column)} = '#{stored_class}'") - end - - set_model = Proc.new { |item| item.model = @model } - - @fields = builder.fields &set_model - @attributes = builder.attributes.each &set_model - @conditions = builder.conditions - @groupings = builder.groupings - @delta_object = ThinkingSphinx::Deltas.parse self, builder.properties - @options = builder.properties - - is_faceted = Proc.new { |item| item.faceted } - add_facet = Proc.new { |item| @model.sphinx_facets << item.to_facet } - - @model.sphinx_facets ||= [] - @fields.select( &is_faceted).each &add_facet - @attributes.select(&is_faceted).each &add_facet - - # We want to make sure that if the database doesn't exist, then Thinking - # Sphinx doesn't mind when running non-TS tasks (like db:create, db:drop - # and db:migrate). It's a bit hacky, but I can't think of a better way. - rescue StandardError => err - case err.class.name - when "Mysql::Error", "ActiveRecord::StatementInvalid" - return - else - raise err - end - end - - # Returns all associations used amongst all the fields and attributes. - # This includes all associations between the model and what the actual - # columns are from. - # - def all_associations - @all_associations ||= ( - # field associations - @fields.collect { |field| - field.associations.values - }.flatten + - # attribute associations - @attributes.collect { |attrib| - attrib.associations.values - }.flatten - ).uniq.collect { |assoc| - # get ancestors as well as column-level associations - assoc.ancestors - }.flatten.uniq - end - - # Gets a stack of associations for a specific path. - # - def associations(path, parent = nil) - assocs = [] - - if parent.nil? - assocs = association(path.shift) - else - assocs = parent.children(path.shift) - end - - until path.empty? - point = path.shift - assocs = assocs.collect { |assoc| - assoc.children(point) - }.flatten - end - - assocs - end - - # Gets the association stack for a specific key. - # - def association(key) - @associations[key] ||= Association.children(@model, key) - end - - def crc_column - if @model.column_names.include?(@model.inheritance_column) - adapter.cast_to_unsigned(adapter.convert_nulls( - adapter.crc(adapter.quote_with_table(@model.inheritance_column)), - @model.to_crc32 - )) - else - @model.to_crc32.to_s - end - end - - def add_internal_attributes - @attributes << Attribute.new( - FauxColumn.new(@model.primary_key.to_sym), - :type => :integer, - :as => :sphinx_internal_id - ) unless @attributes.detect { |attr| attr.alias == :sphinx_internal_id } - - @attributes << Attribute.new( - FauxColumn.new(crc_column), - :type => :integer, - :as => :class_crc - ) unless @attributes.detect { |attr| attr.alias == :class_crc } - - @attributes << Attribute.new( - FauxColumn.new("'" + (@model.send(:subclasses).collect { |klass| - klass.to_crc32.to_s - } << @model.to_crc32.to_s).join(",") + "'"), - :type => :multi, - :as => :subclass_crcs - ) unless @attributes.detect { |attr| attr.alias == :subclass_crcs } - - @attributes << Attribute.new( - FauxColumn.new("0"), - :type => :integer, - :as => :sphinx_deleted - ) unless @attributes.detect { |attr| attr.alias == :sphinx_deleted } - end - - def set_source_database_settings(source) - config = @model.connection.instance_variable_get(:@config) - - source.sql_host = config[:host] || "localhost" - source.sql_user = config[:username] || config[:user] || "" - source.sql_pass = (config[:password].to_s || "").gsub('#', '\#') - source.sql_db = config[:database] - source.sql_port = config[:port] - source.sql_sock = config[:socket] - end - - def set_source_attributes(source) - attributes.each do |attrib| - source.send(attrib.type_to_config) << attrib.config_value - end - end - - def set_source_sql(source, offset, delta = false) - source.sql_query = to_sql(:offset => offset, :delta => delta).gsub(/\n/, ' ') - source.sql_query_range = to_sql_query_range(:delta => delta) - source.sql_query_info = to_sql_query_info(offset) - - source.sql_query_pre += send(!delta ? :sql_query_pre_for_core : :sql_query_pre_for_delta) - - if @options[:group_concat_max_len] - source.sql_query_pre << "SET SESSION group_concat_max_len = #{@options[:group_concat_max_len]}" - end - - source.sql_query_pre += [adapter.utf8_query_pre].compact if utf8? - end - - def set_source_settings(source) - ThinkingSphinx::Configuration.instance.source_options.each do |key, value| - source.send("#{key}=".to_sym, value) - end - - @options.each do |key, value| - source.send("#{key}=".to_sym, value) if ThinkingSphinx::Configuration::SourceOptions.include?(key.to_s) && !value.nil? - end - end - - def sql_query_pre_for_core - if self.delta? && !@delta_object.reset_query(@model).blank? - [@delta_object.reset_query(@model)] - else - [] - end - end - - def sql_query_pre_for_delta - [""] - end - - # Generates the big SQL statement to get the data back for all the fields - # and attributes, using all the relevant association joins. If you want - # the version filtered for delta values, send through :delta => true in the - # options. Won't do much though if the index isn't set up to support a - # delta sibling. - # - # Examples: - # - # index.to_sql - # index.to_sql(:delta => true) - # - def to_sql(options={}) - assocs = all_associations - - where_clause = "" - if self.delta? && !@delta_object.clause(@model, options[:delta]).blank? - where_clause << " AND #{@delta_object.clause(@model, options[:delta])}" - end - unless @conditions.empty? - where_clause << " AND " << @conditions.join(" AND ") - end - - internal_groupings = [] - if @model.column_names.include?(@model.inheritance_column) - internal_groupings << "#{@model.quoted_table_name}.#{quote_column(@model.inheritance_column)}" - end - - unique_id_expr = "* #{ThinkingSphinx.indexed_models.size} + #{options[:offset] || 0}" - - sql = <<-SQL -SELECT #{ ( - ["#{@model.quoted_table_name}.#{quote_column(@model.primary_key)} #{unique_id_expr} AS #{quote_column(@model.primary_key)} "] + - @fields.collect { |field| field.to_select_sql } + - @attributes.collect { |attribute| attribute.to_select_sql } -).join(", ") } -FROM #{ @model.table_name } - #{ assocs.collect { |assoc| assoc.to_sql }.join(' ') } -WHERE #{@model.quoted_table_name}.#{quote_column(@model.primary_key)} >= $start - AND #{@model.quoted_table_name}.#{quote_column(@model.primary_key)} <= $end - #{ where_clause } -GROUP BY #{ ( - ["#{@model.quoted_table_name}.#{quote_column(@model.primary_key)}"] + - @fields.collect { |field| field.to_group_sql }.compact + - @attributes.collect { |attribute| attribute.to_group_sql }.compact + - @groupings + internal_groupings -).join(", ") } - SQL - - if @model.connection.class.name == "ActiveRecord::ConnectionAdapters::MysqlAdapter" - sql += " ORDER BY NULL" - end - - sql - end - - # Simple helper method for the query info SQL - which is a statement that - # returns the single row for a corresponding id. - # - def to_sql_query_info(offset) - "SELECT * FROM #{@model.quoted_table_name} WHERE " + - " #{quote_column(@model.primary_key)} = (($id - #{offset}) / #{ThinkingSphinx.indexed_models.size})" - end - - # Simple helper method for the query range SQL - which is a statement that - # returns minimum and maximum id values. These can be filtered by delta - - # so pass in :delta => true to get the delta version of the SQL. - # - def to_sql_query_range(options={}) - min_statement = adapter.convert_nulls( - "MIN(#{quote_column(@model.primary_key)})", 1 - ) - max_statement = adapter.convert_nulls( - "MAX(#{quote_column(@model.primary_key)})", 1 - ) - - sql = "SELECT #{min_statement}, #{max_statement} " + - "FROM #{@model.quoted_table_name} " - if self.delta? && !@delta_object.clause(@model, options[:delta]).blank? - sql << "WHERE #{@delta_object.clause(@model, options[:delta])}" - end - - sql - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/builder.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/builder.rb deleted file mode 100644 index 918c1d0..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/builder.rb +++ /dev/null @@ -1,224 +0,0 @@ -module ThinkingSphinx - class Index - # The Builder class is the core for the index definition block processing. - # There are four methods you really need to pay attention to: - # - indexes (aliased to includes and attribute) - # - has (aliased to attribute) - # - where - # - set_property (aliased to set_properties) - # - # The first two of these methods allow you to define what data makes up - # your indexes. #where provides a method to add manual SQL conditions, and - # set_property allows you to set some settings on a per-index basis. Check - # out each method's documentation for better ideas of usage. - # - class Builder - class << self - # No idea where this is coming from - haven't found it in any ruby or - # rails documentation. It's not needed though, so it gets undef'd. - # Hopefully the list of methods that get in the way doesn't get too - # long. - HiddenMethods = [:parent, :name, :id, :type].each { |method| - define_method(method) { - caller.grep(/irb.completion/).empty? ? method_missing(method) : super - } - } - - attr_accessor :fields, :attributes, :properties, :conditions, - :groupings - - # Set up all the collections. Consider this the equivalent of an - # instance's initialize method. - # - def setup - @fields = [] - @attributes = [] - @properties = {} - @conditions = [] - @groupings = [] - end - - # This is how you add fields - the strings Sphinx looks at - to your - # index. Technically, to use this method, you need to pass in some - # columns and options - but there's some neat method_missing stuff - # happening, so lets stick to the expected syntax within a define_index - # block. - # - # Expected options are :as, which points to a column alias in symbol - # form, and :sortable, which indicates whether you want to sort by this - # field. - # - # Adding Single-Column Fields: - # - # You can use symbols or methods - and can chain methods together to - # get access down the associations tree. - # - # indexes :id, :as => :my_id - # indexes :name, :sortable => true - # indexes first_name, last_name, :sortable => true - # indexes users.posts.content, :as => :post_content - # indexes users(:id), :as => :user_ids - # - # Keep in mind that if any keywords for Ruby methods - such as id or - # name - clash with your column names, you need to use the symbol - # version (see the first, second and last examples above). - # - # If you specify multiple columns (example #2), a field will be created - # for each. Don't use the :as option in this case. If you want to merge - # those columns together, continue reading. - # - # Adding Multi-Column Fields: - # - # indexes [first_name, last_name], :as => :name - # indexes [location, parent.location], :as => :location - # - # To combine multiple columns into a single field, you need to wrap - # them in an Array, as shown by the above examples. There's no - # limitations on whether they're symbols or methods or what level of - # associations they come from. - # - # Adding SQL Fragment Fields - # - # You can also define a field using an SQL fragment, useful for when - # you would like to index a calculated value. - # - # indexes "age < 18", :as => :minor - # - def indexes(*args) - options = args.extract_options! - args.each do |columns| - fields << Field.new(FauxColumn.coerce(columns), options) - - if fields.last.sortable - attributes << Attribute.new( - fields.last.columns.collect { |col| col.clone }, - options.merge( - :type => :string, - :as => fields.last.unique_name.to_s.concat("_sort").to_sym - ) - ) - end - end - end - alias_method :field, :indexes - alias_method :includes, :indexes - - # This is the method to add attributes to your index (hence why it is - # aliased as 'attribute'). The syntax is the same as #indexes, so use - # that as starting point, but keep in mind the following points. - # - # An attribute can have an alias (the :as option), but it is always - # sortable - so you don't need to explicitly request that. You _can_ - # specify the data type of the attribute (the :type option), but the - # code's pretty good at figuring that out itself from peering into the - # database. - # - # Attributes are limited to the following types: integers, floats, - # datetimes (converted to timestamps), booleans and strings. Don't - # forget that Sphinx converts string attributes to integers, which are - # useful for sorting, but that's about it. - # - # You can also have a collection of integers for multi-value attributes - # (MVAs). Generally these would be through a has_many relationship, - # like in this example: - # - # has posts(:id), :as => :post_ids - # - # This allows you to filter on any of the values tied to a specific - # record. Might be best to read through the Sphinx documentation to get - # a better idea of that though. - # - # Adding SQL Fragment Attributes - # - # You can also define an attribute using an SQL fragment, useful for - # when you would like to index a calculated value. Don't forget to set - # the type of the attribute though: - # - # has "age < 18", :as => :minor, :type => :boolean - # - # If you're creating attributes for latitude and longitude, don't - # forget that Sphinx expects these values to be in radians. - # - def has(*args) - options = args.extract_options! - args.each do |columns| - attributes << Attribute.new(FauxColumn.coerce(columns), options) - end - end - alias_method :attribute, :has - - # Use this method to add some manual SQL conditions for your index - # request. You can pass in as many strings as you like, they'll get - # joined together with ANDs later on. - # - # where "user_id = 10" - # where "parent_type = 'Article'", "created_at < NOW()" - # - def where(*args) - @conditions += args - end - - # Use this method to add some manual SQL strings to the GROUP BY - # clause. You can pass in as many strings as you'd like, they'll get - # joined together with commas later on. - # - # group_by "lat", "lng" - # - def group_by(*args) - @groupings += args - end - - # This is what to use to set properties on the index. Chief amongst - # those is the delta property - to allow automatic updates to your - # indexes as new models are added and edited - but also you can - # define search-related properties which will be the defaults for all - # searches on the model. - # - # set_property :delta => true - # set_property :field_weights => {"name" => 100} - # set_property :order => "name ASC" - # set_property :include => :picture - # set_property :select => 'name' - # - # Also, the following two properties are particularly relevant for - # geo-location searching - latitude_attr and longitude_attr. If your - # attributes for these two values are named something other than - # lat/latitude or lon/long/longitude, you can dictate what they are - # when defining the index, so you don't need to specify them for every - # geo-related search. - # - # set_property :latitude_attr => "lt", :longitude_attr => "lg" - # - # Please don't forget to add a boolean field named 'delta' to your - # model's database table if enabling the delta index for it. - # - def set_property(*args) - options = args.extract_options! - if options.empty? - @properties[args[0]] = args[1] - else - @properties.merge!(options) - end - end - alias_method :set_properties, :set_property - - # Handles the generation of new columns for the field and attribute - # definitions. - # - def method_missing(method, *args) - FauxColumn.new(method, *args) - end - - # A method to allow adding fields from associations which have names - # that clash with method names in the Builder class (ie: properties, - # fields, attributes). - # - # Example: indexes assoc(:properties).column - # - def assoc(assoc) - FauxColumn.new(method) - end - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/faux_column.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/faux_column.rb deleted file mode 100644 index 84068de..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/index/faux_column.rb +++ /dev/null @@ -1,110 +0,0 @@ -module ThinkingSphinx - class Index - # Instances of this class represent database columns and the stack of - # associations that lead from the base model to them. - # - # The name and stack are accessible through methods starting with __ to - # avoid conflicting with the method_missing calls that build the stack. - # - class FauxColumn - # Create a new column with a pre-defined stack. The top element in the - # stack will get shifted to be the name value. - # - def initialize(*stack) - @name = stack.pop - @stack = stack - end - - def self.coerce(columns) - case columns - when Symbol, String - FauxColumn.new(columns) - when Array - columns.collect { |col| FauxColumn.coerce(col) } - when FauxColumn - columns - else - nil - end - end - - # Can't use normal method name, as that could be an association or - # column name. - # - def __name - @name - end - - # Can't use normal method name, as that could be an association or - # column name. - # - def __stack - @stack - end - - # Returns true if the stack is empty *and* if the name is a string - - # which is an indication that of raw SQL, as opposed to a value from a - # table's column. - # - def is_string? - @name.is_a?(String) && @stack.empty? - end - - # This handles any 'invalid' method calls and sets them as the name, - # and pushing the previous name into the stack. The object returns - # itself. - # - # If there's a single argument, it becomes the name, and the method - # symbol goes into the stack as well. Multiple arguments means new - # columns with the original stack and new names (from each argument) gets - # returned. - # - # Easier to explain with examples: - # - # col = FauxColumn.new :a, :b, :c - # col.__name #=> :c - # col.__stack #=> [:a, :b] - # - # col.whatever #=> col - # col.__name #=> :whatever - # col.__stack #=> [:a, :b, :c] - # - # col.something(:id) #=> col - # col.__name #=> :id - # col.__stack #=> [:a, :b, :c, :whatever, :something] - # - # cols = col.short(:x, :y, :z) - # cols[0].__name #=> :x - # cols[0].__stack #=> [:a, :b, :c, :whatever, :something, :short] - # cols[1].__name #=> :y - # cols[1].__stack #=> [:a, :b, :c, :whatever, :something, :short] - # cols[2].__name #=> :z - # cols[2].__stack #=> [:a, :b, :c, :whatever, :something, :short] - # - # Also, this allows method chaining to build up a relevant stack: - # - # col = FauxColumn.new :a, :b - # col.__name #=> :b - # col.__stack #=> [:a] - # - # col.one.two.three #=> col - # col.__name #=> :three - # col.__stack #=> [:a, :b, :one, :two] - # - def method_missing(method, *args) - @stack << @name - @name = method - - if (args.empty?) - self - elsif (args.length == 1) - method_missing(args.first) - else - args.collect { |arg| - FauxColumn.new(@stack + [@name, arg]) - } - end - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/rails_additions.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/rails_additions.rb deleted file mode 100644 index d528baa..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/rails_additions.rb +++ /dev/null @@ -1,133 +0,0 @@ -module ThinkingSphinx - module HashExcept - # Returns a new hash without the given keys. - def except(*keys) - rejected = Set.new(respond_to?(:convert_key) ? keys.map { |key| convert_key(key) } : keys) - reject { |key,| rejected.include?(key) } - end - - # Replaces the hash without only the given keys. - def except!(*keys) - replace(except(*keys)) - end - end -end - -Hash.send( - :include, ThinkingSphinx::HashExcept -) unless Hash.instance_methods.include?("except") - -module ThinkingSphinx - module ArrayExtractOptions - def extract_options! - last.is_a?(::Hash) ? pop : {} - end - end -end - -Array.send( - :include, ThinkingSphinx::ArrayExtractOptions -) unless Array.instance_methods.include?("extract_options!") - -module ThinkingSphinx - module AbstractQuotedTableName - def quote_table_name(name) - quote_column_name(name) - end - end -end - -ActiveRecord::ConnectionAdapters::AbstractAdapter.send( - :include, ThinkingSphinx::AbstractQuotedTableName -) unless ActiveRecord::ConnectionAdapters::AbstractAdapter.instance_methods.include?("quote_table_name") - -module ThinkingSphinx - module MysqlQuotedTableName - def quote_table_name(name) #:nodoc: - quote_column_name(name).gsub('.', '`.`') - end - end -end - -if ActiveRecord::ConnectionAdapters.constants.include?("MysqlAdapter") - ActiveRecord::ConnectionAdapters::MysqlAdapter.send( - :include, ThinkingSphinx::MysqlQuotedTableName - ) unless ActiveRecord::ConnectionAdapters::MysqlAdapter.instance_methods.include?("quote_table_name") -end - -module ThinkingSphinx - module ActiveRecordQuotedName - def quoted_table_name - self.connection.quote_table_name(self.table_name) - end - end -end - -ActiveRecord::Base.extend( - ThinkingSphinx::ActiveRecordQuotedName -) unless ActiveRecord::Base.respond_to?("quoted_table_name") - -module ThinkingSphinx - module ActiveRecordStoreFullSTIClass - def store_full_sti_class - false - end - end -end - -ActiveRecord::Base.extend( - ThinkingSphinx::ActiveRecordStoreFullSTIClass -) unless ActiveRecord::Base.respond_to?(:store_full_sti_class) - -module ThinkingSphinx - module ClassAttributeMethods - def cattr_reader(*syms) - syms.flatten.each do |sym| - next if sym.is_a?(Hash) - class_eval(<<-EOS, __FILE__, __LINE__) - unless defined? @@#{sym} - @@#{sym} = nil - end - - def self.#{sym} - @@#{sym} - end - - def #{sym} - @@#{sym} - end - EOS - end - end - - def cattr_writer(*syms) - options = syms.extract_options! - syms.flatten.each do |sym| - class_eval(<<-EOS, __FILE__, __LINE__) - unless defined? @@#{sym} - @@#{sym} = nil - end - - def self.#{sym}=(obj) - @@#{sym} = obj - end - - #{" - def #{sym}=(obj) - @@#{sym} = obj - end - " unless options[:instance_writer] == false } - EOS - end - end - - def cattr_accessor(*syms) - cattr_reader(*syms) - cattr_writer(*syms) - end - end -end - -Class.extend( - ThinkingSphinx::ClassAttributeMethods -) unless Class.respond_to?(:cattr_reader) diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/search.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/search.rb deleted file mode 100644 index d57022c..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/search.rb +++ /dev/null @@ -1,631 +0,0 @@ -module ThinkingSphinx - # Once you've got those indexes in and built, this is the stuff that - # matters - how to search! This class provides a generic search - # interface - which you can use to search all your indexed models at once. - # Most times, you will just want a specific model's results - to search and - # search_for_ids methods will do the job in exactly the same manner when - # called from a model. - # - class Search - class << self - # Searches for results that match the parameters provided. Will only - # return the ids for the matching objects. See #search for syntax - # examples. - # - # Note that this only searches the Sphinx index, with no ActiveRecord - # queries. Thus, if your index is not in sync with the database, this - # method may return ids that no longer exist there. - # - def search_for_ids(*args) - results, client = search_results(*args.clone) - - options = args.extract_options! - page = options[:page] ? options[:page].to_i : 1 - - ThinkingSphinx::Collection.ids_from_results(results, page, client.limit, options) - end - - # Searches through the Sphinx indexes for relevant matches. There's - # various ways to search, sort, group and filter - which are covered - # below. - # - # Also, if you have WillPaginate installed, the search method can be used - # just like paginate. The same parameters - :page and :per_page - work as - # expected, and the returned result set can be used by the will_paginate - # helper. - # - # == Basic Searching - # - # The simplest way of searching is straight text. - # - # ThinkingSphinx::Search.search "pat" - # ThinkingSphinx::Search.search "google" - # User.search "pat", :page => (params[:page] || 1) - # Article.search "relevant news issue of the day" - # - # If you specify :include, like in an #find call, this will be respected - # when loading the relevant models from the search results. - # - # User.search "pat", :include => :posts - # - # == Match Modes - # - # Sphinx supports 5 different matching modes. By default Thinking Sphinx - # uses :all, which unsurprisingly requires all the supplied search terms - # to match a result. - # - # Alternative modes include: - # - # User.search "pat allan", :match_mode => :any - # User.search "pat allan", :match_mode => :phrase - # User.search "pat | allan", :match_mode => :boolean - # User.search "@name pat | @username pat", :match_mode => :extended - # - # Any will find results with any of the search terms. Phrase treats the search - # terms a single phrase instead of individual words. Boolean and extended allow - # for more complex query syntax, refer to the sphinx documentation for further - # details. - # - # == Weighting - # - # Sphinx has support for weighting, where matches in one field can be considered - # more important than in another. Weights are integers, with 1 as the default. - # They can be set per-search like this: - # - # User.search "pat allan", :field_weights => { :alias => 4, :aka => 2 } - # - # If you're searching multiple models, you can set per-index weights: - # - # ThinkingSphinx::Search.search "pat", :index_weights => { User => 10 } - # - # See http://sphinxsearch.com/doc.html#weighting for further details. - # - # == Searching by Fields - # - # If you want to step it up a level, you can limit your search terms to - # specific fields: - # - # User.search :conditions => {:name => "pat"} - # - # This uses Sphinx's extended match mode, unless you specify a different - # match mode explicitly (but then this way of searching won't work). Also - # note that you don't need to put in a search string. - # - # == Searching by Attributes - # - # Also known as filters, you can limit your searches to documents that - # have specific values for their attributes. There are two ways to do - # this. The first is one that works in all scenarios - using the :with - # option. - # - # ThinkingSphinx::Search.search :with => {:parent_id => 10} - # - # The second is only viable if you're searching with a specific model - # (not multi-model searching). With a single model, Thinking Sphinx - # can figure out what attributes and fields are available, so you can - # put it all in the :conditions hash, and it will sort it out. - # - # Node.search :conditions => {:parent_id => 10} - # - # Filters can be single values, arrays of values, or ranges. - # - # Article.search "East Timor", :conditions => {:rating => 3..5} - # - # == Excluding by Attributes - # - # Sphinx also supports negative filtering - where the filters are of - # attribute values to exclude. This is done with the :without option: - # - # User.search :without => {:role_id => 1} - # - # == Excluding by Primary Key - # - # There is a shortcut to exclude records by their ActiveRecord primary key: - # - # User.search :without_ids => 1 - # - # Pass an array or a single value. - # - # The primary key must be an integer as a negative filter is used. Note - # that for multi-model search, an id may occur in more than one model. - # - # == Infix (Star) Searching - # - # By default, Sphinx uses English stemming, e.g. matching "shoes" if you - # search for "shoe". It won't find "Melbourne" if you search for - # "elbourn", though. - # - # Enable infix searching by something like this in config/sphinx.yml: - # - # development: - # enable_star: 1 - # min_infix_length: 2 - # - # Note that this will make indexing take longer. - # - # With those settings (and after reindexing), wildcard asterisks can be used - # in queries: - # - # Location.search "*elbourn*" - # - # To automatically add asterisks around every token (but not operators), - # pass the :star option: - # - # Location.search "elbourn -ustrali", :star => true, :match_mode => :boolean - # - # This would become "*elbourn* -*ustrali*". The :star option only adds the - # asterisks. You need to make the config/sphinx.yml changes yourself. - # - # By default, the tokens are assumed to match the regular expression /\w+/u. - # If you've modified the charset_table, pass another regular expression, e.g. - # - # User.search("oo@bar.c", :star => /[\w@.]+/u) - # - # to search for "*oo@bar.c*" and not "*oo*@*bar*.*c*". - # - # == Sorting - # - # Sphinx can only sort by attributes, so generally you will need to avoid - # using field names in your :order option. However, if you're searching - # on a single model, and have specified some fields as sortable, you can - # use those field names and Thinking Sphinx will interpret accordingly. - # Remember: this will only happen for single-model searches, and only - # through the :order option. - # - # Location.search "Melbourne", :order => :state - # User.search :conditions => {:role_id => 2}, :order => "name ASC" - # - # Keep in mind that if you use a string, you *must* specify the direction - # (ASC or DESC) else Sphinx won't return any results. If you use a symbol - # then Thinking Sphinx assumes ASC, but if you wish to state otherwise, - # use the :sort_mode option: - # - # Location.search "Melbourne", :order => :state, :sort_mode => :desc - # - # Of course, there are other sort modes - check out the Sphinx - # documentation[http://sphinxsearch.com/doc.html] for that level of - # detail though. - # - # == Grouping - # - # For this you can use the group_by, group_clause and group_function - # options - which are all directly linked to Sphinx's expectations. No - # magic from Thinking Sphinx. It can get a little tricky, so make sure - # you read all the relevant - # documentation[http://sphinxsearch.com/doc.html#clustering] first. - # - # Yes this section will be expanded, but this is a start. - # - # == Geo/Location Searching - # - # Sphinx - and therefore Thinking Sphinx - has the facility to search - # around a geographical point, using a given latitude and longitude. To - # take advantage of this, you will need to have both of those values in - # attributes. To search with that point, you can then use one of the - # following syntax examples: - # - # Address.search "Melbourne", :geo => [1.4, -2.217], :order => "@geodist asc" - # Address.search "Australia", :geo => [-0.55, 3.108], :order => "@geodist asc" - # :latitude_attr => "latit", :longitude_attr => "longit" - # - # The first example applies when your latitude and longitude attributes - # are named any of lat, latitude, lon, long or longitude. If that's not - # the case, you will need to explicitly state them in your search, _or_ - # you can do so in your model: - # - # define_index do - # has :latit # Float column, stored in radians - # has :longit # Float column, stored in radians - # - # set_property :latitude_attr => "latit" - # set_property :longitude_attr => "longit" - # end - # - # Now, geo-location searching really only has an affect if you have a - # filter, sort or grouping clause related to it - otherwise it's just a - # normal search, and _will not_ return a distance value otherwise. To - # make use of the positioning difference, use the special attribute - # "@geodist" in any of your filters or sorting or grouping clauses. - # - # And don't forget - both the latitude and longitude you use in your - # search, and the values in your indexes, need to be stored as a float in radians, - # _not_ degrees. Keep in mind that if you do this conversion in SQL - # you will need to explicitly declare a column type of :float. - # - # define_index do - # has 'RADIANS(lat)', :as => :lat, :type => :float - # # ... - # end - # - # Once you've got your results set, you can access the distances as - # follows: - # - # @results.each_with_geodist do |result, distance| - # # ... - # end - # - # The distance value is returned as a float, representing the distance in - # metres. - # - # == Handling a Stale Index - # - # Especially if you don't use delta indexing, you risk having records in the - # Sphinx index that are no longer in the database. By default, those will simply - # come back as nils: - # - # >> pat_user.delete - # >> User.search("pat") - # Sphinx Result: [1,2] - # => [nil, <#User id: 2>] - # - # (If you search across multiple models, you'll get ActiveRecord::RecordNotFound.) - # - # You can simply Array#compact these results or handle the nils in some other way, but - # Sphinx will still report two results, and the missing records may upset your layout. - # - # If you pass :retry_stale => true to a single-model search, missing records will - # cause Thinking Sphinx to retry the query but excluding those records. Since search - # is paginated, the new search could potentially include missing records as well, so by - # default Thinking Sphinx will retry three times. Pass :retry_stale => 5 to retry five - # times, and so on. If there are still missing ids on the last retry, they are - # shown as nils. - # - def search(*args) - query = args.clone # an array - options = query.extract_options! - - retry_search_on_stale_index(query, options) do - results, client = search_results(*(query + [options])) - - ::ActiveRecord::Base.logger.error( - "Sphinx Error: #{results[:error]}" - ) if results[:error] - - klass = options[:class] - page = options[:page] ? options[:page].to_i : 1 - - ThinkingSphinx::Collection.create_from_results(results, page, client.limit, options) - end - end - - def retry_search_on_stale_index(query, options, &block) - stale_ids = [] - stale_retries_left = case options[:retry_stale] - when true: 3 # default to three retries - when nil, false: 0 # no retries - else options[:retry_stale].to_i - end - begin - # Passing this in an option so Collection.create_from_results can see it. - # It should only raise on stale records if there are any retries left. - options[:raise_on_stale] = stale_retries_left > 0 - block.call - # If ThinkingSphinx::Collection.create_from_results found records in Sphinx but not - # in the DB and the :raise_on_stale option is set, this exception is raised. We retry - # a limited number of times, excluding the stale ids from the search. - rescue StaleIdsException => e - stale_retries_left -= 1 - - stale_ids |= e.ids # For logging - options[:without_ids] = Array(options[:without_ids]) | e.ids # Actual exclusion - - tries = stale_retries_left - ::ActiveRecord::Base.logger.debug("Sphinx Stale Ids (%s %s left): %s" % [ - tries, (tries==1 ? 'try' : 'tries'), stale_ids.join(', ') - ]) - - retry - end - end - - def count(*args) - results, client = search_results(*args.clone) - results[:total_found] || 0 - end - - # Checks if a document with the given id exists within a specific index. - # Expected parameters: - # - # - ID of the document - # - Index to check within - # - Options hash (defaults to {}) - # - # Example: - # - # ThinkingSphinx::Search.search_for_id(10, "user_core", :class => User) - # - def search_for_id(*args) - options = args.extract_options! - client = client_from_options options - - query, filters = search_conditions( - options[:class], options[:conditions] || {} - ) - client.filters += filters - client.match_mode = :extended unless query.empty? - client.id_range = args.first..args.first - - begin - return client.query(query, args[1])[:matches].length > 0 - rescue Errno::ECONNREFUSED => err - raise ThinkingSphinx::ConnectionError, "Connection to Sphinx Daemon (searchd) failed." - end - end - - def facets(*args) - hash = ThinkingSphinx::FacetCollection.new args - options = args.extract_options!.clone.merge! :group_function => :attr - - options[:class].sphinx_facets.inject(hash) do |hash, facet| - options[:group_by] = facet.attribute_name - - hash.add_from_results facet, search(*(args + [options])) - hash - end - end - - private - - # This method handles the common search functionality, and returns both - # the result hash and the client. Not super elegant, but it'll do for - # the moment. - # - def search_results(*args) - options = args.extract_options! - query = args.join(' ') - client = client_from_options options - - query = star_query(query, options[:star]) if options[:star] - - extra_query, filters = search_conditions( - options[:class], options[:conditions] || {} - ) - client.filters += filters - client.match_mode = :extended unless extra_query.empty? - query = [query, extra_query].join(' ') - query.strip! # Because "" and " " are not equivalent - - set_sort_options! client, options - - client.limit = options[:per_page].to_i if options[:per_page] - page = options[:page] ? options[:page].to_i : 1 - client.offset = (page - 1) * client.limit - - begin - ::ActiveRecord::Base.logger.debug "Sphinx: #{query}" - results = client.query query - ::ActiveRecord::Base.logger.debug "Sphinx Result: #{results[:matches].collect{|m| m[:attributes]["sphinx_internal_id"]}.inspect}" - rescue Errno::ECONNREFUSED => err - raise ThinkingSphinx::ConnectionError, "Connection to Sphinx Daemon (searchd) failed." - end - - return results, client - end - - # Set all the appropriate settings for the client, using the provided - # options hash. - # - def client_from_options(options = {}) - config = ThinkingSphinx::Configuration.instance - client = Riddle::Client.new config.address, config.port - klass = options[:class] - index_options = klass ? klass.sphinx_index_options : {} - - # The Riddle default is per-query max_matches=1000. If we set the - # per-server max to a smaller value in sphinx.yml, we need to override - # the Riddle default or else we get search errors like - # "per-query max_matches=1000 out of bounds (per-server max_matches=200)" - if per_server_max_matches = config.configuration.searchd.max_matches - options[:max_matches] ||= per_server_max_matches - end - - # Turn :index_weights => { "foo" => 2, User => 1 } - # into :index_weights => { "foo" => 2, "user_core" => 1, "user_delta" => 1 } - if iw = options[:index_weights] - options[:index_weights] = iw.inject({}) do |hash, (index,weight)| - if index.is_a?(Class) - name = ThinkingSphinx::Index.name(index) - hash["#{name}_core"] = weight - hash["#{name}_delta"] = weight - else - hash[index] = weight - end - hash - end - end - - [ - :max_matches, :match_mode, :sort_mode, :sort_by, :id_range, - :group_by, :group_function, :group_clause, :group_distinct, :cut_off, - :retry_count, :retry_delay, :index_weights, :rank_mode, - :max_query_time, :field_weights, :filters, :anchor, :limit - ].each do |key| - client.send( - key.to_s.concat("=").to_sym, - options[key] || index_options[key] || client.send(key) - ) - end - - options[:classes] = [klass] if klass - - client.anchor = anchor_conditions(klass, options) || {} if client.anchor.empty? - - client.filters << Riddle::Client::Filter.new( - "sphinx_deleted", [0] - ) - - # class filters - client.filters << Riddle::Client::Filter.new( - "class_crc", options[:classes].collect { |k| k.to_crc32s }.flatten - ) if options[:classes] - - # normal attribute filters - client.filters += options[:with].collect { |attr,val| - Riddle::Client::Filter.new attr.to_s, filter_value(val) - } if options[:with] - - # exclusive attribute filters - client.filters += options[:without].collect { |attr,val| - Riddle::Client::Filter.new attr.to_s, filter_value(val), true - } if options[:without] - - # exclusive attribute filter on primary key - client.filters += Array(options[:without_ids]).collect { |id| - Riddle::Client::Filter.new 'sphinx_internal_id', filter_value(id), true - } if options[:without_ids] - - client - end - - def star_query(query, custom_token = nil) - token = custom_token.is_a?(Regexp) ? custom_token : /\w+/u - - query.gsub(/("#{token}(.*?#{token})?"|(?![!-])#{token})/u) do - pre, proper, post = $`, $&, $' - is_operator = pre.match(%r{(\W|^)[@~/]\Z}) # E.g. "@foo", "/2", "~3", but not as part of a token - is_quote = proper.starts_with?('"') && proper.ends_with?('"') # E.g. "foo bar", with quotes - has_star = pre.ends_with?("*") || post.starts_with?("*") - if is_operator || is_quote || has_star - proper - else - "*#{proper}*" - end - end - end - - def filter_value(value) - case value - when Range - value.first.is_a?(Time) ? timestamp(value.first)..timestamp(value.last) : value - when Array - value.collect { |val| val.is_a?(Time) ? timestamp(val) : val } - else - Array(value) - end - end - - # Returns the integer timestamp for a Time object. - # - # If using Rails 2.1+, need to handle timezones to translate them back to - # UTC, as that's what datetimes will be stored as by MySQL. - # - # in_time_zone is a method that was added for the timezone support in - # Rails 2.1, which is why it's used for testing. I'm sure there's better - # ways, but this does the job. - # - def timestamp(value) - value.respond_to?(:in_time_zone) ? value.utc.to_i : value.to_i - end - - # Translate field and attribute conditions to the relevant search string - # and filters. - # - def search_conditions(klass, conditions={}) - attributes = klass ? klass.sphinx_indexes.collect { |index| - index.attributes.collect { |attrib| attrib.unique_name } - }.flatten : [] - - search_string = [] - filters = [] - - conditions.each do |key,val| - if attributes.include?(key.to_sym) - filters << Riddle::Client::Filter.new( - key.to_s, filter_value(val) - ) - else - search_string << "@#{key} #{val}" - end - end - - return search_string.join(' '), filters - end - - # Return the appropriate latitude and longitude values, depending on - # whether the relevant attributes have been defined, and also whether - # there's actually any values. - # - def anchor_conditions(klass, options) - attributes = klass ? klass.sphinx_indexes.collect { |index| - index.attributes.collect { |attrib| attrib.unique_name } - }.flatten : [] - - lat_attr = klass ? klass.sphinx_indexes.collect { |index| - index.options[:latitude_attr] - }.compact.first : nil - - lon_attr = klass ? klass.sphinx_indexes.collect { |index| - index.options[:longitude_attr] - }.compact.first : nil - - lat_attr = options[:latitude_attr] if options[:latitude_attr] - lat_attr ||= :lat if attributes.include?(:lat) - lat_attr ||= :latitude if attributes.include?(:latitude) - - lon_attr = options[:longitude_attr] if options[:longitude_attr] - lon_attr ||= :lng if attributes.include?(:lng) - lon_attr ||= :lon if attributes.include?(:lon) - lon_attr ||= :long if attributes.include?(:long) - lon_attr ||= :longitude if attributes.include?(:longitude) - - lat = options[:lat] - lon = options[:lon] - - if options[:geo] - lat = options[:geo].first - lon = options[:geo].last - end - - lat && lon ? { - :latitude_attribute => lat_attr.to_s, - :latitude => lat, - :longitude_attribute => lon_attr.to_s, - :longitude => lon - } : nil - end - - # Set the sort options using the :order key as well as the appropriate - # Riddle settings. - # - def set_sort_options!(client, options) - klass = options[:class] - fields = klass ? klass.sphinx_indexes.collect { |index| - index.fields.collect { |field| field.unique_name } - }.flatten : [] - index_options = klass ? klass.sphinx_index_options : {} - - order = options[:order] || index_options[:order] - case order - when Symbol - client.sort_mode = :attr_asc if client.sort_mode == :relevance || client.sort_mode.nil? - if fields.include?(order) - client.sort_by = order.to_s.concat("_sort") - else - client.sort_by = order.to_s - end - when String - client.sort_mode = :extended - client.sort_by = sorted_fields_to_attributes(order, fields) - else - # do nothing - end - - client.sort_mode = :attr_asc if client.sort_mode == :asc - client.sort_mode = :attr_desc if client.sort_mode == :desc - end - - # Search through a collection of fields and translate any appearances - # of them in a string to their attribute equivalent for sorting. - # - def sorted_fields_to_attributes(string, fields) - fields.each { |field| - string.gsub!(/(^|\s)#{field}(,?\s|$)/) { |match| - match.gsub field.to_s, field.to_s.concat("_sort") - } - } - - string - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/tasks.rb b/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/tasks.rb deleted file mode 100644 index 6c80c71..0000000 --- a/vendor/plugins/thinking-sphinx/lib/thinking_sphinx/tasks.rb +++ /dev/null @@ -1,128 +0,0 @@ -require 'fileutils' - -namespace :thinking_sphinx do - task :app_env do - Rake::Task[:environment].invoke if defined?(RAILS_ROOT) - Rake::Task[:merb_env].invoke if defined?(Merb) - end - - desc "Stop if running, then start a Sphinx searchd daemon using Thinking Sphinx's settings" - task :running_start => :app_env do - Rake::Task["thinking_sphinx:stop"].invoke if sphinx_running? - Rake::Task["thinking_sphinx:start"].invoke - end - - desc "Start a Sphinx searchd daemon using Thinking Sphinx's settings" - task :start => :app_env do - config = ThinkingSphinx::Configuration.instance - - FileUtils.mkdir_p config.searchd_file_path - raise RuntimeError, "searchd is already running." if sphinx_running? - - Dir["#{config.searchd_file_path}/*.spl"].each { |file| File.delete(file) } - - cmd = "#{config.bin_path}searchd --pidfile --config #{config.config_file}" - puts cmd - system cmd - - sleep(2) - - if sphinx_running? - puts "Started successfully (pid #{sphinx_pid})." - else - puts "Failed to start searchd daemon. Check #{config.searchd_log_file}." - end - end - - desc "Stop Sphinx using Thinking Sphinx's settings" - task :stop => :app_env do - raise RuntimeError, "searchd is not running." unless sphinx_running? - config = ThinkingSphinx::Configuration.instance - pid = sphinx_pid - system "searchd --stop --config #{config.config_file}" - puts "Stopped search daemon (pid #{pid})." - end - - desc "Restart Sphinx" - task :restart => [:app_env, :stop, :start] - - desc "Generate the Sphinx configuration file using Thinking Sphinx's settings" - task :configure => :app_env do - config = ThinkingSphinx::Configuration.instance - puts "Generating Configuration to #{config.config_file}" - config.build - end - - desc "Index data for Sphinx using Thinking Sphinx's settings" - task :index => :app_env do - ThinkingSphinx::Deltas::Job.cancel_thinking_sphinx_jobs - - config = ThinkingSphinx::Configuration.instance - unless ENV["INDEX_ONLY"] == "true" - puts "Generating Configuration to #{config.config_file}" - config.build - end - - FileUtils.mkdir_p config.searchd_file_path - cmd = "#{config.bin_path}indexer --config #{config.config_file} --all" - cmd << " --rotate" if sphinx_running? - puts cmd - system cmd - end - - namespace :index do - task :delta => :app_env do - ThinkingSphinx.indexed_models.select { |model| - model.constantize.sphinx_indexes.any? { |index| index.delta? } - }.each do |model| - model.constantize.sphinx_indexes.select { |index| - index.delta? && index.delta_object.respond_to?(:delayed_index) - }.each { |index| - index.delta_object.delayed_index(index.model) - } - end - end - end - - desc "Process stored delta index requests" - task :delayed_delta => :app_env do - require 'delayed/worker' - - Delayed::Worker.new( - :min_priority => ENV['MIN_PRIORITY'], - :max_priority => ENV['MAX_PRIORITY'] - ).start - end -end - -namespace :ts do - desc "Stop if running, then start a Sphinx searchd daemon using Thinking Sphinx's settings" - task :run => "thinking_sphinx:running_start" - desc "Start a Sphinx searchd daemon using Thinking Sphinx's settings" - task :start => "thinking_sphinx:start" - desc "Stop Sphinx using Thinking Sphinx's settings" - task :stop => "thinking_sphinx:stop" - desc "Index data for Sphinx using Thinking Sphinx's settings" - task :in => "thinking_sphinx:index" - namespace :in do - desc "Index Thinking Sphinx datetime delta indexes" - task :delta => "thinking_sphinx:index:delta" - end - task :index => "thinking_sphinx:index" - desc "Restart Sphinx" - task :restart => "thinking_sphinx:restart" - desc "Generate the Sphinx configuration file using Thinking Sphinx's settings" - task :conf => "thinking_sphinx:configure" - desc "Generate the Sphinx configuration file using Thinking Sphinx's settings" - task :config => "thinking_sphinx:configure" - desc "Process stored delta index requests" - task :dd => "thinking_sphinx:delayed_delta" -end - -def sphinx_pid - ThinkingSphinx.sphinx_pid -end - -def sphinx_running? - ThinkingSphinx.sphinx_running? -end diff --git a/vendor/plugins/thinking-sphinx/rails/init.rb b/vendor/plugins/thinking-sphinx/rails/init.rb deleted file mode 100644 index 82ea0ee..0000000 --- a/vendor/plugins/thinking-sphinx/rails/init.rb +++ /dev/null @@ -1,6 +0,0 @@ -require 'thinking_sphinx' -require 'action_controller/dispatcher' - -ActionController::Dispatcher.to_prepare :thinking_sphinx do - ThinkingSphinx::Configuration.instance.load_models -end diff --git a/vendor/plugins/thinking-sphinx/spec/fixtures/data.sql b/vendor/plugins/thinking-sphinx/spec/fixtures/data.sql deleted file mode 100644 index d5ec579..0000000 --- a/vendor/plugins/thinking-sphinx/spec/fixtures/data.sql +++ /dev/null @@ -1,32 +0,0 @@ -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Ellie','K','Ford','38 Mills Street','Eagle Farm Bc','QLD','4009','Ellie.K.Ford@mailinator.com','1970/1/23 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Aaliyah','E','Allen','71 Murphy Street','Wyola West','WA','6407','Aaliyah.E.Allen@dodgit.com','1980/3/23 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('male','Callum','C','Miah','89 Dalgarno Street','Bullawa Creek','NSW','2390','Callum.C.Miah@trashymail.com','1973/3/25 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('male','Finley','L','Buckley','18 Queen Street','Manly Vale','NSW','2093','Finley.L.Buckley@spambob.com','1962/11/20 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Poppy','A','Hilton','36 Nerrigundah Drive','Nyora','VIC','3987','Poppy.A.Hilton@dodgit.com','1972/10/30 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Eloise','Z','Kennedy','18 Mt Berryman Road','Lilydale','QLD','4344','Eloise.Z.Kennedy@spambob.com','1973/9/28 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Shannon','L','Manning','60 Ocean Pde','Greenvale','QLD','4816','Shannon.L.Manning@dodgit.com','1956/6/13 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('male','Oscar','C','Lawson','43 Feather Street','Battery Hill','QLD','4551','Oscar.C.Lawson@spambob.com','1979/10/17 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('female','Sofia','K','Bray','26 Clifton Street','Pental Island','VIC','3586','Sofia.K.Bray@mailinator.com','1970/5/10 00:00:00', 3, 'CricketTeam'); -insert into `people` (gender, first_name, middle_initial, last_name, street_address, city, state, postcode, email, birthday, team_id, team_type) values('male','Andrew','N','Byrne','35 Cecil Street','Monash Park','NSW','2111','Andrew.N.Byrne@spambob.com','1983/2/16 00:00:00', 3, 'CricketTeam'); - -insert into `alphas` (name) values ('one'); -insert into `alphas` (name) values ('two'); -insert into `alphas` (name) values ('three'); -insert into `alphas` (name) values ('four'); -insert into `alphas` (name) values ('five'); -insert into `alphas` (name) values ('six'); -insert into `alphas` (name) values ('seven'); -insert into `alphas` (name) values ('eight'); -insert into `alphas` (name) values ('nine'); -insert into `alphas` (name) values ('ten'); - -insert into `betas` (name) values ('one'); -insert into `betas` (name) values ('two'); -insert into `betas` (name) values ('three'); -insert into `betas` (name) values ('four'); -insert into `betas` (name) values ('five'); -insert into `betas` (name) values ('six'); -insert into `betas` (name) values ('seven'); -insert into `betas` (name) values ('eight'); -insert into `betas` (name) values ('nine'); -insert into `betas` (name) values ('ten'); diff --git a/vendor/plugins/thinking-sphinx/spec/fixtures/database.yml.default b/vendor/plugins/thinking-sphinx/spec/fixtures/database.yml.default deleted file mode 100644 index dfad2a6..0000000 --- a/vendor/plugins/thinking-sphinx/spec/fixtures/database.yml.default +++ /dev/null @@ -1,3 +0,0 @@ -username: root -password: -host: localhost \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/fixtures/models.rb b/vendor/plugins/thinking-sphinx/spec/fixtures/models.rb deleted file mode 100644 index 6e962ac..0000000 --- a/vendor/plugins/thinking-sphinx/spec/fixtures/models.rb +++ /dev/null @@ -1,81 +0,0 @@ -class Person < ActiveRecord::Base - belongs_to :team, :polymorphic => :true - has_many :contacts - - has_many :friendships - has_many :friends, :through => :friendships - - define_index do - indexes [first_name, middle_initial, last_name], :as => :name - indexes team.name, :as => :team_name - indexes contacts.phone_number, :as => :phone_numbers - indexes city, :prefixes => true - indexes state, :infixes => true - - has [first_name, middle_initial, last_name], :as => :name_sort - has team.name, :as => :team_name_sort - - has [:id, :team_id], :as => :ids - has team(:id), :as => :team_id - - has contacts.phone_number, :as => :phone_number_sort - has contacts(:id), :as => :contact_ids - - has birthday - - has friendships.person_id, :as => :friendly_ids - - set_property :delta => true - end -end - -class Parent < Person -end - -class Child < Person - belongs_to :parent - define_index do - indexes [parent.first_name, parent.middle_initial, parent.last_name], :as => :parent_name - end -end - -class Contact < ActiveRecord::Base - belongs_to :person -end - -class FootballTeam < ActiveRecord::Base - # -end - -class CricketTeam < ActiveRecord::Base - # -end - -class Friendship < ActiveRecord::Base - belongs_to :person - belongs_to :friend, :class_name => "Person", :foreign_key => :friend_id - - define_index do - has person_id, friend_id - end -end - -class Alpha < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - set_property :field_weights => {"name" => 10} - end -end - -class Beta < ActiveRecord::Base - define_index do - indexes :name, :sortable => true - - set_property :delta => true - end -end - -class Search < ActiveRecord::Base - # -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/fixtures/structure.sql b/vendor/plugins/thinking-sphinx/spec/fixtures/structure.sql deleted file mode 100644 index 000215a..0000000 --- a/vendor/plugins/thinking-sphinx/spec/fixtures/structure.sql +++ /dev/null @@ -1,84 +0,0 @@ -DROP TABLE IF EXISTS `people`; - -CREATE TABLE `people` ( - `id` int(11) NOT NULL auto_increment, - `first_name` varchar(50) NULL, - `middle_initial` varchar(10) NULL, - `last_name` varchar(50) NULL, - `gender` varchar(10) NULL, - `street_address` varchar(200) NULL, - `city` varchar(100) NULL, - `state` varchar(100) NULL, - `postcode` varchar(10) NULL, - `email` varchar(100) NULL, - `birthday` datetime NULL, - `team_id` int(11) NULL, - `team_type` varchar(50) NULL, - `type` varchar(50) NULL, - `parent_id` varchar(50) NULL, - `delta` tinyint(1) NOT NULL DEFAULT 0, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `friendships`; - -CREATE TABLE `friendships` ( - `id` int(11) NOT NULL auto_increment, - `person_id` int(11) NOT NULL, - `friend_id` int(11) NOT NULL, - `created_at` datetime NOT NULL, - `updated_at` datetime NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `football_teams`; - -CREATE TABLE `football_teams` ( - `id` int(11) NOT NULL auto_increment, - `name` varchar(50) NOT NULL, - `state` varchar(50) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `cricket_teams`; - -CREATE TABLE `cricket_teams` ( - `id` int(11) NOT NULL auto_increment, - `name` varchar(50) NOT NULL, - `state` varchar(50) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `contacts`; - -CREATE TABLE `contacts` ( - `id` int(11) NOT NULL auto_increment, - `phone_number` varchar(50) NOT NULL, - `person_id` int(11) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `alphas`; - -CREATE TABLE `alphas` ( - `id` int(11) NOT NULL auto_increment, - `name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `betas`; - -CREATE TABLE `betas` ( - `id` int(11) NOT NULL auto_increment, - `name` varchar(50) NOT NULL, - `delta` tinyint(1) NOT NULL DEFAULT 0, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; - -DROP TABLE IF EXISTS `searches`; - -CREATE TABLE `searches` ( - `id` int(11) NOT NULL auto_increment, - `name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8; diff --git a/vendor/plugins/thinking-sphinx/spec/spec_helper.rb b/vendor/plugins/thinking-sphinx/spec/spec_helper.rb deleted file mode 100644 index f66bf5e..0000000 --- a/vendor/plugins/thinking-sphinx/spec/spec_helper.rb +++ /dev/null @@ -1,54 +0,0 @@ -$:.unshift File.dirname(__FILE__) + '/../lib' - -require 'rubygems' -require 'fileutils' -require 'ginger' -require 'not_a_mock' -require 'will_paginate' - -require 'lib/thinking_sphinx' -require 'spec/sphinx_helper' - -ActiveRecord::Base.logger = Logger.new(StringIO.new) - -Spec::Runner.configure do |config| - %w( tmp tmp/config tmp/log tmp/db ).each do |path| - FileUtils.mkdir_p "#{Dir.pwd}/#{path}" - end - - Kernel.const_set :RAILS_ROOT, "#{Dir.pwd}/tmp" unless defined?(RAILS_ROOT) - - sphinx = SphinxHelper.new - sphinx.setup_mysql - - require 'spec/fixtures/models' - - config.before :all do - %w( tmp tmp/config tmp/log tmp/db ).each do |path| - FileUtils.mkdir_p "#{Dir.pwd}/#{path}" - end - - ThinkingSphinx.updates_enabled = true - ThinkingSphinx.deltas_enabled = true - ThinkingSphinx.suppress_delta_output = true - - ThinkingSphinx::Configuration.instance.reset - ThinkingSphinx::Configuration.instance.database_yml_file = "spec/fixtures/sphinx/database.yml" - - # Ensure after_commit plugin is loaded correctly - Object.subclasses_of(ActiveRecord::ConnectionAdapters::AbstractAdapter).each { |klass| - unless klass.ancestors.include?(AfterCommit::ConnectionAdapters) - klass.send(:include, AfterCommit::ConnectionAdapters) - end - } - end - - config.after :each do - NotAMock::CallRecorder.instance.reset - NotAMock::Stubber.instance.reset - end - - config.after :all do - FileUtils.rm_r "#{Dir.pwd}/tmp" rescue nil - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/sphinx_helper.rb b/vendor/plugins/thinking-sphinx/spec/sphinx_helper.rb deleted file mode 100644 index abaaee2..0000000 --- a/vendor/plugins/thinking-sphinx/spec/sphinx_helper.rb +++ /dev/null @@ -1,109 +0,0 @@ -require 'active_record' -require 'active_record/connection_adapters/mysql_adapter' -begin - require 'active_record/connection_adapters/postgresql_adapter' -rescue LoadError - # No postgres? no prob... -end -require 'yaml' - -class SphinxHelper - attr_accessor :host, :username, :password - attr_reader :path - - def initialize - @host = "localhost" - @username = "thinking_sphinx" - @password = "" - - if File.exist?("spec/fixtures/database.yml") - config = YAML.load(File.open("spec/fixtures/database.yml")) - @host = config["host"] - @username = config["username"] - @password = config["password"] - end - - @path = File.expand_path(File.dirname(__FILE__)) - end - - def setup_mysql - ActiveRecord::Base.establish_connection( - :adapter => 'mysql', - :database => 'thinking_sphinx', - :username => @username, - :password => @password, - :host => @host - ) - ActiveRecord::Base.logger = Logger.new(File.open("tmp/activerecord.log", "a")) - - structure = File.open("spec/fixtures/structure.sql") { |f| f.read.chomp } - structure.split(';').each { |table| - ActiveRecord::Base.connection.execute table - } - - File.open("spec/fixtures/data.sql") { |f| - while line = f.gets - ActiveRecord::Base.connection.execute line unless line.blank? - end - } - end - - def setup_sphinx - @configuration = ThinkingSphinx::Configuration.instance.reset - File.open("spec/fixtures/sphinx/database.yml", "w") do |file| - YAML.dump({@configuration.environment => { - :adapter => 'mysql', - :host => @host, - :database => "thinking_sphinx", - :username => @username, - :password => @password - }}, file) - end - FileUtils.mkdir_p(@configuration.searchd_file_path) - - @configuration.database_yml_file = "spec/fixtures/sphinx/database.yml" - @configuration.build - - index - end - - def reset - setup_mysql - end - - def index - cmd = "indexer --config #{@configuration.config_file} --all" - cmd << " --rotate" if running? - `#{cmd}` - end - - def start - return if running? - - cmd = "searchd --config #{@configuration.config_file}" - `#{cmd}` - - sleep(1) - - unless running? - puts "Failed to start searchd daemon. Check #{@configuration.searchd_log_file}." - end - end - - def stop - return unless running? - `kill #{pid}` - end - - def pid - if File.exists?("#{@configuration.pid_file}") - `cat #{@configuration.pid_file}`[/\d+/] - else - nil - end - end - - def running? - pid && `ps #{pid} | wc -l`.to_i > 1 - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/delta_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/delta_spec.rb deleted file mode 100644 index 29ce1ac..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/delta_spec.rb +++ /dev/null @@ -1,136 +0,0 @@ -require 'spec/spec_helper' - -describe "ThinkingSphinx::ActiveRecord::Delta" do - it "should call the toggle_delta method after a save" do - @beta = Beta.new(:name => 'beta') - @beta.stub_method(:toggle_delta => true) - - @beta.save - - @beta.should have_received(:toggle_delta) - end - - it "should call the toggle_delta method after a save!" do - @beta = Beta.new(:name => 'beta') - @beta.stub_method(:toggle_delta => true) - - @beta.save! - - @beta.should have_received(:toggle_delta) - end - - describe "suspended_delta method" do - before :each do - ThinkingSphinx.stub_method(:deltas_enabled? => true) - Person.sphinx_indexes.first.delta_object.stub_method(:` => "") - end - - it "should execute the argument block with deltas disabled" do - ThinkingSphinx.should_receive(:deltas_enabled=).once.with(false) - ThinkingSphinx.should_receive(:deltas_enabled=).once.with(true) - lambda { Person.suspended_delta { raise 'i was called' } }.should( - raise_error(Exception) - ) - end - - it "should restore deltas_enabled to its original setting" do - ThinkingSphinx.stub_method(:deltas_enabled? => false) - ThinkingSphinx.should_receive(:deltas_enabled=).twice.with(false) - Person.suspended_delta { 'no-op' } - end - - it "should restore deltas_enabled to its original setting even if there was an exception" do - ThinkingSphinx.stub_method(:deltas_enabled? => false) - ThinkingSphinx.should_receive(:deltas_enabled=).twice.with(false) - lambda { Person.suspended_delta { raise 'bad error' } }.should( - raise_error(Exception) - ) - end - - it "should reindex by default after the code block is run" do - Person.should_receive(:index_delta) - Person.suspended_delta { 'no-op' } - end - - it "should not reindex after the code block if false is passed in" do - Person.should_not_receive(:index_delta) - Person.suspended_delta(false) { 'no-op' } - end - end - - describe "toggle_delta method" do - it "should set the delta value to true" do - @person = Person.new - - @person.delta.should be_false - @person.send(:toggle_delta) - @person.delta.should be_true - end - end - - describe "index_delta method" do - before :each do - ThinkingSphinx::Configuration.stub_method(:environment => "spec") - ThinkingSphinx.stub_method(:deltas_enabled? => true, :sphinx_running? => true) - Person.delta_object.stub_methods(:` => "", :toggled => true) - - @person = Person.new - @person.stub_method( - :in_core_index? => false, - :sphinx_document_id => 1 - ) - - @client = Riddle::Client.stub_instance(:update => true) - Riddle::Client.stub_method(:new => @client) - end - - it "shouldn't index if delta indexing is disabled" do - ThinkingSphinx.stub_method(:deltas_enabled? => false) - - @person.send(:index_delta) - - Person.sphinx_indexes.first.delta_object.should_not have_received(:`) - @client.should_not have_received(:update) - end - - it "shouldn't index if index updating is disabled" do - ThinkingSphinx.stub_method(:updates_enabled? => false) - - @person.send(:index_delta) - - Person.sphinx_indexes.first.delta_object.should_not have_received(:`) - end - - it "shouldn't index if the environment is 'test'" do - ThinkingSphinx.unstub_method(:deltas_enabled?) - ThinkingSphinx.deltas_enabled = nil - ThinkingSphinx::Configuration.stub_method(:environment => "test") - - @person.send(:index_delta) - - Person.sphinx_indexes.first.delta_object.should_not have_received(:`) - end - - it "should call indexer for the delta index" do - @person.send(:index_delta) - - Person.sphinx_indexes.first.delta_object.should have_received(:`).with( - "#{ThinkingSphinx::Configuration.instance.bin_path}indexer --config #{ThinkingSphinx::Configuration.instance.config_file} --rotate person_delta" - ) - end - - it "shouldn't update the deleted attribute if not in the index" do - @person.send(:index_delta) - - @client.should_not have_received(:update) - end - - it "should update the deleted attribute if in the core index" do - @person.stub_method(:in_core_index? => true) - - @person.send(:index_delta) - - @client.should have_received(:update) - end - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/has_many_association_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/has_many_association_spec.rb deleted file mode 100644 index b37ac9f..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/has_many_association_spec.rb +++ /dev/null @@ -1,53 +0,0 @@ -require 'spec/spec_helper' - -describe 'ThinkingSphinx::ActiveRecord::HasManyAssociation' do - describe "search method" do - before :each do - Friendship.stub_method(:search => true) - - @person = Person.find(:first) - @index = Friendship.sphinx_indexes.first - end - - it "should raise an error if the required attribute doesn't exist" do - @index.stub_method(:attributes => []) - - lambda { @person.friendships.search "test" }.should raise_error(RuntimeError) - - @index.unstub_method(:attributes) - end - - it "should add a filter for the attribute into a normal search call" do - @person.friendships.search "test" - - Friendship.should have_received(:search).with( - "test", :with => {:person_id => @person.id} - ) - end - end - - describe "search method for has_many :through" do - before :each do - Person.stub_method(:search => true) - - @person = Person.find(:first) - @index = Person.sphinx_indexes.first - end - - it "should raise an error if the required attribute doesn't exist" do - @index.stub_method(:attributes => []) - - lambda { @person.friends.search "test" }.should raise_error(RuntimeError) - - @index.unstub_method(:attributes) - end - - it "should add a filter for the attribute into a normal search call" do - @person.friends.search "test" - - Person.should have_received(:search).with( - "test", :with => {:friendly_ids => @person.id} - ) - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/search_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/search_spec.rb deleted file mode 100644 index 49c59e1..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record/search_spec.rb +++ /dev/null @@ -1,107 +0,0 @@ -require 'spec/spec_helper' - -describe "ThinkingSphinx::ActiveRecord::Search" do - it "should add search_for_ids to ActiveRecord::Base" do - ActiveRecord::Base.methods.should include("search_for_ids") - end - - it "should add search_for_ids to ActiveRecord::Base" do - ActiveRecord::Base.methods.should include("search") - end - - it "should add search_count to ActiveRecord::Base" do - ActiveRecord::Base.methods.should include("search_count") - end - - it "should add search_for_id to ActiveRecord::Base" do - ActiveRecord::Base.methods.should include("search_for_id") - end - - describe "search_for_ids method" do - before :each do - ThinkingSphinx::Search.stub_method(:search_for_ids => true) - end - - it "should call ThinkingSphinx::Search#search_for_ids with the class option set" do - Person.search_for_ids("search") - - ThinkingSphinx::Search.should have_received(:search_for_ids).with( - "search", :class => Person - ) - end - - it "should override the class option" do - Person.search_for_ids("search", :class => Friendship) - - ThinkingSphinx::Search.should have_received(:search_for_ids).with( - "search", :class => Person - ) - end - end - - describe "search method" do - before :each do - ThinkingSphinx::Search.stub_method(:search => true) - end - - it "should call ThinkingSphinx::Search#search with the class option set" do - Person.search("search") - - ThinkingSphinx::Search.should have_received(:search).with( - "search", :class => Person - ) - end - - it "should override the class option" do - Person.search("search", :class => Friendship) - - ThinkingSphinx::Search.should have_received(:search).with( - "search", :class => Person - ) - end - end - - describe "search_for_id method" do - before :each do - ThinkingSphinx::Search.stub_method(:search_for_id => true) - end - - it "should call ThinkingSphinx::Search#search with the class option set" do - Person.search_for_id(10) - - ThinkingSphinx::Search.should have_received(:search_for_id).with( - 10, :class => Person - ) - end - - it "should override the class option" do - Person.search_for_id(10, :class => Friendship) - - ThinkingSphinx::Search.should have_received(:search_for_id).with( - 10, :class => Person - ) - end - end - - describe "search_count method" do - before :each do - ThinkingSphinx::Search.stub_method(:count => true) - end - - it "should call ThinkingSphinx::Search#search with the class option set" do - Person.search_count("search") - - ThinkingSphinx::Search.should have_received(:count).with( - "search", :class => Person - ) - end - - it "should override the class option" do - Person.search_count("search", :class => Friendship) - - ThinkingSphinx::Search.should have_received(:count).with( - "search", :class => Person - ) - end - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record_spec.rb deleted file mode 100644 index c8d6736..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/active_record_spec.rb +++ /dev/null @@ -1,256 +0,0 @@ -require 'spec/spec_helper' - -describe "ThinkingSphinx::ActiveRecord" do - describe "define_index method" do - before :each do - module TestModule - class TestModel < ActiveRecord::Base; end - end - - TestModule::TestModel.stub_methods( - :before_save => true, - :after_commit => true, - :after_destroy => true - ) - - @index = ThinkingSphinx::Index.stub_instance(:delta? => false) - ThinkingSphinx::Index.stub_method(:new => @index) - end - - after :each do - # Remove the class so we can redefine it - TestModule.send(:remove_const, :TestModel) - - ThinkingSphinx.indexed_models.delete "TestModule::TestModel" - end - - it "should return nil and do nothing if indexes are disabled" do - ThinkingSphinx.stub_method(:define_indexes? => false) - - TestModule::TestModel.define_index {}.should be_nil - ThinkingSphinx::Index.should_not have_received(:new) - - ThinkingSphinx.unstub_method(:define_indexes?) - end - - it "should add a new index to the model" do - TestModule::TestModel.define_index do; end - - TestModule::TestModel.sphinx_indexes.length.should == 1 - end - - it "should add to ThinkingSphinx.indexed_models if the model doesn't already exist in the array" do - TestModule::TestModel.define_index do; end - - ThinkingSphinx.indexed_models.should include("TestModule::TestModel") - end - - it "shouldn't add to ThinkingSphinx.indexed_models if the model already exists in the array" do - TestModule::TestModel.define_index do; end - - ThinkingSphinx.indexed_models.select { |model| - model == "TestModule::TestModel" - }.length.should == 1 - - TestModule::TestModel.define_index do; end - - ThinkingSphinx.indexed_models.select { |model| - model == "TestModule::TestModel" - }.length.should == 1 - end - - it "should add before_save and after_commit hooks to the model if delta indexing is enabled" do - @index.stub_method(:delta? => true) - - TestModule::TestModel.define_index do; end - - TestModule::TestModel.should have_received(:before_save) - TestModule::TestModel.should have_received(:after_commit) - end - - it "should not add before_save and after_commit hooks to the model if delta indexing is disabled" do - TestModule::TestModel.define_index do; end - - TestModule::TestModel.should_not have_received(:before_save) - TestModule::TestModel.should_not have_received(:after_commit) - end - - it "should add an after_destroy hook with delta indexing enabled" do - @index.stub_method(:delta? => true) - - TestModule::TestModel.define_index do; end - - TestModule::TestModel.should have_received(:after_destroy).with(:toggle_deleted) - end - - it "should add an after_destroy hook with delta indexing disabled" do - TestModule::TestModel.define_index do; end - - TestModule::TestModel.should have_received(:after_destroy).with(:toggle_deleted) - end - - it "should return the new index" do - TestModule::TestModel.define_index.should == @index - end - end - - describe "to_crc32 method" do - it "should return an integer" do - Person.to_crc32.should be_a_kind_of(Integer) - end - end - - describe "toggle_deleted method" do - before :each do - ThinkingSphinx.stub_method(:sphinx_running? => true) - - @configuration = ThinkingSphinx::Configuration.instance - @configuration.stub_methods( - :address => "an address", - :port => 123 - ) - @client = Riddle::Client.stub_instance(:update => true) - @person = Person.find(:first) - - Riddle::Client.stub_method(:new => @client) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => false) } - @person.stub_method(:in_core_index? => true) - end - - it "should create a client using the Configuration's address and port" do - @person.toggle_deleted - - Riddle::Client.should have_received(:new).with( - @configuration.address, @configuration.port - ) - end - - it "should update the core index's deleted flag if in core index" do - @person.toggle_deleted - - @client.should have_received(:update).with( - "person_core", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "shouldn't update the core index's deleted flag if the record isn't in it" do - @person.stub_method(:in_core_index? => false) - - @person.toggle_deleted - - @client.should_not have_received(:update).with( - "person_core", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "shouldn't attempt to update the deleted flag if sphinx isn't running" do - ThinkingSphinx.stub_method(:sphinx_running? => false) - - @person.toggle_deleted - - @person.should_not have_received(:in_core_index?) - @client.should_not have_received(:update) - end - - it "should update the delta index's deleted flag if delta indexes are enabled and the instance's delta is true" do - ThinkingSphinx.stub_method(:deltas_enabled? => true) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => true) } - @person.delta = true - - @person.toggle_deleted - - @client.should have_received(:update).with( - "person_delta", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "should not update the delta index's deleted flag if delta indexes are enabled and the instance's delta is false" do - ThinkingSphinx.stub_method(:deltas_enabled? => true) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => true) } - @person.delta = false - - @person.toggle_deleted - - @client.should_not have_received(:update).with( - "person_delta", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "should not update the delta index's deleted flag if delta indexes are enabled and the instance's delta is equivalent to false" do - ThinkingSphinx.stub_method(:deltas_enabled? => true) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => true) } - @person.delta = 0 - - @person.toggle_deleted - - @client.should_not have_received(:update).with( - "person_delta", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "shouldn't update the delta index if delta indexes are disabled" do - ThinkingSphinx.stub_method(:deltas_enabled? => true) - @person.toggle_deleted - - @client.should_not have_received(:update).with( - "person_delta", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "should not update the delta index if delta indexing is disabled" do - ThinkingSphinx.stub_method(:deltas_enabled? => false) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => true) } - @person.delta = true - - @person.toggle_deleted - - @client.should_not have_received(:update).with( - "person_delta", ["sphinx_deleted"], {@person.sphinx_document_id => 1} - ) - end - - it "should not update either index if updates are disabled" do - ThinkingSphinx.stub_methods( - :updates_enabled? => false, - :deltas_enabled => true - ) - Person.sphinx_indexes.each { |index| index.stub_method(:delta? => true) } - @person.delta = true - - @person.toggle_deleted - - @client.should_not have_received(:update) - end - end - - describe "sphinx_indexes in the inheritance chain (STI)" do - it "should hand defined indexes on a class down to its child classes" do - Child.sphinx_indexes.should include(*Person.sphinx_indexes) - end - - it "should allow associations to other STI models" do - Child.sphinx_indexes.last.link! - sql = Child.sphinx_indexes.last.to_riddle_for_core(0, 0).sql_query - sql.gsub!('$start', '0').gsub!('$end', '100') - lambda { Child.connection.execute(sql) }.should_not raise_error(ActiveRecord::StatementInvalid) - end - end - - it "should return the sphinx document id as expected" do - person = Person.find(:first) - model_count = ThinkingSphinx.indexed_models.length - offset = ThinkingSphinx.indexed_models.index("Person") - - (person.id * model_count + offset).should == person.sphinx_document_id - - alpha = Alpha.find(:first) - offset = ThinkingSphinx.indexed_models.index("Alpha") - - (alpha.id * model_count + offset).should == alpha.sphinx_document_id - - beta = Beta.find(:first) - offset = ThinkingSphinx.indexed_models.index("Beta") - - (beta.id * model_count + offset).should == beta.sphinx_document_id - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/association_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/association_spec.rb deleted file mode 100644 index 4b92a8b..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/association_spec.rb +++ /dev/null @@ -1,247 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Association do - describe "class-level children method" do - before :each do - @normal_reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :options => {:polymorphic => false} - ) - @normal_association = ThinkingSphinx::Association.stub_instance - @poly_reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :options => {:polymorphic => true}, - :macro => :has_many, - :name => "polly", - :active_record => "AR" - ) - @non_poly_reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance - - Person.stub_method(:reflect_on_association => @normal_reflection) - ThinkingSphinx::Association.stub_methods( - :new => @normal_association, - :polymorphic_classes => [Person, Person], - :casted_options => {:casted => :options} - ) - ::ActiveRecord::Reflection::AssociationReflection.stub_method( - :new => @non_poly_reflection - ) - end - - it "should return an empty array if no association exists" do - Person.stub_method(:reflect_on_association => nil) - - ThinkingSphinx::Association.children(Person, :assoc).should == [] - end - - it "should return a single association instance in an array if assocation isn't polymorphic" do - ThinkingSphinx::Association.children(Person, :assoc).should == [@normal_association] - end - - it "should return multiple association instances for polymorphic associations" do - Person.stub_method(:reflect_on_association => @poly_reflection) - - ThinkingSphinx::Association.children(Person, :assoc).should == - [@normal_association, @normal_association] - end - - it "should generate non-polymorphic 'casted' associations for each polymorphic possibility" do - Person.stub_method(:reflect_on_association => @poly_reflection) - - ThinkingSphinx::Association.children(Person, :assoc) - - ThinkingSphinx::Association.should have_received(:casted_options).with( - Person, @poly_reflection - ).twice - - ::ActiveRecord::Reflection::AssociationReflection.should have_received(:new).with( - :has_many, :polly_Person, {:casted => :options}, "AR" - ).twice - - ThinkingSphinx::Association.should have_received(:new).with( - nil, @non_poly_reflection - ).twice - end - end - - describe "instance-level children method" do - it "should return the children associations for the given association" do - @reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :klass => :klass - ) - @association = ThinkingSphinx::Association.new(nil, @reflection) - ThinkingSphinx::Association.stub_method(:children => :result) - - @association.children(:assoc).should == :result - ThinkingSphinx::Association.should have_received(:children).with(:klass, :assoc, @association) - end - end - - describe "join_to method" do - before :each do - @parent_join = ::ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation.stub_instance - @join = ::ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation.stub_instance - @parent = ThinkingSphinx::Association.stub_instance(:join_to => true, :join => nil) - @base_join = Object.stub_instance(:joins => [:a, :b, :c]) - - ::ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation.stub_method(:new => @join) - end - - it "should call the parent's join_to if parent has no join" do - @assoc = ThinkingSphinx::Association.new(@parent, :ref) - - @assoc.join_to(@base_join) - - @parent.should have_received(:join_to).with(@base_join) - end - - it "should not call the parent's join_to if it already has a join" do - @assoc = ThinkingSphinx::Association.new(@parent, :ref) - @parent.stub_method(:join => @parent_join) - - @assoc.join_to(@base_join) - - @parent.should_not have_received(:join_to) - end - - it "should define the join association with a JoinAssociation instance" do - @assoc = ThinkingSphinx::Association.new(@parent, :ref) - - @assoc.join_to(@base_join).should == @join - @assoc.join.should == @join - end - end - - describe "to_sql method" do - before :each do - @reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :klass => Person - ) - @association = ThinkingSphinx::Association.new(nil, @reflection) - @parent = Object.stub_instance(:aliased_table_name => "ALIAS TABLE NAME") - @join = ::ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation.stub_instance( - :association_join => "full association join SQL", - :parent => @parent - ) - @association.join = @join - end - - it "should return the join's association join value" do - @association.to_sql.should == "full association join SQL" - end - - it "should replace ::ts_join_alias:: with the aliased table name" do - @join.stub_method(:association_join => "text with ::ts_join_alias:: gone") - - @association.to_sql.should == "text with `ALIAS TABLE NAME` gone" - end - end - - describe "is_many? method" do - before :each do - @parent = ThinkingSphinx::Association.stub_instance( - :is_many? => :parent_is_many - ) - @reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :macro => :has_many - ) - end - - it "should return true if association is either a has_many or a habtm" do - association = ThinkingSphinx::Association.new(@parent, @reflection) - association.is_many?.should be_true - - @reflection.stub_method(:macro => :has_and_belongs_to_many) - association.is_many?.should be_true - end - - it "should return the parent value if not a has many or habtm and there is a parent" do - association = ThinkingSphinx::Association.new(@parent, @reflection) - @reflection.stub_method(:macro => :belongs_to) - association.is_many?.should == :parent_is_many - end - - it "should return false if no parent and not a has many or habtm" do - association = ThinkingSphinx::Association.new(nil, @reflection) - @reflection.stub_method(:macro => :belongs_to) - association.is_many?.should be_false - end - end - - describe "ancestors method" do - it "should return an array of associations - including all parents" do - parent = ThinkingSphinx::Association.stub_instance(:ancestors => [:all, :ancestors]) - association = ThinkingSphinx::Association.new(parent, @reflection) - association.ancestors.should == [:all, :ancestors, association] - end - end - - describe "polymorphic_classes method" do - it "should return all the polymorphic result types as classes" do - Person.connection.stub_method(:select_all => [ - {"person_type" => "Person"}, - {"person_type" => "Friendship"} - ]) - ref = Object.stub_instance( - :active_record => Person, - :options => {:foreign_type => "person_type"} - ) - - ThinkingSphinx::Association.send(:polymorphic_classes, ref).should == [Person, Friendship] - end - end - - describe "casted_options method" do - before :each do - @options = { - :foreign_key => "thing_id", - :foreign_type => "thing_type", - :polymorphic => true - } - @reflection = ::ActiveRecord::Reflection::AssociationReflection.stub_instance( - :options => @options - ) - end - - it "should return a new options set for a specific class" do - ThinkingSphinx::Association.send(:casted_options, Person, @reflection).should == { - :polymorphic => nil, - :class_name => "Person", - :foreign_key => "thing_id", - :foreign_type => "thing_type", - :conditions => "::ts_join_alias::.`thing_type` = 'Person'" - } - end - - it "should append to existing Array of conditions" do - @options[:conditions] = ["first condition"] - ThinkingSphinx::Association.send(:casted_options, Person, @reflection).should == { - :polymorphic => nil, - :class_name => "Person", - :foreign_key => "thing_id", - :foreign_type => "thing_type", - :conditions => ["first condition", "::ts_join_alias::.`thing_type` = 'Person'"] - } - end - - it "should merge to an existing Hash of conditions" do - @options[:conditions] = {"field" => "value"} - ThinkingSphinx::Association.send(:casted_options, Person, @reflection).should == { - :polymorphic => nil, - :class_name => "Person", - :foreign_key => "thing_id", - :foreign_type => "thing_type", - :conditions => {"field" => "value", "thing_type" => "Person"} - } - end - - it "should append to an existing String of conditions" do - @options[:conditions] = "first condition" - ThinkingSphinx::Association.send(:casted_options, Person, @reflection).should == { - :polymorphic => nil, - :class_name => "Person", - :foreign_key => "thing_id", - :foreign_type => "thing_type", - :conditions => "first condition AND ::ts_join_alias::.`thing_type` = 'Person'" - } - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/attribute_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/attribute_spec.rb deleted file mode 100644 index a25b582..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/attribute_spec.rb +++ /dev/null @@ -1,212 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Attribute do - describe '#initialize' do - it 'raises if no columns are provided so that configuration errors are easier to track down' do - lambda { - ThinkingSphinx::Attribute.new([]) - }.should raise_error(RuntimeError) - end - - it 'raises if an element of the columns param is an integer - as happens when you use id instead of :id - so that configuration errors are easier to track down' do - lambda { - ThinkingSphinx::Attribute.new([1234]) - }.should raise_error(RuntimeError) - end - end - - describe "unique_name method" do - before :each do - @attribute = ThinkingSphinx::Attribute.new [ - Object.stub_instance(:__stack => [], :__name => "col_name") - ] - end - - it "should use the alias if there is one" do - @attribute.alias = "alias" - @attribute.unique_name.should == "alias" - end - - it "should use the alias if there's multiple columns" do - @attribute.columns << Object.stub_instance(:__stack => [], :__name => "col_name") - @attribute.unique_name.should be_nil - - @attribute.alias = "alias" - @attribute.unique_name.should == "alias" - end - - it "should use the column name if there's no alias and just one column" do - @attribute.unique_name.should == "col_name" - end - end - - describe "column_with_prefix method" do - before :each do - @attribute = ThinkingSphinx::Attribute.new [ - ThinkingSphinx::Index::FauxColumn.new(:col_name) - ] - @attribute.columns.each { |col| @attribute.associations[col] = [] } - @attribute.model = Person - - @first_join = Object.stub_instance(:aliased_table_name => "tabular") - @second_join = Object.stub_instance(:aliased_table_name => "data") - - @first_assoc = ThinkingSphinx::Association.stub_instance( - :join => @first_join, :has_column? => true - ) - @second_assoc = ThinkingSphinx::Association.stub_instance( - :join => @second_join, :has_column? => true - ) - end - - it "should return the column name if the column is a string" do - @attribute.columns = [ThinkingSphinx::Index::FauxColumn.new("string")] - @attribute.send(:column_with_prefix, @attribute.columns.first).should == "string" - end - - it "should return the column with model's table prefix if there's no associations for the column" do - @attribute.send(:column_with_prefix, @attribute.columns.first).should == "`people`.`col_name`" - end - - it "should return the column with its join table prefix if an association exists" do - column = @attribute.columns.first - @attribute.associations[column] = [@first_assoc] - @attribute.send(:column_with_prefix, column).should == "`tabular`.`col_name`" - end - - it "should return multiple columns concatenated if more than one association exists" do - column = @attribute.columns.first - @attribute.associations[column] = [@first_assoc, @second_assoc] - @attribute.send(:column_with_prefix, column).should == "`tabular`.`col_name`, `data`.`col_name`" - end - end - - describe "is_many? method" do - before :each do - @assoc_a = Object.stub_instance(:is_many? => true) - @assoc_b = Object.stub_instance(:is_many? => true) - @assoc_c = Object.stub_instance(:is_many? => true) - - @attribute = ThinkingSphinx::Attribute.new( - [ThinkingSphinx::Index::FauxColumn.new(:col_name)] - ) - @attribute.associations = { - :a => @assoc_a, :b => @assoc_b, :c => @assoc_c - } - end - - it "should return true if all associations return true to is_many?" do - @attribute.send(:is_many?).should be_true - end - - it "should return true if one association returns true to is_many?" do - @assoc_b.stub_method(:is_many? => false) - @assoc_c.stub_method(:is_many? => false) - - @attribute.send(:is_many?).should be_true - end - - it "should return false if all associations return false to is_many?" do - @assoc_a.stub_method(:is_many? => false) - @assoc_b.stub_method(:is_many? => false) - @assoc_c.stub_method(:is_many? => false) - - @attribute.send(:is_many?).should be_false - end - end - - describe "is_string? method" do - before :each do - @col_a = ThinkingSphinx::Index::FauxColumn.new("a") - @col_b = ThinkingSphinx::Index::FauxColumn.new("b") - @col_c = ThinkingSphinx::Index::FauxColumn.new("c") - - @attribute = ThinkingSphinx::Attribute.new( - [@col_a, @col_b, @col_c] - ) - end - - it "should return true if all columns return true to is_string?" do - @attribute.send(:is_string?).should be_true - end - - it "should return false if one column returns true to is_string?" do - @col_a.send(:instance_variable_set, :@name, :a) - @attribute.send(:is_string?).should be_false - end - - it "should return false if all columns return false to is_string?" do - @col_a.send(:instance_variable_set, :@name, :a) - @col_b.send(:instance_variable_set, :@name, :b) - @col_c.send(:instance_variable_set, :@name, :c) - @attribute.send(:is_string?).should be_false - end - end - - describe "type method" do - before :each do - @column = ThinkingSphinx::Index::FauxColumn.new(:col_name) - @attribute = ThinkingSphinx::Attribute.new([@column]) - @attribute.model = Person - @attribute.stub_method(:is_many? => false) - end - - it "should return :multi if is_many? is true" do - @attribute.stub_method(:is_many? => true) - @attribute.send(:type).should == :multi - end - - it "should return :string if there's more than one association" do - @attribute.associations = {:a => :assoc, :b => :assoc} - @attribute.send(:type).should == :string - end - - it "should return the column type from the database if not :multi or more than one association" do - @column.send(:instance_variable_set, :@name, "birthday") - @attribute.send(:type).should == :datetime - - @attribute.send(:instance_variable_set, :@type, nil) - @column.send(:instance_variable_set, :@name, "first_name") - @attribute.send(:type).should == :string - - @attribute.send(:instance_variable_set, :@type, nil) - @column.send(:instance_variable_set, :@name, "id") - @attribute.send(:type).should == :integer - end - end - - describe "all_ints? method" do - it "should return true if all columns are integers" do - attribute = ThinkingSphinx::Attribute.new( - [ ThinkingSphinx::Index::FauxColumn.new(:id), - ThinkingSphinx::Index::FauxColumn.new(:team_id) ] - ) - attribute.model = Person - attribute.columns.each { |col| attribute.associations[col] = [] } - - attribute.send(:all_ints?).should be_true - end - - it "should return false if only some columns are integers" do - attribute = ThinkingSphinx::Attribute.new( - [ ThinkingSphinx::Index::FauxColumn.new(:id), - ThinkingSphinx::Index::FauxColumn.new(:first_name) ] - ) - attribute.model = Person - attribute.columns.each { |col| attribute.associations[col] = [] } - - attribute.send(:all_ints?).should be_false - end - - it "should return false if no columns are integers" do - attribute = ThinkingSphinx::Attribute.new( - [ ThinkingSphinx::Index::FauxColumn.new(:first_name), - ThinkingSphinx::Index::FauxColumn.new(:last_name) ] - ) - attribute.model = Person - attribute.columns.each { |col| attribute.associations[col] = [] } - - attribute.send(:all_ints?).should be_false - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/collection_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/collection_spec.rb deleted file mode 100644 index b13c769..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/collection_spec.rb +++ /dev/null @@ -1,14 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Collection do - it "should behave like WillPaginate::Collection" do - ThinkingSphinx::Collection.instance_methods.should include("previous_page") - ThinkingSphinx::Collection.instance_methods.should include("next_page") - ThinkingSphinx::Collection.instance_methods.should include("current_page") - ThinkingSphinx::Collection.instance_methods.should include("total_pages") - ThinkingSphinx::Collection.instance_methods.should include("total_entries") - ThinkingSphinx::Collection.instance_methods.should include("offset") - - ThinkingSphinx::Collection.ancestors.should include(Array) - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/configuration_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/configuration_spec.rb deleted file mode 100644 index 12e2775..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/configuration_spec.rb +++ /dev/null @@ -1,136 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Configuration do - describe "environment class method" do - before :each do - ThinkingSphinx::Configuration.send(:class_variable_set, :@@environment, nil) - - ENV["RAILS_ENV"] = nil - end - - it "should use the Merb environment value if set" do - unless defined?(Merb) - module Merb; end - end - - ThinkingSphinx::Configuration.stub_method(:defined? => true) - Merb.stub_method(:environment => "merb_production") - ThinkingSphinx::Configuration.environment.should == "merb_production" - - Object.send(:remove_const, :Merb) - end - - it "should use the Rails environment value if set" do - ENV["RAILS_ENV"] = "rails_production" - ThinkingSphinx::Configuration.environment.should == "rails_production" - end - - it "should default to development" do - ThinkingSphinx::Configuration.environment.should == "development" - end - end - - describe "parse_config method" do - before :each do - @settings = { - "development" => { - "config_file" => "tmp/config/development.sphinx.conf", - "searchd_log_file" => "searchd_log_file.log", - "query_log_file" => "query_log_file.log", - "pid_file" => "pid_file.pid", - "searchd_file_path" => "searchd/file/path", - "address" => "127.0.0.1", - "port" => 3333, - "min_prefix_len" => 2, - "min_infix_len" => 3, - "mem_limit" => "128M", - "max_matches" => 1001, - "morphology" => "stem_ru", - "charset_type" => "latin1", - "charset_table" => "table", - "ignore_chars" => "e" - } - } - - open("#{RAILS_ROOT}/config/sphinx.yml", "w") do |f| - f.write YAML.dump(@settings) - end - end - - it "should use the accessors to set the configuration values" do - config = ThinkingSphinx::Configuration.instance - config.send(:parse_config) - - %w(config_file searchd_log_file query_log_file pid_file searchd_file_path - address port).each do |key| - config.send(key).should == @settings["development"][key] - end - end - - after :each do - FileUtils.rm "#{RAILS_ROOT}/config/sphinx.yml" - end - end - - describe "initialisation" do - it "should have a default bin_path of nothing" do - ThinkingSphinx::Configuration.instance.bin_path.should == "" - end - - it "should append a / to bin_path if one is supplied" do - @settings = { - "development" => { - "bin_path" => "path/to/somewhere" - } - } - - open("#{RAILS_ROOT}/config/sphinx.yml", "w") do |f| - f.write YAML.dump(@settings) - end - - ThinkingSphinx::Configuration.instance.send(:parse_config) - ThinkingSphinx::Configuration.instance.bin_path.should match(/\/$/) - end - end - - it "should insert set index options into the configuration file" do - config = ThinkingSphinx::Configuration.instance - ThinkingSphinx::Configuration::IndexOptions.each do |option| - config.index_options[option.to_sym] = "something" - config.build - - file = open(config.config_file) { |f| f.read } - file.should match(/#{option}\s+= something/) - - config.index_options[option.to_sym] = nil - end - end - - it "should insert set source options into the configuration file" do - config = ThinkingSphinx::Configuration.instance - ThinkingSphinx::Configuration::SourceOptions.each do |option| - config.source_options[option.to_sym] = "something" - config.build - - file = open(config.config_file) { |f| f.read } - file.should match(/#{option}\s+= something/) - - config.source_options[option.to_sym] = nil - end - end - - it "should set any explicit prefixed or infixed fields" do - file = open(ThinkingSphinx::Configuration.instance.config_file) { |f| - f.read - } - file.should match(/prefix_fields\s+= city/) - file.should match(/infix_fields\s+= state/) - end - - it "should not have prefix fields in indexes where nothing is set" do - file = open(ThinkingSphinx::Configuration.instance.config_file) { |f| - f.read - } - file.should_not match(/index alpha_core\s+\{\s+[^\}]*prefix_fields\s+=[^\}]*\}/m) - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/core/string_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/core/string_spec.rb deleted file mode 100644 index 26f813c..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/core/string_spec.rb +++ /dev/null @@ -1,9 +0,0 @@ -require 'spec/spec_helper' - -describe String do - describe "to_crc32 instance method" do - it "should return an integer" do - 'to_crc32'.to_crc32.should be_a_kind_of(Integer) - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/field_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/field_spec.rb deleted file mode 100644 index 770f749..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/field_spec.rb +++ /dev/null @@ -1,145 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Field do - describe '#initialize' do - it 'raises if no columns are provided so that configuration errors are easier to track down' do - lambda { - ThinkingSphinx::Field.new([]) - }.should raise_error(RuntimeError) - end - - it 'raises if an element of the columns param is an integer - as happens when you use id instead of :id - so that configuration errors are easier to track down' do - lambda { - ThinkingSphinx::Field.new([1234]) - }.should raise_error(RuntimeError) - end - end - - describe "unique_name method" do - before :each do - @field = ThinkingSphinx::Field.new [ - Object.stub_instance(:__stack => [], :__name => "col_name") - ] - end - - it "should use the alias if there is one" do - @field.alias = "alias" - @field.unique_name.should == "alias" - end - - it "should use the alias if there's multiple columns" do - @field.columns << Object.stub_instance(:__stack => [], :__name => "col_name") - @field.unique_name.should be_nil - - @field.alias = "alias" - @field.unique_name.should == "alias" - end - - it "should use the column name if there's no alias and just one column" do - @field.unique_name.should == "col_name" - end - end - - describe "prefixes method" do - it "should default to false" do - @field = ThinkingSphinx::Field.new([Object.stub_instance(:__stack => [])]) - @field.prefixes.should be_false - end - - it "should be true if the corresponding option is set" do - @field = ThinkingSphinx::Field.new( - [Object.stub_instance(:__stack => [])], :prefixes => true - ) - @field.prefixes.should be_true - end - end - - describe "infixes method" do - it "should default to false" do - @field = ThinkingSphinx::Field.new([Object.stub_instance(:__stack => [])]) - @field.infixes.should be_false - end - - it "should be true if the corresponding option is set" do - @field = ThinkingSphinx::Field.new( - [Object.stub_instance(:__stack => [])], :infixes => true - ) - @field.infixes.should be_true - end - end - - describe "column_with_prefix method" do - before :each do - @field = ThinkingSphinx::Field.new [ - ThinkingSphinx::Index::FauxColumn.new(:col_name) - ] - @field.columns.each { |col| @field.associations[col] = [] } - @field.model = Person - - @first_join = Object.stub_instance(:aliased_table_name => "tabular") - @second_join = Object.stub_instance(:aliased_table_name => "data") - - @first_assoc = ThinkingSphinx::Association.stub_instance( - :join => @first_join, :has_column? => true - ) - @second_assoc = ThinkingSphinx::Association.stub_instance( - :join => @second_join, :has_column? => true - ) - end - - it "should return the column name if the column is a string" do - @field.columns = [ThinkingSphinx::Index::FauxColumn.new("string")] - @field.send(:column_with_prefix, @field.columns.first).should == "string" - end - - it "should return the column with model's table prefix if there's no associations for the column" do - @field.send(:column_with_prefix, @field.columns.first).should == "`people`.`col_name`" - end - - it "should return the column with its join table prefix if an association exists" do - column = @field.columns.first - @field.associations[column] = [@first_assoc] - @field.send(:column_with_prefix, column).should == "`tabular`.`col_name`" - end - - it "should return multiple columns concatenated if more than one association exists" do - column = @field.columns.first - @field.associations[column] = [@first_assoc, @second_assoc] - @field.send(:column_with_prefix, column).should == "`tabular`.`col_name`, `data`.`col_name`" - end - end - - describe "is_many? method" do - before :each do - @assoc_a = Object.stub_instance(:is_many? => true) - @assoc_b = Object.stub_instance(:is_many? => true) - @assoc_c = Object.stub_instance(:is_many? => true) - - @field = ThinkingSphinx::Field.new( - [ThinkingSphinx::Index::FauxColumn.new(:col_name)] - ) - @field.associations = { - :a => @assoc_a, :b => @assoc_b, :c => @assoc_c - } - end - - it "should return true if all associations return true to is_many?" do - @field.send(:is_many?).should be_true - end - - it "should return true if one association returns true to is_many?" do - @assoc_b.stub_method(:is_many? => false) - @assoc_c.stub_method(:is_many? => false) - - @field.send(:is_many?).should be_true - end - - it "should return false if all associations return false to is_many?" do - @assoc_a.stub_method(:is_many? => false) - @assoc_b.stub_method(:is_many? => false) - @assoc_c.stub_method(:is_many? => false) - - @field.send(:is_many?).should be_false - end - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/index_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/index_spec.rb deleted file mode 100644 index 7e079fb..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/index_spec.rb +++ /dev/null @@ -1,54 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx::Index do - describe "generated sql_query" do - it "should include explicit groupings if requested" do - @index = ThinkingSphinx::Index.new(Person) - - @index.groupings << "custom_sql" - @index.to_riddle_for_core(0, 0).sql_query.should match(/GROUP BY.+custom_sql/) - end - end - - describe "prefix_fields method" do - before :each do - @index = ThinkingSphinx::Index.new(Person) - - @field_a = ThinkingSphinx::Field.stub_instance(:prefixes => true) - @field_b = ThinkingSphinx::Field.stub_instance(:prefixes => false) - @field_c = ThinkingSphinx::Field.stub_instance(:prefixes => true) - - @index.fields = [@field_a, @field_b, @field_c] - end - - it "should return fields that are flagged as prefixed" do - @index.prefix_fields.should include(@field_a) - @index.prefix_fields.should include(@field_c) - end - - it "should not return fields that aren't flagged as prefixed" do - @index.prefix_fields.should_not include(@field_b) - end - end - - describe "infix_fields method" do - before :each do - @index = ThinkingSphinx::Index.new(Person) - - @field_a = ThinkingSphinx::Field.stub_instance(:infixes => true) - @field_b = ThinkingSphinx::Field.stub_instance(:infixes => false) - @field_c = ThinkingSphinx::Field.stub_instance(:infixes => true) - - @index.fields = [@field_a, @field_b, @field_c] - end - - it "should return fields that are flagged as infixed" do - @index.infix_fields.should include(@field_a) - @index.infix_fields.should include(@field_c) - end - - it "should not return fields that aren't flagged as infixed" do - @index.infix_fields.should_not include(@field_b) - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/search_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/search_spec.rb deleted file mode 100644 index dd85138..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx/search_spec.rb +++ /dev/null @@ -1,59 +0,0 @@ -require 'spec/spec_helper' -require 'will_paginate/collection' - -describe ThinkingSphinx::Search do - describe "search method" do - before :each do - @client = Riddle::Client.stub_instance( - :filters => [], - :filters= => true, - :id_range= => true, - :sort_mode => :asc, - :limit => 5, - :offset= => 0, - :sort_mode= => true, - :query => { - :matches => [], - :total => 50 - } - ) - - ThinkingSphinx::Search.stub_methods( - :client_from_options => @client, - :search_conditions => ["", []] - ) - end - - describe ":star option" do - - it "should not apply by default" do - ThinkingSphinx::Search.search "foo bar" - @client.should have_received(:query).with("foo bar") - end - - it "should apply when passed, and handle full extended syntax" do - input = %{a b* c (d | e) 123 5&6 (f_f g) !h "i j" "k l"~10 "m n"/3 @o p -(q|r)} - expected = %{*a* b* *c* (*d* | *e*) *123* *5*&*6* (*f_f* *g*) !*h* "i j" "k l"~10 "m n"/3 @o *p* -(*q*|*r*)} - ThinkingSphinx::Search.search input, :star => true - @client.should have_received(:query).with(expected) - end - - it "should default to /\w+/ as token" do - ThinkingSphinx::Search.search "foo@bar.com", :star => true - @client.should have_received(:query).with("*foo*@*bar*.*com*") - end - - it "should honour custom token" do - ThinkingSphinx::Search.search "foo@bar.com -foo-bar", :star => /[\w@.-]+/u - @client.should have_received(:query).with("*foo@bar.com* -*foo-bar*") - end - - end - end -end - -describe ThinkingSphinx::Search, "playing nice with Search model" do - it "should not conflict with models called Search" do - lambda { Search.find(:all) }.should_not raise_error - end -end diff --git a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx_spec.rb b/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx_spec.rb deleted file mode 100644 index 2f88c52..0000000 --- a/vendor/plugins/thinking-sphinx/spec/unit/thinking_sphinx_spec.rb +++ /dev/null @@ -1,129 +0,0 @@ -require 'spec/spec_helper' - -describe ThinkingSphinx do - it "should define indexes by default" do - ThinkingSphinx.define_indexes?.should be_true - end - - it "should disable index definition" do - ThinkingSphinx.define_indexes = false - ThinkingSphinx.define_indexes?.should be_false - end - - it "should enable index definition" do - ThinkingSphinx.define_indexes = false - ThinkingSphinx.define_indexes?.should be_false - ThinkingSphinx.define_indexes = true - ThinkingSphinx.define_indexes?.should be_true - end - - it "should index deltas by default" do - ThinkingSphinx.deltas_enabled = nil - ThinkingSphinx.deltas_enabled?.should be_true - end - - it "should disable delta indexing" do - ThinkingSphinx.deltas_enabled = false - ThinkingSphinx.deltas_enabled?.should be_false - end - - it "should enable delta indexing" do - ThinkingSphinx.deltas_enabled = false - ThinkingSphinx.deltas_enabled?.should be_false - ThinkingSphinx.deltas_enabled = true - ThinkingSphinx.deltas_enabled?.should be_true - end - - it "should update indexes by default" do - ThinkingSphinx.updates_enabled = nil - ThinkingSphinx.updates_enabled?.should be_true - end - - it "should disable index updating" do - ThinkingSphinx.updates_enabled = false - ThinkingSphinx.updates_enabled?.should be_false - end - - it "should enable index updating" do - ThinkingSphinx.updates_enabled = false - ThinkingSphinx.updates_enabled?.should be_false - ThinkingSphinx.updates_enabled = true - ThinkingSphinx.updates_enabled?.should be_true - end - - describe "use_group_by_shortcut? method" do - before :each do - unless ::ActiveRecord::ConnectionAdapters.const_defined?(:MysqlAdapter) - pending "No MySQL" - return - end - - @connection = ::ActiveRecord::ConnectionAdapters::MysqlAdapter.stub_instance( - :select_all => true - ) - ::ActiveRecord::Base.stub_method( - :connection => @connection - ) - end - - it "should return true if no ONLY_FULL_GROUP_BY" do - @connection.stub_method( - :select_all => {:a => "OTHER SETTINGS"} - ) - - ThinkingSphinx.use_group_by_shortcut?.should be_true - end - - it "should return true if NULL value" do - @connection.stub_method( - :select_all => {:a => nil} - ) - - ThinkingSphinx.use_group_by_shortcut?.should be_true - end - - it "should return false if ONLY_FULL_GROUP_BY is set" do - @connection.stub_method( - :select_all => {:a => "OTHER SETTINGS,ONLY_FULL_GROUP_BY,blah"} - ) - - ThinkingSphinx.use_group_by_shortcut?.should be_false - end - - it "should return false if ONLY_FULL_GROUP_BY is set in any of the values" do - @connection.stub_method( - :select_all => { - :a => "OTHER SETTINGS", - :b => "ONLY_FULL_GROUP_BY" - } - ) - - ThinkingSphinx.use_group_by_shortcut?.should be_false - end - - describe "if not using MySQL" do - before :each do - unless ::ActiveRecord::ConnectionAdapters.const_defined?(:PostgreSQLAdapter) - pending "No PostgreSQL" - return - end - @connection = ::ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.stub_instance( - :select_all => true - ) - ::ActiveRecord::Base.stub_method( - :connection => @connection - ) - end - - it "should return false" do - ThinkingSphinx.use_group_by_shortcut?.should be_false - end - - it "should not call select_all" do - ThinkingSphinx.use_group_by_shortcut? - - @connection.should_not have_received(:select_all) - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/tasks/distribution.rb b/vendor/plugins/thinking-sphinx/tasks/distribution.rb deleted file mode 100644 index 54ea964..0000000 --- a/vendor/plugins/thinking-sphinx/tasks/distribution.rb +++ /dev/null @@ -1,48 +0,0 @@ -require 'rake/rdoctask' -require 'rake/gempackagetask' - -$LOAD_PATH.unshift File.dirname(__FILE__) + '/../lib' -require 'thinking_sphinx' - -desc 'Generate documentation' -Rake::RDocTask.new(:rdoc) do |rdoc| - rdoc.rdoc_dir = 'rdoc' - rdoc.title = 'Thinking Sphinx - ActiveRecord Sphinx Plugin' - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.rdoc_files.include('README') - rdoc.rdoc_files.include('lib/**/*.rb') -end - -spec = Gem::Specification.new do |s| - s.name = "thinking-sphinx" - s.version = ThinkingSphinx::Version::String - s.summary = "A concise and easy-to-use Ruby library that connects ActiveRecord to the Sphinx search daemon, managing configuration, indexing and searching." - s.description = "A concise and easy-to-use Ruby library that connects ActiveRecord to the Sphinx search daemon, managing configuration, indexing and searching." - s.author = "Pat Allan" - s.email = "pat@freelancing-gods.com" - s.homepage = "http://ts.freelancing-gods.com" - s.has_rdoc = true - s.rdoc_options << "--title" << "Thinking Sphinx -- Rails/Merb Sphinx Plugin" << - "--line-numbers" - s.rubyforge_project = "thinking-sphinx" - s.test_files = FileList["spec/**/*_spec.rb"] - s.files = FileList[ - "lib/**/*.rb", - "LICENCE", - "README", - "tasks/**/*.rb", - "tasks/**/*.rake", - "vendor/**/*" - ] -end - -Rake::GemPackageTask.new(spec) do |p| - p.gem_spec = spec - p.need_tar = true - p.need_zip = true -end - -desc "Build gemspec file" -task :build do - File.open('thinking-sphinx.gemspec', 'w') { |f| f.write spec.to_ruby } -end diff --git a/vendor/plugins/thinking-sphinx/tasks/rails.rake b/vendor/plugins/thinking-sphinx/tasks/rails.rake deleted file mode 100644 index 47fa76c..0000000 --- a/vendor/plugins/thinking-sphinx/tasks/rails.rake +++ /dev/null @@ -1 +0,0 @@ -require File.join(File.dirname(__FILE__), '/../lib/thinking_sphinx/tasks') \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/tasks/testing.rb b/vendor/plugins/thinking-sphinx/tasks/testing.rb deleted file mode 100644 index 9cb83ba..0000000 --- a/vendor/plugins/thinking-sphinx/tasks/testing.rb +++ /dev/null @@ -1,86 +0,0 @@ -require 'rubygems' -require 'spec/rake/spectask' -require 'cucumber/rake/task' - -desc "Run the specs under spec" -Spec::Rake::SpecTask.new do |t| - t.spec_files = FileList['spec/**/*_spec.rb'] - t.spec_opts << "-c" -end - -desc "Run all feature-set configurations" -task :features do |t| - puts "rake features:mysql" - system "rake features:mysql" - puts "rake features:postgresql" - system "rake features:postgresql" -end - -namespace :features do - def add_task(name, description) - Cucumber::Rake::Task.new(name, description) do |t| - t.cucumber_opts = "--format pretty" - t.step_pattern = [ - "features/support/env", - "features/support/db/#{name}", - "features/support/db/active_record", - "features/support/post_database", - "features/step_definitions/**.rb" - ] - end - end - - add_task :mysql, "Run feature-set against MySQL" - add_task :postgresql, "Run feature-set against PostgreSQL" -end - -desc "Generate RCov reports" -Spec::Rake::SpecTask.new(:rcov) do |t| - t.libs << 'lib' - t.spec_files = FileList['spec/**/*_spec.rb'] - t.rcov = true - t.rcov_opts = ['--exclude', 'spec', '--exclude', 'gems', '--exclude', 'riddle'] -end - -namespace :rcov do - def add_task(name, description) - Cucumber::Rake::Task.new(name, description) do |t| - t.cucumber_opts = "--format pretty" - t.step_pattern = [ - "features/support/env", - "features/support/db/#{name}", - "features/support/db/active_record", - "features/support/post_database", - "features/step_definitions/**.rb" - ] - t.rcov = true - t.rcov_opts = [ - '--exclude', 'spec', - '--exclude', 'gems', - '--exclude', 'riddle', - '--exclude', 'features' - ] - end - end - - add_task :mysql, "Run feature-set against MySQL with rcov" - add_task :postgresql, "Run feature-set against PostgreSQL with rcov" -end - -desc "Build cucumber.yml file" -task :cucumber_defaults do - default_requires = %w( - --require features/support/env.rb - --require features/support/db/mysql.rb - --require features/support/db/active_record.rb - --require features/support/post_database.rb - ).join(" ") - - step_definitions = FileList["features/step_definitions/**.rb"].collect { |path| - "--require #{path}" - }.join(" ") - - File.open('cucumber.yml', 'w') { |f| - f.write "default: \"#{default_requires} #{step_definitions}\"" - } -end diff --git a/vendor/plugins/thinking-sphinx/thinking-sphinx.gemspec b/vendor/plugins/thinking-sphinx/thinking-sphinx.gemspec deleted file mode 100644 index 03b2145..0000000 --- a/vendor/plugins/thinking-sphinx/thinking-sphinx.gemspec +++ /dev/null @@ -1,31 +0,0 @@ -# -*- encoding: utf-8 -*- - -Gem::Specification.new do |s| - s.name = %q{thinking-sphinx} - s.version = "1.1.3" - - s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= - s.authors = ["Pat Allan"] - s.date = %q{2009-01-17} - s.description = %q{A concise and easy-to-use Ruby library that connects ActiveRecord to the Sphinx search daemon, managing configuration, indexing and searching.} - s.email = %q{pat@freelancing-gods.com} - s.files = ["lib/thinking_sphinx/active_record/delta.rb", "lib/thinking_sphinx/active_record/has_many_association.rb", "lib/thinking_sphinx/active_record/search.rb", "lib/thinking_sphinx/active_record.rb", "lib/thinking_sphinx/adapters/abstract_adapter.rb", "lib/thinking_sphinx/adapters/mysql_adapter.rb", "lib/thinking_sphinx/adapters/postgresql_adapter.rb", "lib/thinking_sphinx/association.rb", "lib/thinking_sphinx/attribute.rb", "lib/thinking_sphinx/collection.rb", "lib/thinking_sphinx/configuration.rb", "lib/thinking_sphinx/core/string.rb", "lib/thinking_sphinx/deltas/datetime_delta.rb", "lib/thinking_sphinx/deltas/default_delta.rb", "lib/thinking_sphinx/deltas/delayed_delta/delta_job.rb", "lib/thinking_sphinx/deltas/delayed_delta/flag_as_deleted_job.rb", "lib/thinking_sphinx/deltas/delayed_delta/job.rb", "lib/thinking_sphinx/deltas/delayed_delta.rb", "lib/thinking_sphinx/deltas.rb", "lib/thinking_sphinx/field.rb", "lib/thinking_sphinx/index/builder.rb", "lib/thinking_sphinx/index/faux_column.rb", "lib/thinking_sphinx/index.rb", "lib/thinking_sphinx/rails_additions.rb", "lib/thinking_sphinx/search.rb", "lib/thinking_sphinx.rb", "LICENCE", "README", "tasks/distribution.rb", "tasks/testing.rb", "tasks/thinking_sphinx_tasks.rb", "tasks/thinking_sphinx_tasks.rake", "vendor/after_commit", "vendor/after_commit/init.rb", "vendor/after_commit/lib", "vendor/after_commit/lib/after_commit", "vendor/after_commit/lib/after_commit/active_record.rb", "vendor/after_commit/lib/after_commit/connection_adapters.rb", "vendor/after_commit/lib/after_commit.rb", "vendor/after_commit/LICENSE", "vendor/after_commit/Rakefile", "vendor/after_commit/README", "vendor/after_commit/test", "vendor/after_commit/test/after_commit_test.rb", "vendor/delayed_job", "vendor/delayed_job/lib", "vendor/delayed_job/lib/delayed", "vendor/delayed_job/lib/delayed/job.rb", "vendor/delayed_job/lib/delayed/message_sending.rb", "vendor/delayed_job/lib/delayed/performable_method.rb", "vendor/delayed_job/lib/delayed/worker.rb", "vendor/riddle", "vendor/riddle/lib", "vendor/riddle/lib/riddle", "vendor/riddle/lib/riddle/client", "vendor/riddle/lib/riddle/client/filter.rb", "vendor/riddle/lib/riddle/client/message.rb", "vendor/riddle/lib/riddle/client/response.rb", "vendor/riddle/lib/riddle/client.rb", "vendor/riddle/lib/riddle/configuration", "vendor/riddle/lib/riddle/configuration/distributed_index.rb", "vendor/riddle/lib/riddle/configuration/index.rb", "vendor/riddle/lib/riddle/configuration/indexer.rb", "vendor/riddle/lib/riddle/configuration/remote_index.rb", "vendor/riddle/lib/riddle/configuration/searchd.rb", "vendor/riddle/lib/riddle/configuration/section.rb", "vendor/riddle/lib/riddle/configuration/source.rb", "vendor/riddle/lib/riddle/configuration/sql_source.rb", "vendor/riddle/lib/riddle/configuration/xml_source.rb", "vendor/riddle/lib/riddle/configuration.rb", "vendor/riddle/lib/riddle/controller.rb", "vendor/riddle/lib/riddle.rb", "spec/unit/thinking_sphinx/active_record/delta_spec.rb", "spec/unit/thinking_sphinx/active_record/has_many_association_spec.rb", "spec/unit/thinking_sphinx/active_record/search_spec.rb", "spec/unit/thinking_sphinx/active_record_spec.rb", "spec/unit/thinking_sphinx/association_spec.rb", "spec/unit/thinking_sphinx/attribute_spec.rb", "spec/unit/thinking_sphinx/collection_spec.rb", "spec/unit/thinking_sphinx/configuration_spec.rb", "spec/unit/thinking_sphinx/core/string_spec.rb", "spec/unit/thinking_sphinx/field_spec.rb", "spec/unit/thinking_sphinx/index/builder_spec.rb", "spec/unit/thinking_sphinx/index/faux_column_spec.rb", "spec/unit/thinking_sphinx/index_spec.rb", "spec/unit/thinking_sphinx/search_spec.rb", "spec/unit/thinking_sphinx_spec.rb"] - s.has_rdoc = true - s.homepage = %q{http://ts.freelancing-gods.com} - s.rdoc_options = ["--title", "Thinking Sphinx -- Rails/Merb Sphinx Plugin", "--line-numbers"] - s.require_paths = ["lib"] - s.rubyforge_project = %q{thinking-sphinx} - s.rubygems_version = %q{1.3.0} - s.summary = %q{A concise and easy-to-use Ruby library that connects ActiveRecord to the Sphinx search daemon, managing configuration, indexing and searching.} - s.test_files = ["spec/unit/thinking_sphinx/active_record/delta_spec.rb", "spec/unit/thinking_sphinx/active_record/has_many_association_spec.rb", "spec/unit/thinking_sphinx/active_record/search_spec.rb", "spec/unit/thinking_sphinx/active_record_spec.rb", "spec/unit/thinking_sphinx/association_spec.rb", "spec/unit/thinking_sphinx/attribute_spec.rb", "spec/unit/thinking_sphinx/collection_spec.rb", "spec/unit/thinking_sphinx/configuration_spec.rb", "spec/unit/thinking_sphinx/core/string_spec.rb", "spec/unit/thinking_sphinx/field_spec.rb", "spec/unit/thinking_sphinx/index/builder_spec.rb", "spec/unit/thinking_sphinx/index/faux_column_spec.rb", "spec/unit/thinking_sphinx/index_spec.rb", "spec/unit/thinking_sphinx/search_spec.rb", "spec/unit/thinking_sphinx_spec.rb"] - - if s.respond_to? :specification_version then - current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION - s.specification_version = 2 - - if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then - else - end - else - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/.gitignore b/vendor/plugins/thinking-sphinx/vendor/after_commit/.gitignore deleted file mode 100644 index daabe26..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.sqlite3 diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/LICENSE b/vendor/plugins/thinking-sphinx/vendor/after_commit/LICENSE deleted file mode 100644 index f4913c7..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2008 Nick Muerdter - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/README b/vendor/plugins/thinking-sphinx/vendor/after_commit/README deleted file mode 100644 index d99d58b..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/README +++ /dev/null @@ -1,16 +0,0 @@ -after_commit -=========== - -A Ruby on Rails plugin to add after_commit callbacks. The callbacks that are provided can be used -to trigger events that run only after the entire transaction is complete. This is beneficial -in situations where you are doing asynchronous processing and need committed objects. - -The following callbacks are provided: - - * (1) after_commit - * (2) after_commit_on_create - * (3) after_commit_on_update - * (4) after_commit_on_destroy - -The after_commit callback is run for any object that has just been committed. You can obtain finer -callback control by using the additional after_commit_on_* callbacks. diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/Rakefile b/vendor/plugins/thinking-sphinx/vendor/after_commit/Rakefile deleted file mode 100644 index c481278..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/Rakefile +++ /dev/null @@ -1,22 +0,0 @@ -require 'rake' -require 'rake/testtask' -require 'rake/rdoctask' - -desc 'Default: run unit tests.' -task :default => :test - -desc 'Test the after_commit plugin.' -Rake::TestTask.new(:test) do |t| - t.libs << 'lib' - t.pattern = 'test/**/*_test.rb' - t.verbose = true -end - -desc 'Generate documentation for the after_commit plugin.' -Rake::RDocTask.new(:rdoc) do |rdoc| - rdoc.rdoc_dir = 'rdoc' - rdoc.title = 'AfterCommit' - rdoc.options << '--line-numbers' << '--inline-source' - rdoc.rdoc_files.include('README') - rdoc.rdoc_files.include('lib/**/*.rb') -end diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/init.rb b/vendor/plugins/thinking-sphinx/vendor/after_commit/init.rb deleted file mode 100644 index 137b69e..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/init.rb +++ /dev/null @@ -1,5 +0,0 @@ -ActiveRecord::Base.send(:include, AfterCommit::ActiveRecord) - -Object.subclasses_of(ActiveRecord::ConnectionAdapters::AbstractAdapter).each do |klass| - klass.send(:include, AfterCommit::ConnectionAdapters) -end diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit.rb b/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit.rb deleted file mode 100644 index 2ce054b..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit.rb +++ /dev/null @@ -1,42 +0,0 @@ -require 'after_commit/active_record' -require 'after_commit/connection_adapters' - -module AfterCommit - def self.committed_records - @@committed_records ||= [] - end - - def self.committed_records=(committed_records) - @@committed_records = committed_records - end - - def self.committed_records_on_create - @@committed_records_on_create ||= [] - end - - def self.committed_records_on_create=(committed_records) - @@committed_records_on_create = committed_records - end - - def self.committed_records_on_update - @@committed_records_on_update ||= [] - end - - def self.committed_records_on_update=(committed_records) - @@committed_records_on_update = committed_records - end - - def self.committed_records_on_destroy - @@committed_records_on_destroy ||= [] - end - - def self.committed_records_on_destroy=(committed_records) - @@committed_records_on_destroy = committed_records - end -end - -ActiveRecord::Base.send(:include, AfterCommit::ActiveRecord) - -Object.subclasses_of(ActiveRecord::ConnectionAdapters::AbstractAdapter).each do |klass| - klass.send(:include, AfterCommit::ConnectionAdapters) -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/active_record.rb b/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/active_record.rb deleted file mode 100644 index 3bc4857..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/active_record.rb +++ /dev/null @@ -1,91 +0,0 @@ -module AfterCommit - module ActiveRecord - # Based on the code found in Thinking Sphinx: - # http://ts.freelancing-gods.com/ which was based on code written by Eli - # Miller: - # http://elimiller.blogspot.com/2007/06/proper-cache-expiry-with-aftercommit.html - # with slight modification from Joost Hietbrink. And now me! Whew. - def self.included(base) - base.class_eval do - # The define_callbacks method was added post Rails 2.0.2 - if it - # doesn't exist, we define the callback manually - if respond_to?(:define_callbacks) - define_callbacks :after_commit, - :after_commit_on_create, - :after_commit_on_update, - :after_commit_on_destroy - else - class << self - # Handle after_commit callbacks - call all the registered callbacks. - def after_commit(*callbacks, &block) - callbacks << block if block_given? - write_inheritable_array(:after_commit, callbacks) - end - - def after_commit_on_create(*callbacks, &block) - callbacks << block if block_given? - write_inheritable_array(:after_commit_on_create, callbacks) - end - - def after_commit_on_update(*callbacks, &block) - callbacks << block if block_given? - write_inheritable_array(:after_commit_on_update, callbacks) - end - - def after_commit_on_destroy(*callbacks, &block) - callbacks << block if block_given? - write_inheritable_array(:after_commit_on_destroy, callbacks) - end - end - end - - after_save :add_committed_record - after_create :add_committed_record_on_create - after_update :add_committed_record_on_update - after_destroy :add_committed_record_on_destroy - - # We need to keep track of records that have been saved or destroyed - # within this transaction. - def add_committed_record - AfterCommit.committed_records << self - end - - def add_committed_record_on_create - AfterCommit.committed_records_on_create << self - end - - def add_committed_record_on_update - AfterCommit.committed_records_on_update << self - end - - def add_committed_record_on_destroy - AfterCommit.committed_records << self - AfterCommit.committed_records_on_destroy << self - end - - def after_commit - # Deliberately blank. - end - - # Wraps a call to the private callback method so that the the - # after_commit callback can be made from the ConnectionAdapters when - # the commit for the transaction has finally succeeded. - def after_commit_callback - callback(:after_commit) - end - - def after_commit_on_create_callback - callback(:after_commit_on_create) - end - - def after_commit_on_update_callback - callback(:after_commit_on_update) - end - - def after_commit_on_destroy_callback - callback(:after_commit_on_destroy) - end - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/connection_adapters.rb b/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/connection_adapters.rb deleted file mode 100644 index 02bea17..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/lib/after_commit/connection_adapters.rb +++ /dev/null @@ -1,103 +0,0 @@ -module AfterCommit - module ConnectionAdapters - def self.included(base) - base.class_eval do - # The commit_db_transaction method gets called when the outermost - # transaction finishes and everything inside commits. We want to - # override it so that after this happens, any records that were saved - # or destroyed within this transaction now get their after_commit - # callback fired. - def commit_db_transaction_with_callback - commit_db_transaction_without_callback - trigger_after_commit_callbacks - trigger_after_commit_on_create_callbacks - trigger_after_commit_on_update_callbacks - trigger_after_commit_on_destroy_callbacks - end - alias_method_chain :commit_db_transaction, :callback - - # In the event the transaction fails and rolls back, nothing inside - # should recieve the after_commit callback. - def rollback_db_transaction_with_callback - rollback_db_transaction_without_callback - - AfterCommit.committed_records = [] - AfterCommit.committed_records_on_create = [] - AfterCommit.committed_records_on_update = [] - AfterCommit.committed_records_on_destroy = [] - end - alias_method_chain :rollback_db_transaction, :callback - - protected - def trigger_after_commit_callbacks - # Trigger the after_commit callback for each of the committed - # records. - if AfterCommit.committed_records.any? - AfterCommit.committed_records.each do |record| - begin - record.after_commit_callback - rescue - end - end - end - - # Make sure we clear out our list of committed records now that we've - # triggered the callbacks for each one. - AfterCommit.committed_records = [] - end - - def trigger_after_commit_on_create_callbacks - # Trigger the after_commit_on_create callback for each of the committed - # records. - if AfterCommit.committed_records_on_create.any? - AfterCommit.committed_records_on_create.each do |record| - begin - record.after_commit_on_create_callback - rescue - end - end - end - - # Make sure we clear out our list of committed records now that we've - # triggered the callbacks for each one. - AfterCommit.committed_records_on_create = [] - end - - def trigger_after_commit_on_update_callbacks - # Trigger the after_commit_on_update callback for each of the committed - # records. - if AfterCommit.committed_records_on_update.any? - AfterCommit.committed_records_on_update.each do |record| - begin - record.after_commit_on_update_callback - rescue - end - end - end - - # Make sure we clear out our list of committed records now that we've - # triggered the callbacks for each one. - AfterCommit.committed_records_on_update = [] - end - - def trigger_after_commit_on_destroy_callbacks - # Trigger the after_commit_on_destroy callback for each of the committed - # records. - if AfterCommit.committed_records_on_destroy.any? - AfterCommit.committed_records_on_destroy.each do |record| - begin - record.after_commit_on_destroy_callback - rescue - end - end - end - - # Make sure we clear out our list of committed records now that we've - # triggered the callbacks for each one. - AfterCommit.committed_records_on_destroy = [] - end - #end protected - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/after_commit/test/after_commit_test.rb b/vendor/plugins/thinking-sphinx/vendor/after_commit/test/after_commit_test.rb deleted file mode 100644 index f8b42cd..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/after_commit/test/after_commit_test.rb +++ /dev/null @@ -1,53 +0,0 @@ -$LOAD_PATH.unshift(File.dirname(__FILE__) + '/../lib') -require 'test/unit' -require 'rubygems' -require 'activerecord' -require 'after_commit' -require 'after_commit/active_record' -require 'after_commit/connection_adapters' - -ActiveRecord::Base.establish_connection({"adapter" => "sqlite3", "database" => 'test.sqlite3'}) -begin - ActiveRecord::Base.connection.execute("drop table mock_records"); -rescue -end -ActiveRecord::Base.connection.execute("create table mock_records(id int)"); - -require File.dirname(__FILE__) + '/../init.rb' - -class MockRecord < ActiveRecord::Base - attr_accessor :after_commit_on_create_called - attr_accessor :after_commit_on_update_called - attr_accessor :after_commit_on_destroy_called - - after_commit_on_create :do_create - def do_create - self.after_commit_on_create_called = true - end - - after_commit_on_update :do_update - def do_update - self.after_commit_on_update_called = true - end - - after_commit_on_create :do_destroy - def do_destroy - self.after_commit_on_destroy_called = true - end -end - -class AfterCommitTest < Test::Unit::TestCase - def test_after_commit_on_create_is_called - assert_equal true, MockRecord.create!.after_commit_on_create_called - end - - def test_after_commit_on_update_is_called - record = MockRecord.create! - record.save - assert_equal true, record.after_commit_on_update_called - end - - def test_after_commit_on_destroy_is_called - assert_equal true, MockRecord.create!.destroy.after_commit_on_destroy_called - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/job.rb b/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/job.rb deleted file mode 100644 index cb62bab..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/job.rb +++ /dev/null @@ -1,251 +0,0 @@ -module Delayed - - class DeserializationError < StandardError - end - - class Job < ActiveRecord::Base - MAX_ATTEMPTS = 25 - MAX_RUN_TIME = 4.hours - set_table_name :delayed_jobs - - # By default failed jobs are destroyed after too many attempts. - # If you want to keep them around (perhaps to inspect the reason - # for the failure), set this to false. - cattr_accessor :destroy_failed_jobs - self.destroy_failed_jobs = true - - # Every worker has a unique name which by default is the pid of the process. - # There are some advantages to overriding this with something which survives worker retarts: - # Workers can safely resume working on tasks which are locked by themselves. The worker will assume that it crashed before. - cattr_accessor :worker_name - self.worker_name = "host:#{Socket.gethostname} pid:#{Process.pid}" rescue "pid:#{Process.pid}" - - NextTaskSQL = '(run_at <= ? AND (locked_at IS NULL OR locked_at < ?) OR (locked_by = ?)) AND failed_at IS NULL' - NextTaskOrder = 'priority DESC, run_at ASC' - - ParseObjectFromYaml = /\!ruby\/\w+\:([^\s]+)/ - - cattr_accessor :min_priority, :max_priority - self.min_priority = nil - self.max_priority = nil - - class LockError < StandardError - end - - def self.clear_locks! - update_all("locked_by = null, locked_at = null", ["locked_by = ?", worker_name]) - end - - def failed? - failed_at - end - alias_method :failed, :failed? - - def payload_object - @payload_object ||= deserialize(self['handler']) - end - - def name - @name ||= begin - payload = payload_object - if payload.respond_to?(:display_name) - payload.display_name - else - payload.class.name - end - end - end - - def payload_object=(object) - self['handler'] = object.to_yaml - end - - def reschedule(message, backtrace = [], time = nil) - if self.attempts < MAX_ATTEMPTS - time ||= Job.db_time_now + (attempts ** 4) + 5 - - self.attempts += 1 - self.run_at = time - self.last_error = message + "\n" + backtrace.join("\n") - self.unlock - save! - else - logger.info "* [JOB] PERMANENTLY removing #{self.name} because of #{attempts} consequetive failures." - destroy_failed_jobs ? destroy : update_attribute(:failed_at, Time.now) - end - end - - def self.enqueue(*args, &block) - object = block_given? ? EvaledJob.new(&block) : args.shift - - unless object.respond_to?(:perform) || block_given? - raise ArgumentError, 'Cannot enqueue items which do not respond to perform' - end - - priority = args[0] || 0 - run_at = args[1] - - Job.create(:payload_object => object, :priority => priority.to_i, :run_at => run_at) - end - - def self.find_available(limit = 5, max_run_time = MAX_RUN_TIME) - - time_now = db_time_now - - sql = NextTaskSQL.dup - - conditions = [time_now, time_now - max_run_time, worker_name] - - if self.min_priority - sql << ' AND (priority >= ?)' - conditions << min_priority - end - - if self.max_priority - sql << ' AND (priority <= ?)' - conditions << max_priority - end - - conditions.unshift(sql) - - records = ActiveRecord::Base.silence do - find(:all, :conditions => conditions, :order => NextTaskOrder, :limit => limit) - end - - records.sort_by { rand() } - end - - # Get the payload of the next job we can get an exclusive lock on. - # If no jobs are left we return nil - def self.reserve(max_run_time = MAX_RUN_TIME, &block) - - # We get up to 5 jobs from the db. In face we cannot get exclusive access to a job we try the next. - # this leads to a more even distribution of jobs across the worker processes - find_available(5, max_run_time).each do |job| - begin - logger.info "* [JOB] aquiring lock on #{job.name}" - job.lock_exclusively!(max_run_time, worker_name) - runtime = Benchmark.realtime do - invoke_job(job.payload_object, &block) - job.destroy - end - logger.info "* [JOB] #{job.name} completed after %.4f" % runtime - - return job - rescue LockError - # We did not get the lock, some other worker process must have - logger.warn "* [JOB] failed to aquire exclusive lock for #{job.name}" - rescue StandardError => e - job.reschedule e.message, e.backtrace - log_exception(job, e) - return job - end - end - - nil - end - - # This method is used internally by reserve method to ensure exclusive access - # to the given job. It will rise a LockError if it cannot get this lock. - def lock_exclusively!(max_run_time, worker = worker_name) - now = self.class.db_time_now - affected_rows = if locked_by != worker - # We don't own this job so we will update the locked_by name and the locked_at - self.class.update_all(["locked_at = ?, locked_by = ?", now, worker], ["id = ? and (locked_at is null or locked_at < ?)", id, (now - max_run_time.to_i)]) - else - # We already own this job, this may happen if the job queue crashes. - # Simply resume and update the locked_at - self.class.update_all(["locked_at = ?", now], ["id = ? and locked_by = ?", id, worker]) - end - raise LockError.new("Attempted to aquire exclusive lock failed") unless affected_rows == 1 - - self.locked_at = now - self.locked_by = worker - end - - def unlock - self.locked_at = nil - self.locked_by = nil - end - - # This is a good hook if you need to report job processing errors in additional or different ways - def self.log_exception(job, error) - logger.error "* [JOB] #{job.name} failed with #{error.class.name}: #{error.message} - #{job.attempts} failed attempts" - logger.error(error) - end - - def self.work_off(num = 100) - success, failure = 0, 0 - - num.times do - job = self.reserve do |j| - begin - j.perform - success += 1 - rescue - failure += 1 - raise - end - end - - break if job.nil? - end - - return [success, failure] - end - - # Moved into its own method so that new_relic can trace it. - def self.invoke_job(job, &block) - block.call(job) - end - - private - - def deserialize(source) - handler = YAML.load(source) rescue nil - - unless handler.respond_to?(:perform) - if handler.nil? && source =~ ParseObjectFromYaml - handler_class = $1 - end - attempt_to_load(handler_class || handler.class) - handler = YAML.load(source) - end - - return handler if handler.respond_to?(:perform) - - raise DeserializationError, - 'Job failed to load: Unknown handler. Try to manually require the appropiate file.' - rescue TypeError, LoadError, NameError => e - raise DeserializationError, - "Job failed to load: #{e.message}. Try to manually require the required file." - end - - # Constantize the object so that ActiveSupport can attempt - # its auto loading magic. Will raise LoadError if not successful. - def attempt_to_load(klass) - klass.constantize - end - - def self.db_time_now - (ActiveRecord::Base.default_timezone == :utc) ? Time.now.utc : Time.now - end - - protected - - def before_save - self.run_at ||= self.class.db_time_now - end - - end - - class EvaledJob - def initialize - @job = yield - end - - def perform - eval(@job) - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/message_sending.rb b/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/message_sending.rb deleted file mode 100644 index 80a02f3..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/message_sending.rb +++ /dev/null @@ -1,7 +0,0 @@ -module Delayed - module MessageSending - def send_later(method, *args) - Delayed::Job.enqueue Delayed::PerformableMethod.new(self, method.to_sym, args) - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/performable_method.rb b/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/performable_method.rb deleted file mode 100644 index 18bc77a..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/performable_method.rb +++ /dev/null @@ -1,55 +0,0 @@ -module Delayed - class PerformableMethod < Struct.new(:object, :method, :args) - CLASS_STRING_FORMAT = /^CLASS\:([A-Z][\w\:]+)$/ - AR_STRING_FORMAT = /^AR\:([A-Z][\w\:]+)\:(\d+)$/ - - def initialize(object, method, args) - raise NoMethodError, "undefined method `#{method}' for #{self.inspect}" unless object.respond_to?(method) - - self.object = dump(object) - self.args = args.map { |a| dump(a) } - self.method = method.to_sym - end - - def display_name - case self.object - when CLASS_STRING_FORMAT then "#{$1}.#{method}" - when AR_STRING_FORMAT then "#{$1}##{method}" - else "Unknown##{method}" - end - end - - def perform - load(object).send(method, *args.map{|a| load(a)}) - rescue ActiveRecord::RecordNotFound - # We cannot do anything about objects which were deleted in the meantime - true - end - - private - - def load(arg) - case arg - when CLASS_STRING_FORMAT then $1.constantize - when AR_STRING_FORMAT then $1.constantize.find($2) - else arg - end - end - - def dump(arg) - case arg - when Class then class_to_string(arg) - when ActiveRecord::Base then ar_to_string(arg) - else arg - end - end - - def ar_to_string(obj) - "AR:#{obj.class}:#{obj.id}" - end - - def class_to_string(obj) - "CLASS:#{obj.name}" - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/worker.rb b/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/worker.rb deleted file mode 100644 index 9ae6726..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/delayed_job/lib/delayed/worker.rb +++ /dev/null @@ -1,54 +0,0 @@ -module Delayed - class Worker - SLEEP = 5 - - cattr_accessor :logger - self.logger = if defined?(Merb::Logger) - Merb.logger - elsif defined?(RAILS_DEFAULT_LOGGER) - RAILS_DEFAULT_LOGGER - end - - def initialize(options={}) - @quiet = options[:quiet] - Delayed::Job.min_priority = options[:min_priority] if options.has_key?(:min_priority) - Delayed::Job.max_priority = options[:max_priority] if options.has_key?(:max_priority) - end - - def start - say "*** Starting job worker #{Delayed::Job.worker_name}" - - trap('TERM') { say 'Exiting...'; $exit = true } - trap('INT') { say 'Exiting...'; $exit = true } - - loop do - result = nil - - realtime = Benchmark.realtime do - result = Delayed::Job.work_off - end - - count = result.sum - - break if $exit - - if count.zero? - sleep(SLEEP) - else - say "#{count} jobs processed at %.4f j/s, %d failed ..." % [count / realtime, result.last] - end - - break if $exit - end - - ensure - Delayed::Job.clear_locks! - end - - def say(text) - puts text unless @quiet - logger.info text if logger - end - - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle.rb deleted file mode 100644 index 2f1801a..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle.rb +++ /dev/null @@ -1,30 +0,0 @@ -require 'socket' -require 'timeout' - -require 'riddle/client' -require 'riddle/configuration' -require 'riddle/controller' - -module Riddle #:nodoc: - class ConnectionError < StandardError #:nodoc: - end - - module Version #:nodoc: - Major = 0 - Minor = 9 - Tiny = 8 - # Revision number for RubyForge's sake, taken from what Sphinx - # outputs to the command line. - Rev = 1533 - # Release number to mark my own fixes, beyond feature parity with - # Sphinx itself. - Release = 4 - - String = [Major, Minor, Tiny].join('.') - GemVersion = [Major, Minor, Tiny, Rev, Release].join('.') - end - - def self.escape(string) - string.gsub(/[\(\)\|\-!@~"&\/]/) { |char| "\\#{char}" } - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client.rb deleted file mode 100644 index 1895bba..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client.rb +++ /dev/null @@ -1,619 +0,0 @@ -require 'riddle/client/filter' -require 'riddle/client/message' -require 'riddle/client/response' - -module Riddle - class VersionError < StandardError; end - class ResponseError < StandardError; end - - # This class was heavily based on the existing Client API by Dmytro Shteflyuk - # and Alexy Kovyrin. Their code worked fine, I just wanted something a bit - # more Ruby-ish (ie. lowercase and underscored method names). I also have - # used a few helper classes, just to neaten things up. - # - # Feel free to use it wherever. Send bug reports, patches, comments and - # suggestions to pat at freelancing-gods dot com. - # - # Most properties of the client are accessible through attribute accessors, - # and where relevant use symboles instead of the long constants common in - # other clients. - # Some examples: - # - # client.sort_mode = :extended - # client.sort_by = "birthday DESC" - # client.match_mode = :extended - # - # To add a filter, you will need to create a Filter object: - # - # client.filters << Riddle::Client::Filter.new("birthday", - # Time.at(1975, 1, 1).to_i..Time.at(1985, 1, 1).to_i, false) - # - class Client - Commands = { - :search => 0, # SEARCHD_COMMAND_SEARCH - :excerpt => 1, # SEARCHD_COMMAND_EXCERPT - :update => 2, # SEARCHD_COMMAND_UPDATE - :keywords => 3 # SEARCHD_COMMAND_KEYWORDS - } - - Versions = { - :search => 0x113, # VER_COMMAND_SEARCH - :excerpt => 0x100, # VER_COMMAND_EXCERPT - :update => 0x101, # VER_COMMAND_UPDATE - :keywords => 0x100 # VER_COMMAND_KEYWORDS - } - - Statuses = { - :ok => 0, # SEARCHD_OK - :error => 1, # SEARCHD_ERROR - :retry => 2, # SEARCHD_RETRY - :warning => 3 # SEARCHD_WARNING - } - - MatchModes = { - :all => 0, # SPH_MATCH_ALL - :any => 1, # SPH_MATCH_ANY - :phrase => 2, # SPH_MATCH_PHRASE - :boolean => 3, # SPH_MATCH_BOOLEAN - :extended => 4, # SPH_MATCH_EXTENDED - :fullscan => 5, # SPH_MATCH_FULLSCAN - :extended2 => 6 # SPH_MATCH_EXTENDED2 - } - - RankModes = { - :proximity_bm25 => 0, # SPH_RANK_PROXIMITY_BM25 - :bm25 => 1, # SPH_RANK_BM25 - :none => 2, # SPH_RANK_NONE - :wordcount => 3 # SPH_RANK_WORDCOUNT - } - - SortModes = { - :relevance => 0, # SPH_SORT_RELEVANCE - :attr_desc => 1, # SPH_SORT_ATTR_DESC - :attr_asc => 2, # SPH_SORT_ATTR_ASC - :time_segments => 3, # SPH_SORT_TIME_SEGMENTS - :extended => 4, # SPH_SORT_EXTENDED - :expr => 5 # SPH_SORT_EXPR - } - - AttributeTypes = { - :integer => 1, # SPH_ATTR_INTEGER - :timestamp => 2, # SPH_ATTR_TIMESTAMP - :ordinal => 3, # SPH_ATTR_ORDINAL - :bool => 4, # SPH_ATTR_BOOL - :float => 5, # SPH_ATTR_FLOAT - :multi => 0x40000000 # SPH_ATTR_MULTI - } - - GroupFunctions = { - :day => 0, # SPH_GROUPBY_DAY - :week => 1, # SPH_GROUPBY_WEEK - :month => 2, # SPH_GROUPBY_MONTH - :year => 3, # SPH_GROUPBY_YEAR - :attr => 4, # SPH_GROUPBY_ATTR - :attrpair => 5 # SPH_GROUPBY_ATTRPAIR - } - - FilterTypes = { - :values => 0, # SPH_FILTER_VALUES - :range => 1, # SPH_FILTER_RANGE - :float_range => 2 # SPH_FILTER_FLOATRANGE - } - - attr_accessor :server, :port, :offset, :limit, :max_matches, - :match_mode, :sort_mode, :sort_by, :weights, :id_range, :filters, - :group_by, :group_function, :group_clause, :group_distinct, :cut_off, - :retry_count, :retry_delay, :anchor, :index_weights, :rank_mode, - :max_query_time, :field_weights, :timeout - attr_reader :queue - - # Can instantiate with a specific server and port - otherwise it assumes - # defaults of localhost and 3312 respectively. All other settings can be - # accessed and changed via the attribute accessors. - def initialize(server=nil, port=nil) - @server = server || "localhost" - @port = port || 3312 - - reset - - @queue = [] - end - - # Reset attributes and settings to defaults. - def reset - # defaults - @offset = 0 - @limit = 20 - @max_matches = 1000 - @match_mode = :all - @sort_mode = :relevance - @sort_by = '' - @weights = [] - @id_range = 0..0 - @filters = [] - @group_by = '' - @group_function = :day - @group_clause = '@group desc' - @group_distinct = '' - @cut_off = 0 - @retry_count = 0 - @retry_delay = 0 - @anchor = {} - # string keys are index names, integer values are weightings - @index_weights = {} - @rank_mode = :proximity_bm25 - @max_query_time = 0 - # string keys are field names, integer values are weightings - @field_weights = {} - @timeout = 0 - end - - # Set the geo-anchor point - with the names of the attributes that contain - # the latitude and longitude (in radians), and the reference position. - # Note that for geocoding to work properly, you must also set - # match_mode to :extended. To sort results by distance, you will - # need to set sort_mode to '@geodist asc' for example. Sphinx - # expects latitude and longitude to be returned from you SQL source - # in radians. - # - # Example: - # client.set_anchor('lat', -0.6591741, 'long', 2.530770) - # - def set_anchor(lat_attr, lat, long_attr, long) - @anchor = { - :latitude_attribute => lat_attr, - :latitude => lat, - :longitude_attribute => long_attr, - :longitude => long - } - end - - # Append a query to the queue. This uses the same parameters as the query - # method. - def append_query(search, index = '*', comments = '') - @queue << query_message(search, index, comments) - end - - # Run all the queries currently in the queue. This will return an array of - # results hashes. - def run - response = Response.new request(:search, @queue) - - results = @queue.collect do - result = { - :matches => [], - :fields => [], - :attributes => {}, - :attribute_names => [], - :words => {} - } - - result[:status] = response.next_int - case result[:status] - when Statuses[:warning] - result[:warning] = response.next - when Statuses[:error] - result[:error] = response.next - next result - end - - result[:fields] = response.next_array - - attributes = response.next_int - for i in 0...attributes - attribute_name = response.next - type = response.next_int - - result[:attributes][attribute_name] = type - result[:attribute_names] << attribute_name - end - - matches = response.next_int - is_64_bit = response.next_int - for i in 0...matches - doc = is_64_bit > 0 ? response.next_64bit_int : response.next_int - weight = response.next_int - - result[:matches] << {:doc => doc, :weight => weight, :index => i, :attributes => {}} - result[:attribute_names].each do |attr| - result[:matches].last[:attributes][attr] = attribute_from_type( - result[:attributes][attr], response - ) - end - end - - result[:total] = response.next_int.to_i || 0 - result[:total_found] = response.next_int.to_i || 0 - result[:time] = ('%.3f' % (response.next_int / 1000.0)).to_f || 0.0 - - words = response.next_int - for i in 0...words - word = response.next - docs = response.next_int - hits = response.next_int - result[:words][word] = {:docs => docs, :hits => hits} - end - - result - end - - @queue.clear - results - end - - # Query the Sphinx daemon - defaulting to all indexes, but you can specify - # a specific one if you wish. The search parameter should be a string - # following Sphinx's expectations. - # - # The object returned from this method is a hash with the following keys: - # - # * :matches - # * :fields - # * :attributes - # * :attribute_names - # * :words - # * :total - # * :total_found - # * :time - # * :status - # * :warning (if appropriate) - # * :error (if appropriate) - # - # The key :matches returns an array of hashes - the actual search - # results. Each hash has the document id (:doc), the result - # weighting (:weight), and a hash of the attributes for the - # document (:attributes). - # - # The :fields and :attribute_names keys return list of - # fields and attributes for the documents. The key :attributes - # will return a hash of attribute name and type pairs, and :words - # returns a hash of hashes representing the words from the search, with the - # number of documents and hits for each, along the lines of: - # - # results[:words]["Pat"] #=> {:docs => 12, :hits => 15} - # - # :total, :total_found and :time return the - # number of matches available, the total number of matches (which may be - # greater than the maximum available, depending on the number of matches - # and your sphinx configuration), and the time in milliseconds that the - # query took to run. - # - # :status is the error code for the query - and if there was a - # related warning, it will be under the :warning key. Fatal errors - # will be described under :error. - # - def query(search, index = '*', comments = '') - @queue << query_message(search, index, comments) - self.run.first - end - - # Build excerpts from search terms (the +words+) and the text of documents. Excerpts are bodies of text that have the +words+ highlighted. - # They may also be abbreviated to fit within a word limit. - # - # As part of the options hash, you will need to - # define: - # * :docs - # * :words - # * :index - # - # Optional settings include: - # * :before_match (defaults to ) - # * :after_match (defaults to ) - # * :chunk_separator (defaults to ' … ' - which is an HTML ellipsis) - # * :limit (defaults to 256) - # * :around (defaults to 5) - # * :exact_phrase (defaults to false) - # * :single_passage (defaults to false) - # - # The defaults differ from the official PHP client, as I've opted for - # semantic HTML markup. - # - # Example: - # - # client.excerpts(:docs => ["Pat Allan, Pat Cash"], :words => 'Pat', :index => 'pats') - # #=> ["Pat Allan, Pat Cash"] - # - # lorem_lipsum = "Lorem ipsum dolor..." - # - # client.excerpts(:docs => ["Pat Allan, #{lorem_lipsum} Pat Cash"], :words => 'Pat', :index => 'pats') - # #=> ["Pat Allan, Lorem ipsum dolor sit amet, consectetur adipisicing - # elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua … . Excepteur - # sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est - # laborum. Pat Cash"] - # - # Workflow: - # - # Excerpt creation is completely isolated from searching the index. The nominated index is only used to - # discover encoding and charset information. - # - # Therefore, the workflow goes: - # - # 1. Do the sphinx query. - # 2. Fetch the documents found by sphinx from their repositories. - # 3. Pass the documents' text to +excerpts+ for marking up of matched terms. - # - def excerpts(options = {}) - options[:index] ||= '*' - options[:before_match] ||= '' - options[:after_match] ||= '' - options[:chunk_separator] ||= ' … ' # ellipsis - options[:limit] ||= 256 - options[:around] ||= 5 - options[:exact_phrase] ||= false - options[:single_passage] ||= false - - response = Response.new request(:excerpt, excerpts_message(options)) - - options[:docs].collect { response.next } - end - - # Update attributes - first parameter is the relevant index, second is an - # array of attributes to be updated, and the third is a hash, where the - # keys are the document ids, and the values are arrays with the attribute - # values - in the same order as the second parameter. - # - # Example: - # - # client.update('people', ['birthday'], {1 => [Time.at(1982, 20, 8).to_i]}) - # - def update(index, attributes, values_by_doc) - response = Response.new request( - :update, - update_message(index, attributes, values_by_doc) - ) - - response.next_int - end - - # Generates a keyword list for a given query. Each keyword is represented - # by a hash, with keys :tokenised and :normalised. If return_hits is set to - # true it will also report on the number of hits and documents for each - # keyword (see :hits and :docs keys respectively). - def keywords(query, index, return_hits = false) - response = Response.new request( - :keywords, - keywords_message(query, index, return_hits) - ) - - (0...response.next_int).collect do - hash = {} - hash[:tokenised] = response.next - hash[:normalised] = response.next - - if return_hits - hash[:docs] = response.next_int - hash[:hits] = response.next_int - end - - hash - end - end - - private - - # Connects to the Sphinx daemon, and yields a socket to use. The socket is - # closed at the end of the block. - def connect(&block) - socket = nil - if @timeout == 0 - socket = initialise_connection - else - begin - Timeout.timeout(@timeout) { socket = initialise_connection } - rescue Timeout::Error - raise Riddle::ConnectionError, - "Connection to #{@server} on #{@port} timed out after #{@timeout} seconds" - end - end - - begin - yield socket - ensure - socket.close - end - end - - def initialise_connection - socket = TCPSocket.new @server, @port - - # Checking version - version = socket.recv(4).unpack('N*').first - if version < 1 - socket.close - raise VersionError, "Can only connect to searchd version 1.0 or better, not version #{version}" - end - - # Send version - socket.send [1].pack('N'), 0 - - socket - end - - # Send a collection of messages, for a command type (eg, search, excerpts, - # update), to the Sphinx daemon. - def request(command, messages) - response = "" - status = -1 - version = 0 - length = 0 - message = Array(messages).join("") - - connect do |socket| - case command - when :search - # Message length is +4 to account for the following count value for - # the number of messages (well, that's what I'm assuming). - socket.send [ - Commands[command], Versions[command], - 4+message.length, messages.length - ].pack("nnNN") + message, 0 - else - socket.send [ - Commands[command], Versions[command], message.length - ].pack("nnN") + message, 0 - end - - header = socket.recv(8) - status, version, length = header.unpack('n2N') - - while response.length < (length || 0) - part = socket.recv(length - response.length) - response << part if part - end - end - - if response.empty? || response.length != length - raise ResponseError, "No response from searchd (status: #{status}, version: #{version})" - end - - case status - when Statuses[:ok] - if version < Versions[command] - puts format("searchd command v.%d.%d older than client (v.%d.%d)", - version >> 8, version & 0xff, - Versions[command] >> 8, Versions[command] & 0xff) - end - response - when Statuses[:warning] - length = response[0, 4].unpack('N*').first - puts response[4, length] - response[4 + length, response.length - 4 - length] - when Statuses[:error], Statuses[:retry] - raise ResponseError, "searchd error (status: #{status}): #{response[4, response.length - 4]}" - else - raise ResponseError, "Unknown searchd error (status: #{status})" - end - end - - # Generation of the message to send to Sphinx for a search. - def query_message(search, index, comments = '') - message = Message.new - - # Mode, Limits, Sort Mode - message.append_ints @offset, @limit, MatchModes[@match_mode], - RankModes[@rank_mode], SortModes[@sort_mode] - message.append_string @sort_by - - # Query - message.append_string search - - # Weights - message.append_int @weights.length - message.append_ints *@weights - - # Index - message.append_string index - - # ID Range - message.append_int 1 - message.append_64bit_ints @id_range.first, @id_range.last - - # Filters - message.append_int @filters.length - @filters.each { |filter| message.append filter.query_message } - - # Grouping - message.append_int GroupFunctions[@group_function] - message.append_string @group_by - message.append_int @max_matches - message.append_string @group_clause - message.append_ints @cut_off, @retry_count, @retry_delay - message.append_string @group_distinct - - # Anchor Point - if @anchor.empty? - message.append_int 0 - else - message.append_int 1 - message.append_string @anchor[:latitude_attribute] - message.append_string @anchor[:longitude_attribute] - message.append_floats @anchor[:latitude], @anchor[:longitude] - end - - # Per Index Weights - message.append_int @index_weights.length - @index_weights.each do |key,val| - message.append_string key.to_s - message.append_int val - end - - # Max Query Time - message.append_int @max_query_time - - # Per Field Weights - message.append_int @field_weights.length - @field_weights.each do |key,val| - message.append_string key.to_s - message.append_int val - end - - message.append_string comments - - message.to_s - end - - # Generation of the message to send to Sphinx for an excerpts request. - def excerpts_message(options) - message = Message.new - - flags = 1 - flags |= 2 if options[:exact_phrase] - flags |= 4 if options[:single_passage] - flags |= 8 if options[:use_boundaries] - flags |= 16 if options[:weight_order] - - message.append [0, flags].pack('N2') # 0 = mode - message.append_string options[:index] - message.append_string options[:words] - - # options - message.append_string options[:before_match] - message.append_string options[:after_match] - message.append_string options[:chunk_separator] - message.append_ints options[:limit], options[:around] - - message.append_array options[:docs] - - message.to_s - end - - # Generation of the message to send to Sphinx to update attributes of a - # document. - def update_message(index, attributes, values_by_doc) - message = Message.new - - message.append_string index - message.append_array attributes - - message.append_int values_by_doc.length - values_by_doc.each do |key,values| - message.append_64bit_int key # document ID - message.append_ints *values # array of new values (integers) - end - - message.to_s - end - - # Generates the simple message to send to the daemon for a keywords request. - def keywords_message(query, index, return_hits) - message = Message.new - - message.append_string query - message.append_string index - message.append_int return_hits ? 1 : 0 - - message.to_s - end - - def attribute_from_type(type, response) - type -= AttributeTypes[:multi] if is_multi = type > AttributeTypes[:multi] - - case type - when AttributeTypes[:float] - is_multi ? response.next_float_array : response.next_float - else - is_multi ? response.next_int_array : response.next_int - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/filter.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/filter.rb deleted file mode 100644 index 65aa26a..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/filter.rb +++ /dev/null @@ -1,53 +0,0 @@ -module Riddle - class Client - # Used for querying Sphinx. - class Filter - attr_accessor :attribute, :values, :exclude - - # Attribute name, values (which can be an array or a range), and whether - # the filter should be exclusive. - def initialize(attribute, values, exclude=false) - @attribute, @values, @exclude = attribute, values, exclude - end - - def exclude? - self.exclude - end - - # Returns the message for this filter to send to the Sphinx service - def query_message - message = Message.new - - message.append_string self.attribute.to_s - case self.values - when Range - if self.values.first.is_a?(Float) && self.values.last.is_a?(Float) - message.append_int FilterTypes[:float_range] - message.append_floats self.values.first, self.values.last - else - message.append_int FilterTypes[:range] - message.append_ints self.values.first, self.values.last - end - when Array - message.append_int FilterTypes[:values] - message.append_int self.values.length - # using to_f is a hack from the php client - to workaround 32bit - # signed ints on x32 platforms - message.append_ints *self.values.collect { |val| - case val - when TrueClass - 1.0 - when FalseClass - 0.0 - else - val.to_f - end - } - end - message.append_int self.exclude? ? 1 : 0 - - message.to_s - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/message.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/message.rb deleted file mode 100644 index 4b29ad9..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/message.rb +++ /dev/null @@ -1,65 +0,0 @@ -module Riddle - class Client - # This class takes care of the translation of ints, strings and arrays to - # the format required by the Sphinx service. - class Message - def initialize - @message = "" - @size_method = @message.respond_to?(:bytesize) ? :bytesize : :length - end - - # Append raw data (only use if you know what you're doing) - def append(*args) - return if args.length == 0 - - args.each { |arg| @message << arg } - end - - # Append a string's length, then the string itself - def append_string(str) - @message << [str.send(@size_method)].pack('N') + str - end - - # Append an integer - def append_int(int) - @message << [int].pack('N') - end - - def append_64bit_int(int) - @message << [int >> 32, int & 0xFFFFFFFF].pack('NN') - end - - # Append a float - def append_float(float) - @message << [float].pack('f').unpack('L*').pack("N") - end - - # Append multiple integers - def append_ints(*ints) - ints.each { |int| append_int(int) } - end - - def append_64bit_ints(*ints) - ints.each { |int| append_64bit_int(int) } - end - - # Append multiple floats - def append_floats(*floats) - floats.each { |float| append_float(float) } - end - - # Append an array of strings - first appends the length of the array, - # then each item's length and value. - def append_array(array) - append_int(array.length) - - array.each { |item| append_string(item) } - end - - # Returns the entire message - def to_s - @message - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/response.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/response.rb deleted file mode 100644 index 18423e6..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/client/response.rb +++ /dev/null @@ -1,84 +0,0 @@ -module Riddle - class Client - # Used to interrogate responses from the Sphinx daemon. Keep in mind none - # of the methods here check whether the data they're grabbing are what the - # user expects - it just assumes the user knows what the data stream is - # made up of. - class Response - # Create with the data to interpret - def initialize(str) - @str = str - @marker = 0 - end - - # Return the next string value in the stream - def next - len = next_int - result = @str[@marker, len] - @marker += len - - return result - end - - # Return the next integer value from the stream - def next_int - int = @str[@marker, 4].unpack('N*').first - @marker += 4 - - return int - end - - def next_64bit_int - high, low = @str[@marker, 8].unpack('N*N*')[0..1] - @marker += 8 - - return (high << 32) + low - end - - # Return the next float value from the stream - def next_float - float = @str[@marker, 4].unpack('N*').pack('L').unpack('f*').first - @marker += 4 - - return float - end - - # Returns an array of string items - def next_array - count = next_int - items = [] - for i in 0...count - items << self.next - end - - return items - end - - # Returns an array of int items - def next_int_array - count = next_int - items = [] - for i in 0...count - items << self.next_int - end - - return items - end - - def next_float_array - count = next_int - items = [] - for i in 0...count - items << self.next_float - end - - return items - end - - # Returns the length of the streamed data - def length - @str.length - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration.rb deleted file mode 100644 index 666bca6..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration.rb +++ /dev/null @@ -1,33 +0,0 @@ -require 'riddle/configuration/section' - -require 'riddle/configuration/distributed_index' -require 'riddle/configuration/index' -require 'riddle/configuration/indexer' -require 'riddle/configuration/remote_index' -require 'riddle/configuration/searchd' -require 'riddle/configuration/source' -require 'riddle/configuration/sql_source' -require 'riddle/configuration/xml_source' - -module Riddle - class Configuration - class ConfigurationError < StandardError #:nodoc: - end - - attr_reader :indexes, :searchd - attr_accessor :indexer - - def initialize - @indexer = Riddle::Configuration::Indexer.new - @searchd = Riddle::Configuration::Searchd.new - @indexes = [] - end - - def render - ( - [@indexer.render, @searchd.render] + - @indexes.collect { |index| index.render } - ).join("\n") - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/distributed_index.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/distributed_index.rb deleted file mode 100644 index 9c7ef86..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/distributed_index.rb +++ /dev/null @@ -1,48 +0,0 @@ -module Riddle - class Configuration - class DistributedIndex < Riddle::Configuration::Section - self.settings = [:type, :local, :agent, :agent_connect_timeout, - :agent_query_timeout] - - attr_accessor :name, :local_indexes, :remote_indexes, - :agent_connect_timeout, :agent_query_timeout - - def initialize(name) - @name = name - @local_indexes = [] - @remote_indexes = [] - end - - def type - "distributed" - end - - def local - self.local_indexes - end - - def agent - agents = remote_indexes.collect { |index| index.remote }.uniq - agents.collect { |agent| - agent + ":" + remote_indexes.select { |index| - index.remote == agent - }.collect { |index| index.name }.join(",") - } - end - - def render - raise ConfigurationError unless valid? - - ( - ["index #{name}", "{"] + - settings_body + - ["}", ""] - ).join("\n") - end - - def valid? - @local_indexes.length > 0 || @remote_indexes.length > 0 - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/index.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/index.rb deleted file mode 100644 index 4cd8a03..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/index.rb +++ /dev/null @@ -1,142 +0,0 @@ -module Riddle - class Configuration - class Index < Riddle::Configuration::Section - self.settings = [:source, :path, :docinfo, :mlock, :morphology, - :stopwords, :wordforms, :exceptions, :min_word_len, :charset_type, - :charset_table, :ignore_chars, :min_prefix_len, :min_infix_len, - :prefix_fields, :infix_fields, :enable_star, :ngram_len, :ngram_chars, - :phrase_boundary, :phrase_boundary_step, :html_strip, - :html_index_attrs, :html_remove_elements, :preopen] - - attr_accessor :name, :parent, :sources, :path, :docinfo, :mlock, - :morphologies, :stopword_files, :wordform_files, :exception_files, - :min_word_len, :charset_type, :charset_table, :ignore_characters, - :min_prefix_len, :min_infix_len, :prefix_field_names, - :infix_field_names, :enable_star, :ngram_len, :ngram_characters, - :phrase_boundaries, :phrase_boundary_step, :html_strip, - :html_index_attrs, :html_remove_element_tags, :preopen - - def initialize(name, *sources) - @name = name - @sources = sources - @morphologies = [] - @stopword_files = [] - @wordform_files = [] - @exception_files = [] - @ignore_characters = [] - @prefix_field_names = [] - @infix_field_names = [] - @ngram_characters = [] - @phrase_boundaries = [] - @html_remove_element_tags = [] - end - - def source - @sources.collect { |s| s.name } - end - - def morphology - nil_join @morphologies, ", " - end - - def morphology=(morphology) - @morphologies = nil_split morphology, /,\s?/ - end - - def stopwords - nil_join @stopword_files, " " - end - - def stopwords=(stopwords) - @stopword_files = nil_split stopwords, ' ' - end - - def wordforms - nil_join @wordform_files, " " - end - - def wordforms=(wordforms) - @wordform_files = nil_split wordforms, ' ' - end - - def exceptions - nil_join @exception_files, " " - end - - def exceptions=(exceptions) - @exception_files = nil_split exceptions, ' ' - end - - def ignore_chars - nil_join @ignore_characters, ", " - end - - def ignore_chars=(ignore_chars) - @ignore_characters = nil_split ignore_chars, /,\s?/ - end - - def prefix_fields - nil_join @prefix_field_names, ", " - end - - def infix_fields - nil_join @infix_field_names, ", " - end - - def ngram_chars - nil_join @ngram_characters, ", " - end - - def ngram_chars=(ngram_chars) - @ngram_characters = nil_split ngram_chars, /,\s?/ - end - - def phrase_boundary - nil_join @phrase_boundaries, ", " - end - - def phrase_boundary=(phrase_boundary) - @phrase_boundaries = nil_split phrase_boundary, /,\s?/ - end - - def html_remove_elements - nil_join @html_remove_element_tags, ", " - end - - def html_remove_elements=(html_remove_elements) - @html_remove_element_tags = nil_split html_remove_elements, /,\s?/ - end - - def render - raise ConfigurationError, "#{@name} #{@sources.inspect} #{@path} #{@parent}" unless valid? - - inherited_name = "#{name}" - inherited_name << " : #{parent}" if parent - ( - @sources.collect { |s| s.render } + - ["index #{inherited_name}", "{"] + - settings_body + - ["}", ""] - ).join("\n") - end - - def valid? - (!@name.nil?) && (!( @sources.length == 0 || @path.nil? ) || !@parent.nil?) - end - - private - - def nil_split(string, pattern) - (string || "").split(pattern) - end - - def nil_join(array, delimiter) - if array.length == 0 - nil - else - array.join(delimiter) - end - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/indexer.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/indexer.rb deleted file mode 100644 index 3a88b2b..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/indexer.rb +++ /dev/null @@ -1,19 +0,0 @@ -module Riddle - class Configuration - class Indexer < Riddle::Configuration::Section - self.settings = [:mem_limit, :max_iops, :max_iosize] - - attr_accessor *self.settings - - def render - raise ConfigurationError unless valid? - - ( - ["indexer", "{"] + - settings_body + - ["}", ""] - ).join("\n") - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/remote_index.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/remote_index.rb deleted file mode 100644 index c9c8323..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/remote_index.rb +++ /dev/null @@ -1,17 +0,0 @@ -module Riddle - class Configuration - class RemoteIndex - attr_accessor :address, :port, :name - - def initialize(address, port, name) - @address = address - @port = port - @name = name - end - - def remote - "#{address}:#{port}" - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/searchd.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/searchd.rb deleted file mode 100644 index d113488..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/searchd.rb +++ /dev/null @@ -1,25 +0,0 @@ -module Riddle - class Configuration - class Searchd < Riddle::Configuration::Section - self.settings = [:address, :port, :log, :query_log, :read_timeout, - :max_children, :pid_file, :max_matches, :seamless_rotate, - :preopen_indexes, :unlink_old] - - attr_accessor *self.settings - - def render - raise ConfigurationError unless valid? - - ( - ["searchd", "{"] + - settings_body + - ["}", ""] - ).join("\n") - end - - def valid? - !( @port.nil? || @pid_file.nil? ) - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/section.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/section.rb deleted file mode 100644 index acd891b..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/section.rb +++ /dev/null @@ -1,37 +0,0 @@ -module Riddle - class Configuration - class Section - class << self - attr_accessor :settings - end - - settings = [] - - def valid? - true - end - - private - - def settings_body - self.class.settings.select { |setting| - !send(setting).nil? - }.collect { |setting| - if send(setting) == "" - conf = " #{setting} = " - else - conf = setting_to_array(setting).collect { |set| - " #{setting} = #{set}" - } - end - conf.length == 0 ? nil : conf - }.flatten.compact - end - - def setting_to_array(setting) - value = send(setting) - value.is_a?(Array) ? value : [value] - end - end - end -end diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/source.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/source.rb deleted file mode 100644 index 15e75f9..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/source.rb +++ /dev/null @@ -1,23 +0,0 @@ -module Riddle - class Configuration - class Source < Riddle::Configuration::Section - attr_accessor :name, :parent, :type - - def render - raise ConfigurationError unless valid? - - inherited_name = "#{name}" - inherited_name << " : #{parent}" if parent - ( - ["source #{inherited_name}", "{"] + - settings_body + - ["}", ""] - ).join("\n") - end - - def valid? - !( @name.nil? || @type.nil? ) - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/sql_source.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/sql_source.rb deleted file mode 100644 index 8521c97..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/sql_source.rb +++ /dev/null @@ -1,34 +0,0 @@ -module Riddle - class Configuration - class SQLSource < Riddle::Configuration::Source - self.settings = [:type, :sql_host, :sql_user, :sql_pass, :sql_db, - :sql_port, :sql_sock, :mysql_connect_flags, :sql_query_pre, :sql_query, - :sql_query_range, :sql_range_step, :sql_attr_uint, :sql_attr_bool, - :sql_attr_timestamp, :sql_attr_str2ordinal, :sql_attr_float, - :sql_attr_multi, :sql_query_post, :sql_query_post_index, - :sql_ranged_throttle, :sql_query_info] - - attr_accessor *self.settings - - def initialize(name, type) - @name = name - @type = type - - @sql_query_pre = [] - @sql_attr_uint = [] - @sql_attr_bool = [] - @sql_attr_timestamp = [] - @sql_attr_str2ordinal = [] - @sql_attr_float = [] - @sql_attr_multi = [] - @sql_query_post = [] - @sql_query_post_index = [] - end - - def valid? - super && (!( @sql_host.nil? || @sql_user.nil? || @sql_db.nil? || - @sql_query.nil? ) || !@parent.nil?) - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/xml_source.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/xml_source.rb deleted file mode 100644 index 0c3d3ec..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/configuration/xml_source.rb +++ /dev/null @@ -1,28 +0,0 @@ -module Riddle - class Configuration - class XMLSource < Riddle::Configuration::Source - self.settings = [:type, :xmlpipe_command, :xmlpipe_field, - :xmlpipe_attr_uint, :xmlpipe_attr_bool, :xmlpipe_attr_timestamp, - :xmlpipe_attr_str2ordinal, :xmlpipe_attr_float, :xmlpipe_attr_multi] - - attr_accessor *self.settings - - def initialize(name, type) - @name = name - @type = type - - @xmlpipe_field = [] - @xmlpipe_attr_uint = [] - @xmlpipe_attr_bool = [] - @xmlpipe_attr_timestamp = [] - @xmlpipe_attr_str2ordinal = [] - @xmlpipe_attr_float = [] - @xmlpipe_attr_multi = [] - end - - def valid? - super && ( !@xmlpipe_command.nil? || !parent.nil? ) - end - end - end -end \ No newline at end of file diff --git a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/controller.rb b/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/controller.rb deleted file mode 100644 index 92709cd..0000000 --- a/vendor/plugins/thinking-sphinx/vendor/riddle/lib/riddle/controller.rb +++ /dev/null @@ -1,44 +0,0 @@ -module Riddle - class Controller - def initialize(configuration, path) - @configuration = configuration - @path = path - end - - def index - cmd = "indexer --config #{@path} --all" - cmd << " --rotate" if running? - `#{cmd}` - end - - def start - return if running? - - cmd = "searchd --pidfile --config #{@path}" - `#{cmd}` - - sleep(1) - - unless running? - puts "Failed to start searchd daemon. Check #{@configuration.searchd.log}." - end - end - - def stop - return unless running? - `kill #{pid}` - end - - def pid - if File.exists?("#{@configuration.searchd.pid_file}") - `cat #{@configuration.searchd.pid_file}`[/\d+/] - else - nil - end - end - - def running? - pid && `ps #{pid} | wc -l`.to_i > 1 - end - end -end \ No newline at end of file