}
+ return item
+ end
+
+ def vote_tooltip_widget(post)
+ return %{}
+ end
+
+ def vote_widget(post, user, options = {})
+ html = []
+
+ html << %{}
+
+ if user.is_anonymous?
+ current_user_vote = -100
+ else
+ current_user_vote = PostVotes.find_by_ids(user.id, post.id).score rescue 0
+ end
+
+ #(CONFIG["vote_sum_min"]..CONFIG["vote_sum_max"]).each do |vote|
+ if !user.is_anonymous?
+ html << link_to_function('↶', "Post.vote(#{post.id}, 0)", :class => "star", :onmouseover => "Post.vote_mouse_over('Remove vote', #{post.id}, 0)", :onmouseout => "Post.vote_mouse_out('', #{post.id}, 0)")
+ html << " "
+
+ (1..3).each do |vote|
+ star = '★☆'
+
+ desc = CONFIG["vote_descriptions"][vote]
+
+ html << link_to_function(star, "Post.vote(#{post.id}, #{vote})", :class => "star star-#{vote}", :id => "star-#{vote}-#{post.id}", :onmouseover => "Post.vote_mouse_over('#{desc}', #{post.id}, #{vote})", :onmouseout => "Post.vote_mouse_out('#{desc}', #{post.id}, #{vote})")
+ end
+
+ html << " (" + link_to_function('vote up', "Post.vote(#{post.id}, Post.posts.get(#{post.id}).vote + 1)", :class => "star") + ")"
+ else
+ html << "(" + link_to_function('vote up', "Post.vote(#{post.id}, +1)", :class => "star") + ")"
+ end
+
+ html << %{}
+ return html
+ end
+
+ def get_tag_types(posts)
+ post_tags = []
+ posts.each { |post| post_tags += post.cached_tags.split(/ /) }
+ tag_types = {}
+ post_tags.uniq.each { |tag| tag_types[tag] = Tag.type_name(tag) }
+ return tag_types
+ end
+
+ def get_service_icon(service)
+ ExternalPost.get_service_icon(service)
+ end
+end
diff --git a/app/helpers/post_tag_history_helper.rb b/app/helpers/post_tag_history_helper.rb
new file mode 100644
index 00000000..7191ebe8
--- /dev/null
+++ b/app/helpers/post_tag_history_helper.rb
@@ -0,0 +1,35 @@
+module PostTagHistoryHelper
+ def tag_list(tags, options = {})
+ return "" if tags.blank?
+ prefix = options[:prefix] || ""
+ obsolete = options[:obsolete] || []
+
+ html = ""
+
+ # tags contains versioned metatags; split these out.
+ metatags, tags = tags.partition {|x| x=~ /^(?:rating):/}
+ metatags.each do |name|
+ obsolete_tag = ([name] & obsolete).empty? ? "":" obsolete-tag-change"
+ html << %{}
+
+ html << %{#{prefix}#{h(name)} }
+ html << ''
+ end
+
+ tags = Tag.find(:all, :conditions => ["name in (?)", tags], :select => "name").inject([]) {|all, x| all << x.name; all}.to_a.sort {|a, b| a <=> b}
+
+ tags.each do |name|
+ name ||= "UNKNOWN"
+
+ tag_type = Tag.type_name(name)
+
+ obsolete_tag = ([name] & obsolete).empty? ? "":" obsolete-tag-change"
+ html << %{}
+
+ html << %{#{prefix}#{h(name)} }
+ html << ''
+ end
+
+ return html
+ end
+end
diff --git a/app/helpers/report_helper.rb b/app/helpers/report_helper.rb
new file mode 100644
index 00000000..0847d3bf
--- /dev/null
+++ b/app/helpers/report_helper.rb
@@ -0,0 +1,2 @@
+module ReportHelper
+end
diff --git a/app/helpers/static_helper.rb b/app/helpers/static_helper.rb
new file mode 100644
index 00000000..8cfc9af4
--- /dev/null
+++ b/app/helpers/static_helper.rb
@@ -0,0 +1,2 @@
+module StaticHelper
+end
diff --git a/app/helpers/tag_alias_helper.rb b/app/helpers/tag_alias_helper.rb
new file mode 100644
index 00000000..2940c5af
--- /dev/null
+++ b/app/helpers/tag_alias_helper.rb
@@ -0,0 +1,2 @@
+module TagAliasHelper
+end
diff --git a/app/helpers/tag_helper.rb b/app/helpers/tag_helper.rb
new file mode 100644
index 00000000..c23e5fab
--- /dev/null
+++ b/app/helpers/tag_helper.rb
@@ -0,0 +1,91 @@
+module TagHelper
+ def tag_link(tag)
+ tag_type = Tag.type_name(tag)
+ html = %{}
+ html << link_to(h(tag), :action => "index", :tags => tag)
+ html << %{}
+ end
+
+ def tag_links(tags, options = {})
+ return "" if tags.blank?
+ prefix = options[:prefix] || ""
+
+ html = ""
+
+ case tags[0]
+ when String
+ tags = Tag.find(:all, :conditions => ["name in (?)", tags], :select => "name, post_count, id").inject({}) {|all, x| all[x.name] = [x.post_count, x.id]; all}.sort {|a, b| a[0] <=> b[0]}.map { |a| [a[0], a[1][0], a[1][1]] }
+
+ when Hash
+ tags = tags.map {|x| [x["name"], x["post_count"], nil]}
+
+ when Tag
+ tags = tags.map {|x| [x.name, x.post_count, x.id]}
+ end
+
+ tags.each do |name, count, id|
+ name ||= "UNKNOWN"
+
+ tag_type = Tag.type_name(name)
+
+ html << %{
}
+
+ if CONFIG["enable_artists"] && tag_type == "artist"
+ html << %{? }
+ else
+ html << %{? }
+ end
+
+ if @current_user.is_privileged_or_higher?
+ html << %{+ }
+ html << %{– }
+ end
+
+ if options[:with_hover_highlight] then
+ mouseover=%{ onmouseover='Post.highlight_posts_with_tag("#{escape_javascript(name).gsub("'", "")}")'}
+ mouseout=%{ onmouseout='Post.highlight_posts_with_tag(null)'}
+ end
+ html << %{#{h(name.tr("_", " "))} }
+ html << %{#{count} }
+ html << '
'
+ end
+
+ if options[:with_aliases] then
+ # Map tags to aliases to the tag, and include the original tag so search engines can
+ # find it.
+ id_list = tags.map { |t| t[2] }
+ alternate_tags = TagAlias.find(:all, :select => :name, :conditions => ["alias_id IN (?)", id_list]).map { |t| t.name }.uniq
+ if not alternate_tags.empty?
+ html << %{#{alternate_tags.map { |t| t.tr("_", " ") }.join(" ")}}
+ end
+ end
+
+ return html
+ end
+
+ def cloud_view(tags, divisor = 6)
+ html = ""
+
+ tags.sort {|a, b| a["name"] <=> b["name"]}.each do |tag|
+ size = Math.log(tag["post_count"].to_i) / divisor
+ size = 0.8 if size < 0.8
+ html << %{#{h(tag["name"])} }
+ end
+
+ return html
+ end
+
+ def related_tags(tags)
+ if tags.blank?
+ return ""
+ end
+
+ all = []
+ pattern, related = tags.split(/\s+/).partition {|i| i.include?("*")}
+ pattern.each {|i| all += Tag.find(:all, :conditions => ["name LIKE ?", i.tr("*", "%")]).map {|j| j.name}}
+ if related.any?
+ Tag.find(:all, :conditions => ["name IN (?)", TagAlias.to_aliased(related)]).each {|i| all += i.related.map {|j| j[0]}}
+ end
+ all.join(" ")
+ end
+end
diff --git a/app/helpers/tag_implication_helper.rb b/app/helpers/tag_implication_helper.rb
new file mode 100644
index 00000000..423c54a8
--- /dev/null
+++ b/app/helpers/tag_implication_helper.rb
@@ -0,0 +1,2 @@
+module TagImplicationHelper
+end
diff --git a/app/helpers/user_helper.rb b/app/helpers/user_helper.rb
new file mode 100644
index 00000000..0147c3fe
--- /dev/null
+++ b/app/helpers/user_helper.rb
@@ -0,0 +1,2 @@
+module UserHelper
+end
diff --git a/app/helpers/wiki_helper.rb b/app/helpers/wiki_helper.rb
new file mode 100644
index 00000000..d97194c2
--- /dev/null
+++ b/app/helpers/wiki_helper.rb
@@ -0,0 +1,9 @@
+module WikiHelper
+ def linked_from(to)
+ links = to.find_pages_that_link_to_this.map do |page|
+ link_to(h(page.pretty_title), :controller => "wiki", :action => "show", :title => page.title)
+ end.join(", ")
+
+ links.empty? ? "None" : links
+ end
+end
diff --git a/app/models/advertisement.rb b/app/models/advertisement.rb
new file mode 100644
index 00000000..48a18789
--- /dev/null
+++ b/app/models/advertisement.rb
@@ -0,0 +1,3 @@
+class Advertisement < ActiveRecord::Base
+ validates_inclusion_of :ad_type, :in => %w(horizontal vertical)
+end
diff --git a/app/models/artist.rb b/app/models/artist.rb
new file mode 100644
index 00000000..ad4ffdfe
--- /dev/null
+++ b/app/models/artist.rb
@@ -0,0 +1,244 @@
+class Artist < ActiveRecord::Base
+ module UrlMethods
+ module ClassMethods
+ def find_all_by_url(url)
+ url = ArtistUrl.normalize(url)
+ artists = []
+
+ while artists.empty? && url.size > 10
+ u = url.to_escaped_for_sql_like.gsub(/\*/, '%') + '%'
+ artists += Artist.find(:all, :joins => "JOIN artist_urls ON artist_urls.artist_id = artists.id", :conditions => ["artists.alias_id IS NULL AND artist_urls.normalized_url LIKE ? ESCAPE E'\\\\'", u], :order => "artists.name")
+
+ # Remove duplicates based on name
+ artists = artists.inject({}) {|all, artist| all[artist.name] = artist ; all}.values
+ url = File.dirname(url)
+ end
+
+ return artists[0, 20]
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.after_save :commit_urls
+ m.has_many :artist_urls, :dependent => :delete_all
+ end
+
+ def commit_urls
+ if @urls
+ artist_urls.clear
+
+ @urls.scan(/\S+/).each do |url|
+ artist_urls.create(:url => url)
+ end
+ end
+ end
+
+ def urls=(urls)
+ @urls = urls
+ end
+
+ def urls
+ artist_urls.map {|x| x.url}.join("\n")
+ end
+ end
+
+ module NoteMethods
+ def self.included(m)
+ m.after_save :commit_notes
+ end
+
+ def wiki_page
+ WikiPage.find_page(name)
+ end
+
+ def notes_locked?
+ wiki_page.is_locked? rescue false
+ end
+
+ def notes
+ wiki_page.body rescue ""
+ end
+
+ def notes=(text)
+ @notes = text
+ end
+
+ def commit_notes
+ unless @notes.blank?
+ if wiki_page.nil?
+ WikiPage.create(:title => name, :body => @notes, :ip_addr => updater_ip_addr, :user_id => updater_id)
+ elsif wiki_page.is_locked?
+ errors.add(:notes, "are locked")
+ else
+ wiki_page.update_attributes(:body => @notes, :ip_addr => updater_ip_addr, :user_id => updater_id)
+ end
+ end
+ end
+ end
+
+ module AliasMethods
+ def self.included(m)
+ m.after_save :commit_aliases
+ end
+
+ def commit_aliases
+ transaction do
+ connection.execute("UPDATE artists SET alias_id = NULL WHERE alias_id = #{id}")
+
+ if @alias_names
+ @alias_names.each do |name|
+ a = Artist.find_or_create_by_name(name)
+ a.update_attributes(:alias_id => id, :updater_id => updater_id)
+ end
+ end
+ end
+ end
+
+ def alias_names=(names)
+ @alias_names = names.split(/\s*,\s*/)
+ end
+
+ def alias_names
+ aliases.map(&:name).join(", ")
+ end
+
+ def aliases
+ if new_record?
+ return []
+ else
+ return Artist.find(:all, :conditions => "alias_id = #{id}", :order => "name")
+ end
+ end
+
+ def alias_name
+ if alias_id
+ begin
+ return Artist.find(alias_id).name
+ rescue ActiveRecord::RecordNotFound
+ end
+ end
+
+ return nil
+ end
+
+ def alias_name=(name)
+ if name.blank?
+ self.alias_id = nil
+ else
+ artist = Artist.find_or_create_by_name(name)
+ self.alias_id = artist.id
+ end
+ end
+ end
+
+ module GroupMethods
+ def self.included(m)
+ m.after_save :commit_members
+ end
+
+ def commit_members
+ transaction do
+ connection.execute("UPDATE artists SET group_id = NULL WHERE group_id = #{id}")
+
+ if @member_names
+ @member_names.each do |name|
+ a = Artist.find_or_create_by_name(name)
+ a.update_attributes(:group_id => id, :updater_id => updater_id)
+ end
+ end
+ end
+ end
+
+ def group_name
+ if group_id
+ return Artist.find(group_id).name
+ else
+ nil
+ end
+ end
+
+ def members
+ if new_record?
+ return []
+ else
+ Artist.find(:all, :conditions => "group_id = #{id}", :order => "name")
+ end
+ end
+
+ def member_names
+ members.map(&:name).join(", ")
+ end
+
+ def member_names=(names)
+ @member_names = names.split(/\s*,\s*/)
+ end
+
+ def group_name=(name)
+ if name.blank?
+ self.group_id = nil
+ else
+ artist = Artist.find_or_create_by_name(name)
+ self.group_id = artist.id
+ end
+ end
+ end
+
+ module ApiMethods
+ def api_attributes
+ return {
+ :id => id,
+ :name => name,
+ :alias_id => alias_id,
+ :group_id => group_id,
+ :urls => artist_urls.map {|x| x.url}
+ }
+ end
+
+ def to_xml(options = {})
+ attribs = api_attributes
+ attribs[:urls] = attribs[:urls].join(" ")
+ attribs.to_xml(options.merge(:root => "artist"))
+ end
+
+ def to_json(*args)
+ return api_attributes.to_json(*args)
+ end
+ end
+
+ include UrlMethods
+ include NoteMethods
+ include AliasMethods
+ include GroupMethods
+ include ApiMethods
+
+ before_validation :normalize
+ validates_uniqueness_of :name
+ belongs_to :updater, :class_name => "User", :foreign_key => "updater_id"
+ attr_accessor :updater_ip_addr
+
+ def normalize
+ self.name = name.downcase.gsub(/^\s+/, "").gsub(/\s+$/, "").gsub(/ /, '_')
+ end
+
+ def to_s
+ return name
+ end
+
+ def self.generate_sql(name)
+ b = Nagato::Builder.new do |builder, cond|
+ case name
+ when /^[a-fA-F0-9]{32,32}$/
+ cond.add "name IN (SELECT t.name FROM tags t JOIN posts_tags pt ON pt.tag_id = t.id JOIN posts p ON p.id = pt.post_id WHERE p.md5 = ?)", name
+
+ when /^http/
+ cond.add "id IN (?)", find_all_by_url(name).map {|x| x.id}
+
+ else
+ cond.add "name LIKE ? ESCAPE E'\\\\'", name.to_escaped_for_sql_like + "%"
+ end
+ end
+
+ return b.to_hash
+ end
+end
diff --git a/app/models/artist_url.rb b/app/models/artist_url.rb
new file mode 100644
index 00000000..bc64fbb7
--- /dev/null
+++ b/app/models/artist_url.rb
@@ -0,0 +1,29 @@
+class ArtistUrl < ActiveRecord::Base
+ before_save :normalize
+ validates_presence_of :url
+
+ def self.normalize(url)
+ if url.nil?
+ return nil
+ else
+ url = url.gsub(/^http:\/\/blog\d+\.fc2/, "http://blog.fc2")
+ url = url.gsub(/^http:\/\/blog-imgs-\d+\.fc2/, "http://blog.fc2")
+ url = url.gsub(/^http:\/\/img\d+\.pixiv\.net/, "http://img.pixiv.net")
+ return url
+ end
+ end
+
+ def self.normalize_for_search(url)
+ if url =~ /\.\w+$/ && url =~ /\w\/\w/
+ url = File.dirname(url)
+ end
+
+ url = url.gsub(/^http:\/\/blog\d+\.fc2/, "http://blog*.fc2")
+ url = url.gsub(/^http:\/\/blog-imgs-\d+\.fc2/, "http://blog*.fc2")
+ url = url.gsub(/^http:\/\/img\d+\.pixiv\.net/, "http://img*.pixiv.net")
+ end
+
+ def normalize
+ self.normalized_url = self.class.normalize(self.url)
+ end
+end
diff --git a/app/models/ban.rb b/app/models/ban.rb
new file mode 100644
index 00000000..77407148
--- /dev/null
+++ b/app/models/ban.rb
@@ -0,0 +1,33 @@
+class Ban < ActiveRecord::Base
+ before_create :save_level
+ after_create :save_to_record
+ after_create :update_level
+ after_destroy :restore_level
+
+ def restore_level
+ User.find(user_id).update_attribute(:level, old_level)
+ end
+
+ def save_level
+ self.old_level = User.find(user_id).level
+ end
+
+ def update_level
+ user = User.find(user_id)
+ user.level = CONFIG["user_levels"]["Blocked"]
+ user.save
+ end
+
+ def save_to_record
+ UserRecord.create(:user_id => self.user_id, :reported_by => self.banned_by, :is_positive => false, :body => "Blocked: #{self.reason}")
+ end
+
+ def duration=(dur)
+ self.expires_at = (dur.to_f * 60*60*24).seconds.from_now
+ @duration = dur
+ end
+
+ def duration
+ @duration
+ end
+end
diff --git a/app/models/coefficient.rb b/app/models/coefficient.rb
new file mode 100644
index 00000000..39bd700b
--- /dev/null
+++ b/app/models/coefficient.rb
@@ -0,0 +1,3 @@
+class Coefficient < ActiveRecord::Base
+ belongs_to :post
+end
diff --git a/app/models/comment.rb b/app/models/comment.rb
new file mode 100644
index 00000000..b354f7b4
--- /dev/null
+++ b/app/models/comment.rb
@@ -0,0 +1,59 @@
+class Comment < ActiveRecord::Base
+ validates_format_of :body, :with => /\S/, :message => 'has no content'
+ belongs_to :post
+ belongs_to :user
+ after_save :update_last_commented_at
+ after_destroy :update_last_commented_at
+ attr_accessor :do_not_bump_post
+
+ def self.generate_sql(params)
+ return Nagato::Builder.new do |builder, cond|
+ cond.add_unless_blank "post_id = ?", params[:post_id]
+ end.to_hash
+ end
+
+ def self.updated?(user)
+ conds = []
+ conds += ["user_id <> %d" % [user.id]] unless user.is_anonymous?
+
+ newest_comment = Comment.find(:first, :order => "id desc", :limit => 1, :select => "created_at", :conditions => conds)
+ return false if newest_comment == nil
+ return newest_comment.created_at > user.last_comment_read_at
+ end
+
+ def update_last_commented_at
+ # return if self.do_not_bump_post
+
+ comment_count = connection.select_value("SELECT COUNT(*) FROM comments WHERE post_id = #{post_id}").to_i
+ if comment_count <= CONFIG["comment_threshold"]
+ connection.execute("UPDATE posts SET last_commented_at = (SELECT created_at FROM comments WHERE post_id = #{post_id} ORDER BY created_at DESC LIMIT 1) WHERE posts.id = #{post_id}")
+ end
+ end
+
+ def author
+ return User.find_name(self.user_id)
+ end
+
+ def pretty_author
+ author.tr("_", " ")
+ end
+
+ def api_attributes
+ return {
+ :id => id,
+ :created_at => created_at,
+ :post_id => post_id,
+ :creator => author,
+ :creator_id => user_id,
+ :body => body
+ }
+ end
+
+ def to_xml(options = {})
+ return api_attributes.to_xml(options.merge(:root => "comment"))
+ end
+
+ def to_json(*args)
+ return api_attributes.to_json(*args)
+ end
+end
diff --git a/app/models/dmail.rb b/app/models/dmail.rb
new file mode 100644
index 00000000..6bb8be05
--- /dev/null
+++ b/app/models/dmail.rb
@@ -0,0 +1,58 @@
+class Dmail < ActiveRecord::Base
+ validates_presence_of :to_id
+ validates_presence_of :from_id
+ validates_format_of :title, :with => /\S/
+ validates_format_of :body, :with => /\S/
+
+ belongs_to :to, :class_name => "User", :foreign_key => "to_id"
+ belongs_to :from, :class_name => "User", :foreign_key => "from_id"
+
+ after_create :update_recipient
+ after_create :send_dmail
+
+ def send_dmail
+ if to.receive_dmails? && to.email.include?("@")
+ UserMailer.deliver_dmail(to, from, title, body)
+ end
+ end
+
+ def mark_as_read!(current_user)
+ update_attribute(:has_seen, true)
+
+ unless Dmail.exists?(["to_id = ? AND has_seen = false", current_user.id])
+ current_user.update_attribute(:has_mail, false)
+ end
+ end
+
+ def update_recipient
+ to.update_attribute(:has_mail, true)
+ end
+
+ def to_name
+ User.find_name(to_id)
+ end
+
+ def from_name
+ User.find_name(from_id)
+ end
+
+ def to_name=(name)
+ user = User.find_by_name(name)
+ return if user.nil?
+ self.to_id = user.id
+ end
+
+ def from_name=(name)
+ user = User.find_by_name(name)
+ return if user.nil?
+ self.from_id = user.id
+ end
+
+ def title
+ if parent_id
+ return "Re: " + self[:title]
+ else
+ return self[:title]
+ end
+ end
+end
diff --git a/app/models/favorite.rb b/app/models/favorite.rb
new file mode 100644
index 00000000..3cfa23d8
--- /dev/null
+++ b/app/models/favorite.rb
@@ -0,0 +1,2 @@
+class Favorite < ActiveRecord::Base
+end
diff --git a/app/models/favorite_tag.rb b/app/models/favorite_tag.rb
new file mode 100644
index 00000000..6e35aaef
--- /dev/null
+++ b/app/models/favorite_tag.rb
@@ -0,0 +1,55 @@
+class FavoriteTag < ActiveRecord::Base
+ belongs_to :user
+ before_create :initialize_post_ids
+
+ def initialize_post_ids
+ if user.is_privileged_or_higher?
+ self.cached_post_ids = Post.find_by_tags(tag_query, :limit => 60, :select => "p.id").map(&:id).join(",")
+ end
+ end
+
+ def interested?(post_id)
+ Post.find_by_tags(tag_query + " id:#{post_id}").any?
+ end
+
+ def add_post!(post_id)
+ if cached_post_ids.blank?
+ update_attribute :cached_post_ids, post_id.to_s
+ else
+ update_attribute :cached_post_ids, "#{post_id},#{cached_post_ids}"
+ end
+ end
+
+ def prune!
+ hoge = cached_post_ids.split(/,/)
+
+ if hoge.size > CONFIG["favorite_tag_limit"]
+ update_attribute :cached_post_ids, hoge[0, CONFIG["favorite_tag_limit"]].join(",")
+ end
+ end
+
+ def self.find_post_ids(user_id, limit = 60)
+ find(:all, :conditions => ["user_id = ?", user_id], :select => "id, cached_post_ids").map {|x| x.cached_post_ids.split(/,/)}.flatten
+ end
+
+ def self.find_posts(user_id, limit = 60)
+ Post.find(:all, :conditions => ["id in (?)", find_post_ids(user_id, limit)], :order => "id DESC", :limit => limit)
+ end
+
+ def self.process_all(last_processed_post_id)
+ posts = Post.find(:all, :conditions => ["id > ?", last_processed_post_id], :order => "id DESC", :select => "id")
+ fav_tags = FavoriteTag.find(:all)
+
+ fav_tags.each do |fav_tag|
+ if fav_tag.user.is_privileged_or_higher?
+ posts.each do |post|
+ if fav_tag.interested?(post.id)
+ fav_tag.add_post!(post.id)
+ end
+ end
+
+ fav_tag.prune!
+ end
+ end
+ end
+end
diff --git a/app/models/flagged_post_detail.rb b/app/models/flagged_post_detail.rb
new file mode 100644
index 00000000..fef7bb90
--- /dev/null
+++ b/app/models/flagged_post_detail.rb
@@ -0,0 +1,19 @@
+class FlaggedPostDetail < ActiveRecord::Base
+ belongs_to :post
+ belongs_to :user
+
+ def author
+ return User.find_name(self.user_id)
+ end
+
+ def self.new_deleted_posts(user)
+ return 0 if user.is_anonymous?
+
+ return Cache.get("deleted_posts:#{user.id}:#{user.last_deleted_post_seen_at.to_i}", 1.minute) do
+ select_value_sql(
+ "SELECT COUNT(*) FROM flagged_post_details fpd JOIN posts p ON (p.id = fpd.post_id) " +
+ "WHERE p.status = 'deleted' AND p.user_id = ? AND fpd.user_id <> ? AND fpd.created_at > ?",
+ user.id, user.id, user.last_deleted_post_seen_at).to_i
+ end
+ end
+end
diff --git a/app/models/forum_post.rb b/app/models/forum_post.rb
new file mode 100644
index 00000000..3bd29030
--- /dev/null
+++ b/app/models/forum_post.rb
@@ -0,0 +1,160 @@
+class ForumPost < ActiveRecord::Base
+ belongs_to :creator, :class_name => "User", :foreign_key => :creator_id
+ after_create :initialize_last_updated_by
+ before_validation :validate_title
+ validates_length_of :body, :minimum => 1, :message => "You need to enter a body"
+
+ module LockMethods
+ module ClassMethods
+ def lock!(id)
+ # Run raw SQL to skip the lock check
+ execute_sql("UPDATE forum_posts SET is_locked = TRUE WHERE id = ?", id)
+ end
+
+ def unlock!(id)
+ # Run raw SQL to skip the lock check
+ execute_sql("UPDATE forum_posts SET is_locked = FALSE WHERE id = ?", id)
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.before_validation :validate_lock
+ end
+
+ def validate_lock
+ if root.is_locked?
+ errors.add_to_base("Thread is locked")
+ return false
+ end
+
+ return true
+ end
+ end
+
+ module StickyMethods
+ module ClassMethods
+ def stick!(id)
+ # Run raw SQL to skip the lock check
+ execute_sql("UPDATE forum_posts SET is_sticky = TRUE WHERE id = ?", id)
+ end
+
+ def unstick!(id)
+ # Run raw SQL to skip the lock check
+ execute_sql("UPDATE forum_posts SET is_sticky = FALSE WHERE id = ?", id)
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+ end
+
+ module ParentMethods
+ def self.included(m)
+ m.after_create :update_parent_on_create
+ m.before_destroy :update_parent_on_destroy
+ m.has_many :children, :class_name => "ForumPost", :foreign_key => :parent_id, :order => "id"
+ m.belongs_to :parent, :class_name => "ForumPost", :foreign_key => :parent_id
+ end
+
+ def update_parent_on_destroy
+ unless is_parent?
+ p = parent
+ p.update_attributes(:response_count => p.response_count - 1)
+ end
+ end
+
+ def update_parent_on_create
+ unless is_parent?
+ p = parent
+ p.update_attributes(:updated_at => updated_at, :response_count => p.response_count + 1, :last_updated_by => creator_id)
+ end
+ end
+
+ def is_parent?
+ return parent_id.nil?
+ end
+
+ def root
+ if is_parent?
+ return self
+ else
+ return ForumPost.find(parent_id)
+ end
+ end
+
+ def root_id
+ if is_parent?
+ return id
+ else
+ return parent_id
+ end
+ end
+ end
+
+ module ApiMethods
+ def api_attributes
+ return {
+ :body => body,
+ :creator => author,
+ :creator_id => creator_id,
+ :id => id,
+ :parent_id => parent_id,
+ :title => title
+ }
+ end
+
+ def to_json(*params)
+ api_attributes.to_json(*params)
+ end
+
+ def to_xml(options = {})
+ api_attributes.to_xml(options.merge(:root => "forum_post"))
+ end
+ end
+
+ include LockMethods
+ include StickyMethods
+ include ParentMethods
+ include ApiMethods
+
+ def self.updated?(user)
+ conds = []
+ conds += ["creator_id <> %d" % [user.id]] unless user.is_anonymous?
+
+ newest_topic = ForumPost.find(:first, :order => "id desc", :limit => 1, :select => "created_at", :conditions => conds)
+ return false if newest_topic == nil
+ return newest_topic.created_at > user.last_forum_topic_read_at
+ end
+
+ def validate_title
+ if is_parent?
+ if title.blank?
+ errors.add :title, "missing"
+ return false
+ end
+
+ if title !~ /\S/
+ errors.add :title, "missing"
+ return false
+ end
+ end
+
+ return true
+ end
+
+ def initialize_last_updated_by
+ if is_parent?
+ update_attribute(:last_updated_by, creator_id)
+ end
+ end
+
+ def last_updater
+ User.find_name(last_updated_by)
+ end
+
+ def author
+ User.find_name(creator_id)
+ end
+end
diff --git a/app/models/history.rb b/app/models/history.rb
new file mode 100644
index 00000000..1fce4ffa
--- /dev/null
+++ b/app/models/history.rb
@@ -0,0 +1,159 @@
+class History < ActiveRecord::Base
+ belongs_to :user
+ has_many :history_changes, :order => "id"
+
+ def group_by_table_class
+ Object.const_get(group_by_table.classify)
+ end
+
+ def get_group_by_controller
+ group_by_table_class.get_versioning_group_by[:controller]
+ end
+
+ def get_group_by_action
+ group_by_table_class.get_versioning_group_by[:action]
+ end
+
+ def group_by_obj
+ group_by_table_class.find(group_by_id)
+ end
+
+ def user
+ User.find(user_id)
+ end
+
+ def author
+ User.find_name(user_id)
+ end
+
+ # Undo all changes in the array changes.
+ def self.undo(changes, user, redo_change=false, errors={})
+ # Save parent objects after child objects, so changes to the children are
+ # committed when we save the parents.
+ objects = {}
+
+ changes.each { |change|
+ # If we have no previous change, this was the first change to this property
+ # and we have no default, so this change can't be undone.
+ previous_change = change.previous
+ if !previous_change && !change.options[:allow_reverting_to_default]
+ next
+ end
+
+ if not user.can_change?(change.obj, change.field.to_sym) then
+ errors[change] = :denied
+ next
+ end
+
+ # Add this node and its parent objects to objects.
+ node = cache_object_recurse(objects, change.table_name, change.remote_id, change.obj)
+ node[:changes] ||= []
+ node[:changes] << change
+ }
+
+ return unless objects[:objects]
+
+ # objects contains one or more trees of objects. Flatten this to an ordered
+ # list, so we can always save child nodes before parent nodes.
+ done = {}
+ stack = []
+ objects[:objects].each { |table_name, rhs|
+ rhs.each { |id, node|
+ # Start adding from the node at the top of the tree.
+ while node[:parent] do
+ node = node[:parent]
+ end
+ self.stack_object_recurse(node, stack, done)
+ }
+ }
+
+ stack.reverse.each { |node|
+ object = node[:o]
+ changes = node[:changes]
+ if changes
+ changes.each { |change|
+ if redo_change
+ redo_func = ("%s_redo" % change.field).to_sym
+ if object.respond_to?(redo_func) then
+ object.send(redo_func, change)
+ else
+ object.attributes = { change.field.to_sym => change.value }
+ end
+ else
+ undo_func = ("%s_undo" % change.field).to_sym
+ if object.respond_to?(undo_func) then
+ object.send(undo_func, change)
+ else
+ if change.previous
+ previous = change.previous.value
+ else
+ previous = change.options[:default] # when :allow_reverting_to_default
+ end
+ object.attributes = { change.field.to_sym => previous }
+ end
+ end
+ }
+ end
+
+ object.run_callbacks(:after_undo)
+ object.save!
+ }
+ end
+
+ def self.generate_sql(options = {})
+ Nagato::Builder.new do |builder, cond|
+ cond.add_unless_blank "histories.remote_id = ?", options[:remote_id]
+ cond.add_unless_blank "histories.user_id = ?", options[:user_id]
+
+ if options[:user_name]
+ builder.join "users ON users.id = histories.user_id"
+ cond.add "users.name = ?", options[:user_name]
+ end
+ end.to_hash
+ end
+
+private
+ # Find and return the node for table_name/id in objects. If the node doesn't
+ # exist, create it and point it at object.
+ def self.cache_object(objects, table_name, id, object)
+ objects[:objects] ||= {}
+ objects[:objects][table_name] ||= {}
+ objects[:objects][table_name][id] ||= {
+ :o => object
+ }
+ return objects[:objects][table_name][id]
+ end
+
+ # Find and return the node for table_name/id in objects. Recursively create
+ # nodes for parent objects.
+ def self.cache_object_recurse(objects, table_name, id, object)
+ node = self.cache_object(objects, table_name, id, object)
+
+ # If this class has a master class, register the master object for update callbacks too.
+ master = object.versioned_master_object
+ if master
+ master_node = cache_object_recurse(objects, master.class.to_s, master.id, master)
+
+ master_node[:children] ||= []
+ master_node[:children] << node
+ node[:parent] = master_node
+ end
+
+ return node
+ end
+
+ # Recursively add all nodes to stack, parents first.
+ def self.stack_object_recurse(node, stack, done = {})
+ return if done[node]
+ done[node] = true
+
+ stack << node
+
+ if node[:children] then
+ node[:children].each { |child|
+ self.stack_object_recurse(child, stack, done)
+ }
+ end
+ end
+end
+
diff --git a/app/models/history_change.rb b/app/models/history_change.rb
new file mode 100644
index 00000000..668a321c
--- /dev/null
+++ b/app/models/history_change.rb
@@ -0,0 +1,78 @@
+class HistoryChange < ActiveRecord::Base
+ belongs_to :history
+ belongs_to :previous, :class_name => "HistoryChange", :foreign_key => :previous_id
+ after_create :set_previous
+
+ def options
+ master_class.get_versioned_attribute_options(field) or {}
+ end
+
+ def master_class
+ # Hack because Rails is stupid and can't reliably derive class names
+ # from table names:
+ if table_name == "pools_posts"
+ class_name = "PoolPost"
+ else
+ class_name = table_name.classify
+ end
+ Object.const_get(class_name)
+ end
+
+ # Return true if this changes the value to the default value.
+ def changes_to_default?
+ return false if not has_default?
+
+ # Cast our value to the actual type; if this is a boolean value, this
+ # casts "f" to false.
+ column = master_class.columns_hash[field]
+ typecasted_value = column.type_cast(value)
+
+ return typecasted_value == get_default
+ end
+
+ def is_obsolete?
+ latest_change = latest
+ return self.value != latest_change.value
+ end
+
+ def has_default?
+ options.has_key?(:default)
+ end
+
+ def get_default
+ default = options[:default]
+ end
+
+ # Return the default value for the field recorded by this change.
+ def default_history
+ return nil if not has_default?
+
+ History.new :table_name => self.table_name,
+ :remote_id => self.remote_id,
+ :field => self.field,
+ :value => get_default
+ end
+
+ # Return the object this change modifies.
+ def obj
+ @obj ||= master_class.find(self.remote_id)
+ @obj
+ end
+
+ def latest
+ HistoryChange.find(:first, :order => "id DESC",
+ :conditions => ["table_name = ? AND remote_id = ? AND field = ?", table_name, remote_id, field])
+ end
+
+ def next
+ HistoryChange.find(:first, :order => "h.id ASC",
+ :conditions => ["table_name = ? AND remote_id = ? AND id > ? AND field = ?", table_name, remote_id, id, field])
+ end
+
+ def set_previous
+ self.previous = HistoryChange.find(:first, :order => "id DESC",
+ :conditions => ["table_name = ? AND remote_id = ? AND id < ? AND field = ?", table_name, remote_id, id, field])
+ self.save!
+ end
+end
+
diff --git a/app/models/inline.rb b/app/models/inline.rb
new file mode 100644
index 00000000..8c802612
--- /dev/null
+++ b/app/models/inline.rb
@@ -0,0 +1,83 @@
+class Inline < ActiveRecord::Base
+ belongs_to :user
+ has_many :inline_images, :dependent => :destroy, :order => "sequence"
+
+ # Sequence numbers must start at 1 and increase monotonically, to keep the UI simple.
+ # If we've been given sequences with gaps or duplicates, sanitize them.
+ def renumber_sequences
+ first = 1
+ for image in inline_images do
+ image.sequence = first
+ image.save!
+ first += 1
+ end
+ end
+
+ def pretty_name
+ "x"
+ end
+
+ def crop(params)
+ if params[:top].to_f < 0 or params[:top].to_f > 1 or
+ params[:bottom].to_f < 0 or params[:bottom].to_f > 1 or
+ params[:left].to_f < 0 or params[:left].to_f > 1 or
+ params[:right].to_f < 0 or params[:right].to_f > 1 or
+ params[:top] >= params[:bottom] or
+ params[:left] >= params[:right]
+ then
+ errors.add(:parameter, "error")
+ return false
+ end
+
+ def reduce_and_crop(image_width, image_height, params)
+ cropped_image_width = image_width * (params[:right].to_f - params[:left].to_f)
+ cropped_image_height = image_height * (params[:bottom].to_f - params[:top].to_f)
+
+ size = {}
+ size[:width] = cropped_image_width
+ size[:height] = cropped_image_height
+ size[:crop_top] = image_height * params[:top].to_f
+ size[:crop_bottom] = image_height * params[:bottom].to_f
+ size[:crop_left] = image_width * params[:left].to_f
+ size[:crop_right] = image_width * params[:right].to_f
+ size
+ end
+
+ images = self.inline_images
+ for image in images do
+ # Create a new image with the same properties, crop this image into the new one,
+ # and delete the old one.
+ new_image = InlineImage.new(:description => image.description, :sequence => image.sequence, :inline_id => self.id, :file_ext => "jpg")
+ size = reduce_and_crop(image.width, image.height, params)
+
+ begin
+ # Create one crop for the image, and InlineImage will create the sample and preview from that.
+ Danbooru.resize(image.file_ext, image.file_path, new_image.tempfile_image_path, size, 95)
+ FileUtils.chmod(0775, new_image.tempfile_image_path)
+ rescue Exception => x
+ FileUtils.rm_f(new_image.tempfile_image_path)
+
+ errors.add "crop", "couldn't be generated (#{x})"
+ return false
+ end
+
+ new_image.got_file
+ new_image.save!
+ image.destroy
+ end
+ end
+
+ def api_attributes
+ return {
+ :id => id,
+ :description => description,
+ :user_id => user_id,
+ :images => inline_images
+ }
+ end
+
+ def to_json(*params)
+ api_attributes.to_json(*params)
+ end
+end
+
diff --git a/app/models/inline_image.rb b/app/models/inline_image.rb
new file mode 100644
index 00000000..c3da931c
--- /dev/null
+++ b/app/models/inline_image.rb
@@ -0,0 +1,327 @@
+require "fileutils"
+
+# InlineImages can be uploaded, copied directly from posts, or cropped from other InlineImages.
+# To create an image by cropping a post, the post must be copied to an InlineImage of its own,
+# and cropped from there; the only UI for cropping is InlineImage->InlineImage.
+#
+# InlineImages can be posted directly in the forum and wiki (and possibly comments).
+#
+# An inline image can have three versions, like a post. For consistency, they use the
+# same names: image, sample, preview. As with posts, sample and previews are always JPEG,
+# and the dimensions of preview is derived from image rather than stored.
+#
+# Image files are effectively garbage collected: InlineImages can share files, and the file
+# is deleted when the last one using it is deleted. This allows any user to copy another user's
+# InlineImage, to crop it or to include it in an Inline.
+#
+# Example use cases:
+#
+# - Plain inlining, eg. for tutorials. Thumbs and larger images can be shown inline, allowing
+# a click to expand.
+# - Showing edits. Each user can upload his edit as an InlineImage and post it directly
+# into the forum.
+# - Comparing edits. A user can upload his own edit, pair it with another version (using
+# Inline), crop to a region of interest, and post that inline. The images can then be
+# compared in-place. This can be used to clearly show editing problems and differences.
+
+class InlineImage < ActiveRecord::Base
+ belongs_to :inline
+ before_validation_on_create :download_source
+ before_validation_on_create :determine_content_type
+ before_validation_on_create :set_image_dimensions
+ before_validation_on_create :generate_sample
+ before_validation_on_create :generate_preview
+ before_validation_on_create :move_file
+ before_validation_on_create :set_default_sequence
+ after_destroy :delete_file
+ before_create :validate_uniqueness
+
+ def tempfile_image_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}.upload"
+ end
+
+ def tempfile_sample_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-sample.upload"
+ end
+
+ def tempfile_preview_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-preview.upload"
+ end
+
+ attr_accessor :source
+ attr_accessor :received_file
+ attr_accessor :file_needs_move
+ def post_id=(id)
+ post = Post.find_by_id(id)
+ file = post.file_path
+
+ FileUtils.ln_s(file.local_path, tempfile_image_path)
+
+ self.received_file = true
+ self.md5 = post.md5
+ end
+
+ # Call once a file is available in tempfile_image_path.
+ def got_file
+ generate_hash(tempfile_image_path)
+ FileUtils.chmod(0775, self.tempfile_image_path)
+ self.file_needs_move = true
+ self.received_file = true
+ end
+
+ def file=(f)
+ return if f.nil? || f.size == 0
+
+ if f.local_path
+ FileUtils.cp(f.local_path, tempfile_image_path)
+ else
+ File.open(tempfile_image_path, 'wb') {|nf| nf.write(f.read)}
+ end
+
+ got_file
+ end
+
+ def download_source
+ return if source !~ /^http:\/\// || !file_ext.blank?
+ return if received_file
+
+ begin
+ Danbooru.http_get_streaming(source) do |response|
+ File.open(tempfile_image_path, "wb") do |out|
+ response.read_body do |block|
+ out.write(block)
+ end
+ end
+ end
+ got_file
+
+ return true
+ rescue SocketError, URI::Error, Timeout::Error, SystemCallError => x
+ delete_tempfile
+ errors.add "source", "couldn't be opened: #{x}"
+ return false
+ end
+ end
+
+ def determine_content_type
+ return true if self.file_ext
+
+ if not File.exists?(tempfile_image_path)
+ errors.add_to_base("No file received")
+ return false
+ end
+
+ imgsize = ImageSize.new(File.open(tempfile_image_path, "rb"))
+
+ unless imgsize.get_width.nil?
+ self.file_ext = imgsize.get_type.gsub(/JPEG/, "JPG").downcase
+ end
+
+ unless %w(jpg png gif).include?(file_ext.downcase)
+ errors.add(:file, "is an invalid content type: " + (file_ext.downcase or "unknown"))
+ return false
+ end
+
+ return true
+ end
+
+ def set_image_dimensions
+ return true if self.width and self.height
+ imgsize = ImageSize.new(File.open(tempfile_image_path, "rb"))
+ self.width = imgsize.get_width
+ self.height = imgsize.get_height
+
+ return true
+ end
+
+ def preview_dimensions
+ return Danbooru.reduce_to({:width => width, :height => height}, {:width => 150, :height => 150})
+ end
+
+ def thumb_size
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => 400, :height => 400})
+ end
+
+ def generate_sample
+ return true if File.exists?(sample_path)
+
+ # We can generate the sample image during upload or offline. Use tempfile_image_path
+ # if it exists, otherwise use file_path.
+ path = tempfile_image_path
+ path = file_path unless File.exists?(path)
+ unless File.exists?(path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # If we're not reducing the resolution for the sample image, only reencode if the
+ # source image is above the reencode threshold. Anything smaller won't be reduced
+ # enough by the reencode to bother, so don't reencode it and save disk space.
+ sample_size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["inline_sample_width"], :height => CONFIG["inline_sample_height"]})
+ if sample_size[:width] == width && sample_size[:height] == height && File.size?(path) < CONFIG["sample_always_generate_size"]
+ return true
+ end
+
+ # If we already have a sample image, and the parameters havn't changed,
+ # don't regenerate it.
+ if sample_size[:width] == sample_width && sample_size[:height] == sample_height
+ return true
+ end
+
+ begin
+ Danbooru.resize(file_ext, path, tempfile_sample_path, sample_size, 95)
+ rescue Exception => x
+ errors.add "sample", "couldn't be created: #{x}"
+ return false
+ end
+
+ self.sample_width = sample_size[:width]
+ self.sample_height = sample_size[:height]
+ return true
+ end
+
+ def generate_preview
+ return true if File.exists?(preview_path)
+
+ unless File.exists?(tempfile_image_path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # Generate the preview from the new sample if we have one to save CPU, otherwise from the image.
+ if File.exists?(tempfile_sample_path)
+ path, ext = tempfile_sample_path, "jpg"
+ else
+ path, ext = tempfile_image_path, file_ext
+ end
+
+ begin
+ Danbooru.resize(ext, path, tempfile_preview_path, preview_dimensions, 95)
+ rescue Exception => x
+ errors.add "preview", "couldn't be generated (#{x})"
+ return false
+ end
+ return true
+ end
+
+ def move_file
+ return true if not file_needs_move
+ FileUtils.mv(tempfile_image_path, file_path)
+
+ if File.exists?(tempfile_preview_path)
+ FileUtils.mv(tempfile_preview_path, preview_path)
+ end
+ if File.exists?(tempfile_sample_path)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ end
+ self.file_needs_move = false
+ return true
+ end
+
+ def set_default_sequence
+ return if not self.sequence.nil?
+ siblings = self.inline.inline_images
+ max_sequence = siblings.map { |image| image.sequence }.max
+ max_sequence ||= 0
+ self.sequence = max_sequence + 1
+ end
+
+ def generate_hash(path)
+ md5_obj = Digest::MD5.new
+ File.open(path, 'rb') { |fp|
+ buf = ""
+ while fp.read(1024*64, buf) do md5_obj << buf end
+ }
+
+ self.md5 = md5_obj.hexdigest
+ end
+
+ def has_sample?
+ return (not self.sample_height.nil?)
+ end
+
+ def file_name
+ "#{md5}.#{file_ext}"
+ end
+
+ def file_name_jpg
+ "#{md5}.jpg"
+ end
+
+ def file_path
+ "#{RAILS_ROOT}/public/data/inline/image/#{file_name}"
+ end
+
+ def preview_path
+ "#{RAILS_ROOT}/public/data/inline/preview/#{file_name_jpg}"
+ end
+
+ def sample_path
+ "#{RAILS_ROOT}/public/data/inline/sample/#{file_name_jpg}"
+ end
+
+ def file_url
+ CONFIG["url_base"] + "/data/inline/image/#{file_name}"
+ end
+
+ def sample_url
+ if self.has_sample?
+ return CONFIG["url_base"] + "/data/inline/sample/#{file_name_jpg}"
+ else
+ return file_url
+ end
+ end
+
+ def preview_url
+ CONFIG["url_base"] + "/data/inline/preview/#{file_name_jpg}"
+ end
+
+ def delete_file
+ # If several inlines use the same image, they'll share the same file via the MD5. Only
+ # delete the file if this is the last one using it.
+ exists = InlineImage.find(:first, :conditions => ["id <> ? AND md5 = ?", self.id, self.md5])
+ return if not exists.nil?
+
+ FileUtils.rm_f(file_path)
+ FileUtils.rm_f(preview_path)
+ FileUtils.rm_f(sample_path)
+ end
+
+ # We should be able to use validates_uniqueness_of for this, but Rails is completely
+ # brain-damaged: it only lets you specify an error message that starts with the name
+ # of the column, capitalized, so if we say "foo", the message is "Md5 foo". This is
+ # useless.
+ def validate_uniqueness
+ siblings = self.inline.inline_images
+ for s in siblings do
+ next if s.id == self
+ if s.md5 == self.md5
+ errors.add_to_base("##{s.sequence} already exists.")
+ return false
+ end
+ end
+ return true
+ end
+
+ def api_attributes
+ return {
+ :id => id,
+ :sequence => sequence,
+ :md5 => md5,
+ :width => width,
+ :height => height,
+ :sample_width => sample_width,
+ :sample_height => sample_height,
+ :preview_width => preview_dimensions[:width],
+ :preview_height => preview_dimensions[:height],
+ :description => description,
+ :file_url => file_url,
+ :sample_url => sample_url,
+ :preview_url => preview_url,
+ }
+ end
+
+ def to_json(*params)
+ api_attributes.to_json(*params)
+ end
+end
diff --git a/app/models/ip_bans.rb b/app/models/ip_bans.rb
new file mode 100644
index 00000000..c6dfd626
--- /dev/null
+++ b/app/models/ip_bans.rb
@@ -0,0 +1,18 @@
+class IpBans < ActiveRecord::Base
+ belongs_to :user, :foreign_key => :banned_by
+
+ def duration=(dur)
+ if not dur or dur == "" then
+ self.expires_at = nil
+ @duration = nil
+ else
+ self.expires_at = (dur.to_f * 60*60*24).seconds.from_now
+ @duration = dur
+ end
+ end
+
+ def duration
+ @duration
+ end
+end
+
diff --git a/app/models/job_task.rb b/app/models/job_task.rb
new file mode 100644
index 00000000..5293986c
--- /dev/null
+++ b/app/models/job_task.rb
@@ -0,0 +1,190 @@
+class JobTask < ActiveRecord::Base
+ TASK_TYPES = %w(mass_tag_edit approve_tag_alias approve_tag_implication calculate_favorite_tags upload_posts_to_mirrors periodic_maintenance)
+ STATUSES = %w(pending processing finished error)
+
+ validates_inclusion_of :task_type, :in => TASK_TYPES
+ validates_inclusion_of :status, :in => STATUSES
+
+ def data
+ JSON.parse(data_as_json)
+ end
+
+ def data=(hoge)
+ self.data_as_json = hoge.to_json
+ end
+
+ def execute!
+ if repeat_count > 0
+ count = repeat_count - 1
+ else
+ count = repeat_count
+ end
+
+ begin
+ execute_sql("SET statement_timeout = 0")
+ update_attributes(:status => "processing")
+ __send__("execute_#{task_type}")
+
+ if count == 0
+ update_attributes(:status => "finished")
+ else
+ update_attributes(:status => "pending", :repeat_count => count)
+ end
+ rescue SystemExit => x
+ update_attributes(:status => "pending")
+ raise x
+ rescue Exception => x
+ update_attributes(:status => "error", :status_message => "#{x.class}: #{x}")
+ end
+ end
+
+ def execute_mass_tag_edit
+ start_tags = data["start_tags"]
+ result_tags = data["result_tags"]
+ updater_id = data["updater_id"]
+ updater_ip_addr = data["updater_ip_addr"]
+ Tag.mass_edit(start_tags, result_tags, updater_id, updater_ip_addr)
+ end
+
+ def execute_approve_tag_alias
+ ta = TagAlias.find(data["id"])
+ updater_id = data["updater_id"]
+ updater_ip_addr = data["updater_ip_addr"]
+ ta.approve(updater_id, updater_ip_addr)
+ end
+
+ def execute_approve_tag_implication
+ ti = TagImplication.find(data["id"])
+ updater_id = data["updater_id"]
+ updater_ip_addr = data["updater_ip_addr"]
+ ti.approve(updater_id, updater_ip_addr)
+ end
+
+ def execute_calculate_favorite_tags
+ return if Cache.get("delay-favtags-calc")
+
+ last_processed_post_id = data["last_processed_post_id"].to_i
+
+ if last_processed_post_id == 0
+ last_processed_post_id = Post.maximum("id").to_i
+ end
+
+ Cache.put("delay-favtags-calc", "1", 10.minutes)
+ FavoriteTag.process_all(last_processed_post_id)
+ update_attributes(:data => {"last_processed_post_id" => Post.maximum("id")})
+ end
+
+ def update_data(*args)
+ hash = data.merge(args[0])
+ update_attributes(:data => hash)
+ end
+
+ def execute_periodic_maintenance
+ return if data["next_run"] && data["next_run"] > Time.now.to_i
+
+ update_data("step" => "recalculating post count")
+ Post.recalculate_row_count
+ update_data("step" => "recalculating tag post counts")
+ Tag.recalculate_post_count
+ update_data("step" => "purging old tags")
+ Tag.purge_tags
+
+ update_data("next_run" => Time.now.to_i + 60*60*6, "step" => nil)
+ end
+
+ def execute_upload_posts_to_mirrors
+ # This is a little counterintuitive: if we're backlogged, mirror newer posts first,
+ # since they're the ones that receive the most attention. Mirror held posts after
+ # unheld posts.
+ #
+ # Apply a limit, so if we're backlogged heavily, we'll only upload a few posts and
+ # then give other jobs a chance to run.
+ data = {}
+ (1..10).each do
+ post = Post.find(:first, :conditions => ["NOT is_warehoused AND status <> 'deleted'"], :order => "is_held ASC, index_timestamp DESC")
+ break if not post
+
+ data["left"] = Post.count(:conditions => ["NOT is_warehoused AND status <> 'deleted'"])
+ data["post_id"] = post.id
+ update_attributes(:data => data)
+
+ begin
+ post.upload_to_mirrors
+ ensure
+ data["post_id"] = nil
+ update_attributes(:data => data)
+ end
+
+ data["left"] = Post.count(:conditions => ["NOT is_warehoused AND status <> 'deleted'"])
+ update_attributes(:data => data)
+ end
+ end
+
+ def pretty_data
+ case task_type
+ when "mass_tag_edit"
+ start = data["start_tags"]
+ result = data["result_tags"]
+ user = User.find_name(data["updater_id"])
+
+ "start:#{start} result:#{result} user:#{user}"
+
+ when "approve_tag_alias"
+ ta = TagAlias.find(data["id"])
+ "start:#{ta.name} result:#{ta.alias_name}"
+
+ when "approve_tag_implication"
+ ti = TagImplication.find(data["id"])
+ "start:#{ti.predicate.name} result:#{ti.consequent.name}"
+
+ when "calculate_favorite_tags"
+ "post_id:#{data['last_processed_post_id']}"
+
+ when "upload_posts_to_mirrors"
+ ret = ""
+ if data["post_id"]
+ ret << "uploading post_id #{data["post_id"]}"
+ elsif data["left"]
+ ret << "sleeping"
+ else
+ ret << "idle"
+ end
+ ret << (" (%i left) " % data["left"]) if data["left"]
+ ret
+
+ when "periodic_maintenance"
+ if status == "processing" then
+ data["step"]
+ elsif status != "error" then
+ next_run = (data["next_run"] or 0) - Time.now.to_i
+ next_run_in_minutes = next_run.to_i / 60
+ if next_run_in_minutes > 0
+ eta = "next run in #{(next_run_in_minutes.to_f / 60.0).round} hours"
+ else
+ eta = "next run imminent"
+ end
+ "sleeping (#{eta})"
+ end
+ end
+ end
+
+ def self.execute_once
+ find(:all, :conditions => ["status = ?", "pending"], :order => "id desc").each do |task|
+ task.execute!
+ sleep 1
+ end
+ end
+
+ def self.execute_all
+ # If we were interrupted without finishing a task, it may be left in processing; reset
+ # thos tasks to pending.
+ find(:all, :conditions => ["status = ?", "processing"]).each do |task|
+ task.update_attributes(:status => "pending")
+ end
+
+ while true
+ execute_once
+ sleep 10
+ end
+ end
+end
diff --git a/app/models/note.rb b/app/models/note.rb
new file mode 100644
index 00000000..6811e469
--- /dev/null
+++ b/app/models/note.rb
@@ -0,0 +1,82 @@
+class Note < ActiveRecord::Base
+ include ActiveRecord::Acts::Versioned
+
+ belongs_to :post
+ before_save :blank_body
+ acts_as_versioned :order => "updated_at DESC"
+ after_save :update_post
+
+ module LockMethods
+ def self.included(m)
+ m.validate :post_must_not_be_note_locked
+ end
+
+ def post_must_not_be_note_locked
+ if is_locked?
+ errors.add_to_base "Post is note locked"
+ return false
+ end
+ end
+
+ def is_locked?
+ if select_value_sql("SELECT 1 FROM posts WHERE id = ? AND is_note_locked = ?", post_id, true)
+ return true
+ else
+ return false
+ end
+ end
+ end
+
+ module ApiMethods
+ def api_attributes
+ return {
+ :id => id,
+ :created_at => created_at,
+ :updated_at => updated_at,
+ :creator_id => user_id,
+ :x => x,
+ :y => y,
+ :width => width,
+ :height => height,
+ :is_active => is_active,
+ :post_id => post_id,
+ :body => body,
+ :version => version
+ }
+ end
+
+ def to_xml(options = {})
+ api_attributes.to_xml(options.merge(:root => "note"))
+ end
+
+ def to_json(*args)
+ return api_attributes.to_json(*args)
+ end
+ end
+
+ include LockMethods
+ include ApiMethods
+
+ def blank_body
+ self.body = "(empty)" if body.blank?
+ end
+
+ # TODO: move this to a helper
+ def formatted_body
+ body.gsub(/(.+?)<\/tn>/m, '
\1
').gsub(/\n/, ' ')
+ end
+
+ def update_post
+ active_notes = select_value_sql("SELECT 1 FROM notes WHERE is_active = ? AND post_id = ? LIMIT 1", true, post_id)
+
+ if active_notes
+ execute_sql("UPDATE posts SET last_noted_at = ? WHERE id = ?", updated_at, post_id)
+ else
+ execute_sql("UPDATE posts SET last_noted_at = ? WHERE id = ?", nil, post_id)
+ end
+ end
+
+ def author
+ User.find_name(user_id)
+ end
+end
diff --git a/app/models/note_version.rb b/app/models/note_version.rb
new file mode 100644
index 00000000..ac3af6de
--- /dev/null
+++ b/app/models/note_version.rb
@@ -0,0 +1,13 @@
+class NoteVersion < ActiveRecord::Base
+ def to_xml(options = {})
+ {:created_at => created_at, :updated_at => updated_at, :creator_id => user_id, :x => x, :y => y, :width => width, :height => height, :is_active => is_active, :post_id => post_id, :body => body, :version => version}.to_xml(options.merge(:root => "note_version"))
+ end
+
+ def to_json(*args)
+ {:created_at => created_at, :updated_at => updated_at, :creator_id => user_id, :x => x, :y => y, :width => width, :height => height, :is_active => is_active, :post_id => post_id, :body => body, :version => version}.to_json(*args)
+ end
+
+ def author
+ User.find_name(user_id)
+ end
+end
diff --git a/app/models/pool.rb b/app/models/pool.rb
new file mode 100644
index 00000000..01d76c6e
--- /dev/null
+++ b/app/models/pool.rb
@@ -0,0 +1,391 @@
+require 'mirror'
+require "erb"
+include ERB::Util
+
+class Pool < ActiveRecord::Base
+ belongs_to :user
+
+ class PostAlreadyExistsError < Exception
+ end
+
+ class AccessDeniedError < Exception
+ end
+
+ module PostMethods
+ def self.included(m)
+ # Prefer child posts (the posts that were actually added to the pool). This is what's displayed
+ # when editing the pool.
+ m.has_many :pool_posts, :class_name => "PoolPost", :order => "nat_sort(sequence), post_id", :conditions => "pools_posts.active = true"
+
+ # Prefer parent posts (the parents of posts that were added to the pool). This is what's displayed by
+ # default in post/show.
+ m.has_many :pool_parent_posts, :class_name => "PoolPost", :order => "nat_sort(sequence), post_id",
+ :conditions => "(pools_posts.active = true AND pools_posts.slave_id IS NULL) OR pools_posts.master_id IS NOT NULL"
+ m.has_many :all_pool_posts, :class_name => "PoolPost", :order => "nat_sort(sequence), post_id"
+ m.versioned :name
+ m.versioned :description, :default => ""
+ m.versioned :is_public, :default => true
+ m.versioned :is_active, :default => true
+ m.after_undo :update_pool_links
+ end
+
+ def can_be_updated_by?(user)
+ is_public? || user.has_permission?(self)
+ end
+
+ def add_post(post_id, options = {})
+ transaction do
+ if options[:user] && !can_be_updated_by?(options[:user])
+ raise AccessDeniedError
+ end
+
+ seq = options[:sequence] || next_sequence
+
+ pool_post = all_pool_posts.find(:first, :conditions => ["post_id = ?", post_id])
+ if pool_post
+ raise PostAlreadyExistsError if pool_post.active
+ pool_post.active = true
+ pool_post.sequence = seq
+ pool_post.save!
+ else
+ PoolPost.create(:pool_id => id, :post_id => post_id, :sequence => seq)
+ end
+
+ increment!(:post_count)
+
+ unless options[:skip_update_pool_links]
+ self.reload
+ update_pool_links
+ end
+ end
+ end
+
+ def remove_post(post_id, options = {})
+ transaction do
+ if options[:user] && !can_be_updated_by?(options[:user])
+ raise AccessDeniedError
+ end
+
+ pool_post = pool_posts.find(:first, :conditions => ["post_id = ?", post_id])
+ if pool_post then
+ pool_post.active = false
+ pool_post.save!
+ self.reload
+ decrement!(:post_count)
+ update_pool_links
+ end
+ end
+ end
+
+ def get_sample
+ # By preference, pick the first post (by sequence) in the pool that isn't hidden from
+ # the index.
+ PoolPost.find(:all, :order => "posts.is_shown_in_index DESC, nat_sort(pools_posts.sequence), pools_posts.post_id",
+ :joins => "JOIN posts ON posts.id = pools_posts.post_id",
+ :conditions => ["pool_id = ? AND posts.status = 'active' AND pools_posts.active", self.id]).each { |pool_post|
+ return pool_post.post if pool_post.post.can_be_seen_by?(Thread.current["danbooru-user"])
+ }
+ return rescue nil
+ end
+
+ def can_change_is_public?(user)
+ user.has_permission?(self)
+ end
+
+ def has_originals?
+ pool_posts.each { |pp| return true if pp.slave_id }
+ return false
+ end
+
+ def can_change?(user, attribute)
+ return false if not user.is_member_or_higher?
+ return is_public? || user.has_permission?(self)
+ end
+
+ def update_pool_links
+ transaction do
+ pp = pool_parent_posts(true) # force reload
+ pp.each_index do |i|
+ pp[i].next_post_id = nil
+ pp[i].prev_post_id = nil
+ pp[i].next_post_id = pp[i + 1].post_id unless i == pp.size - 1
+ pp[i].prev_post_id = pp[i - 1].post_id unless i == 0
+ pp[i].save
+ end
+ end
+ end
+
+ def next_sequence
+ seq = 0
+ pool_posts.find(:all, :select => "sequence", :order => "sequence DESC").each { |pp|
+ seq = [seq, pp.sequence.to_i].max
+ }
+
+ return seq + 1
+ end
+ end
+
+ module ApiMethods
+ def api_attributes
+ return {
+ :id => id,
+ :name => name,
+ :created_at => created_at,
+ :updated_at => updated_at,
+ :user_id => user_id,
+ :is_public => is_public,
+ :post_count => post_count,
+ }
+ end
+
+ def to_json(*params)
+ api_attributes.to_json(*params)
+ end
+
+ def to_xml(options = {})
+ options[:indent] ||= 2
+ xml = options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])
+ xml.pool(api_attributes) do
+ xml.description(description)
+ yield options[:builder] if block_given?
+ end
+ end
+ end
+
+ module NameMethods
+ module ClassMethods
+ def find_by_name(name)
+ if name =~ /^\d+$/
+ find_by_id(name)
+ else
+ find(:first, :conditions => ["lower(name) = lower(?)", name])
+ end
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.validates_uniqueness_of :name
+ m.before_validation :normalize_name
+ end
+
+ def normalize_name
+ self.name = name.gsub(/\s/, "_")
+ end
+
+ def pretty_name
+ name.tr("_", " ")
+ end
+ end
+
+ module ZipMethods
+ def get_zip_filename(options={})
+ filename = pretty_name.gsub(/\?/, "")
+ filename += " (JPG)" if options[:jpeg]
+ filename += " (orig)" if options[:originals]
+ "#{filename}.zip"
+ end
+
+ # Return true if any posts in this pool have a generated JPEG version.
+ def has_jpeg_zip?(options={})
+ posts = options[:originals] ? pool_posts : pool_parent_posts
+ posts.each do |pool_post|
+ post = pool_post.post
+ return true if post.has_jpeg?
+ end
+ return false
+ end
+
+ def get_zip_url(control_path, options={})
+ url = Mirrors.select_image_server(self.zip_is_warehoused, self.zip_created_at.to_i, :zipfile => true)
+ url += "/data/zips/#{File.basename(control_path)}"
+
+ # Adds the pretty filename to the end. This is ignored by lighttpd.
+ url += "/#{url_encode(get_zip_filename(options))}"
+ return url
+ end
+
+ # Estimate the size of the ZIP.
+ def get_zip_size(options={})
+ sum = 0
+ posts = options[:originals] ? pool_posts : pool_parent_posts
+ posts.each do |pool_post|
+ post = pool_post.post
+ next if post.status == 'deleted'
+ sum += options[:jpeg] && post.has_jpeg? ? post.jpeg_size : post.file_size
+ end
+
+ return sum
+ end
+
+ def get_zip_control_file_path_for_time(time, options={})
+ jpeg = options[:jpeg] || false
+ originals = options[:originals] || false
+
+ # If this pool has a JPEG version, name the normal version "png". Otherwise, name it
+ # "normal". This only affects the URL used to access the file, so the frontend can
+ # match it for QOS purposes; it doesn't affect the downloaded pool's filename.
+ if jpeg
+ type = "jpeg"
+ elsif has_jpeg_zip?(options) then
+ type = "png"
+ else
+ type = "normal"
+ end
+
+ "#{RAILS_ROOT}/public/data/zips/%s-pool-%08i-%i%s" % [type, self.id, time.to_i, originals ? "-orig":""]
+ end
+
+ def all_posts_in_zip_are_warehoused?(options={})
+ posts = options[:originals] ? pool_posts : pool_parent_posts
+ posts.each do |pool_post|
+ post = pool_post.post
+ next if post.status == 'deleted'
+ return false if not post.is_warehoused?
+ end
+ return true
+ end
+
+ # Generate a mod_zipfile control file for this pool.
+ def get_zip_control_file(options={})
+ return "" if pool_posts.empty?
+
+ jpeg = options[:jpeg] || false
+ originals = options[:originals] || false
+
+ buf = ""
+
+ # Pad sequence numbers in filenames to the longest sequence number. Ignore any text
+ # after the sequence for padding; for example, if we have 1, 5, 10a and 12, then pad
+ # to 2 digits.
+
+ # Always pad to at least 3 digits.
+ max_sequence_digits = 3
+ pool_posts.each do |pool_post|
+ filtered_sequence = pool_post.sequence.gsub(/^([0-9]+(-[0-9]+)?)?.*/, '\1') # 45a -> 45
+ filtered_sequence.split(/-/).each { |p|
+ max_sequence_digits = [p.length, max_sequence_digits].max
+ }
+ end
+
+ filename_count = {}
+ posts = originals ? pool_posts : pool_parent_posts
+ posts.each do |pool_post|
+ post = pool_post.post
+ next if post.status == 'deleted'
+
+ # Strip RAILS_ROOT/public off the file path, so the paths are relative to document-root.
+ if jpeg && post.has_jpeg?
+ path = post.jpeg_path
+ file_ext = "jpg"
+ else
+ path = post.file_path
+ file_ext = post.file_ext
+ end
+ path = path[(RAILS_ROOT + "/public").length .. path.length]
+
+ # For padding filenames, break numbers apart on hyphens and pad each part. For
+ # example, if max_sequence_digits is 3, and we have "88-89", pad it to "088-089".
+ filename = pool_post.sequence.gsub(/^([0-9]+(-[0-9]+)*)(.*)$/) { |m|
+ if $1 != ""
+ suffix = $3
+ numbers = $1.split(/-/).map { |p|
+ "%0*i" % [max_sequence_digits, p.to_i]
+ }.join("-")
+ "%s%s" % [numbers, suffix]
+ else
+ "%s" % [$3]
+ end
+ }
+
+ #filename = "%0*i" % [max_sequence_digits, pool_post.sequence]
+
+ # Avoid duplicate filenames.
+ filename_count[filename] ||= 0
+ filename_count[filename] = filename_count[filename] + 1
+ if filename_count[filename] > 1
+ filename << " (%i)" % [filename_count[filename]]
+ end
+ filename << ".%s" % [file_ext]
+
+ buf << "#{filename}\n"
+ buf << "#{path}\n"
+ if jpeg && post.has_jpeg?
+ buf << "#{post.jpeg_size}\n"
+ buf << "#{post.jpeg_crc32}\n"
+ else
+ buf << "#{post.file_size}\n"
+ buf << "#{post.crc32}\n"
+ end
+ end
+
+ return buf
+ end
+
+ def get_zip_control_file_path(options = {})
+ control_file = self.get_zip_control_file(options)
+
+ # The latest pool ZIP we generated is stored in pool.zip_created_at. If that ZIP
+ # control file still exists, compare it against the control file we just generated,
+ # and reuse it if it hasn't changed.
+ control_path_time = Time.now
+ control_path = self.get_zip_control_file_path_for_time(control_path_time, options)
+ reuse_old_control_file = false
+ if self.zip_created_at then
+ old_path = self.get_zip_control_file_path_for_time(self.zip_created_at, options)
+ begin
+ old_control_file = File.open(old_path).read
+
+ if control_file == old_control_file
+ reuse_old_control_file = true
+ control_path = old_path
+ control_path_time = self.zip_created_at
+ end
+ rescue SystemCallError => e
+ end
+ end
+
+ if not reuse_old_control_file then
+ control_path_temp = control_path + ".temp"
+ File.open(control_path_temp, 'w+') do |fp|
+ fp.write(control_file)
+ end
+
+ FileUtils.mv(control_path_temp, control_path)
+
+ # Only after we've attempted to mirror the control file, update self.zip_created_at.
+ self.update_attributes(:zip_created_at => control_path_time, :zip_is_warehoused => false)
+ end
+
+ if !self.zip_is_warehoused && all_posts_in_zip_are_warehoused?(options)
+ delay = ServerKey.find(:first, :conditions => ["name = 'delay-mirrors-down'"])
+ if delay.nil?
+ delay = ServerKey.create(:name => "delay-mirrors-down", :value => 0)
+ end
+ if delay.value.to_i < Time.now.to_i
+ # Send the control file to all mirrors, if we have any.
+ begin
+ # This is being done interactively, so use a low timeout.
+ Mirrors.copy_file_to_mirrors(control_path, :timeout => 5)
+ self.update_attributes(:zip_is_warehoused => true)
+ rescue Mirrors::MirrorError => e
+ # If mirroring is failing, disable it for a while. It might be timing out, and this
+ # will make the UI unresponsive.
+ delay.update_attributes!(:value => Time.now.to_i + 60*60)
+ ActiveRecord::Base.logger.error("Error warehousing ZIP control file: #{e}")
+ end
+ end
+ end
+
+ return control_path
+ end
+ end
+
+ include PostMethods
+ include ApiMethods
+ include NameMethods
+ if CONFIG["pool_zips"]
+ include ZipMethods
+ end
+end
+
diff --git a/app/models/pool_post.rb b/app/models/pool_post.rb
new file mode 100644
index 00000000..208d5478
--- /dev/null
+++ b/app/models/pool_post.rb
@@ -0,0 +1,172 @@
+class PoolPost < ActiveRecord::Base
+ set_table_name "pools_posts"
+ belongs_to :post
+ belongs_to :pool
+ versioned_parent :pool
+ versioning_display :class => :pool
+ versioned :active, :default => 'f', :allow_reverting_to_default => true
+ versioned :sequence
+ before_save :update_pool
+
+ def can_change_is_public?(user)
+ return user.has_permission?(pool) # only the owner can change is_public
+ end
+
+ def can_change?(user, attribute)
+ return false if not user.is_member_or_higher?
+ return pool.is_public? || user.has_permission?(pool)
+ end
+
+ def pretty_sequence
+ if sequence =~ /^[0-9]+.*/
+ return "##{sequence}"
+ else
+ return "\"#{sequence}\""
+ end
+ end
+
+ def update_pool
+ # Implicit posts never affect the post count, because we always show either the
+ # parent or the child posts in the index, but not both.
+ return if master_id
+
+ if active_changed? then
+ if active then
+ pool.increment!(:post_count)
+ else
+ pool.decrement!(:post_count)
+ end
+
+ pool.save!
+ end
+ end
+
+ # A master pool_post is a post which was added explicitly to the pool whose post has
+ # a parent. A slave pool_post is a post which was added implicitly to the pool, because
+ # it has a child which was added to the pool. (Master/slave terminology is used because
+ # calling these parent and child becomes confusing with its close relationship to
+ # post parents.)
+ #
+ # The active flag is always false for an implicit slave post. Setting the active flag
+ # to true on a slave post means you're adding it explicitly, which will cause it to no
+ # longer be a slave post. This behavior cooperates well with history: simply setting
+ # and unsetting active are converse operations, regardless of whether a post is a slave
+ # or not. For example, if you have a parent and a child that are both explicitly in the
+ # pool, and you remove the parent (causing it to be added as a slave), this will register
+ # as a removal in history; undoing that history action will cause the active flag to be
+ # set to true again, which will undo as expected.
+ belongs_to :master, :class_name => "PoolPost", :foreign_key => "master_id"
+ belongs_to :slave, :class_name => "PoolPost", :foreign_key => "slave_id"
+
+protected
+ # Find a pool_post that can be a master post of pp: active, explicitly in this pool (not another
+ # slave), doesn't already have a master post, and has self.post as its post parent.
+ def self.find_master_pool_post(pp)
+ sql = <<-SQL
+ SELECT pp.* FROM posts p JOIN pools_posts pp ON (p.id = pp.post_id)
+ WHERE p.parent_id = #{pp.post_id}
+ AND pp.active
+ AND pp.pool_id = #{pp.pool_id}
+ AND pp.master_id IS NULL
+ AND pp.slave_id IS NULL
+ ORDER BY pp.id ASC
+ LIMIT 1
+ SQL
+ new_master = PoolPost.find_by_sql([sql])
+
+ return nil if new_master.empty?
+ return new_master[0]
+ end
+
+ # If our master post is no longer valid, by being deactivated or the post having
+ # its parent changed, unlink us from it.
+ def detach_stale_master
+ # If we already have no master, we have nothing to do.
+ return if not self.master
+
+ # If our master has been deactivated, or we've been explicitly activated, or if our
+ # master is no longer our child, it's no longer a valid parent.
+ return if self.master.active && !self.active && self.master.post.parent_id == self.post_id
+
+ self.master.slave_id = nil
+ self.master.save!
+
+ self.master_id = nil
+ self.master = nil
+ self.save!
+ end
+
+ def find_master_and_propagate
+ # If we have a master post, verify that it's still valid; if not, detach us from it.
+ detach_stale_master
+
+ need_save = false
+
+ # Don't set a slave if we already have a master or a slave, or if we're already active.
+ if !self.slave_id && !self.master_id && !self.active
+ new_master = PoolPost.find_master_pool_post(self)
+ if new_master
+ self.master_id = new_master.id
+ new_master.slave_id = self.id
+ new_master.save!
+ need_save = true
+ end
+ end
+
+ # If we have a master, propagate changes from it to us.
+ if self.master
+ self.sequence = master.sequence
+ need_save = true if self.sequence_changed?
+ end
+
+ self.save! if need_save
+ end
+
+public
+ # The specified post has had its parent changed.
+ def self.post_parent_changed(post)
+ PoolPost.find(:all, :conditions => ["post_id = ?", post.id]).each { |pp|
+ pp.need_slave_update = true
+ pp.copy_changes_to_slave
+ }
+ end
+
+ # Since copy_changes_to_slave may call self.save, it needs to be run from
+ # post_save and not after_save. We need to know whether attributes have changed
+ # (so we don't run this unnecessarily), so that check needs to be done in after_save,
+ # while dirty flags are still set.
+ after_save :check_if_need_slave_update
+ post_save :copy_changes_to_slave
+
+ attr_accessor :need_slave_update
+ def check_if_need_slave_update
+ self.need_slave_update = true if sequence_changed? || active_changed?
+ return true
+ end
+
+ # After a PoolPost or its post changes, update master PoolPosts.
+ def copy_changes_to_slave
+ return true if !self.need_slave_update
+ self.need_slave_update = false
+
+ # If our sequence changed, we need to copy that to our slave (if any), and if our
+ # active flag was turned off we need to detach from our slave.
+ post_to_update = self.slave
+
+ if !post_to_update && self.active && self.post.parent_id
+ # We have no slave, but we have a parent post and we're active, so we might need to
+ # assign it. Make sure that a PoolPost exists for the parent.
+ post_to_update = PoolPost.find(:first, :conditions => {:pool_id => self.pool_id, :post_id => post.parent_id})
+ if not post_to_update
+ post_to_update = PoolPost.create(:pool_id => self.pool_id, :post_id => post.parent_id, :active => false)
+ end
+ end
+
+ post_to_update.find_master_and_propagate if post_to_update
+
+ self.find_master_and_propagate
+
+ return true
+ end
+end
+
diff --git a/app/models/post.rb b/app/models/post.rb
new file mode 100644
index 00000000..3b02b8ca
--- /dev/null
+++ b/app/models/post.rb
@@ -0,0 +1,168 @@
+Dir["#{RAILS_ROOT}/app/models/post/**/*.rb"].each {|x| require_dependency x}
+
+class Post < ActiveRecord::Base
+ STATUSES = %w(active pending flagged deleted)
+
+ define_callbacks :after_delete
+ define_callbacks :after_undelete
+ has_many :notes, :order => "id desc"
+ has_one :flag_detail, :class_name => "FlaggedPostDetail"
+ belongs_to :user
+ before_validation_on_create :set_random!
+ before_create :set_index_timestamp!
+ belongs_to :approver, :class_name => "User"
+ attr_accessor :updater_ip_addr, :updater_user_id
+ attr_accessor :metatag_flagged
+ has_many :avatars, :class_name => "User", :foreign_key => "avatar_post_id"
+ after_delete :clear_avatars
+ after_save :commit_flag
+
+ include PostSqlMethods
+ include PostCommentMethods
+ include PostImageStoreMethods
+ include PostVoteMethods
+ include PostTagMethods
+ include PostCountMethods
+ include PostCacheMethods if CONFIG["enable_caching"]
+ include PostParentMethods if CONFIG["enable_parent_posts"]
+ include PostFileMethods
+ include PostChangeSequenceMethods
+ include PostRatingMethods
+ include PostStatusMethods
+ include PostApiMethods
+ include PostMirrorMethods
+
+ def self.destroy_with_reason(id, reason, current_user)
+ post = Post.find(id)
+ post.flag!(reason, current_user.id)
+ if post.flag_detail
+ post.flag_detail.update_attributes(:is_resolved => true)
+ end
+
+ post.delete
+ end
+
+ def delete
+ self.update_attributes(:status => "deleted")
+ self.run_callbacks(:after_delete)
+ end
+
+ def undelete
+ return if self.status == "active"
+ self.update_attributes(:status => "active")
+ self.run_callbacks(:after_undelete)
+ end
+
+ def can_user_delete?(user)
+ if not user.has_permission?(self)
+ return false
+ end
+
+ if not user.is_mod_or_higher? and Time.now - self.created_at > 1.day
+ return false
+ end
+
+ return true
+ end
+
+ def clear_avatars
+ User.clear_avatars(self.id)
+ end
+
+ def set_random!
+ self.random = rand;
+ end
+
+ def set_index_timestamp!
+ self.index_timestamp = self.created_at
+ end
+
+ def flag!(reason, creator_id)
+ transaction do
+ update_attributes(:status => "flagged")
+
+ if flag_detail
+ flag_detail.update_attributes(:reason => reason, :user_id => creator_id, :created_at => Time.now)
+ else
+ FlaggedPostDetail.create!(:post_id => id, :reason => reason, :user_id => creator_id, :is_resolved => false)
+ end
+ end
+ end
+
+ # If the flag_post metatag was used and the current user has access, flag the post.
+ def commit_flag
+ return if self.metatag_flagged.nil?
+ return if not Thread.current["danbooru-user"].is_mod_or_higher?
+ return if self.status != "active"
+
+ self.flag!(self.metatag_flagged, Thread.current["danbooru-user"].id)
+ end
+
+ def approve!(approver_id)
+ if flag_detail
+ flag_detail.update_attributes(:is_resolved => true)
+ end
+
+ update_attributes(:status => "active", :approver_id => approver_id)
+ end
+
+ def voted_by
+ # Cache results
+ if @voted_by.nil?
+ @voted_by = {}
+ (1..3).each { |v|
+ @voted_by[v] = User.find(:all, :joins => "JOIN post_votes v ON v.user_id = users.id", :select => "users.name, users.id", :conditions => ["v.post_id = ? and v.score = ?", self.id, v], :order => "v.updated_at DESC") || []
+ }
+ end
+
+ return @voted_by
+ end
+
+ def favorited_by
+ return voted_by[3]
+ end
+
+ def author
+ return User.find_name(user_id)
+ end
+
+ def delete_from_database
+ delete_file
+ execute_sql("DELETE FROM posts WHERE id = ?", id)
+ end
+
+ def active_notes
+ notes.select {|x| x.is_active?}
+ end
+
+ STATUSES.each do |x|
+ define_method("is_#{x}?") do
+ return status == x
+ end
+ end
+
+ def can_be_seen_by?(user, options={})
+ if not options[:show_deleted] and self.status == 'deleted'
+ return false
+ end
+ CONFIG["can_see_post"].call(user, self)
+ end
+
+ def self.new_deleted?(user)
+ conds = []
+ conds += ["creator_id <> %d" % [user.id]] unless user.is_anonymous?
+
+ newest_topic = ForumPost.find(:first, :order => "id desc", :limit => 1, :select => "created_at", :conditions => conds)
+ return false if newest_topic == nil
+ return newest_topic.created_at > user.last_forum_topic_read_at
+ end
+
+ def normalized_source
+ if source =~ /pixiv\.net\/img\//
+ img_id = source[/(\d+)\.\w+$/, 1]
+ "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=#{img_id}"
+ else
+ source
+ end
+ end
+end
diff --git a/app/models/post/api_methods.rb b/app/models/post/api_methods.rb
new file mode 100644
index 00000000..a1006f22
--- /dev/null
+++ b/app/models/post/api_methods.rb
@@ -0,0 +1,43 @@
+module PostApiMethods
+ def api_attributes
+ return {
+ :id => id,
+ :tags => cached_tags,
+ :created_at => created_at,
+ :creator_id => user_id,
+ :author => author,
+ :change => change_seq,
+ :source => source,
+ :score => score,
+ :md5 => md5,
+ :file_size => file_size,
+ :file_url => file_url,
+ :is_shown_in_index => is_shown_in_index,
+ :preview_url => preview_url,
+ :preview_width => preview_dimensions[0],
+ :preview_height => preview_dimensions[1],
+ :sample_url => sample_url,
+ :sample_width => sample_width || width,
+ :sample_height => sample_height || height,
+ :sample_file_size => sample_size,
+ :jpeg_url => jpeg_url,
+ :jpeg_width => jpeg_width || width,
+ :jpeg_height => jpeg_height || height,
+ :jpeg_file_size => jpeg_size,
+ :rating => rating,
+ :has_children => has_children,
+ :parent_id => parent_id,
+ :status => status,
+ :width => width,
+ :height => height
+ }
+ end
+
+ def to_json(*args)
+ return api_attributes.to_json(*args)
+ end
+
+ def to_xml(options = {})
+ return api_attributes.to_xml(options.merge(:root => "post"))
+ end
+end
diff --git a/app/models/post/cache_methods.rb b/app/models/post/cache_methods.rb
new file mode 100644
index 00000000..f85a0031
--- /dev/null
+++ b/app/models/post/cache_methods.rb
@@ -0,0 +1,12 @@
+module PostCacheMethods
+ def self.included(m)
+ m.after_save :expire_cache
+ m.after_destroy :expire_cache
+ end
+
+ def expire_cache
+ # Have to call this twice in order to expire tags that may have been removed
+ Cache.expire(:tags => old_cached_tags) if old_cached_tags
+ Cache.expire(:tags => cached_tags)
+ end
+end
diff --git a/app/models/post/change_sequence_methods.rb b/app/models/post/change_sequence_methods.rb
new file mode 100644
index 00000000..60d10a8c
--- /dev/null
+++ b/app/models/post/change_sequence_methods.rb
@@ -0,0 +1,18 @@
+module PostChangeSequenceMethods
+ attr_accessor :increment_change_seq
+
+ def self.included(m)
+ m.before_create :touch_change_seq!
+ m.after_save :update_change_seq
+ end
+
+ def touch_change_seq!
+ self.increment_change_seq = true
+ end
+
+ def update_change_seq
+ return if increment_change_seq.nil?
+ execute_sql("UPDATE posts SET change_seq = nextval('post_change_seq') WHERE id = ?", id)
+ self.change_seq = select_value_sql("SELECT change_seq FROM posts WHERE id = ?", id)
+ end
+end
diff --git a/app/models/post/comment_methods.rb b/app/models/post/comment_methods.rb
new file mode 100644
index 00000000..d58101cc
--- /dev/null
+++ b/app/models/post/comment_methods.rb
@@ -0,0 +1,9 @@
+module PostCommentMethods
+ def self.included(m)
+ m.has_many :comments, :order => "id"
+ end
+
+ def recent_comments
+ Comment.find(:all, :conditions => ["post_id = ?", id], :order => "id desc", :limit => 6).reverse
+ end
+end
diff --git a/app/models/post/count_methods.rb b/app/models/post/count_methods.rb
new file mode 100644
index 00000000..bd910427
--- /dev/null
+++ b/app/models/post/count_methods.rb
@@ -0,0 +1,49 @@
+module PostCountMethods
+ module ClassMethods
+ def fast_count(tags = nil)
+ cache_version = Cache.get("$cache_version").to_i
+ key = "post-count/v=#{cache_version}/#{tags}"
+
+ # memcached protocol is dumb so we need to escape spaces
+ key = key.gsub(/-/, "--").gsub(/ /, "-_")
+
+ count = Cache.get(key) {
+ Post.count_by_sql(Post.generate_sql(tags, :count => true))
+ }.to_i
+
+ return count
+
+ # This is just too brittle, and hard to make work with other features that may
+ # hide posts from the index.
+# if tags.blank?
+# return select_value_sql("SELECT row_count FROM table_data WHERE name = 'posts'").to_i
+# else
+# c = select_value_sql("SELECT post_count FROM tags WHERE name = ?", tags).to_i
+# if c == 0
+# return Post.count_by_sql(Post.generate_sql(tags, :count => true))
+# else
+# return c
+# end
+# end
+ end
+
+ def recalculate_row_count
+ execute_sql("UPDATE table_data SET row_count = (SELECT COUNT(*) FROM posts WHERE parent_id IS NULL AND status <> 'deleted') WHERE name = 'posts'")
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.after_create :increment_count
+ m.after_delete :decrement_count
+ m.after_undelete :increment_count
+ end
+
+ def increment_count
+ execute_sql("UPDATE table_data SET row_count = row_count + 1 WHERE name = 'posts'")
+ end
+
+ def decrement_count
+ execute_sql("UPDATE table_data SET row_count = row_count - 1 WHERE name = 'posts'")
+ end
+end
diff --git a/app/models/post/file_methods.rb b/app/models/post/file_methods.rb
new file mode 100644
index 00000000..c389ccee
--- /dev/null
+++ b/app/models/post/file_methods.rb
@@ -0,0 +1,470 @@
+require "download"
+require "zlib"
+
+# These are methods dealing with getting the image and generating the thumbnail.
+# It works in conjunction with the image_store methods. Since these methods have
+# to be called in a specific order, they've been bundled into one module.
+module PostFileMethods
+ def self.included(m)
+ m.before_validation_on_create :download_source
+ m.before_validation_on_create :ensure_tempfile_exists
+ m.before_validation_on_create :determine_content_type
+ m.before_validation_on_create :validate_content_type
+ m.before_validation_on_create :generate_hash
+ m.before_validation_on_create :set_image_dimensions
+ m.before_validation_on_create :generate_sample
+ m.before_validation_on_create :generate_jpeg
+ m.before_validation_on_create :generate_preview
+ m.before_validation_on_create :move_file
+ end
+
+ def ensure_tempfile_exists
+ unless File.exists?(tempfile_path)
+ errors.add :file, "not found, try uploading again"
+ return false
+ end
+ end
+
+ def validate_content_type
+ unless %w(jpg png gif swf).include?(file_ext.downcase)
+ errors.add(:file, "is an invalid content type: " + file_ext.downcase)
+ return false
+ end
+ end
+
+ def pretty_file_name(options={})
+ # Include the post number and tags. Don't include too many tags for posts that have too
+ # many of them.
+ options[:type] ||= :image
+
+ # If the filename is too long, it might fail to save or lose the extension when saving.
+ # Cut it down as needed. Most tags on moe with lots of tags have lots of characters,
+ # and those tags are the least important (compared to tags like artists, circles, "fixme",
+ # etc).
+ #
+ # Prioritize tags:
+ # - remove artist and circle tags last; these are the most important
+ # - general tags can either be important ("fixme") or useless ("red hair")
+ # - remove character tags first;
+
+ tags = Tag.compact_tags(self.cached_tags, 150)
+ if options[:type] == :sample then
+ tags = "sample"
+ end
+
+ # Filter characters.
+ tags = tags.gsub(/[\/]/, "_")
+
+ name = "#{self.id} #{tags}"
+ if CONFIG["download_filename_prefix"] != ""
+ name = CONFIG["download_filename_prefix"] + " " + name
+ end
+
+ name
+ end
+
+ def file_name
+ md5 + "." + file_ext
+ end
+
+ def delete_tempfile
+ FileUtils.rm_f(tempfile_path)
+ FileUtils.rm_f(tempfile_preview_path)
+ FileUtils.rm_f(tempfile_sample_path)
+ FileUtils.rm_f(tempfile_jpeg_path)
+ end
+
+ def tempfile_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}.upload"
+ end
+
+ def tempfile_preview_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-preview.jpg"
+ end
+
+ # Generate MD5 and CRC32 hashes for the file. Do this before generating samples, so if this
+ # is a duplicate we'll notice before we spend time resizing the image.
+ def regenerate_hash
+ path = tempfile_path
+ if not File.exists?(path)
+ path = file_path
+ end
+
+ if not File.exists?(path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # Compute both hashes in one pass.
+ md5_obj = Digest::MD5.new
+ crc32_accum = 0
+ File.open(path, 'rb') { |fp|
+ buf = ""
+ while fp.read(1024*64, buf) do
+ md5_obj << buf
+ crc32_accum = Zlib.crc32(buf, crc32_accum)
+ end
+ }
+
+ self.md5 = md5_obj.hexdigest
+ self.crc32 = crc32_accum
+ end
+
+ def generate_hash
+ if not regenerate_hash
+ return false
+ end
+
+ if Post.exists?(["md5 = ?", md5])
+ delete_tempfile
+ errors.add "md5", "already exists"
+ return false
+ else
+ return true
+ end
+ end
+
+ # Generate the specified image type. If options[:force_regen] is set, generate the file even
+ # if it already exists.
+ def regenerate_images(type, options = {})
+ return true unless image?
+
+ if type == :sample then
+ return false if not generate_sample(options[:force_regen])
+ temp_path = tempfile_sample_path
+ dest_path = sample_path
+ elsif type == :jpeg then
+ return false if not generate_jpeg(options[:force_regen])
+ temp_path = tempfile_jpeg_path
+ dest_path = jpeg_path
+ elsif type == :preview then
+ return false if not generate_preview
+ temp_path = tempfile_preview_path
+ dest_path = preview_path
+ else
+ raise Exception, "unknown type: %s" % type
+ end
+
+ # Only move in the changed files on success. When we return false, the caller won't
+ # save us to the database; we need to only move the new files in if we're going to be
+ # saved. This is normally handled by move_file.
+ if File.exists?(temp_path)
+ FileUtils.mkdir_p(File.dirname(dest_path), :mode => 0775)
+ FileUtils.mv(temp_path, dest_path)
+ FileUtils.chmod(0775, dest_path)
+ end
+
+ return true
+ end
+
+ def generate_preview
+ return true unless image? && width && height
+
+ size = Danbooru.reduce_to({:width=>width, :height=>height}, {:width=>150, :height=>150})
+
+ # Generate the preview from the new sample if we have one to save CPU, otherwise from the image.
+ if File.exists?(tempfile_sample_path)
+ path, ext = tempfile_sample_path, "jpg"
+ elsif File.exists?(sample_path)
+ path, ext = sample_path, "jpg"
+ elsif File.exists?(tempfile_path)
+ path, ext = tempfile_path, file_ext
+ elsif File.exists?(file_path)
+ path, ext = file_path, file_ext
+ else
+ errors.add(:file, "not found")
+ return false
+ end
+
+ begin
+ Danbooru.resize(ext, path, tempfile_preview_path, size, 95)
+ rescue Exception => x
+ errors.add "preview", "couldn't be generated (#{x})"
+ return false
+ end
+
+ return true
+ end
+
+ # Automatically download from the source if it's a URL.
+ attr_accessor :received_file
+ def download_source
+ return if source !~ /^http:\/\// || !file_ext.blank?
+ return if received_file
+
+ begin
+ Danbooru.http_get_streaming(source) do |response|
+ File.open(tempfile_path, "wb") do |out|
+ response.read_body do |block|
+ out.write(block)
+ end
+ end
+ end
+
+ if self.source.to_s =~ /^http/
+ #self.source = "Image board"
+ self.source = ""
+ end
+
+ return true
+ rescue SocketError, URI::Error, Timeout::Error, SystemCallError => x
+ delete_tempfile
+ errors.add "source", "couldn't be opened: #{x}"
+ return false
+ end
+ end
+
+ def determine_content_type
+ if not File.exists?(tempfile_path)
+ errors.add_to_base("No file received")
+ return false
+ end
+
+ imgsize = ImageSize.new(File.open(tempfile_path, "rb"))
+
+ unless imgsize.get_width.nil?
+ self.file_ext = imgsize.get_type.gsub(/JPEG/, "JPG").downcase
+ end
+ end
+
+ # Assigns a CGI file to the post. This writes the file to disk and generates a unique file name.
+ def file=(f)
+ return if f.nil? || f.size == 0
+
+ if f.local_path
+ # Large files are stored in the temp directory, so instead of
+ # reading/rewriting through Ruby, just rely on system calls to
+ # copy the file to danbooru's directory.
+ FileUtils.cp(f.local_path, tempfile_path)
+ else
+ File.open(tempfile_path, 'wb') {|nf| nf.write(f.read)}
+ end
+
+ self.received_file = true
+ end
+
+ def set_image_dimensions
+ if image? or flash?
+ imgsize = ImageSize.new(File.open(tempfile_path, "rb"))
+ self.width = imgsize.get_width
+ self.height = imgsize.get_height
+ end
+ self.file_size = File.size(tempfile_path) rescue 0
+ end
+
+ # Returns true if the post is an image format that GD can handle.
+ def image?
+ %w(jpg jpeg gif png).include?(file_ext.downcase)
+ end
+
+ # Returns true if the post is a Flash movie.
+ def flash?
+ file_ext == "swf"
+ end
+
+ def find_ext(file_path)
+ ext = File.extname(file_path)
+ if ext.blank?
+ return "txt"
+ else
+ ext = ext[1..-1].downcase
+ ext = "jpg" if ext == "jpeg"
+ return ext
+ end
+ end
+
+ def content_type_to_file_ext(content_type)
+ case content_type.chomp
+ when "image/jpeg"
+ return "jpg"
+
+ when "image/gif"
+ return "gif"
+
+ when "image/png"
+ return "png"
+
+ when "application/x-shockwave-flash"
+ return "swf"
+
+ else
+ nil
+ end
+ end
+
+ def preview_dimensions
+ if image?
+ dim = Danbooru.reduce_to({:width => width, :height => height}, {:width => 150, :height => 150})
+ return [dim[:width], dim[:height]]
+ else
+ return [150, 150]
+ end
+ end
+
+ def tempfile_sample_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-sample.jpg"
+ end
+
+ def generate_sample(force_regen = false)
+ return true unless image?
+ return true unless CONFIG["image_samples"]
+ return true unless (width && height)
+ return true if (file_ext.downcase == "gif")
+
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["sample_width"], :height => CONFIG["sample_height"]}, CONFIG["sample_ratio"])
+
+ # We can generate the sample image during upload or offline. Use tempfile_path
+ # if it exists, otherwise use file_path.
+ path = tempfile_path
+ path = file_path unless File.exists?(path)
+ unless File.exists?(path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # If we're not reducing the resolution for the sample image, only reencode if the
+ # source image is above the reencode threshold. Anything smaller won't be reduced
+ # enough by the reencode to bother, so don't reencode it and save disk space.
+ if size[:width] == width && size[:height] == height && File.size?(path) < CONFIG["sample_always_generate_size"]
+ self.sample_width = nil
+ self.sample_height = nil
+ return true
+ end
+
+ # If we already have a sample image, and the parameters havn't changed,
+ # don't regenerate it.
+ if !force_regen && (size[:width] == sample_width && size[:height] == sample_height)
+ return true
+ end
+
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["sample_width"], :height => CONFIG["sample_height"]})
+ begin
+ Danbooru.resize(file_ext, path, tempfile_sample_path, size, CONFIG["sample_quality"])
+ rescue Exception => x
+ errors.add "sample", "couldn't be created: #{x}"
+ return false
+ end
+
+ self.sample_width = size[:width]
+ self.sample_height = size[:height]
+ self.sample_size = File.size(tempfile_sample_path)
+
+ crc32_accum = 0
+ File.open(tempfile_sample_path, 'rb') { |fp|
+ buf = ""
+ while fp.read(1024*64, buf) do
+ crc32_accum = Zlib.crc32(buf, crc32_accum)
+ end
+ }
+ self.sample_crc32 = crc32_accum
+
+ return true
+ end
+
+ # Returns true if the post has a sample image.
+ def has_sample?
+ sample_width.is_a?(Integer)
+ end
+
+ # Returns true if the post has a sample image, and we're going to use it.
+ def use_sample?(user = nil)
+ if user && !user.show_samples?
+ false
+ else
+ CONFIG["image_samples"] && has_sample?
+ end
+ end
+
+ def sample_url(user = nil)
+ if status != "deleted" && use_sample?(user)
+ store_sample_url
+ else
+ file_url
+ end
+ end
+
+ def get_sample_width(user = nil)
+ if use_sample?(user)
+ sample_width
+ else
+ width
+ end
+ end
+
+ def get_sample_height(user = nil)
+ if use_sample?(user)
+ sample_height
+ else
+ height
+ end
+ end
+
+ def tempfile_jpeg_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-jpeg.jpg"
+ end
+
+ # If the JPEG version needs to be generated (or regenerated), output it to tempfile_jpeg_path. On
+ # error, return false; on success or no-op, return true.
+ def generate_jpeg(force_regen = false)
+ return true unless image?
+ return true unless CONFIG["jpeg_enable"]
+ return true unless (width && height)
+ # Only generate JPEGs for PNGs. Don't do it for files that are already JPEGs; we'll just add
+ # artifacts and/or make the file bigger. Don't do it for GIFs; they're usually animated.
+ return true if (file_ext.downcase != "png")
+
+ # We can generate the image during upload or offline. Use tempfile_path
+ # if it exists, otherwise use file_path.
+ path = tempfile_path
+ path = file_path unless File.exists?(path)
+ unless File.exists?(path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # If we already have the image, don't regenerate it.
+ if !force_regen && jpeg_width.is_a?(Integer)
+ return true
+ end
+
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["jpeg_width"], :height => CONFIG["jpeg_height"]}, CONFIG["jpeg_ratio"])
+ begin
+ Danbooru.resize(file_ext, path, tempfile_jpeg_path, size, CONFIG["jpeg_quality"])
+ rescue Exception => x
+ errors.add "jpeg", "couldn't be created: #{x}"
+ return false
+ end
+
+ self.jpeg_width = size[:width]
+ self.jpeg_height = size[:height]
+ self.jpeg_size = File.size(tempfile_jpeg_path)
+
+ crc32_accum = 0
+ File.open(tempfile_jpeg_path, 'rb') { |fp|
+ buf = ""
+ while fp.read(1024*64, buf) do
+ crc32_accum = Zlib.crc32(buf, crc32_accum)
+ end
+ }
+ self.jpeg_crc32 = crc32_accum
+
+ return true
+ end
+
+ def has_jpeg?
+ jpeg_width.is_a?(Integer)
+ end
+
+ # Returns true if the post has a JPEG version, and we're going to use it.
+ def use_jpeg?(user = nil)
+ CONFIG["jpeg_enable"] && has_jpeg?
+ end
+
+ def jpeg_url(user = nil)
+ if status != "deleted" && use_jpeg?(user)
+ store_jpeg_url
+ else
+ file_url
+ end
+ end
+end
diff --git a/app/models/post/g b/app/models/post/g
new file mode 100644
index 00000000..20a7a166
--- /dev/null
+++ b/app/models/post/g
@@ -0,0 +1,312 @@
+require "download"
+
+# These are methods dealing with getting the image and generating the thumbnail.
+# It works in conjunction with the image_store methods. Since these methods have
+# to be called in a specific order, they've been bundled into one module.
+module PostFileMethods
+ def self.included(m)
+ m.before_validation_on_create :download_source
+ m.before_validation_on_create :determine_content_type
+ m.before_validation_on_create :validate_content_type
+ m.before_validation_on_create :generate_hash
+ m.before_validation_on_create :set_image_dimensions
+ m.before_validation_on_create :generate_sample
+ m.before_validation_on_create :generate_preview
+ m.before_validation_on_create :move_file
+ end
+
+ def validate_content_type
+ if file_ext.empty?
+ errors.add_to_base("No file received")
+ return false
+ end
+
+ unless %w(jpg png gif swf).include?(file_ext.downcase)
+ errors.add(:file, "is an invalid content type: " + file_ext.downcase)
+ return false
+ end
+ end
+
+ def file_name
+ md5 + "." + file_ext
+ end
+
+ def delete_tempfile
+ FileUtils.rm_f(tempfile_path)
+ FileUtils.rm_f(tempfile_preview_path)
+ FileUtils.rm_f(tempfile_sample_path)
+ end
+
+ def tempfile_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}.upload"
+ end
+
+ def tempfile_preview_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-preview.jpg"
+ end
+
+ def file_size
+ File.size(file_path) rescue 0
+ end
+
+ # Generate an MD5 hash for the file.
+ def generate_hash
+ unless File.exists?(tempfile_path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ self.md5 = File.open(tempfile_path, 'rb') {|fp| Digest::MD5.hexdigest(fp.read)}
+
+ if Post.exists?(["md5 = ?", md5])
+ delete_tempfile
+ errors.add "md5", "already exists"
+ return false
+ else
+ return true
+ end
+ end
+
+ def generate_preview
+ return true unless image? && width && height
+
+ unless File.exists?(tempfile_path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ size = Danbooru.reduce_to({:width=>width, :height=>height}, {:width=>150, :height=>150})
+
+ # Generate the preview from the new sample if we have one to save CPU, otherwise from the image.
+ if File.exists?(tempfile_sample_path)
+ path, ext = tempfile_sample_path, "jpg"
+ else
+ path, ext = tempfile_path, file_ext
+ end
+
+ begin
+ Danbooru.resize(ext, path, tempfile_preview_path, size, 95)
+ rescue Exception => x
+ errors.add "preview", "couldn't be generated (#{x})"
+ return false
+ end
+
+ return true
+ end
+
+ # Automatically download from the source if it's a URL.
+ def download_source
+ return if source !~ /^http:\/\// || !file_ext.blank?
+
+ begin
+ Download.download(:url => source) do |res|
+ File.open(tempfile_path, 'wb') do |out|
+ res.read_body do |block|
+ out.write(block)
+ end
+ end
+ end
+
+ if self.source.to_s =~ /^http/
+ self.source = "Image board"
+ end
+
+ return true
+ rescue SocketError, URI::Error, SystemCallError => x
+ delete_tempfile
+ errors.add "source", "couldn't be opened: #{x}"
+ return false
+ end
+ end
+
+ def determine_content_type
+ if not File.exists?(tempfile_path)
+ errors.add_to_base("No file received")
+ return false
+ end
+
+ imgsize = ImageSize.new(File.open(tempfile_path, 'rb'))
+ if !imgsize.get_width.nil?
+ self.file_ext = imgsize.get_type.gsub(/JPEG/, "JPG").downcase
+ end
+ end
+
+ # Assigns a CGI file to the post. This writes the file to disk and generates a unique file name.
+ def file=(f)
+ return if f.nil? || f.size == 0
+a = File.new("/tmp/templog", "a+")
+
+ if f.local_path
+ # Large files are stored in the temp directory, so instead of
+ # reading/rewriting through Ruby, just rely on system calls to
+ # copy the file to danbooru's directory.
+
+a.write("%s to %s\n" % f.local_path, tempfile_path)
+ FileUtils.cp(f.local_path, tempfile_path)
+ else
+
+a.write("... to %s\n" % tempfile_path)
+
+ File.open(tempfile_path, 'wb') {|nf| nf.write(f.read)}
+ end
+
+a.close
+
+ end
+
+ def set_image_dimensions
+ if image? or flash?
+ imgsize = ImageSize.new(File.open(tempfile_path, "rb"))
+ self.width = imgsize.get_width
+ self.height = imgsize.get_height
+ end
+ end
+
+ # Returns true if the post is an image format that GD can handle.
+ def image?
+ %w(jpg jpeg gif png).include?(file_ext.downcase)
+ end
+
+ # Returns true if the post is a Flash movie.
+ def flash?
+ file_ext == "swf"
+ end
+
+ def find_ext(file_path)
+ ext = File.extname(file_path)
+ if ext.blank?
+ return "txt"
+ else
+ ext = ext[1..-1].downcase
+ ext = "jpg" if ext == "jpeg"
+ return ext
+ end
+ end
+
+ def content_type_to_file_ext(content_type)
+ case content_type.chomp
+ when "image/jpeg"
+ return "jpg"
+
+ when "image/gif"
+ return "gif"
+
+ when "image/png"
+ return "png"
+
+ when "application/x-shockwave-flash"
+ return "swf"
+
+ else
+ nil
+ end
+ end
+
+ def preview_dimensions
+ if image? && !is_deleted?
+ dim = Danbooru.reduce_to({:width => width, :height => height}, {:width => 150, :height => 150})
+ return [dim[:width], dim[:height]]
+ else
+ return [150, 150]
+ end
+ end
+
+ def tempfile_sample_path
+ "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}-sample.jpg"
+ end
+
+ def regenerate_sample
+ return false unless image?
+
+ if generate_sample && File.exists?(tempfile_sample_path)
+ FileUtils.mkdir_p(File.dirname(sample_path), :mode => 0775)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ FileUtils.chmod(0775, sample_path)
+ return true
+ else
+ return false
+ end
+ end
+
+ def generate_sample
+ return true unless image?
+ return true unless CONFIG["image_samples"]
+ return true unless (width && height)
+ return true if (file_ext.downcase == "gif")
+
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["sample_width"], :height => CONFIG["sample_height"]}, CONFIG["sample_ratio"])
+
+ # We can generate the sample image during upload or offline. Use tempfile_path
+ # if it exists, otherwise use file_path.
+ path = tempfile_path
+ path = file_path unless File.exists?(path)
+ unless File.exists?(path)
+ errors.add(:file, "not found")
+ return false
+ end
+
+ # If we're not reducing the resolution for the sample image, only reencode if the
+ # source image is above the reencode threshold. Anything smaller won't be reduced
+ # enough by the reencode to bother, so don't reencode it and save disk space.
+ if size[:width] == width && size[:height] == height &&
+ File.size?(path) < CONFIG["sample_always_generate_size"]
+ return true
+ end
+
+ # If we already have a sample image, and the parameters havn't changed,
+ # don't regenerate it.
+ if size[:width] == sample_width && size[:height] == sample_height
+ return true
+ end
+
+ size = Danbooru.reduce_to({:width => width, :height => height}, {:width => CONFIG["sample_width"], :height => CONFIG["sample_height"]})
+ begin
+ Danbooru.resize(file_ext, path, tempfile_sample_path, size, 95)
+ rescue Exception => x
+ errors.add "sample", "couldn't be created: #{x}"
+ return false
+ end
+
+ self.sample_width = size[:width]
+ self.sample_height = size[:height]
+ return true
+ end
+
+ # Returns true if the post has a sample image.
+ def has_sample?
+ sample_width.is_a?(Integer)
+ end
+
+ # Returns true if the post has a sample image, and we're going to use it.
+ def use_sample?(user = nil)
+ if user && !user.show_samples?
+ false
+ else
+ CONFIG["image_samples"] && has_sample?
+ end
+ end
+
+ def sample_url(user = nil)
+ if status != "deleted" && use_sample?(user)
+ store_sample_url
+ else
+ file_url
+ end
+ end
+
+ def get_sample_width(user = nil)
+ if use_sample?(user)
+ sample_width
+ else
+ width
+ end
+ end
+
+ def get_sample_height(user = nil)
+ if use_sample?(user)
+ sample_height
+ else
+ height
+ end
+ end
+end
diff --git a/app/models/post/image_store/amazon_s3.rb b/app/models/post/image_store/amazon_s3.rb
new file mode 100644
index 00000000..f05fbb8a
--- /dev/null
+++ b/app/models/post/image_store/amazon_s3.rb
@@ -0,0 +1,56 @@
+module PostImageStoreMethods
+ module AmazonS3
+ def move_file
+ begin
+ base64_md5 = Base64.encode64(self.md5.unpack("a2" * (self.md5.size / 2)).map {|x| x.hex.chr}.join)
+
+ AWS::S3::Base.establish_connection!(:access_key_id => CONFIG["amazon_s3_access_key_id"], :secret_access_key => CONFIG["amazon_s3_secret_access_key"])
+ AWS::S3::S3Object.store(file_name, open(self.tempfile_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read, "Content-MD5" => base64_md5, "Cache-Control" => "max-age=315360000")
+
+ if image?
+ AWS::S3::S3Object.store("preview/#{md5}.jpg", open(self.tempfile_preview_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read, "Cache-Control" => "max-age=315360000")
+ end
+
+ if File.exists?(tempfile_sample_path)
+ AWS::S3::S3Object.store("sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg", open(self.tempfile_sample_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read, "Cache-Control" => "max-age=315360000")
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ AWS::S3::S3Object.store("jpeg/#{md5}.jpg", open(self.tempfile_jpeg_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read, "Cache-Control" => "max-age=315360000")
+ end
+
+ return true
+ ensure
+ self.delete_tempfile()
+ end
+ end
+
+ def file_url
+ "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/#{file_name}"
+ end
+
+ def preview_url
+ if self.image?
+ "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/preview/#{md5}.jpg"
+ else
+ "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/preview/download.png"
+ end
+ end
+
+ def store_sample_url
+ "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/sample/deleted.png"
+ end
+
+ def store_jpeg_url
+ "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/jpeg/deleted.png"
+ end
+
+ def delete_file
+ AWS::S3::Base.establish_connection!(:access_key_id => CONFIG["amazon_s3_access_key_id"], :secret_access_key => CONFIG["amazon_s3_secret_access_key"])
+ AWS::S3::S3Object.delete(file_name, CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("preview/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("sample/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("jpeg/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ end
+ end
+end
diff --git a/app/models/post/image_store/local_flat.rb b/app/models/post/image_store/local_flat.rb
new file mode 100644
index 00000000..c8251c43
--- /dev/null
+++ b/app/models/post/image_store/local_flat.rb
@@ -0,0 +1,88 @@
+module PostImageStoreMethods
+ module LocalFlat
+ def file_path
+ "#{RAILS_ROOT}/public/data/#{file_name}"
+ end
+
+ def file_url
+ if CONFIG["use_pretty_image_urls"] then
+ CONFIG["url_base"] + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+ else
+ CONFIG["url_base"] + "/data/#{file_name}"
+ end
+ end
+
+ def preview_path
+ if status == "deleted"
+ "#{RAILS_ROOT}/public/deleted-preview.png"
+ elsif image?
+ "#{RAILS_ROOT}/public/data/preview/#{md5}.jpg"
+ else
+ "#{RAILS_ROOT}/public/download-preview.png"
+ end
+ end
+
+ def sample_path
+ "#{RAILS_ROOT}/public/data/sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ def preview_url
+ if image?
+ CONFIG["url_base"] + "/data/preview/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+ end
+
+ def jpeg_path
+ "#{RAILS_ROOT}/public/data/jpeg/#{file_hierarchy}/#{md5}.jpg"
+ end
+
+ def store_jpeg_url
+ if CONFIG["use_pretty_image_urls"] then
+ CONFIG["url_base"] + "/jpeg/#{md5}/#{url_encode(pretty_file_name({:type => :jpeg}))}.jpg"
+ else
+ CONFIG["url_base"] + "/data/jpeg/#{md5}.jpg"
+ end
+ end
+
+ def store_sample_url
+ if CONFIG["use_pretty_image_urls"] then
+ path = "/sample/#{md5}/#{url_encode(pretty_file_name({:type => :sample}))}.jpg"
+ else
+ path = "/data/sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ CONFIG["url_base"] + path
+ end
+
+ def delete_file
+ FileUtils.rm_f(file_path)
+ FileUtils.rm_f(preview_path) if image?
+ FileUtils.rm_f(sample_path) if image?
+ FileUtils.rm_f(jpeg_path) if image?
+ end
+
+ def move_file
+ FileUtils.mv(tempfile_path, file_path)
+ FileUtils.chmod(0664, file_path)
+
+ if image?
+ FileUtils.mv(tempfile_preview_path, preview_path)
+ FileUtils.chmod(0664, preview_path)
+ end
+
+ if File.exists?(tempfile_sample_path)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ FileUtils.chmod(0664, sample_path)
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ FileUtils.mv(tempfile_jpeg_path, jpeg_path)
+ FileUtils.chmod(0664, jpeg_path)
+ end
+
+ delete_tempfile
+ end
+ end
+end
diff --git a/app/models/post/image_store/local_flat_with_amazon_s3_backup.rb b/app/models/post/image_store/local_flat_with_amazon_s3_backup.rb
new file mode 100644
index 00000000..4affb61a
--- /dev/null
+++ b/app/models/post/image_store/local_flat_with_amazon_s3_backup.rb
@@ -0,0 +1,105 @@
+module PostImageStoreMethods
+ module LocalFlatWithAmazonS3Backup
+ def move_file
+ FileUtils.mv(tempfile_path, file_path)
+ FileUtils.chmod(0664, file_path)
+
+ if image?
+ FileUtils.mv(tempfile_preview_path, preview_path)
+ FileUtils.chmod(0664, preview_path)
+ end
+
+ if File.exists?(tempfile_sample_path)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ FileUtils.chmod(0664, sample_path)
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ FileUtils.mv(tempfile_jpeg_path, jpeg_path)
+ FileUtils.chmod(0664, jpeg_path)
+ end
+
+ self.delete_tempfile()
+
+ base64_md5 = Base64.encode64(self.md5.unpack("a2" * (self.md5.size / 2)).map {|x| x.hex.chr}.join)
+
+ AWS::S3::Base.establish_connection!(:access_key_id => CONFIG["amazon_s3_access_key_id"], :secret_access_key => CONFIG["amazon_s3_secret_access_key"])
+ AWS::S3::S3Object.store(file_name, open(self.file_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read, "Content-MD5" => base64_md5)
+
+ if image?
+ AWS::S3::S3Object.store("preview/#{md5}.jpg", open(self.preview_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read)
+ end
+
+ if File.exists?(tempfile_sample_path)
+ AWS::S3::S3Object.store("sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg", open(self.sample_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read)
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ AWS::S3::S3Object.store("jpeg/#{md5}.jpg", open(self.jpeg_path, "rb"), CONFIG["amazon_s3_bucket_name"], :access => :public_read)
+ end
+
+ return true
+ end
+
+ def file_path
+ "#{RAILS_ROOT}/public/data/#{file_name}"
+ end
+
+ def file_url
+ #"http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/#{file_name}"
+ CONFIG["url_base"] + "/data/#{file_name}"
+ end
+
+ def preview_path
+ if status == "deleted"
+ "#{RAILS_ROOT}/public/deleted-preview.png"
+ elsif image?
+ "#{RAILS_ROOT}/public/data/preview/#{md5}.jpg"
+ else
+ "#{RAILS_ROOT}/public/download-preview.png"
+ end
+ end
+
+ def sample_path
+ "#{RAILS_ROOT}/public/data/sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ def preview_url
+# if self.image?
+# "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/preview/#{md5}.jpg"
+# else
+# "http://s3.amazonaws.com/" + CONFIG["amazon_s3_bucket_name"] + "/preview/download.png"
+# end
+
+ if self.image?
+ CONFIG["url_base"] + "/data/preview/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+ end
+
+ def jpeg_path
+ "#{RAILS_ROOT}/public/data/jpeg/#{md5}.jpg"
+ end
+
+ def store_jpeg_url
+ CONFIG["url_base"] + "/data/jpeg/#{md5}.jpg"
+ end
+
+ def store_sample_url
+ CONFIG["url_base"] + "/data/sample/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ def delete_file
+ AWS::S3::Base.establish_connection!(:access_key_id => CONFIG["amazon_s3_access_key_id"], :secret_access_key => CONFIG["amazon_s3_secret_access_key"])
+ AWS::S3::S3Object.delete(file_name, CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("preview/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("sample/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ AWS::S3::S3Object.delete("jpeg/#{md5}.jpg", CONFIG["amazon_s3_bucket_name"])
+ FileUtils.rm_f(file_path)
+ FileUtils.rm_f(preview_path) if image?
+ FileUtils.rm_f(sample_path) if image?
+ FileUtils.rm_f(jpeg_path) if image?
+ end
+ end
+end
diff --git a/app/models/post/image_store/local_hierarchy.rb b/app/models/post/image_store/local_hierarchy.rb
new file mode 100644
index 00000000..43d18b1d
--- /dev/null
+++ b/app/models/post/image_store/local_hierarchy.rb
@@ -0,0 +1,94 @@
+module PostImageStoreMethods
+ module LocalHierarchy
+ def file_hierarchy
+ "%s/%s" % [md5[0,2], md5[2,2]]
+ end
+
+ def file_path
+ "#{RAILS_ROOT}/public/data/image/#{file_hierarchy}/#{file_name}"
+ end
+
+ def file_url
+ if CONFIG["use_pretty_image_urls"] then
+ CONFIG["url_base"] + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+ else
+ CONFIG["url_base"] + "/data/image/#{file_hierarchy}/#{file_name}"
+ end
+ end
+
+ def preview_path
+ if status == "deleted"
+ "#{RAILS_ROOT}/public/deleted-preview.png"
+ elsif image?
+ "#{RAILS_ROOT}/public/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ "#{RAILS_ROOT}/public/download-preview.png"
+ end
+ end
+
+ def sample_path
+ "#{RAILS_ROOT}/public/data/sample/#{file_hierarchy}/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ def preview_url
+ if image?
+ CONFIG["url_base"] + "/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+ end
+
+ def jpeg_path
+ "#{RAILS_ROOT}/public/data/jpeg/#{file_hierarchy}/#{md5}.jpg"
+ end
+
+ def store_jpeg_url
+ if CONFIG["use_pretty_image_urls"] then
+ CONFIG["url_base"] + "/jpeg/#{md5}/#{url_encode(pretty_file_name({:type => :jpeg}))}.jpg"
+ else
+ CONFIG["url_base"] + "/data/jpeg/#{file_hierarchy}/#{md5}.jpg"
+ end
+ end
+
+ def store_sample_url
+ if CONFIG["use_pretty_image_urls"] then
+ CONFIG["url_base"] + "/sample/#{md5}/#{url_encode(pretty_file_name({:type => :sample}))}.jpg"
+ else
+ CONFIG["url_base"] + "/data/sample/#{file_hierarchy}/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+ end
+
+ def delete_file
+ FileUtils.rm_f(file_path)
+ FileUtils.rm_f(preview_path) if image?
+ FileUtils.rm_f(sample_path) if image?
+ FileUtils.rm_f(jpeg_path) if image?
+ end
+
+ def move_file
+ FileUtils.mkdir_p(File.dirname(file_path), :mode => 0775)
+ FileUtils.mv(tempfile_path, file_path)
+ FileUtils.chmod(0664, file_path)
+
+ if image?
+ FileUtils.mkdir_p(File.dirname(preview_path), :mode => 0775)
+ FileUtils.mv(tempfile_preview_path, preview_path)
+ FileUtils.chmod(0664, preview_path)
+ end
+
+ if File.exists?(tempfile_sample_path)
+ FileUtils.mkdir_p(File.dirname(sample_path), :mode => 0775)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ FileUtils.chmod(0664, sample_path)
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ FileUtils.mkdir_p(File.dirname(jpeg_path), :mode => 0775)
+ FileUtils.mv(tempfile_jpeg_path, jpeg_path)
+ FileUtils.chmod(0664, jpeg_path)
+ end
+
+ delete_tempfile
+ end
+ end
+end
diff --git a/app/models/post/image_store/remote_hierarchy.rb b/app/models/post/image_store/remote_hierarchy.rb
new file mode 100644
index 00000000..0965ce85
--- /dev/null
+++ b/app/models/post/image_store/remote_hierarchy.rb
@@ -0,0 +1,117 @@
+require "mirror"
+require "erb"
+include ERB::Util
+
+module PostImageStoreMethods
+ module RemoteHierarchy
+ def file_hierarchy
+ "%s/%s" % [md5[0,2], md5[2,2]]
+ end
+
+ def select_random_image_server(*options)
+ Mirrors.select_image_server(self.is_warehoused?, self.created_at.to_i, *options)
+ end
+
+ def file_path
+ "#{RAILS_ROOT}/public/data/image/#{file_hierarchy}/#{file_name}"
+ end
+
+ def file_url
+ if CONFIG["use_pretty_image_urls"] then
+ select_random_image_server + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+ else
+ select_random_image_server + "/data/image/#{file_hierarchy}/#{file_name}"
+ end
+ end
+
+ def preview_path
+ if image?
+ "#{RAILS_ROOT}/public/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ "#{RAILS_ROOT}/public/download-preview.png"
+ end
+ end
+
+ def sample_path
+ "#{RAILS_ROOT}/public/data/sample/#{file_hierarchy}/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ def preview_url
+ if self.is_warehoused?
+ if status == "deleted"
+ CONFIG["url_base"] + "/deleted-preview.png"
+
+ elsif image?
+ select_random_image_server(:preview => true) + "/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+ else
+ if status == "deleted"
+ CONFIG["url_base"] + "/deleted-preview.png"
+ elsif image?
+ Mirrors.select_main_image_server + "/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+ end
+ end
+
+ def jpeg_path
+ "#{RAILS_ROOT}/public/data/jpeg/#{file_hierarchy}/#{md5}.jpg"
+ end
+
+ def store_jpeg_url
+ if CONFIG["use_pretty_image_urls"] then
+ path = CONFIG["url_base"] + "/jpeg/#{md5}/#{url_encode(pretty_file_name({:type => :jpeg}))}.jpg"
+ else
+ path = CONFIG["url_base"] + "/data/jpeg/#{file_hierarchy}/#{md5}.jpg"
+ end
+
+ return select_random_image_server + path
+ end
+
+ def store_sample_url
+ if CONFIG["use_pretty_image_urls"] then
+ path = "/sample/#{md5}/#{url_encode(pretty_file_name({:type => :sample}))}.jpg"
+ else
+ path = "/data/sample/#{file_hierarchy}/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+ return select_random_image_server + path
+ end
+
+ def delete_file
+ FileUtils.rm_f(file_path)
+ FileUtils.rm_f(preview_path) if image?
+ FileUtils.rm_f(sample_path) if image?
+ FileUtils.rm_f(jpeg_path) if image?
+ end
+
+ def move_file
+ FileUtils.mkdir_p(File.dirname(file_path), :mode => 0775)
+ FileUtils.mv(tempfile_path, file_path)
+ FileUtils.chmod(0664, file_path)
+
+ if image?
+ FileUtils.mkdir_p(File.dirname(preview_path), :mode => 0775)
+ FileUtils.mv(tempfile_preview_path, preview_path)
+ FileUtils.chmod(0664, preview_path)
+ end
+
+ if File.exists?(tempfile_sample_path)
+ FileUtils.mkdir_p(File.dirname(sample_path), :mode => 0775)
+ FileUtils.mv(tempfile_sample_path, sample_path)
+ FileUtils.chmod(0664, sample_path)
+ end
+
+ if File.exists?(tempfile_jpeg_path)
+ FileUtils.mkdir_p(File.dirname(jpeg_path), :mode => 0775)
+ FileUtils.mv(tempfile_jpeg_path, jpeg_path)
+ FileUtils.chmod(0664, jpeg_path)
+ end
+
+ delete_tempfile
+ end
+ end
+end
diff --git a/app/models/post/image_store_methods.rb b/app/models/post/image_store_methods.rb
new file mode 100644
index 00000000..37d7fd4c
--- /dev/null
+++ b/app/models/post/image_store_methods.rb
@@ -0,0 +1,20 @@
+module PostImageStoreMethods
+ def self.included(m)
+ case CONFIG["image_store"]
+ when :local_flat
+ m.__send__(:include, PostImageStoreMethods::LocalFlat)
+
+ when :local_flat_with_amazon_s3_backup
+ m.__send__(:include, PostImageStoreMethods::LocalFlatWithAmazonS3Backup)
+
+ when :local_hierarchy
+ m.__send__(:include, PostImageStoreMethods::LocalHierarchy)
+
+ when :remote_hierarchy
+ m.__send__(:include, PostImageStoreMethods::RemoteHierarchy)
+
+ when :amazon_s3
+ m.__send__(:include, PostImageStoreMethods::AmazonS3)
+ end
+ end
+end
diff --git a/app/models/post/mirror_methods.rb b/app/models/post/mirror_methods.rb
new file mode 100644
index 00000000..9b6b01b8
--- /dev/null
+++ b/app/models/post/mirror_methods.rb
@@ -0,0 +1,49 @@
+require "mirror"
+
+class MirrorError < Exception ; end
+
+module PostMirrorMethods
+ def upload_to_mirrors
+ return if is_warehoused
+ return if self.status == "deleted"
+
+ files_to_copy = [self.file_path]
+ files_to_copy << self.preview_path if self.image?
+ files_to_copy << self.sample_path if self.has_sample?
+ files_to_copy << self.jpeg_path if self.has_jpeg?
+ files_to_copy = files_to_copy.uniq
+
+ # CONFIG[:data_dir] is equivalent to our local_base.
+ local_base = "#{RAILS_ROOT}/public/data/"
+
+ CONFIG["mirrors"].each { |mirror|
+ remote_user_host = "#{mirror[:user]}@#{mirror[:host]}"
+ remote_dirs = []
+ files_to_copy.each { |file|
+ remote_filename = file[local_base.length, file.length]
+ remote_dir = File.dirname(remote_filename)
+ remote_dirs << mirror[:data_dir] + "/" + File.dirname(remote_filename)
+ }
+
+ # Create all directories in one go.
+ system("/usr/bin/ssh", "-o", "Compression=no", "-o", "BatchMode=yes",
+ remote_user_host, "mkdir -p #{remote_dirs.uniq.join(" ")}")
+ }
+
+ begin
+ files_to_copy.each { |file|
+ Mirrors.copy_file_to_mirrors(file)
+ }
+ rescue MirrorError => e
+ # The post might be deleted while it's uploading. Check the post status after
+ # an error.
+ self.reload
+ raise if self.status != "deleted"
+ end
+
+ # This might take a while. Rather than hold a transaction, just reload the post
+ # after uploading.
+ self.reload
+ self.update_attributes(:is_warehoused => true)
+ end
+end
diff --git a/app/models/post/parent_methods.rb b/app/models/post/parent_methods.rb
new file mode 100644
index 00000000..2e819d22
--- /dev/null
+++ b/app/models/post/parent_methods.rb
@@ -0,0 +1,70 @@
+module PostParentMethods
+ module ClassMethods
+ def update_has_children(post_id)
+ has_children = Post.exists?(["parent_id = ? AND status <> 'deleted'", post_id])
+ execute_sql("UPDATE posts SET has_children = ? WHERE id = ?", has_children, post_id)
+ end
+
+ def recalculate_has_children
+ transaction do
+ execute_sql("UPDATE posts SET has_children = false WHERE has_children = true")
+ execute_sql("UPDATE posts SET has_children = true WHERE id IN (SELECT parent_id FROM posts WHERE parent_id IS NOT NULL AND status <> 'deleted')")
+ end
+ end
+
+ def set_parent(post_id, parent_id, old_parent_id = nil)
+ if old_parent_id.nil?
+ old_parent_id = select_value_sql("SELECT parent_id FROM posts WHERE id = ?", post_id)
+ end
+
+ if parent_id.to_i == post_id.to_i || parent_id.to_i == 0
+ parent_id = nil
+ end
+
+ execute_sql("UPDATE posts SET parent_id = ? WHERE id = ?", parent_id, post_id)
+
+ update_has_children(old_parent_id)
+ update_has_children(parent_id)
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.after_save :update_parent
+ m.after_save :update_pool_children
+ m.validate :validate_parent
+ m.after_delete :give_favorites_to_parent
+ m.versioned :parent_id, :default => nil
+ end
+
+ def validate_parent
+ errors.add("parent_id") unless parent_id.nil? or Post.exists?(parent_id)
+ end
+
+ def update_parent
+ return if !parent_id_changed? && !status_changed?
+ self.class.set_parent(id, parent_id, parent_id_was)
+ end
+
+ def update_pool_children
+ # If the parent didn't change, we don't need to update any pool posts. (Don't use
+ # parent_id_changed?; we want to know if the id changed, not if it was just overwritten
+ # with the same value.)
+ return if self.parent_id == self.parent_id_was
+
+ # Give PoolPost a chance to update parenting when post parents change.
+ PoolPost.post_parent_changed(self)
+ end
+
+ def give_favorites_to_parent
+ return if parent_id.nil?
+ parent = Post.find(parent_id)
+
+ transaction do
+ for vote in PostVotes.find(:all, :conditions => ["post_id = ?", self.id], :include => :user)
+ parent.vote!(vote.score, vote.user, nil)
+ self.vote!(0, vote.user, nil)
+ end
+ end
+ end
+end
diff --git a/app/models/post/rating_methods.rb b/app/models/post/rating_methods.rb
new file mode 100644
index 00000000..c32adaa8
--- /dev/null
+++ b/app/models/post/rating_methods.rb
@@ -0,0 +1,55 @@
+module PostRatingMethods
+ attr_accessor :old_rating
+
+ def self.included(m)
+ m.versioned :rating
+ m.versioned :is_rating_locked, :default => false
+ m.versioned :is_note_locked, :default => false
+ end
+
+ def rating=(r)
+ if r == nil && !new_record?
+ return
+ end
+
+ if is_rating_locked?
+ return
+ end
+
+ r = r.to_s.downcase[0, 1]
+
+ if %w(q e s).include?(r)
+ new_rating = r
+ else
+ new_rating = 'q'
+ end
+
+ return if rating == new_rating
+ self.old_rating = rating
+ write_attribute(:rating, new_rating)
+ touch_change_seq!
+ end
+
+ def pretty_rating
+ case rating
+ when "q"
+ return "Questionable"
+
+ when "e"
+ return "Explicit"
+
+ when "s"
+ return "Safe"
+ end
+ end
+
+ def can_change_is_note_locked?(user)
+ return user.has_permission?(pool)
+ end
+ def can_change_rating_locked?(user)
+ return user.has_permission?(pool)
+ end
+ def can_change_rating?(user)
+ return user.is_member_or_higher? && (!is_rating_locked? || user.has_permission?(self))
+ end
+end
diff --git a/app/models/post/sql_methods.rb b/app/models/post/sql_methods.rb
new file mode 100644
index 00000000..5633ef7d
--- /dev/null
+++ b/app/models/post/sql_methods.rb
@@ -0,0 +1,336 @@
+module PostSqlMethods
+ module ClassMethods
+ def generate_sql_range_helper(arr, field, c, p)
+ case arr[0]
+ when :eq
+ c << "#{field} = ?"
+ p << arr[1]
+
+ when :gt
+ c << "#{field} > ?"
+ p << arr[1]
+
+ when :gte
+ c << "#{field} >= ?"
+ p << arr[1]
+
+ when :lt
+ c << "#{field} < ?"
+ p << arr[1]
+
+ when :lte
+ c << "#{field} <= ?"
+ p << arr[1]
+
+ when :between
+ c << "#{field} BETWEEN ? AND ?"
+ p << arr[1]
+ p << arr[2]
+
+ else
+ # do nothing
+ end
+ end
+
+ def generate_sql(q, options = {})
+ if q.is_a?(Hash)
+ original_query = options[:original_query]
+ else
+ original_query = q
+ q = Tag.parse_query(q)
+ end
+
+ conds = ["true"]
+ joins = ["posts p"]
+ join_params = []
+ cond_params = []
+
+ if q.has_key?(:error)
+ conds << "FALSE"
+ end
+
+ generate_sql_range_helper(q[:post_id], "p.id", conds, cond_params)
+ generate_sql_range_helper(q[:mpixels], "p.width*p.height/1000000.0", conds, cond_params)
+ generate_sql_range_helper(q[:width], "p.width", conds, cond_params)
+ generate_sql_range_helper(q[:height], "p.height", conds, cond_params)
+ generate_sql_range_helper(q[:score], "p.score", conds, cond_params)
+ generate_sql_range_helper(q[:date], "p.created_at::date", conds, cond_params)
+ generate_sql_range_helper(q[:change], "p.change_seq", conds, cond_params)
+
+ if q[:md5].is_a?(String)
+ conds << "p.md5 IN (?)"
+ cond_params << q[:md5].split(/,/)
+ end
+
+ if q[:deleted_only] == true
+ conds << "p.status = 'deleted'"
+ else
+ conds << "p.status <> 'deleted'"
+ end
+
+ if q.has_key?(:parent_id) && q[:parent_id].is_a?(Integer)
+ conds << "(p.parent_id = ? or p.id = ?)"
+ cond_params << q[:parent_id]
+ cond_params << q[:parent_id]
+ elsif q.has_key?(:parent_id) && q[:parent_id] == false
+ conds << "p.parent_id is null"
+ end
+
+ if q[:source].is_a?(String)
+ conds << "p.source LIKE ? ESCAPE E'\\\\'"
+ cond_params << q[:source]
+ end
+
+ if q[:favtag].is_a?(String)
+ user = User.find_by_name(q[:favtag])
+
+ if user
+ post_ids = FavoriteTag.find_post_ids(user.id)
+ conds << "p.id IN (?)"
+ cond_params << post_ids
+ end
+ end
+
+ if q[:fav].is_a?(String)
+ joins << "JOIN favorites f ON f.post_id = p.id JOIN users fu ON f.user_id = fu.id"
+ conds << "lower(fu.name) = lower(?)"
+ cond_params << q[:fav]
+ end
+
+ if q.has_key?(:vote_negated)
+ joins << "LEFT JOIN post_votes v ON p.id = v.post_id AND v.user_id = ?"
+ join_params << q[:vote_negated]
+ conds << "v.score IS NULL"
+ end
+
+ if q.has_key?(:vote)
+ joins << "JOIN post_votes v ON p.id = v.post_id"
+ conds << "v.user_id = ?"
+ cond_params << q[:vote][1]
+
+ generate_sql_range_helper(q[:vote][0], "v.score", conds, cond_params)
+ end
+
+ if q[:user].is_a?(String)
+ joins << "JOIN users u ON p.user_id = u.id"
+ conds << "lower(u.name) = lower(?)"
+ cond_params << q[:user]
+ end
+
+ if q.has_key?(:exclude_pools)
+ q[:exclude_pools].each_index do |i|
+ if q[:exclude_pools][i].is_a?(Integer)
+ joins << "LEFT JOIN pools_posts ep#{i} ON (ep#{i}.post_id = p.id AND ep#{i}.pool_id = ?)"
+ join_params << q[:exclude_pools][i]
+ conds << "ep#{i} IS NULL"
+ end
+
+ if q[:exclude_pools][i].is_a?(String)
+ joins << "LEFT JOIN pools_posts ep#{i} ON ep#{i}.post_id = p.id LEFT JOIN pools epp#{i} ON (ep#{i}.pool_id = epp#{i}.id AND epp#{i}.name ILIKE ? ESCAPE E'\\\\')"
+ join_params << ("%" + q[:exclude_pools][i].to_escaped_for_sql_like + "%")
+ conds << "ep#{i} IS NULL"
+ end
+ end
+ end
+
+ if q.has_key?(:pool)
+ if q.has_key?(:pool_posts) && q[:pool_posts] == "all"
+ conds << "(pools_posts.active OR pools_posts.master_id IS NOT NULL)"
+ elsif q.has_key?(:pool_posts) && q[:pool_posts] == "orig"
+ conds << "pools_posts.active = true"
+ else
+ conds << "((pools_posts.active = true AND pools_posts.slave_id IS NULL) OR pools_posts.master_id IS NOT NULL)"
+ end
+
+ if not q.has_key?(:order)
+ pool_ordering = " ORDER BY pools_posts.pool_id ASC, nat_sort(pools_posts.sequence), pools_posts.post_id"
+ end
+
+ if q[:pool].is_a?(Integer)
+ joins << "JOIN pools_posts ON pools_posts.post_id = p.id JOIN pools ON pools_posts.pool_id = pools.id"
+ conds << "pools.id = ?"
+ cond_params << q[:pool]
+ end
+
+ if q[:pool].is_a?(String)
+ joins << "JOIN pools_posts ON pools_posts.post_id = p.id JOIN pools ON pools_posts.pool_id = pools.id"
+ conds << "pools.name ILIKE ? ESCAPE E'\\\\'"
+ cond_params << ("%" + q[:pool].to_escaped_for_sql_like + "%")
+ end
+ end
+
+ if q.has_key?(:include)
+ joins << "JOIN posts_tags ipt ON ipt.post_id = p.id"
+ conds << "ipt.tag_id IN (SELECT id FROM tags WHERE name IN (?))"
+ cond_params << (q[:include] + q[:related])
+ elsif q[:related].any?
+ raise "You cannot search for more than #{CONFIG['tag_query_limit']} tags at a time" if q[:related].size > CONFIG["tag_query_limit"]
+
+ q[:related].each_with_index do |rtag, i|
+ joins << "JOIN posts_tags rpt#{i} ON rpt#{i}.post_id = p.id AND rpt#{i}.tag_id = (SELECT id FROM tags WHERE name = ?)"
+ join_params << rtag
+ end
+ end
+
+ if q[:exclude].any?
+ raise "You cannot search for more than #{CONFIG['tag_query_limit']} tags at a time" if q[:exclude].size > CONFIG["tag_query_limit"]
+ q[:exclude].each_with_index do |etag, i|
+ joins << "LEFT JOIN posts_tags ept#{i} ON p.id = ept#{i}.post_id AND ept#{i}.tag_id = (SELECT id FROM tags WHERE name = ?)"
+ conds << "ept#{i}.tag_id IS NULL"
+ join_params << etag
+ end
+ end
+
+ if q[:rating].is_a?(String)
+ case q[:rating][0, 1].downcase
+ when "s"
+ conds << "p.rating = 's'"
+
+ when "q"
+ conds << "p.rating = 'q'"
+
+ when "e"
+ conds << "p.rating = 'e'"
+ end
+ end
+
+ if q[:rating_negated].is_a?(String)
+ case q[:rating_negated][0, 1].downcase
+ when "s"
+ conds << "p.rating <> 's'"
+
+ when "q"
+ conds << "p.rating <> 'q'"
+
+ when "e"
+ conds << "p.rating <> 'e'"
+ end
+ end
+
+ if q[:unlocked_rating] == true
+ conds << "p.is_rating_locked = FALSE"
+ end
+
+ if options[:pending]
+ conds << "p.status = 'pending'"
+ end
+
+ if options[:flagged]
+ conds << "p.status = 'flagged'"
+ end
+
+ if q.has_key?(:show_holds_only)
+ if q[:show_holds_only]
+ conds << "p.is_held"
+ end
+ else
+ # Hide held posts by default only when not using the API.
+ if not options[:from_api] then
+ conds << "NOT p.is_held"
+ end
+ end
+
+ if q.has_key?(:shown_in_index)
+ if q[:shown_in_index]
+ conds << "p.is_shown_in_index"
+ else
+ conds << "NOT p.is_shown_in_index"
+ end
+ elsif original_query.blank? and not options[:from_api]
+ # Hide not shown posts by default only when not using the API.
+ conds << "p.is_shown_in_index"
+ end
+
+ sql = "SELECT "
+
+ if options[:count]
+ sql << "COUNT(*)"
+ elsif options[:select]
+ sql << options[:select]
+ else
+ sql << "p.*"
+ end
+
+ sql << " FROM " + joins.join(" ")
+ sql << " WHERE " + conds.join(" AND ")
+
+ if q[:order] && !options[:count]
+ case q[:order]
+ when "id"
+ sql << " ORDER BY p.id"
+
+ when "id_desc"
+ sql << " ORDER BY p.id DESC"
+
+ when "score"
+ sql << " ORDER BY p.score DESC"
+
+ when "score_asc"
+ sql << " ORDER BY p.score"
+
+ when "mpixels"
+ # Use "w*h/1000000", even though "w*h" would give the same result, so this can use
+ # the posts_mpixels index.
+ sql << " ORDER BY width*height/1000000.0 DESC"
+
+ when "mpixels_asc"
+ sql << " ORDER BY width*height/1000000.0"
+
+ when "portrait"
+ sql << " ORDER BY 1.0*width/GREATEST(1, height)"
+
+ when "landscape"
+ sql << " ORDER BY 1.0*width/GREATEST(1, height) DESC"
+
+ when "change", "change_asc"
+ sql << " ORDER BY change_seq"
+
+ when "change_desc"
+ sql << " ORDER BY change_seq DESC"
+
+ when "vote"
+ if q.has_key?(:vote)
+ sql << " ORDER BY v.updated_at DESC"
+ end
+
+ when "fav"
+ if q[:fav].is_a?(String)
+ sql << " ORDER BY f.id DESC"
+ end
+
+ when "random"
+ sql << " ORDER BY random"
+
+ else
+ if pool_ordering
+ sql << pool_ordering
+ else
+ if options[:from_api] then
+ # When using the API, default to sorting by ID.
+ sql << " ORDER BY p.id DESC"
+ else
+ sql << " ORDER BY p.index_timestamp DESC"
+ end
+ end
+ end
+ elsif options[:order]
+ sql << " ORDER BY " + options[:order]
+ end
+
+ if options[:limit]
+ sql << " LIMIT " + options[:limit].to_s
+ end
+
+ if options[:offset]
+ sql << " OFFSET " + options[:offset].to_s
+ end
+
+ params = join_params + cond_params
+ return Post.sanitize_sql([sql, *params])
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+end
diff --git a/app/models/post/status_methods.rb b/app/models/post/status_methods.rb
new file mode 100644
index 00000000..320dcde3
--- /dev/null
+++ b/app/models/post/status_methods.rb
@@ -0,0 +1,115 @@
+module PostStatusMethods
+ def status=(s)
+ return if s == status
+ write_attribute(:status, s)
+ touch_change_seq!
+ end
+
+ def reset_index_timestamp
+ self.index_timestamp = self.created_at
+ end
+
+ # Bump the post to the front of the index.
+ def touch_index_timestamp
+ self.index_timestamp = Time.now
+ end
+
+ module ClassMethods
+ # If user_id is nil, allow activating any user's posts.
+ def batch_activate(user_id, post_ids)
+ conds = []
+ cond_params = []
+
+ conds << "is_held = true"
+ conds << "id IN (?)"
+ cond_params << post_ids
+
+ if user_id
+ conds << "user_id = ?"
+ cond_params << user_id
+ end
+
+ # Tricky: we want posts to show up in the index in the same order they were posted.
+ # If we just bump the posts, the index_timestamps will all be the same, and they'll
+ # show up in an undefined order. We don't want to do this in the ORDER BY when
+ # searching, because that's too expensive. Instead, tweak the timestamps slightly:
+ # for each post updated, set the index_timestamps 1ms newer than the previous.
+ #
+ # Returns the number of posts actually activated.
+ count = nil
+ transaction do
+ # result_id gives us an index for each result row; multiplying this by 1ms
+ # gives us an increasing counter. This should be a lot easier than this.
+ sql = <<-EOS
+ CREATE TEMP SEQUENCE result_id;
+
+ UPDATE posts
+ SET index_timestamp = now() + (interval '1 ms' * idx)
+ FROM
+ (SELECT nextval('result_id') AS idx, * FROM (
+ SELECT id, index_timestamp FROM posts
+ WHERE #{conds.join(" AND ")}
+ ORDER BY created_at DESC
+ ) AS n) AS nn
+ WHERE posts.id IN (nn.id);
+
+ DROP SEQUENCE result_id;
+ EOS
+ execute_sql(sql, *cond_params)
+
+ count = select_value_sql("SELECT COUNT(*) FROM posts WHERE #{conds.join(" AND ")}", *cond_params).to_i
+
+ sql = "UPDATE posts SET is_held = false WHERE #{conds.join(" AND ")}"
+ execute_sql(sql, *cond_params)
+ end
+
+ Cache.expire if count > 0
+
+ return count
+ end
+ end
+
+ def update_status_on_destroy
+ # Can't use update_attributes here since this method is wrapped inside of a destroy call
+ execute_sql("UPDATE posts SET status = ? WHERE id = ?", "deleted", id)
+ Post.update_has_children(parent_id) if parent_id
+ flag_detail.update_attributes(:is_resolved => true) if flag_detail
+ return false
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.before_create :reset_index_timestamp
+ m.versioned :is_shown_in_index, :default => true
+ end
+
+ def is_held=(hold)
+ # Hack because the data comes in as a string:
+ hold = false if hold == "false"
+
+ user = Thread.current["danbooru-user"]
+
+ # Only the original poster can hold or unhold a post.
+ return if user && !user.has_permission?(self)
+
+ if hold
+ # A post can only be held within one minute of posting (except by a moderator);
+ # this is intended to be used on initial posting, before it shows up in the index.
+ return if self.created_at && self.created_at < 1.minute.ago
+ end
+
+ was_held = self.is_held
+
+ write_attribute(:is_held, hold)
+
+ # When a post is unheld, bump it.
+ if was_held && !hold
+ touch_index_timestamp
+ end
+ end
+
+ def undelete!
+ execute_sql("UPDATE posts SET status = ? WHERE id = ?", "active", id)
+ Post.update_has_children(parent_id) if parent_id
+ end
+end
diff --git a/app/models/post/t b/app/models/post/t
new file mode 100644
index 00000000..f4825cda
--- /dev/null
+++ b/app/models/post/t
@@ -0,0 +1,62 @@
+Index: image_store/remote_hierarchy.rb
+===================================================================
+--- image_store/remote_hierarchy.rb (revision 755)
++++ image_store/remote_hierarchy.rb (working copy)
+@@ -8,9 +8,14 @@
+ end
+
+ def select_random_image_server
++ if not self.is_warehoused?
++ # return CONFIG["url_base"]
++ return CONFIG["image_servers"][0]
++ end
++
+ # age = Time.now - self.created_at
+ i = 0
+-# if age > (60*60*3) then
++# if age > (60*60*24) then
+ # i = 2 # Ascaroth
+ # elsif age > (60*60*24)*2 then
+ # i = 1 # saki
+@@ -28,17 +33,9 @@
+
+ def file_url
+ if CONFIG["use_pretty_image_urls"] then
+- if self.is_warehoused?
+- select_random_image_server() + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+- else
+- CONFIG["url_base"] + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+- end
++ select_random_image_server + "/image/#{md5}/#{url_encode(pretty_file_name)}.#{file_ext}"
+ else
+- if self.is_warehoused?
+- select_random_image_server() + "/data/#{file_hierarchy}/#{file_name}"
+- else
+- CONFIG["url_base"] + "/data/#{file_hierarchy}/#{file_name}"
+- end
++ select_random_image_server + "/data/#{file_hierarchy}/#{file_name}"
+ end
+ end
+
+@@ -68,7 +65,7 @@
+ if status == "deleted"
+ CONFIG["url_base"] + "/deleted-preview.png"
+ elsif image?
+- CONFIG["url_base"] + "/data/preview/#{file_hierarchy}/#{md5}.jpg"
++ CONFIG["image_servers"][0] + "/data/preview/#{file_hierarchy}/#{md5}.jpg"
+ else
+ CONFIG["url_base"] + "/download-preview.png"
+ end
+@@ -82,11 +79,7 @@
+ path = "/data/sample/#{file_hierarchy}/" + CONFIG["sample_filename_prefix"] + "#{md5}.jpg"
+ end
+
+- if self.is_warehoused?
+- return select_random_image_server() + path
+- else
+- return CONFIG["url_base"] + path
+- end
++ return select_random_image_server + path
+ end
+
+ def delete_file
diff --git a/app/models/post/tag_methods.rb b/app/models/post/tag_methods.rb
new file mode 100644
index 00000000..980509ad
--- /dev/null
+++ b/app/models/post/tag_methods.rb
@@ -0,0 +1,246 @@
+module PostTagMethods
+ attr_accessor :tags, :new_tags, :old_tags, :old_cached_tags
+
+ module ClassMethods
+ def find_by_tags(tags, options = {})
+ return find_by_sql(Post.generate_sql(tags, options))
+ end
+
+ def recalculate_cached_tags(id = nil)
+ conds = []
+ cond_params = []
+
+ sql = %{
+ UPDATE posts p SET cached_tags = (
+ SELECT array_to_string(coalesce(array(
+ SELECT t.name
+ FROM tags t, posts_tags pt
+ WHERE t.id = pt.tag_id AND pt.post_id = p.id
+ ORDER BY t.name
+ ), '{}'::text[]), ' ')
+ )
+ }
+
+ if id
+ conds << "WHERE p.id = ?"
+ cond_params << id
+ end
+
+ sql = [sql, conds].join(" ")
+ execute_sql sql, *cond_params
+ end
+
+ # new, previous and latest are History objects for cached_tags. Split
+ # the tag changes apart.
+ def tag_changes(new, previous, latest)
+ new_tags = new.value.scan(/\S+/)
+ old_tags = (previous.value rescue "").scan(/\S+/)
+ latest_tags = latest.value.scan(/\S+/)
+
+ {
+ :added_tags => new_tags - old_tags,
+ :removed_tags => old_tags - new_tags,
+ :unchanged_tags => new_tags & old_tags,
+ :obsolete_added_tags => (new_tags - old_tags) - latest_tags,
+ :obsolete_removed_tags => (old_tags - new_tags) & latest_tags,
+ }
+ end
+ end
+
+ def self.included(m)
+ m.extend ClassMethods
+ m.before_save :commit_metatags
+ m.after_save :commit_tags
+ m.after_save :save_post_history
+ m.has_many :tag_history, :class_name => "PostTagHistory", :table_name => "post_tag_histories", :order => "id desc"
+ m.versioned :source, :default => ""
+ m.versioned :cached_tags
+ end
+
+ def cached_tags_undo(change, redo_changes=false)
+ current_tags = self.cached_tags.scan(/\S+/)
+ prev = change.previous
+
+ change, prev = prev, change if redo_changes
+ changes = Post.tag_changes(change, prev, change.latest)
+ new_tags = (current_tags - changes[:added_tags]) | changes[:removed_tags]
+ self.attributes = {:tags => new_tags.join(" ")}
+ end
+
+ def cached_tags_redo(change)
+ cached_tags_undo(change, true)
+ end
+
+ # === Parameters
+ # * :tag:: the tag to search for
+ def has_tag?(tag)
+ return cached_tags =~ /(^|\s)#{tag}($|\s)/
+ end
+
+ # Returns the tags in a URL suitable string
+ def tag_title
+ return title_tags.gsub(/\W+/, "-")[0, 50]
+ end
+
+ # Return the tags we display in URLs, page titles, etc.
+ def title_tags
+ ret = ""
+ ret << "hentai " if self.rating == "e"
+ ret << cached_tags
+ ret
+ end
+
+ def tags
+ cached_tags
+ end
+
+ # Sets the tags for the post. Does not actually save anything to the database when called.
+ #
+ # === Parameters
+ # * :tags:: a whitespace delimited list of tags
+ def tags=(tags)
+ self.new_tags = Tag.scan_tags(tags)
+
+ current_tags = cached_tags.scan(/\S+/)
+ self.touch_change_seq! if new_tags != current_tags
+ end
+
+ # Returns all versioned tags and metatags.
+ def cached_tags_versioned
+ ["rating:" + self.rating, cached_tags].map.join(" ")
+ end
+
+ # Commit metatags; this is done before save, so any changes are stored normally.
+ def commit_metatags
+ return if new_tags.nil?
+
+ transaction do
+ metatags, self.new_tags = new_tags.partition {|x| x=~ /^(hold|unhold|show|hide|\+flag)$/}
+ metatags.each do |metatag|
+ case metatag
+ when /^hold$/
+ self.is_held = true
+
+ when /^unhold$/
+ self.is_held = false
+
+ when /^show$/
+ self.is_shown_in_index = true
+
+ when /^hide$/
+ self.is_shown_in_index = false
+
+ when /^\+flag$/
+ # Permissions for this are checked on commit.
+ self.metatag_flagged = "moderator flagged"
+ end
+ end
+ end
+ end
+
+ # Commit any tag changes to the database. This is done after save, so any changes
+ # must be made directly to the database.
+ def commit_tags
+ return if new_tags.nil?
+
+ if old_tags
+ # If someone else committed changes to this post before we did,
+ # then try to merge the tag changes together.
+ current_tags = cached_tags.scan(/\S+/)
+ self.old_tags = Tag.scan_tags(old_tags)
+ self.new_tags = (current_tags + new_tags) - old_tags + (current_tags & new_tags)
+ end
+
+ metatags, self.new_tags = new_tags.partition {|x| x=~ /^(?:-pool|pool|rating|parent):/}
+
+ transaction do
+ metatags.each do |metatag|
+ case metatag
+ when /^pool:(.+)/
+ begin
+ name, seq = $1.split(":")
+
+ pool = Pool.find_by_name(name)
+ options = {:user => User.find(updater_user_id)}
+ if defined?(seq) then
+ options[:sequence] = seq
+ end
+
+ if pool.nil? and name !~ /^\d+$/
+ pool = Pool.create(:name => name, :is_public => false, :user_id => updater_user_id)
+ end
+
+ next if Thread.current["danbooru-user"] && !pool.can_change?(Thread.current["danbooru-user"], nil)
+ pool.add_post(id, options) if pool
+ rescue Pool::PostAlreadyExistsError
+ rescue Pool::AccessDeniedError
+ end
+
+
+ when /^-pool:(.+)/
+ name = $1
+ pool = Pool.find_by_name(name)
+ next if Thread.current["danbooru-user"] && !pool.can_change?(Thread.current["danbooru-user"], nil)
+
+ pool.remove_post(id) if pool
+
+ when /^rating:([qse])/
+ self.rating = $1 # so we don't have to reload for history_tag_string below
+ execute_sql("UPDATE posts SET rating = ? WHERE id = ?", $1, id)
+
+
+ when /^parent:(\d*)/
+ self.parent_id = $1
+
+ if CONFIG["enable_parent_posts"] && (Post.exists?(parent_id) or parent_id == 0)
+ Post.set_parent(id, parent_id)
+ end
+ end
+ end
+
+ self.new_tags << "tagme" if new_tags.empty?
+ self.new_tags = TagAlias.to_aliased(new_tags)
+ self.new_tags = TagImplication.with_implied(new_tags).uniq
+
+ # TODO: be more selective in deleting from the join table
+ execute_sql("DELETE FROM posts_tags WHERE post_id = ?", id)
+ self.new_tags = new_tags.map {|x| Tag.find_or_create_by_name(x)}.uniq
+
+ # Tricky: Postgresql's locking won't serialize this DELETE/INSERT, so it's
+ # possible for two simultaneous updates to both delete all tags, then insert
+ # them, duplicating them all.
+ #
+ # Work around this by selecting the existing tags within the INSERT and removing
+ # any that already exist. Normally, the inner SELECT will return no rows; if
+ # another process inserts rows before our INSERT, it'll return the rows that it
+ # inserted and we'll avoid duplicating them.
+ tag_set = new_tags.map {|x| ("(#{id}, #{x.id})")}.join(", ")
+ #execute_sql("INSERT INTO posts_tags (post_id, tag_id) VALUES " + tag_set)
+ sql = <<-EOS
+ INSERT INTO posts_tags (post_id, tag_id)
+ SELECT t.post_id, t.tag_id
+ FROM (VALUES #{tag_set}) AS t(post_id, tag_id)
+ WHERE t.tag_id NOT IN (SELECT tag_id FROM posts_tags pt WHERE pt.post_id = #{self.id})
+ EOS
+
+ execute_sql(sql)
+
+ Post.recalculate_cached_tags(self.id)
+
+ # Store the old cached_tags, so we can expire them.
+ self.old_cached_tags = self.cached_tags
+ self.cached_tags = select_value_sql("SELECT cached_tags FROM posts WHERE id = #{id}")
+
+ self.new_tags = nil
+ end
+ end
+
+ def save_post_history
+ new_cached_tags = cached_tags_versioned
+ if tag_history.empty? or tag_history.first.tags != new_cached_tags
+ PostTagHistory.create(:post_id => id, :tags => new_cached_tags,
+ :user_id => Thread.current["danbooru-user_id"],
+ :ip_addr => Thread.current["danbooru-ip_addr"] || "127.0.0.1")
+ end
+ end
+end
diff --git a/app/models/post/vote_methods.rb b/app/models/post/vote_methods.rb
new file mode 100644
index 00000000..4d5b7d72
--- /dev/null
+++ b/app/models/post/vote_methods.rb
@@ -0,0 +1,71 @@
+module PostVoteMethods
+ module ClassMethods
+ def recalculate_score(id=nil)
+ conds = []
+ cond_params = []
+
+ sql = "UPDATE posts AS p SET score = " +
+ "(SELECT COALESCE(SUM(GREATEST(?, LEAST(?, score))), 0) FROM post_votes v WHERE v.post_id = p.id) " +
+ "+ p.anonymous_votes"
+ cond_params << CONFIG["vote_sum_min"]
+ cond_params << CONFIG["vote_sum_max"]
+
+ if id
+ conds << "WHERE p.id = ?"
+ cond_params << id
+ end
+
+ sql = [sql, conds].join(" ")
+ execute_sql sql, *cond_params
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+
+ def recalculate_score!()
+ save!
+ Post.recalculate_score(self.id)
+ connection.clear_query_cache
+ reload
+ end
+
+ def vote!(score, user, ip_addr, options={})
+ score = CONFIG["vote_record_min"] if score < CONFIG["vote_record_min"]
+ score = CONFIG["vote_record_max"] if score > CONFIG["vote_record_max"]
+
+ if user.is_anonymous?
+ score = 0 if score < 0
+ score = 1 if score > 1
+
+ if last_voter_ip == ip_addr
+ return false
+ end
+
+ self.anonymous_votes += score
+ self.last_voter_ip = ip_addr
+ self.last_vote = score
+ else
+ vote = PostVotes.find_by_ids(user.id, self.id)
+
+ if ip_addr and last_voter_ip == ip_addr and not vote
+ # The user voted anonymously, then logged in and tried to vote again. A user
+ # may be browsing anonymously, decide to make an account, then once he has access
+ # to full voting, decide to set his permanent vote. Just undo the anonymous vote.
+ self.anonymous_votes -= self.last_vote
+ self.last_vote = 0
+ end
+
+ if not vote
+ vote = PostVotes.find_or_create_by_id(user.id, self.id)
+ end
+
+ vote.update_attributes(:score => score, :updated_at => Time.now)
+ end
+
+ recalculate_score!
+
+ return true
+ end
+end
diff --git a/app/models/post_tag_history.rb b/app/models/post_tag_history.rb
new file mode 100644
index 00000000..85b7ab06
--- /dev/null
+++ b/app/models/post_tag_history.rb
@@ -0,0 +1,102 @@
+class PostTagHistory < ActiveRecord::Base
+ belongs_to :user
+ belongs_to :post
+
+ def self.undo_user_changes(user_id)
+ posts = Post.find(:all, :joins => "join post_tag_histories pth on pth.post_id = posts.id", :select => "distinct posts.id", :conditions => ["pth.user_id = ?", user_id])
+ puts posts.size
+ p posts.map {|x| x.id}
+# destroy_all(["user_id = ?", user_id])
+# posts.each do |post|
+# post.tags = post.tag_history.first.tags
+# post.updater_user_id = post.tag_history.first.user_id
+# post.updater_ip_addr = post.tag_history.first.ip_addr
+# post.save!
+# end
+ end
+
+ def self.generate_sql(options = {})
+ Nagato::Builder.new do |builder, cond|
+ cond.add_unless_blank "post_tag_histories.post_id = ?", options[:post_id]
+ cond.add_unless_blank "post_tag_histories.user_id = ?", options[:user_id]
+
+ if options[:user_name]
+ builder.join "users ON users.id = post_tag_histories.user_id"
+ cond.add "users.name = ?", options[:user_name]
+ end
+ end.to_hash
+ end
+
+ def self.undo_changes_by_user(user_id)
+ transaction do
+ posts = Post.find(:all, :joins => "join post_tag_histories pth on pth.post_id = posts.id", :select => "distinct posts.*", :conditions => ["pth.user_id = ?", user_id])
+
+ PostTagHistory.destroy_all(["user_id = ?", user_id])
+ posts.each do |post|
+ first = post.tag_history.first
+ if first
+ post.tags = first.tags
+ post.updater_ip_addr = first.ip_addr
+ post.updater_user_id = first.user_id
+ post.save!
+ end
+ end
+ end
+ end
+
+ # The contents of options[:posts] must be saved by the caller. This allows
+ # undoing many tag changes across many posts; all †changes to a particular
+ # post will be condensed into one change.
+ def undo(options={})
+ # TODO: refactor. modifying parameters is a bad habit.
+ options[:posts] ||= {}
+ options[:posts][post_id] ||= options[:post] = Post.find(post_id)
+ post = options[:posts][post_id]
+
+ current_tags = post.cached_tags.scan(/\S+/)
+
+ prev = previous
+ return if not prev
+
+ changes = tag_changes(prev)
+
+ new_tags = (current_tags - changes[:added_tags]) | changes[:removed_tags]
+ options[:update_options] ||= {}
+ post.attributes = {:tags => new_tags.join(" ")}.merge(options[:update_options])
+ end
+
+ def author
+ User.find_name(user_id)
+ end
+
+ def tag_changes(prev)
+ new_tags = tags.scan(/\S+/)
+ old_tags = (prev.tags rescue "").scan(/\S+/)
+ latest = Post.find(post_id).cached_tags_versioned
+ latest_tags = latest.scan(/\S+/)
+
+ {
+ :added_tags => new_tags - old_tags,
+ :removed_tags => old_tags - new_tags,
+ :unchanged_tags => new_tags & old_tags,
+ :obsolete_added_tags => (new_tags - old_tags) - latest_tags,
+ :obsolete_removed_tags => (old_tags - new_tags) & latest_tags,
+ }
+ end
+
+ def next
+ return PostTagHistory.find(:first, :order => "id ASC", :conditions => ["post_id = ? AND id > ?", post_id, id])
+ end
+
+ def previous
+ PostTagHistory.find(:first, :order => "id DESC", :conditions => ["post_id = ? AND id < ?", post_id, id])
+ end
+
+ def to_xml(options = {})
+ {:id => id, :post_id => post_id, :tags => tags}.to_xml(options.merge(:root => "tag_history"))
+ end
+
+ def to_json(*args)
+ {:id => id, :post_id => post_id, :tags => tags}.to_json(*args)
+ end
+end
diff --git a/app/models/post_votes.rb b/app/models/post_votes.rb
new file mode 100644
index 00000000..b7233812
--- /dev/null
+++ b/app/models/post_votes.rb
@@ -0,0 +1,18 @@
+class PostVotes < ActiveRecord::Base
+ belongs_to :post, :class_name => "Post", :foreign_key => :post_id
+ belongs_to :user, :class_name => "User", :foreign_key => :user_id
+
+ def self.find_by_ids(user_id, post_id)
+ self.find(:first, :conditions => ["user_id = ? AND post_id = ?", user_id, post_id])
+ end
+
+ def self.find_or_create_by_id(user_id, post_id)
+ entry = self.find_by_ids(user_id, post_id)
+
+ if entry
+ return entry
+ else
+ return create(:user_id => user_id, :post_id => post_id)
+ end
+ end
+end
diff --git a/app/models/report_mailer.rb b/app/models/report_mailer.rb
new file mode 100644
index 00000000..b751d9f5
--- /dev/null
+++ b/app/models/report_mailer.rb
@@ -0,0 +1,10 @@
+class ReportMailer < ActionMailer::Base
+ default_url_options["host"] = CONFIG["server_host"]
+
+ def moderator_report(email)
+ recipients email
+ from CONFIG["email_from"]
+ subject "#{CONFIG['app_name']} - Moderator Report"
+ content_type "text/html"
+ end
+end
diff --git a/app/models/server_key.rb b/app/models/server_key.rb
new file mode 100644
index 00000000..57b7956d
--- /dev/null
+++ b/app/models/server_key.rb
@@ -0,0 +1,9 @@
+class ServerKey < ActiveRecord::Base
+ def self.[](key)
+ begin
+ ActiveRecord::Base.connection.select_value("SELECT value FROM server_keys WHERE name = '#{key}'")
+ rescue Exception
+ nil
+ end
+ end
+end
diff --git a/app/models/tag.rb b/app/models/tag.rb
new file mode 100644
index 00000000..2d8e8121
--- /dev/null
+++ b/app/models/tag.rb
@@ -0,0 +1,111 @@
+Dir["#{RAILS_ROOT}/app/models/tag/**/*.rb"].each {|x| require_dependency x}
+
+class Tag < ActiveRecord::Base
+ include TagTypeMethods
+ include TagCacheMethods if CONFIG["enable_caching"]
+ include TagRelatedTagMethods
+ include TagParseMethods
+ include TagApiMethods
+
+ def self.count_by_period(start, stop, options = {})
+ options[:limit] ||= 50
+ options[:exclude_types] ||= []
+ sql = <<-SQL
+ SELECT
+ COUNT(pt.tag_id) AS post_count,
+ (SELECT name FROM tags WHERE id = pt.tag_id) AS name
+ FROM posts p, posts_tags pt, tags t
+ WHERE p.created_at BETWEEN ? AND ? AND
+ p.id = pt.post_id AND
+ pt.tag_id = t.id AND
+ t.tag_type IN (?)
+ GROUP BY pt.tag_id
+ ORDER BY post_count DESC
+ LIMIT ?
+ SQL
+
+ tag_types_to_show = Tag.tag_type_indexes - options[:exclude_types]
+ counts = select_all_sql(sql, start, stop, tag_types_to_show, options[:limit])
+ end
+
+ def self.tag_type_indexes
+ CONFIG["tag_types"].keys.select { |x| x =~ /^[A-Z]/ }.inject([]) { |all, x|
+ all << CONFIG["tag_types"][x]
+ }.sort
+ end
+
+ def pretty_name
+ name
+ end
+
+ def self.find_or_create_by_name(name)
+ name = name.downcase.tr(" ", "_").gsub(/^[-~]+/, "")
+
+ ambiguous = false
+ tag_type = nil
+
+ if name =~ /^ambiguous:(.+)/
+ ambiguous = true
+ name = $1
+ end
+
+ if name =~ /^(.+?):(.+)$/ && CONFIG["tag_types"][$1]
+ tag_type = CONFIG["tag_types"][$1]
+ name = $2
+ end
+
+ tag = find_by_name(name)
+
+ if tag
+ if tag_type
+ tag.update_attributes(:tag_type => tag_type)
+ end
+
+ if ambiguous
+ tag.update_attributes(:is_ambiguous => ambiguous)
+ end
+
+ return tag
+ else
+ create(:name => name, :tag_type => tag_type || CONFIG["tag_types"]["General"], :cached_related_expires_on => Time.now, :is_ambiguous => ambiguous)
+ end
+ end
+
+ def self.select_ambiguous(tags)
+ return [] if tags.blank?
+ return select_values_sql("SELECT name FROM tags WHERE name IN (?) AND is_ambiguous = TRUE ORDER BY name", tags)
+ end
+
+ def self.purge_tags
+ sql =
+ "DELETE FROM tags " +
+ "WHERE post_count = 0 AND " +
+ "id NOT IN (SELECT alias_id FROM tag_aliases UNION SELECT predicate_id FROM tag_implications UNION SELECT consequent_id FROM tag_implications)"
+ execute_sql sql
+ end
+
+ def self.recalculate_post_count
+ sql = "UPDATE tags SET post_count = (SELECT COUNT(*) FROM posts_tags pt, posts p WHERE pt.tag_id = tags.id AND pt.post_id = p.id AND p.status <> 'deleted')"
+ execute_sql sql
+ end
+
+ def self.mass_edit(start_tags, result_tags, updater_id, updater_ip_addr)
+ Post.find_by_tags(start_tags).each do |p|
+ start = TagAlias.to_aliased(Tag.scan_tags(start_tags))
+ result = TagAlias.to_aliased(Tag.scan_tags(result_tags))
+ tags = (p.cached_tags.scan(/\S+/) - start + result).join(" ")
+ p.update_attributes(:updater_user_id => updater_id, :updater_ip_addr => updater_ip_addr, :tags => tags)
+ end
+ end
+
+ def self.find_suggestions(query)
+ if query.include?("_") && query.index("_") == query.rindex("_")
+ # Contains only one underscore
+ search_for = query.split(/_/).reverse.join("_").to_escaped_for_sql_like
+ else
+ search_for = "%" + query.to_escaped_for_sql_like + "%"
+ end
+
+ Tag.find(:all, :conditions => ["name LIKE ? ESCAPE E'\\\\' AND name <> ?", search_for, query], :order => "post_count DESC", :limit => 6, :select => "name").map(&:name).sort
+ end
+end
diff --git a/app/models/tag/api_methods.rb b/app/models/tag/api_methods.rb
new file mode 100644
index 00000000..f1b16d16
--- /dev/null
+++ b/app/models/tag/api_methods.rb
@@ -0,0 +1,19 @@
+module TagApiMethods
+ def api_attributes
+ return {
+ :id => id,
+ :name => name,
+ :count => post_count,
+ :type => tag_type,
+ :ambiguous => is_ambiguous
+ }
+ end
+
+ def to_xml(options = {})
+ api_attributes.to_xml(options.merge(:root => "tag"))
+ end
+
+ def to_json(*args)
+ api_attributes.to_json(*args)
+ end
+end
diff --git a/app/models/tag/cache_methods.rb b/app/models/tag/cache_methods.rb
new file mode 100644
index 00000000..dd11f76f
--- /dev/null
+++ b/app/models/tag/cache_methods.rb
@@ -0,0 +1,10 @@
+module TagCacheMethods
+ def self.included(m)
+ m.after_save :update_cache
+ end
+
+ def update_cache
+ Cache.put("tag_type:#{name}", self.class.type_name_from_value(tag_type))
+ end
+end
+
diff --git a/app/models/tag/parse_methods.rb b/app/models/tag/parse_methods.rb
new file mode 100644
index 00000000..bbdcc4c6
--- /dev/null
+++ b/app/models/tag/parse_methods.rb
@@ -0,0 +1,158 @@
+module TagParseMethods
+ module ClassMethods
+ def scan_query(query)
+ query.to_s.downcase.scan(/\S+/).uniq
+ end
+
+ def scan_tags(tags)
+ tags.to_s.gsub(/[*%,]/, "").downcase.scan(/\S+/).uniq
+ end
+
+ def parse_cast(x, type)
+ if type == :integer
+ x.to_i
+ elsif type == :float
+ x.to_f
+ elsif type == :date
+ begin
+ x.to_date
+ rescue Exception
+ nil
+ end
+ end
+ end
+
+ def parse_helper(range, type = :integer)
+ # "1", "0.5", "5.", ".5":
+ # (-?(\d+(\.\d*)?|\d*\.\d+))
+ case range
+ when /^(.+?)\.\.(.+)/
+ return [:between, parse_cast($1, type), parse_cast($2, type)]
+
+ when /^<=(.+)/, /^\.\.(.+)/
+ return [:lte, parse_cast($1, type)]
+
+ when /^<(.+)/
+ return [:lt, parse_cast($1, type)]
+
+ when /^>=(.+)/, /^(.+)\.\.$/
+ return [:gte, parse_cast($1, type)]
+
+ when /^>(.+)/
+ return [:gt, parse_cast($1, type)]
+
+ else
+ return [:eq, parse_cast(range, type)]
+
+ end
+ end
+
+ # Parses a query into three sets of tags: reject, union, and intersect.
+ #
+ # === Parameters
+ # * +query+: String, array, or nil. The query to parse.
+ # * +options+: A hash of options.
+ def parse_query(query, options = {})
+ q = Hash.new {|h, k| h[k] = []}
+
+ scan_query(query).each do |token|
+ if token =~ /^(unlocked|deleted|user|favtag|vote|-vote|fav|md5|-rating|rating|width|height|mpixels|score|source|id|date|pool|-pool|pool_posts|parent|order|change|holds|shown|limit):(.+)$/
+ if $1 == "user"
+ q[:user] = $2
+ elsif $1 == "vote"
+ vote, user = $2.split(":")
+ user_id = User.find_by_name_nocase(user).id rescue nil
+ q[:vote] = [parse_helper(vote), user_id]
+ elsif $1 == "-vote"
+ q[:vote_negated] = User.find_by_name_nocase($2).id rescue nil
+ q[:error] = "no user named %s" % user if q[:vote_negated].nil?
+ elsif $1 == "fav"
+ q[:fav] = $2
+ elsif $1 == "favtag"
+ q[:favtag] = $2
+ elsif $1 == "md5"
+ q[:md5] = $2
+ elsif $1 == "-rating"
+ q[:rating_negated] = $2
+ elsif $1 == "rating"
+ q[:rating] = $2
+ elsif $1 == "id"
+ q[:post_id] = parse_helper($2)
+ elsif $1 == "width"
+ q[:width] = parse_helper($2)
+ elsif $1 == "height"
+ q[:height] = parse_helper($2)
+ elsif $1 == "mpixels"
+ q[:mpixels] = parse_helper($2, :float)
+ elsif $1 == "score"
+ q[:score] = parse_helper($2)
+ elsif $1 == "source"
+ q[:source] = $2.to_escaped_for_sql_like + "%"
+ elsif $1 == "date"
+ q[:date] = parse_helper($2, :date)
+ elsif $1 == "pool"
+ q[:pool] = $2
+ if q[:pool] =~ /^(\d+)$/
+ q[:pool] = q[:pool].to_i
+ end
+ elsif $1 == "-pool"
+ pool = $2
+ if pool =~ /^(\d+)$/
+ pool = pool.to_i
+ end
+ q[:exclude_pools] ||= []
+ q[:exclude_pools] << pool
+ elsif $1 == "pool_posts"
+ q[:pool_posts] = $2
+ elsif $1 == "parent"
+ if $2 == "none"
+ q[:parent_id] = false
+ else
+ q[:parent_id] = $2.to_i
+ end
+ elsif $1 == "order"
+ q[:order] = $2
+ elsif $1 == "unlocked"
+ if $2 == "rating"
+ q[:unlocked_rating] = true
+ end
+ elsif $1 == "deleted" && $2 == "true"
+ q[:deleted_only] = true
+ elsif $1 == "change"
+ q[:change] = parse_helper($2)
+ elsif $1 == "shown"
+ q[:shown_in_index] = ($2 == "true")
+ elsif $1 == "holds"
+ if $2 == "only"
+ q[:show_holds_only] = true
+ elsif $2 == "true"
+ q[:show_holds_only] = false # all posts, held or not
+ end
+ elsif $1 == "limit"
+ q[:limit] = $2
+ end
+ elsif token[0] == ?-
+ q[:exclude] << token[1..-1]
+ elsif token[0] == ?~
+ q[:include] << token[1..-1]
+ elsif token.include?("*")
+ q[:include] += find(:all, :conditions => ["name LIKE ? ESCAPE E'\\\\'", token.to_escaped_for_sql_like], :select => "name, post_count", :limit => 25, :order => "post_count DESC").map {|i| i.name}
+ else
+ q[:related] << token
+ end
+ end
+
+ unless options[:skip_aliasing]
+ q[:exclude] = TagAlias.to_aliased(q[:exclude]) if q.has_key?(:exclude)
+ q[:include] = TagAlias.to_aliased(q[:include]) if q.has_key?(:include)
+ q[:related] = TagAlias.to_aliased(q[:related]) if q.has_key?(:related)
+ end
+
+ return q
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+end
diff --git a/app/models/tag/related_tag_methods.rb b/app/models/tag/related_tag_methods.rb
new file mode 100644
index 00000000..93607437
--- /dev/null
+++ b/app/models/tag/related_tag_methods.rb
@@ -0,0 +1,93 @@
+module TagRelatedTagMethods
+ module ClassMethods
+ def calculate_related_by_type(tag, type, limit = 25)
+ if CONFIG["enable_caching"] && tag.size < 230
+ results = Cache.get("reltagsbytype/#{type}/#{tag}")
+
+ if results
+ return JSON.parse(results)
+ end
+ end
+
+ sql = <<-EOS
+ SELECT (SELECT name FROM tags WHERE id = pt0.tag_id) AS name,
+ COUNT(pt0.tag_id) AS post_count
+ FROM posts_tags pt0, posts_tags pt1
+ WHERE pt0.post_id = pt1.post_id
+ AND (SELECT TRUE FROM POSTS p0 WHERE p0.id = pt0.post_id AND p0.status <> 'deleted')
+ AND pt1.tag_id = (SELECT id FROM tags WHERE name = ?)
+ AND pt0.tag_id IN (SELECT id FROM tags WHERE tag_type = ?)
+ GROUP BY pt0.tag_id
+ ORDER BY post_count DESC
+ LIMIT ?
+ EOS
+
+ results = select_all_sql(sql, tag, type, limit)
+
+ if CONFIG["enable_caching"] && tag.size < 230
+ post_count = (Tag.find_by_name(tag).post_count rescue 0) / 3
+ post_count = 12 if post_count < 12
+ post_count = 200 if post_count > 200
+
+ Cache.put("reltagsbytype/#{type}/#{tag}", results.map {|x| {"name" => x["name"], "post_count" => x["post_count"]}}.to_json, post_count.hours)
+ end
+
+ return results
+ end
+
+ def calculate_related(tags)
+ tags = Array(tags)
+ return [] if tags.empty?
+
+ from = ["posts_tags pt0"]
+ cond = ["pt0.post_id = pt1.post_id"]
+ sql = ""
+
+ # Ignore deleted posts in pt0, so the count excludes them.
+ cond << "(SELECT TRUE FROM POSTS p0 WHERE p0.id = pt0.post_id AND p0.status <> 'deleted')"
+
+ (1..tags.size).each {|i| from << "posts_tags pt#{i}"}
+ (2..tags.size).each {|i| cond << "pt1.post_id = pt#{i}.post_id"}
+ (1..tags.size).each {|i| cond << "pt#{i}.tag_id = (SELECT id FROM tags WHERE name = ?)"}
+
+ sql << "SELECT (SELECT name FROM tags WHERE id = pt0.tag_id) AS tag, COUNT(pt0.*) AS tag_count"
+ sql << " FROM " << from.join(", ")
+ sql << " WHERE " << cond.join(" AND ")
+ sql << " GROUP BY pt0.tag_id"
+ sql << " ORDER BY tag_count DESC LIMIT 25"
+
+ return select_all_sql(sql, *tags).map {|x| [x["tag"], x["tag_count"]]}
+ end
+
+ def find_related(tags)
+ if tags.is_a?(Array) && tags.size > 1
+ return calculate_related(tags)
+ elsif tags.to_s != ""
+ t = find_by_name(tags.to_s)
+ if t
+ return t.related
+ end
+ end
+
+ return []
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+
+ def related
+ if Time.now > cached_related_expires_on
+ length = post_count / 3
+ length = 12 if length < 12
+ length = 8760 if length > 8760
+
+ execute_sql("UPDATE tags SET cached_related = ?, cached_related_expires_on = ? WHERE id = ?", self.class.calculate_related(name).flatten.join(","), length.hours.from_now, id)
+ reload
+ end
+
+ return cached_related.split(/,/).in_groups_of(2)
+ end
+end
+
diff --git a/app/models/tag/type_methods.rb b/app/models/tag/type_methods.rb
new file mode 100644
index 00000000..5a0c8ab4
--- /dev/null
+++ b/app/models/tag/type_methods.rb
@@ -0,0 +1,92 @@
+module TagTypeMethods
+ module ClassMethods
+ attr_accessor :type_map
+
+ # Find the type name for a type value.
+ #
+ # === Parameters
+ # * :type_value:: The tag type value to search for
+ def type_name_from_value(type_value)
+ type_map[type_value]
+ end
+
+ def type_name_helper(tag_name) # :nodoc:
+ tag = Tag.find(:first, :conditions => ["name = ?", tag_name], :select => "tag_type")
+
+ if tag == nil
+ "general"
+ else
+ type_map[tag.tag_type]
+ end
+ end
+
+ # Find the tag type name of a tag.
+ #
+ # === Parameters
+ # * :tag_name:: The tag name to search for
+ def type_name(tag_name)
+ tag_name = tag_name.gsub(/\s/, "_")
+
+ if CONFIG["enable_caching"]
+ return Cache.get("tag_type:#{tag_name}", 1.day) do
+ type_name_helper(tag_name)
+ end
+ else
+ type_name_helper(tag_name)
+ end
+ end
+
+ # Given an array of tags, remove tags to reduce the joined length to <= max_len.
+ def compact_tags(tags, max_len)
+ return tags if tags.length < max_len
+
+ split_tags = tags.split(/ /)
+
+ # Put long tags first, so we don't remove every tag because of one very long one.
+ split_tags.sort! do |a,b| b.length <=> a.length end
+
+ # Tag types that we're allowed to remove:
+ length = tags.length
+ split_tags.each_index do |i|
+ length -= split_tags[i].length + 1
+ split_tags[i] = nil
+ break if length <= max_len
+ end
+
+ split_tags.compact!
+ split_tags.sort!
+ return split_tags.join(" ")
+ end
+
+ def tag_list_order(tag_type)
+ case tag_type
+ when "artist": 0
+ when "circle": 1
+ when "copyright": 2
+ when "character": 3
+ when "general": 5
+ when "faults": 6
+ else 4
+ end
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.versioned :tag_type
+ m.versioned :is_ambiguous, :default => false
+
+ m.versioning_display :action => "edit"
+
+ # This maps ids to names
+ m.type_map = CONFIG["tag_types"].keys.select {|x| x =~ /^[A-Z]/}.inject({}) {|all, x| all[CONFIG["tag_types"][x]] = x.downcase; all}
+ end
+
+ def type_name
+ self.class.type_name_from_value(tag_type)
+ end
+
+ def pretty_type_name
+ type_name.capitalize
+ end
+end
diff --git a/app/models/tag_alias.rb b/app/models/tag_alias.rb
new file mode 100644
index 00000000..4c9d5d9e
--- /dev/null
+++ b/app/models/tag_alias.rb
@@ -0,0 +1,88 @@
+class TagAlias < ActiveRecord::Base
+ before_create :normalize
+ before_create :validate_uniqueness
+
+ # Maps tags to their preferred names. Returns an array of strings.
+ #
+ # === Parameters
+ # * :tags>:: list of tags to transform.
+ def self.to_aliased(tags)
+ Array(tags).inject([]) do |aliased_tags, tag_name|
+ aliased_tags << to_aliased_helper(tag_name)
+ end
+ end
+
+ def self.to_aliased_helper(tag_name)
+ # TODO: add memcached support
+ tag = find(:first, :select => "tags.name AS name", :joins => "JOIN tags ON tags.id = tag_aliases.alias_id", :conditions => ["tag_aliases.name = ? AND tag_aliases.is_pending = FALSE", tag_name])
+ tag ? tag.name : tag_name
+ end
+
+ # Destroys the alias and sends a message to the alias's creator.
+ def destroy_and_notify(current_user, reason)
+ if creator_id && creator_id != current_user.id
+ msg = "A tag alias you submitted (#{name} → #{alias_name}) was deleted for the following reason: #{reason}."
+ Dmail.create(:from_id => current_user.id, :to_id => creator_id, :title => "One of your tag aliases was deleted", :body => msg)
+ end
+
+ destroy
+ end
+
+ # Strips out any illegal characters and makes sure the name is lowercase.
+ def normalize
+ self.name = name.downcase.gsub(/ /, "_").gsub(/^[-~]+/, "")
+ end
+
+ # Makes sure the alias does not conflict with any other aliases.
+ def validate_uniqueness
+ if self.class.exists?(["name = ?", name])
+ errors.add_to_base("#{name} is already aliased to something")
+ return false
+ end
+
+ if self.class.exists?(["alias_id = (select id from tags where name = ?)", name])
+ errors.add_to_base("#{name} is already aliased to something")
+ return false
+ end
+
+ if self.class.exists?(["name = ?", alias_name])
+ errors.add_to_base("#{alias_name} is already aliased to something")
+ return false
+ end
+ end
+
+ def alias=(name)
+ tag = Tag.find_or_create_by_name(name)
+ self.alias_id = tag.id
+ end
+
+ def alias_name
+ Tag.find(alias_id).name
+ end
+
+ def approve(user_id, ip_addr)
+ execute_sql("UPDATE tag_aliases SET is_pending = FALSE WHERE id = ?", id)
+
+ Post.find(:all, :conditions => ["id IN (SELECT pt.post_id FROM posts_tags pt WHERE pt.tag_id = (SELECT id FROM tags WHERE name = ?))", name]).each do |post|
+ post.reload
+ post.update_attributes(:tags => post.cached_tags, :updater_user_id => user_id, :updater_ip_addr => ip_addr)
+ end
+ end
+
+ def api_attributes
+ return {
+ :id => id,
+ :name => name,
+ :alias_id => alias_id,
+ :pending => is_pending
+ }
+ end
+
+ def to_xml(options = {})
+ api_attributes.to_xml(options.merge(:root => "tag_alias"))
+ end
+
+ def to_json(*args)
+ return api_attributes.to_json(*args)
+ end
+end
diff --git a/app/models/tag_implication.rb b/app/models/tag_implication.rb
new file mode 100644
index 00000000..842e210d
--- /dev/null
+++ b/app/models/tag_implication.rb
@@ -0,0 +1,87 @@
+class TagImplication < ActiveRecord::Base
+ before_create :validate_uniqueness
+
+ def validate_uniqueness
+ if self.class.find(:first, :conditions => ["(predicate_id = ? AND consequent_id = ?) OR (predicate_id = ? AND consequent_id = ?)", predicate_id, consequent_id, consequent_id, predicate_id])
+ self.errors.add_to_base("Tag implication already exists")
+ return false
+ end
+ end
+
+ # Destroys the alias and sends a message to the alias's creator.
+ def destroy_and_notify(current_user, reason)
+ if creator_id && creator_id != current_user.id
+ msg = "A tag implication you submitted (#{predicate.name} → #{consequent.name}) was deleted for the following reason: #{reason}."
+
+ Dmail.create(:from_id => current_user.id, :to_id => creator_id, :title => "One of your tag implications was deleted", :body => msg)
+ end
+
+ destroy
+ end
+
+ def predicate
+ return Tag.find(self.predicate_id)
+ end
+
+ def consequent
+ return Tag.find(self.consequent_id)
+ end
+
+ def predicate=(name)
+ t = Tag.find_or_create_by_name(name)
+ self.predicate_id = t.id
+ end
+
+ def consequent=(name)
+ t = Tag.find_or_create_by_name(name)
+ self.consequent_id = t.id
+ end
+
+ def approve(user_id, ip_addr)
+ connection.execute("UPDATE tag_implications SET is_pending = FALSE WHERE id = #{self.id}")
+
+ p = Tag.find(self.predicate_id)
+ implied_tags = self.class.with_implied(p.name).join(" ")
+ Post.find(:all, :conditions => Tag.sanitize_sql(["id IN (SELECT pt.post_id FROM posts_tags pt WHERE pt.tag_id = ?)", p.id])).each do |post|
+ post.reload
+ post.update_attributes(:tags => post.cached_tags + " " + implied_tags, :updater_user_id => user_id, :updater_ip_addr => ip_addr)
+ end
+ end
+
+ def self.with_implied(tags)
+ return [] if tags.blank?
+ all = []
+
+ tags.each do |tag|
+ all << tag
+ results = [tag]
+
+ 10.times do
+ results = connection.select_values(sanitize_sql([<<-SQL, results]))
+ SELECT t1.name
+ FROM tags t1, tags t2, tag_implications ti
+ WHERE ti.predicate_id = t2.id
+ AND ti.consequent_id = t1.id
+ AND t2.name IN (?)
+ AND ti.is_pending = FALSE
+ SQL
+
+ if results.any?
+ all += results
+ else
+ break
+ end
+ end
+ end
+
+ return all
+ end
+
+ def to_xml(options = {})
+ {:id => id, :consequent_id => consequent_id, :predicate_id => predicate_id, :pending => is_pending}.to_xml(options.merge(:root => "tag_implication"))
+ end
+
+ def to_json(*args)
+ {:id => id, :consequent_id => consequent_id, :predicate_id => predicate_id, :pending => is_pending}.to_json(*args)
+ end
+end
diff --git a/app/models/user.rb b/app/models/user.rb
new file mode 100644
index 00000000..1cfea790
--- /dev/null
+++ b/app/models/user.rb
@@ -0,0 +1,668 @@
+require 'digest/sha1'
+
+class User < ActiveRecord::Base
+ class AlreadyFavoritedError < Exception; end
+
+ module UserBlacklistMethods
+ # TODO: I don't see the advantage of normalizing these. Since commas are illegal
+ # characters in tags, they can be used to separate lines (with whitespace separating
+ # tags). Denormalizing this into a field in users would save a SQL query.
+ def self.included(m)
+ m.after_save :commit_blacklists
+ m.after_create :set_default_blacklisted_tags
+ m.has_many :user_blacklisted_tags, :dependent => :delete_all, :order => :id
+ end
+
+ def blacklisted_tags=(blacklists)
+ @blacklisted_tags = blacklists
+ end
+
+ def blacklisted_tags
+ blacklisted_tags_array.join("\n") + "\n"
+ end
+
+ def blacklisted_tags_array
+ user_blacklisted_tags.map {|x| x.tags}
+ end
+
+ def commit_blacklists
+ if @blacklisted_tags
+ user_blacklisted_tags.clear
+
+ @blacklisted_tags.scan(/[^\r\n]+/).each do |tags|
+ user_blacklisted_tags.create(:tags => tags)
+ end
+ end
+ end
+
+ def set_default_blacklisted_tags
+ CONFIG["default_blacklists"].each do |b|
+ UserBlacklistedTag.create(:user_id => self.id, :tags => b)
+ end
+ end
+ end
+
+ module UserAuthenticationMethods
+ module ClassMethods
+ def authenticate(name, pass)
+ authenticate_hash(name, sha1(pass))
+ end
+
+ def authenticate_hash(name, pass)
+ find(:first, :conditions => ["lower(name) = lower(?) AND password_hash = ?", name, pass])
+ end
+
+ if CONFIG["enable_account_email_activation"]
+ def confirmation_hash(name)
+ Digest::SHA256.hexdigest("~-#{name}-~#{salt}")
+ end
+ end
+
+ def sha1(pass)
+ Digest::SHA1.hexdigest("#{salt}--#{pass}--")
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ end
+ end
+
+ module UserPasswordMethods
+ attr_accessor :password
+
+ def self.included(m)
+ m.before_save :encrypt_password
+ m.validates_length_of :password, :minimum => 5, :if => lambda {|rec| rec.password}
+ m.validates_confirmation_of :password
+ end
+
+ def encrypt_password
+ self.password_hash = User.sha1(password) if password
+ end
+
+ def reset_password
+ consonants = "bcdfghjklmnpqrstvqxyz"
+ vowels = "aeiou"
+ pass = ""
+
+ 4.times do
+ pass << consonants[rand(21), 1]
+ pass << vowels[rand(5), 1]
+ end
+
+ pass << rand(100).to_s
+ execute_sql("UPDATE users SET password_hash = ? WHERE id = ?", User.sha1(pass), self.id)
+ return pass
+ end
+ end
+
+ module UserCountMethods
+ module ClassMethods
+ def fast_count
+ return select_value_sql("SELECT row_count FROM table_data WHERE name = 'users'").to_i
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.after_create :increment_count
+ m.after_destroy :decrement_count
+ end
+
+ def increment_count
+ connection.execute("update table_data set row_count = row_count + 1 where name = 'users'")
+ end
+
+ def decrement_count
+ connection.execute("update table_data set row_count = row_count - 1 where name = 'users'")
+ end
+ end
+
+ module UserNameMethods
+ module ClassMethods
+ def find_name_helper(user_id)
+ if user_id.nil?
+ return CONFIG["default_guest_name"]
+ end
+
+ user = find(:first, :conditions => ["id = ?", user_id], :select => "name")
+
+ if user
+ return user.name
+ else
+ return CONFIG["default_guest_name"]
+ end
+ end
+
+ def find_name(user_id)
+ if CONFIG["enable_caching"]
+ return Cache.get("user_name:#{user_id}") do
+ find_name_helper(user_id)
+ end
+ else
+ find_name_helper(user_id)
+ end
+ end
+
+ def find_by_name(name)
+ find(:first, :conditions => ["lower(name) = lower(?)", name])
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.validates_length_of :name, :within => 2..20, :on => :create
+ m.validates_format_of :name, :with => /\A[^\s;,]+\Z/, :on => :create, :message => "cannot have whitespace, commas, or semicolons"
+# validates_format_of :name, :with => /^(Anonymous|[Aa]dministrator)/, :on => :create, :message => "this is a disallowed username"
+ m.validates_uniqueness_of :name, :case_sensitive => false, :on => :create
+ m.after_save :update_cached_name if CONFIG["enable_caching"]
+ end
+
+ def pretty_name
+ name.tr("_", " ")
+ end
+
+ def update_cached_name
+ Cache.put("user_name:#{id}", name)
+ end
+ end
+
+ module UserApiMethods
+ def to_xml(options = {})
+ options[:indent] ||= 2
+ xml = options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])
+ xml.post(:name => name, :id => id) do
+ blacklisted_tags_array.each do |t|
+ xml.blacklisted_tag(:tag => t)
+ end
+
+ yield options[:builder] if block_given?
+ end
+ end
+
+ def to_json(*args)
+ {:name => name, :blacklisted_tags => blacklisted_tags_array, :id => id}.to_json(*args)
+ end
+ end
+
+ def self.find_by_name_nocase(name)
+ return User.find(:first, :conditions => ["lower(name) = lower(?)", name])
+ end
+
+ module UserTagMethods
+ def uploaded_tags(options = {})
+ type = options[:type]
+
+ if CONFIG["enable_caching"]
+ uploaded_tags = Cache.get("uploaded_tags/#{id}/#{type}")
+ return uploaded_tags unless uploaded_tags == nil
+ end
+
+ if RAILS_ENV == "test"
+ # disable filtering in test mode to simplify tests
+ popular_tags = ""
+ else
+ popular_tags = select_values_sql("SELECT id FROM tags WHERE tag_type = #{CONFIG['tag_types']['General']} ORDER BY post_count DESC LIMIT 8").join(", ")
+ popular_tags = "AND pt.tag_id NOT IN (#{popular_tags})" unless popular_tags.blank?
+ end
+
+ if type
+ sql = <<-EOS
+ SELECT (SELECT name FROM tags WHERE id = pt.tag_id) AS tag, COUNT(*) AS count
+ FROM posts_tags pt, tags t, posts p
+ WHERE p.user_id = #{id}
+ AND p.id = pt.post_id
+ AND pt.tag_id = t.id
+ #{popular_tags}
+ AND t.tag_type = #{type.to_i}
+ GROUP BY pt.tag_id
+ ORDER BY count DESC
+ LIMIT 6
+ EOS
+ else
+ sql = <<-EOS
+ SELECT (SELECT name FROM tags WHERE id = pt.tag_id) AS tag, COUNT(*) AS count
+ FROM posts_tags pt, posts p
+ WHERE p.user_id = #{id}
+ AND p.id = pt.post_id
+ #{popular_tags}
+ GROUP BY pt.tag_id
+ ORDER BY count DESC
+ LIMIT 6
+ EOS
+ end
+
+ uploaded_tags = select_all_sql(sql)
+
+ if CONFIG["enable_caching"]
+ Cache.put("uploaded_tags/#{id}/#{type}", uploaded_tags, 1.day)
+ end
+
+ return uploaded_tags
+ end
+
+ def voted_tags(options = {})
+ type = options[:type]
+
+ if CONFIG["enable_caching"]
+ favorite_tags = Cache.get("favorite_tags/#{id}/#{type}")
+ return favorite_tags unless favorite_tags == nil
+ end
+
+ if RAILS_ENV == "test"
+ # disable filtering in test mode to simplify tests
+ popular_tags = ""
+ else
+ popular_tags = select_values_sql("SELECT id FROM tags WHERE tag_type = #{CONFIG['tag_types']['General']} ORDER BY post_count DESC LIMIT 8").join(", ")
+ popular_tags = "AND pt.tag_id NOT IN (#{popular_tags})" unless popular_tags.blank?
+ end
+
+ if type
+ sql = <<-EOS
+ SELECT (SELECT name FROM tags WHERE id = pt.tag_id) AS tag, SUM(v.score) AS sum
+ FROM posts_tags pt, tags t, post_votes v
+ WHERE v.user_id = #{id}
+ AND v.post_id = pt.post_id
+ AND pt.tag_id = t.id
+ #{popular_tags}
+ AND t.tag_type = #{type.to_i}
+ GROUP BY pt.tag_id
+ ORDER BY sum DESC
+ LIMIT 6
+ EOS
+ else
+ sql = <<-EOS
+ SELECT (SELECT name FROM tags WHERE id = pt.tag_id) AS tag, SUM(v.score) AS sum
+ FROM posts_tags pt, post_votes v
+ WHERE v.user_id = #{id}
+ AND v.post_id = pt.post_id
+ #{popular_tags}
+ GROUP BY pt.tag_id
+ ORDER BY sum DESC
+ LIMIT 6
+ EOS
+ end
+
+ favorite_tags = select_all_sql(sql)
+
+ if CONFIG["enable_caching"]
+ Cache.put("favorite_tags/#{id}/#{type}", favorite_tags, 1.day)
+ end
+
+ return favorite_tags
+ end
+ end
+
+ module UserPostMethods
+ def recent_uploaded_posts
+ Post.find_by_sql("SELECT p.* FROM posts p WHERE p.user_id = #{id} AND p.status <> 'deleted' ORDER BY p.id DESC LIMIT 6")
+ end
+
+ def recent_favorite_posts
+ Post.find_by_sql("SELECT p.* FROM posts p, post_votes v WHERE p.id = v.post_id AND v.user_id = #{id} AND v.score = 3 AND p.status <> 'deleted' ORDER BY v.id DESC LIMIT 6")
+ end
+
+ def favorite_post_count(options = {})
+ PostVotes.count_by_sql("SELECT COUNT(*) FROM post_votes v WHERE v.user_id = #{id} AND v.score = 3")
+ end
+
+ def post_count
+ @post_count ||= Post.count(:conditions => ["user_id = ? AND status = 'active'", id])
+ end
+
+ def held_post_count
+ version = Cache.get("$cache_version").to_i
+ key = "held-post-count/v=#{version}/u=#{self.id}"
+
+ return Cache.get(key) {
+ Post.count(:conditions => ["user_id = ? AND is_held AND status <> 'deleted'", self.id])
+ }.to_i
+ end
+ end
+
+ module UserLevelMethods
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.attr_protected :level
+ m.before_create :set_role
+ end
+
+ def pretty_level
+ return CONFIG["user_levels"].invert[self.level]
+ end
+
+ def set_role
+ if User.fast_count == 0
+ self.level = CONFIG["user_levels"]["Admin"]
+ elsif CONFIG["enable_account_email_activation"]
+ self.level = CONFIG["user_levels"]["Unactivated"]
+ else
+ self.level = CONFIG["starting_level"]
+ end
+
+ self.last_logged_in_at = Time.now
+ end
+
+ def has_permission?(record, foreign_key = :user_id)
+ if is_mod_or_higher?
+ true
+ elsif record.respond_to?(foreign_key)
+ record.__send__(foreign_key) == id
+ else
+ false
+ end
+ end
+
+ # Return true if this user can change the specified attribute.
+ #
+ # If record is an ActiveRecord object, returns true if the change is allowed to complete.
+ #
+ # If record is an ActiveRecord class (eg. Pool rather than an actual pool), returns
+ # false if the user would never be allowed to make this change for any instance of the
+ # object, and so the option should not be presented.
+ #
+ # For example, can_change(Pool, :description) returns true (unless the user level
+ # is too low to change any pools), but can_change(Pool.find(1), :description) returns
+ # false if that specific pool is locked.
+ #
+ # attribute usually corresponds with an actual attribute in the class, but any value
+ # can be used.
+ def can_change?(record, attribute)
+ method = "can_change_#{attribute.to_s}?"
+ if is_mod_or_higher?
+ true
+ elsif record.respond_to?(method)
+ record.__send__(method, self)
+ elsif record.respond_to?(:can_change?)
+ record.can_change?(self, attribute)
+ else
+ true
+ end
+ end
+
+ # Defines various convenience methods for finding out the user's level
+ CONFIG["user_levels"].each do |name, value|
+ normalized_name = name.downcase.gsub(/ /, "_")
+ define_method("is_#{normalized_name}?") do
+ self.level == value
+ end
+
+ define_method("is_#{normalized_name}_or_higher?") do
+ self.level >= value
+ end
+
+ define_method("is_#{normalized_name}_or_lower?") do
+ self.level <= value
+ end
+ end
+
+
+ module ClassMethods
+ def get_user_level(level)
+ if not @user_level then
+ @user_level = {}
+ CONFIG["user_levels"].each do |name, value|
+ normalized_name = name.downcase.gsub(/ /, "_").to_sym
+ @user_level[normalized_name] = value
+ end
+ end
+ @user_level[level]
+ end
+ end
+ end
+
+ module UserInviteMethods
+ class NoInvites < Exception ; end
+ class HasNegativeRecord < Exception ; end
+
+ def invite!(name, level)
+ if invite_count <= 0
+ raise NoInvites
+ end
+
+ if level.to_i >= CONFIG["user_levels"]["Contributor"]
+ level = CONFIG["user_levels"]["Contributor"]
+ end
+
+ invitee = User.find_by_name(name)
+
+ if invitee.nil?
+ raise ActiveRecord::RecordNotFound
+ end
+
+ if UserRecord.exists?(["user_id = ? AND is_positive = false AND reported_by IN (SELECT id FROM users WHERE level >= ?)", invitee.id, CONFIG["user_levels"]["Mod"]]) && !is_admin?
+ raise HasNegativeRecord
+ end
+
+ transaction do
+ invitee.level = level
+ invitee.invited_by = id
+ invitee.save
+ decrement! :invite_count
+ end
+ end
+
+ def self.included(m)
+ m.attr_protected :invite_count
+ end
+ end
+
+ module UserAvatarMethods
+ module ClassMethods
+ # post_id is being destroyed. Clear avatar_post_ids for this post, so we won't use
+ # avatars from this post. We don't need to actually delete the image.
+ def clear_avatars(post_id)
+ execute_sql("UPDATE users SET avatar_post_id = NULL WHERE avatar_post_id = ?", post_id)
+ end
+ end
+
+ def self.included(m)
+ m.extend(ClassMethods)
+ m.belongs_to :avatar_post, :class_name => "Post"
+ end
+
+ def avatar_url
+ CONFIG["url_base"] + "/data/avatars/#{self.id}.jpg"
+ end
+
+ def has_avatar?
+ return (not self.avatar_post_id.nil?)
+ end
+
+ def avatar_path
+ "#{RAILS_ROOT}/public/data/avatars/#{self.id}.jpg"
+ end
+
+ def set_avatar(params)
+ post = Post.find(params[:post_id])
+ if not post.can_be_seen_by?(self)
+ errors.add(:access, "denied")
+ return false
+ end
+
+ if params[:top].to_f < 0 or params[:top].to_f > 1 or
+ params[:bottom].to_f < 0 or params[:bottom].to_f > 1 or
+ params[:left].to_f < 0 or params[:left].to_f > 1 or
+ params[:right].to_f < 0 or params[:right].to_f > 1 or
+ params[:top] >= params[:bottom] or
+ params[:left] >= params[:right]
+ then
+ errors.add(:parameter, "error")
+ return false
+ end
+
+ tempfile_path = "#{RAILS_ROOT}/public/data/#{$PROCESS_ID}.avatar.jpg"
+
+ def reduce_and_crop(image_width, image_height, params)
+ cropped_image_width = image_width * (params[:right].to_f - params[:left].to_f)
+ cropped_image_height = image_height * (params[:bottom].to_f - params[:top].to_f)
+
+ size = Danbooru.reduce_to({:width=>cropped_image_width, :height=>cropped_image_height}, {:width=>CONFIG["avatar_max_width"], :height=>CONFIG["avatar_max_height"]}, 1, true)
+ size[:crop_top] = image_height * params[:top].to_f
+ size[:crop_bottom] = image_height * params[:bottom].to_f
+ size[:crop_left] = image_width * params[:left].to_f
+ size[:crop_right] = image_width * params[:right].to_f
+ size
+ end
+
+ use_sample = post.has_sample?
+ if use_sample
+ image_path = post.sample_path
+ image_ext = "jpg"
+ size = reduce_and_crop(post.sample_width, post.sample_height, params)
+
+ # If we're cropping from a very small region in the sample, use the full
+ # image instead, to get a higher quality image.
+ if size[:crop_bottom] - size[:crop_top] < CONFIG["avatar_max_height"] or
+ size[:crop_right] - size[:crop_left] < CONFIG["avatar_max_width"] then
+ use_sample = false
+ end
+ end
+
+ if not use_sample
+ image_path = post.file_path
+ image_ext = post.file_ext
+ size = reduce_and_crop(post.width, post.height, params)
+ end
+
+ begin
+ Danbooru.resize(image_ext, image_path, tempfile_path, size, 95)
+ rescue Exception => x
+ FileUtils.rm_f(tempfile_path)
+
+ errors.add "avatar", "couldn't be generated (#{x})"
+ return false
+ end
+
+ FileUtils.mv(tempfile_path, avatar_path)
+ FileUtils.chmod(0775, avatar_path)
+
+ self.update_attributes(
+ :avatar_post_id => params[:post_id],
+ :avatar_top => params[:top],
+ :avatar_bottom => params[:bottom],
+ :avatar_left => params[:left],
+ :avatar_right => params[:right],
+ :avatar_width => size[:width],
+ :avatar_height => size[:height],
+ :avatar_timestamp => Time.now)
+ end
+ end
+
+
+ module UserFavoriteTagMethods
+ def self.included(m)
+ m.has_many :favorite_tags, :dependent => :delete_all
+ end
+
+ def favorite_tags_text=(text)
+ favorite_tags.clear
+
+ text.scan(/\S+/).slice(0, 20).each do |new_fav_tag|
+ favorite_tags.create(:tag_query => new_fav_tag)
+ end
+ end
+
+ def favorite_tags_text
+ favorite_tags.map(&:tag_query).sort.join(" ")
+ end
+
+ def favorite_tag_posts(limit)
+ FavoriteTag.find_posts(id, limit)
+ end
+ end
+
+ validates_presence_of :email, :on => :create if CONFIG["enable_account_email_activation"]
+ validates_uniqueness_of :email, :case_sensitive => false, :on => :create, :if => lambda {|rec| not rec.email.empty?}
+ before_create :set_show_samples if CONFIG["show_samples"]
+ has_one :ban
+
+ include UserBlacklistMethods
+ include UserAuthenticationMethods
+ include UserPasswordMethods
+ include UserCountMethods
+ include UserNameMethods
+ include UserApiMethods
+ include UserTagMethods
+ include UserPostMethods
+ include UserLevelMethods
+ include UserInviteMethods
+ include UserAvatarMethods
+ include UserFavoriteTagMethods
+
+ @salt = CONFIG["user_password_salt"]
+
+ class << self
+ attr_accessor :salt
+ end
+
+ # For compatibility with AnonymousUser class
+ def is_anonymous?
+ false
+ end
+
+ def invited_by_name
+ self.class.find_name(invited_by)
+ end
+
+ def similar_users
+ # This uses a naive cosine distance formula that is very expensive to calculate.
+ # TODO: look into alternatives, like SVD.
+ sql = <<-EOS
+ SELECT
+ f0.user_id as user_id,
+ COUNT(*) / (SELECT sqrt((SELECT COUNT(*) FROM post_votes WHERE user_id = f0.user_id) * (SELECT COUNT(*) FROM post_votes WHERE user_id = #{id}))) AS similarity
+ FROM
+ vote v0,
+ vote v1,
+ users u
+ WHERE
+ v0.post_id = v1.post_id
+ AND v1.user_id = #{id}
+ AND v0.user_id <> #{id}
+ AND u.id = v0.user_id
+ GROUP BY v0.user_id
+ ORDER BY similarity DESC
+ LIMIT 6
+ EOS
+
+ return select_all_sql(sql)
+ end
+
+ def set_show_samples
+ self.show_samples = true
+ end
+
+ def self.generate_sql(params)
+ return Nagato::Builder.new do |builder, cond|
+ if params[:name]
+ cond.add "name ILIKE ? ESCAPE E'\\\\'", "%" + params[:name].to_escaped_for_sql_like + "%"
+ end
+
+ if params[:level] && params[:level] != "any"
+ cond.add "level = ?", params[:level]
+ end
+
+ cond.add_unless_blank "id = ?", params[:id]
+
+ case params[:order]
+ when "name"
+ builder.order "lower(name)"
+
+ when "posts"
+ builder.order "(SELECT count(*) FROM posts WHERE user_id = users.id) DESC"
+
+ when "favorites"
+ builder.order "(SELECT count(*) FROM favorites WHERE user_id = users.id) DESC"
+
+ when "notes"
+ builder.order "(SELECT count(*) FROM note_versions WHERE user_id = users.id) DESC"
+
+ else
+ builder.order "id DESC"
+ end
+ end.to_hash
+ end
+end
+
diff --git a/app/models/user_blacklisted_tag.rb b/app/models/user_blacklisted_tag.rb
new file mode 100644
index 00000000..8e7bfc1f
--- /dev/null
+++ b/app/models/user_blacklisted_tag.rb
@@ -0,0 +1,2 @@
+class UserBlacklistedTag < ActiveRecord::Base
+end
diff --git a/app/models/user_log.rb b/app/models/user_log.rb
new file mode 100644
index 00000000..3f7aabc8
--- /dev/null
+++ b/app/models/user_log.rb
@@ -0,0 +1,17 @@
+class UserLog < ActiveRecord::Base
+ def self.access(user, request)
+ return if user.is_anonymous?
+
+ # Only do this periodically, so we don't do extra work for every request.
+ old_ip = Cache.get("userip:#{user.id}") if CONFIG["enable_caching"]
+
+ return if !old_ip.nil? && old_ip == request.remote_ip
+
+ execute_sql("SELECT * FROM user_logs_touch(?, ?)", user.id, request.remote_ip)
+
+ # Clean up old records.
+ execute_sql("DELETE FROM user_logs WHERE created_at < now() - interval '3 days'")
+
+ Cache.put("userip:#{user.id}", request.remote_ip, 8.seconds) if CONFIG["enable_caching"]
+ end
+end
diff --git a/app/models/user_mailer.rb b/app/models/user_mailer.rb
new file mode 100644
index 00000000..6e9e95a2
--- /dev/null
+++ b/app/models/user_mailer.rb
@@ -0,0 +1,45 @@
+begin
+ require 'idn'
+rescue LoadError
+end
+
+class UserMailer < ActionMailer::Base
+ include ActionController::UrlWriter
+ helper :application
+ default_url_options["host"] = CONFIG["server_host"]
+
+ def confirmation_email(user)
+ recipients UserMailer.normalize_address(user.email)
+ from CONFIG["email_from"]
+ subject "#{CONFIG["app_name"]} - Confirm email address"
+ body :user => user
+ content_type "text/html"
+ end
+
+ def new_password(user, password)
+ recipients UserMailer.normalize_address(user.email)
+ subject "#{CONFIG["app_name"]} - Password Reset"
+ from CONFIG["email_from"]
+ body :user => user, :password => password
+ content_type "text/html"
+ end
+
+ def dmail(recipient, sender, msg_title, msg_body)
+ recipients UserMailer.normalize_address(recipient.email)
+ subject "#{CONFIG["app_name"]} - Message received from #{sender.name}"
+ from CONFIG["email_from"]
+ body :recipient => recipient, :sender => sender, :title => msg_title, :body => msg_body
+ content_type "text/html"
+ end
+
+ def self.normalize_address(address)
+ if defined?(IDN)
+ address =~ /\A([^@]+)@(.+)\Z/
+ mailbox = $1
+ domain = IDN::Idna.toASCII($2)
+ "#{mailbox}@#{domain}"
+ else
+ address
+ end
+ end
+end
diff --git a/app/models/user_record.rb b/app/models/user_record.rb
new file mode 100644
index 00000000..bf48e39e
--- /dev/null
+++ b/app/models/user_record.rb
@@ -0,0 +1,17 @@
+class UserRecord < ActiveRecord::Base
+ belongs_to :user
+ belongs_to :reporter, :foreign_key => "reported_by", :class_name => "User"
+ validates_presence_of :user_id
+ validates_presence_of :reported_by
+ after_save :generate_dmail
+
+ def user=(name)
+ self.user_id = User.find_by_name(name).id rescue nil
+ end
+
+ def generate_dmail
+ body = "#{reporter.name} created a #{is_positive? ? 'positive' : 'negative'} record for your account. View your record."
+
+ Dmail.create(:from_id => reported_by, :to_id => user_id, :title => "Your user record has been updated", :body => body)
+ end
+end
diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb
new file mode 100644
index 00000000..5f30a6fd
--- /dev/null
+++ b/app/models/wiki_page.rb
@@ -0,0 +1,103 @@
+require 'diff'
+
+class WikiPage < ActiveRecord::Base
+ acts_as_versioned :table_name => "wiki_page_versions", :foreign_key => "wiki_page_id", :order => "updated_at DESC"
+ before_save :normalize_title
+ belongs_to :user
+ validates_uniqueness_of :title, :case_sensitive => false
+ validates_presence_of :body
+
+ class << self
+ def generate_sql(options)
+ joins = []
+ conds = []
+ params = []
+
+ if options[:title]
+ conds << "wiki_pages.title = ?"
+ params << options[:title]
+ end
+
+ if options[:user_id]
+ conds << "wiki_pages.user_id = ?"
+ params << options[:user_id]
+ end
+
+ joins = joins.join(" ")
+ conds = [conds.join(" AND "), *params]
+
+ return joins, conds
+ end
+ end
+
+ def normalize_title
+ self.title = title.tr(" ", "_").downcase
+ end
+
+ def last_version?
+ self.version == next_version.to_i - 1
+ end
+
+ def first_version?
+ self.version == 1
+ end
+
+ def author
+ return User.find_name(user_id)
+ end
+
+ def pretty_title
+ title.tr("_", " ")
+ end
+
+ def diff(version)
+ otherpage = WikiPage.find_page(title, version)
+ Danbooru.diff(self.body, otherpage.body)
+ end
+
+ def self.find_page(title, version = nil)
+ return nil if title.blank?
+
+ page = find_by_title(title)
+ page.revert_to(version) if version && page
+
+ return page
+ end
+
+ def self.find_by_title(title)
+ find(:first, :conditions => ["lower(title) = lower(?)", title.tr(" ", "_")])
+ end
+
+ def lock!
+ self.is_locked = true
+
+ transaction do
+ execute_sql("UPDATE wiki_pages SET is_locked = TRUE WHERE id = ?", id)
+ execute_sql("UPDATE wiki_page_versions SET is_locked = TRUE WHERE wiki_page_id = ?", id)
+ end
+ end
+
+ def unlock!
+ self.is_locked = false
+
+ transaction do
+ execute_sql("UPDATE wiki_pages SET is_locked = FALSE WHERE id = ?", id)
+ execute_sql("UPDATE wiki_page_versions SET is_locked = FALSE WHERE wiki_page_id = ?", id)
+ end
+ end
+
+ def rename!(new_title)
+ transaction do
+ execute_sql("UPDATE wiki_pages SET title = ? WHERE id = ?", new_title, self.id)
+ execute_sql("UPDATE wiki_page_versions SET title = ? WHERE wiki_page_id = ?", new_title, self.id)
+ end
+ end
+
+ def to_xml(options = {})
+ {:id => id, :created_at => created_at, :updated_at => updated_at, :title => title, :body => body, :updater_id => user_id, :locked => is_locked, :version => version}.to_xml(options.merge(:root => "wiki_page"))
+ end
+
+ def to_json(*args)
+ {:id => id, :created_at => created_at, :updated_at => updated_at, :title => title, :body => body, :updater_id => user_id, :locked => is_locked, :version => version}.to_json(*args)
+ end
+end
diff --git a/app/models/wiki_page_version.rb b/app/models/wiki_page_version.rb
new file mode 100644
index 00000000..5b8193eb
--- /dev/null
+++ b/app/models/wiki_page_version.rb
@@ -0,0 +1,17 @@
+class WikiPageVersion < ActiveRecord::Base
+ def author
+ return User.find_name(self.user_id)
+ end
+
+ def pretty_title
+ self.title.tr("_", " ")
+ end
+
+ def to_xml(options = {})
+ {:id => id, :created_at => created_at, :updated_at => updated_at, :title => title, :body => body, :updater_id => user_id, :locked => is_locked, :version => version, :post_id => post_id}.to_xml(options.merge(:root => "wiki_page_version"))
+ end
+
+ def to_json(*args)
+ {:id => id, :created_at => created_at, :updated_at => updated_at, :title => title, :body => body, :updater_id => user_id, :locked => is_locked, :version => version, :post_id => post_id}.to_json(*args)
+ end
+end
diff --git a/app/views/admin/edit_user.html.erb b/app/views/admin/edit_user.html.erb
new file mode 100644
index 00000000..0a43178b
--- /dev/null
+++ b/app/views/admin/edit_user.html.erb
@@ -0,0 +1,21 @@
+
+<% end %>
\ No newline at end of file
diff --git a/app/views/comment/edit.html.erb b/app/views/comment/edit.html.erb
new file mode 100644
index 00000000..fc8d8a11
--- /dev/null
+++ b/app/views/comment/edit.html.erb
@@ -0,0 +1,9 @@
+
+<% end %>
diff --git a/app/views/help/about.html.erb b/app/views/help/about.html.erb
new file mode 100644
index 00000000..4ac9001a
--- /dev/null
+++ b/app/views/help/about.html.erb
@@ -0,0 +1,23 @@
+
+
Help: About
+
+
+
Danbooru is a web application that allows you to upload, share, and tag images. Much of it is inspired by both Moeboard and Flickr. It was specifically designed to be of maximum utility to seasoned imageboard hunters. Some of these features include:
+
+
Posts never expire
+
Tag and comment on posts
+
Search for tags via intersection, union, negation, or pattern
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/api.html.erb b/app/views/help/api.html.erb
new file mode 100644
index 00000000..9ab93882
--- /dev/null
+++ b/app/views/help/api.html.erb
@@ -0,0 +1,595 @@
+
+
Help: API v1.13.0
+
+
+
Danbooru offers a simple API to make scripting easy. All you need is a way to GET and POST to URLs. The ability to parse XML or JSON responses is nice, but not critical. The simplicity of the API means you can write scripts using JavaScript, Perl, Python, Ruby, even shell languages like bash or tcsh.
HTTP defines two request methods: GET and POST. You'll be using these two methods to interact with the Danbooru API. Most API calls that change the state of the database (like creating, updating, or deleting something) require an HTTP POST call. API calls that only retrieve data can typically be done with an HTTP GET call.
+
In the Danbooru API, a URL is analogous to a function name. You pass in the function parameters as a query string. Here's an extremely simple example: /post/index.xml?limit=1.
+
The post part indicates the controller we're working with. In this case it's posts. index describes the action. Here we're retrieving a list of posts. Finally, the xml part describes what format we want the response in. You can specify .xml for XML responses, .json for JSON responses, and nothing at all for HTML responses.
+
+
+
+
Responses
+
All API calls that change state will return a single element response (for XML calls). They are formatted like this:
While you can usually determine success or failure based on the response object, you can also figure out what happened based on the HTTP status code. In addition to the standard ones, Danbooru uses some custom status codes in the 4xx and 5xx range.
+
+
+
+
Status Code
+
Meaning
+
+
+
+
+
200 OK
+
Request was successful
+
+
+
403 Forbidden
+
Access denied
+
+
+
404 Not Found
+
Not found
+
+
+
420 Invalid Record
+
Record could not be saved
+
+
+
421 User Throttled
+
User is throttled, try again later
+
+
+
422 Locked
+
The resource is locked and cannot be modified
+
+
+
423 Already Exists
+
Resource already exists
+
+
+
424 Invalid Parameters
+
The given parameters were invalid
+
+
+
500 Internal Server Error
+
Some unknown error occurred on the server
+
+
+
503 Service Unavailable
+
Server cannot currently handle the request, try again later
+
+
+
+
+
+
+
JSON Responses
+
While you will probably want to work with XML in the majority of cases, if you're writing something in Javascript then the JSON responses may be preferable. They are much faster to parse and there's less code to write to get your data structure:
+
+ var data = eval("(" + responseText + ")")
+ alert(data.response)
+
+
+
+
+
Logging In
+
Some actions may require you to log in. For any action you can always specify two parameters to identify yourself:
+
+
login Your login name.
+
password_hash Your SHA1 hashed password. Simply hashing your plain password will NOT work since Danbooru salts its passwords. The actual string that is hashed is "<%= CONFIG["password_salt"] %>--your-password--".
+
+
Please be aware of the security risks involved in sending your password through an unencrypted channel. Although your password will be hashed, it is still theoretically possible for someone to steal your account by creating a fake cookie based on your hashed password.
+
+
+
+
+
+
+
+
Posts
+
+
+
List
+
The base URL is /post/index.xml.
+
+
limit How many posts you want to retrieve. There is a hard limit of 100 posts per request.
+
page The page number.
+
tags The tags to search for. Any tag combination that works on the web site will work here. This includes all the meta-tags.
+
+
+
+
+
Create
+
The base URL is /post/create.xml. There are only two mandatory fields: you need to supply the tags, and you need to supply the file, either through a multipart form or through a source URL.
+
+
post[tags] A space delimited list of tags.
+
post[file] The file data encoded as a multipart form.
+
post[rating] The rating for the post. Can be: safe, questionable, or explicit.
+
post[source] If this is a URL, Danbooru will download the file.
+
post[is_rating_locked] Set to true to prevent others from changing the rating.
+
post[is_note_locked] Set to true to prevent others from adding notes.
+
post[parent_id] The ID of the parent post.
+
md5 Supply an MD5 if you want Danbooru to verify the file after uploading. If the MD5 doesn't match, the post is destroyed.
+
+
If the call fails, the following response reasons are possible:
+
+
MD5 mismatch This means you supplied an MD5 parameter and what Danbooru got doesn't match. Try uploading the file again.
+
duplicate This post already exists in Danbooru (based on the MD5 hash). An additional attribute called location will be set, pointing to the (relative) URL of the original post.
+
other Any other error will have its error message printed.
+
+
If the post upload succeeded, you'll get an attribute called location in the response pointing to the relative URL of your newly uploaded post.
+
+
+
Update
+
The base URL is /post/update.xml. Only the id parameter is required. Leave the other parameters blank if you don't want to change them.
+
+
id The id number of the post to update.
+
post[tags] A space delimited list of tags.
+
post[file] The file data encoded as a multipart form.
+
post[rating] The rating for the post. Can be: safe, questionable, or explicit.
+
post[source] If this is a URL, Danbooru will download the file.
+
post[is_rating_locked] Set to true to prevent others from changing the rating.
+
post[is_note_locked] Set to true to prevent others from adding notes.
+
post[parent_id] The ID of the parent post.
+
+
+
+
Destroy
+
You must be logged in to use this action. You must also be the user who uploaded the post (or you must be a moderator).
+
+
id The id number of the post to delete.
+
+
+
+
Tag History
+
This action retrieves the history of tag changes for a post (or all posts). The base URL is /post/tag_history.xml.
+
+
post_id Specify if you only want the tag histories for a single post.
+
limit How many histories you want to retrieve.
+
page The page number.
+
+
+
+
Revert Tags
+
This action reverts a post to a previous set of tags. The base URL is /post/revert_tags.xml.
+
+
id The post id number to update.
+
history_id The id number of the tag history.
+
+
+
+
Favorites
+
This action finds all the users who have favorited a post. The base URL is /post/favorites.xml.
+
+
id The post id number to query.
+
+
+
+
Vote
+
This action lets you vote for a post. You can only vote once per post per IP address. The base URL is /post/vote.xml.
+
+
id The post id number to update.
+
score Set to 1 to vote up and -1 to vote down. All other values will be ignored.
+
+
If the call did not succeed, the following reasons are possible:
+
+
already voted You have already voted for this post.
+
invalid score You have supplied an invalid score.
+
+
+
+
+
+
+
+
+
Tags
+
+
+
List
+
The base URL is /tag/index.xml.
+
+
limit How many tags to retrieve. Setting this to 0 will return every tag.
+
page The page number.
+
order Can be date, count, or name.
+
id The id number of the tag.
+
after_id Return all tags that have an id number greater than this.
+
name The exact name of the tag.
+
name_pattern Search for any tag that has this parameter in its name.
+
+
+
+
Update
+
The base URL is /tag/update.xml.
+
+
name The name of the tag to update.
+
tag[tag_type] The tag type. General: 0, artist: 1, copyright: 3, character: 4.
+
tag[is_ambiguous] Whether or not this tag is ambiguous. Use 1 for true and 0 for false.
+
+
+
+
Related
+
The base URL is /tag/related.xml.
+
+
tags The tag names to query.
+
type Restrict results to this tag type (can be general, artist, copyright, or character).
+
+
+
+
+
+
+
+
+
Artists
+
+
+
List
+
The base URL is /artist/index.xml.
+
+
name The name (or a fragment of the name) of the artist.
+
order Can be date or name.
+
page The page number.
+
+
+
+
Create
+
The base URL is /artist/create.xml.
+
+
artist[name] The artist's name.
+
artist[urls] A list of URLs associated with the artist, whitespace delimited.
+
artist[alias] The artist that this artist is an alias for. Simply enter the alias artist's name.
+
artist[group] The group or cicle that this artist is a member of. Simply enter the group's name.
+
+
+
+
Update
+
The base URL is /artist/update.xml. Only the id parameter is required. The other parameters are optional.
+
+
id The id of thr artist to update.
+
artist[name] The artist's name.
+
artist[urls] A list of URLs associated with the artist, whitespace delimited.
+
artist[alias] The artist that this artist is an alias for. Simply enter the alias artist's name.
+
artist[group] The group or cicle that this artist is a member of. Simply enter the group's name.
+
+
+
+
Destroy
+
The base URL is /artist/destroy.xml. You must be logged in to delete artists.
+
+
id The id of the artist to destroy.
+
+
+
+
+
+
+
+
+
Comments
+
+
+
Show
+
The base URL is /comment/show.xml. This retrieves a single comment.
+
+
id The id number of the comment to retrieve.
+
+
+
+
+
Create
+
The base URL is /comment/create.xml.
+
+
comment[anonymous] Set to 1 if you want to post this comment anonymously.
+
comment[post_id] The post id number to which you are responding.
+
comment[body] The body of the comment.
+
+
+
+
+
Destroy
+
The base url is /comment/destroy.xml. You must be logged in to use this action. You must also be the owner of the comment, or you must be a moderator.
+
+
id The id number of the comment to delete.
+
+
+
+
+
+
+
+
+
Wiki
+
All titles must be exact (but case and whitespace don't matter).
+
+
+
List
+
The base URL is /wiki/index.xml. This retrieves a list of every wiki page.
+
+
order How you want the pages ordered. Can be: title, date.
+
limit The number of pages to retrieve.
+
page The page number.
+
query A word or phrase to search for.
+
+
+
+
+
Create
+
The base URL is /wiki/create.xml.
+
+
wiki_page[title] The title of the wiki page.
+
wiki_page[body] The body of the wiki page.
+
+
+
+
+
Update
+
The base URL is /wiki/update.xml. Potential error reasons: "Page is locked"
+
+
title The title of the wiki page to update.
+
wiki_page[title] The new title of the wiki page.
+
wiki_page[body] The new body of the wiki page.
+
+
+
+
+
Show
+
The base URL is /wiki/show.xml. Potential error reasons: "artist type"
+
+
title The title of the wiki page to retrieve.
+
version The version of the page to retrieve.
+
+
+
+
+
Destroy
+
The base URL is /wiki/destroy.xml. You must be logged in as a moderator to use this action.
+
+
title The title of the page to delete.
+
+
+
+
+
Lock
+
The base URL is /wiki/lock.xml. You must be logged in as a moderator to use this action.
+
+
title The title of the page to lock.
+
+
+
+
+
Unlock
+
The base URL is /wiki/unlock.xml. You must be logged in as a moderator to use this action.
+
+
title The title of the page to unlock.
+
+
+
+
+
Revert
+
The base URL is /wiki/revert.xml. Potential error reasons: "Page is locked"
+
+
title The title of the wiki page to update.
+
version The version to revert to.
+
+
+
+
+
History
+
The base URL is /wiki/history.xml.
+
+
title The title of the wiki page to retrieve versions for.
+
+
+
+
+
+
+
+
+
Notes
+
+
+
List
+
The base URL is /note/index.xml.
+
+
post_id The post id number to retrieve notes for.
+
+
+
+
+
Search
+
The base URL is /note/search.xml.
+
+
query A word or phrase to search for.
+
+
+
+
+
History
+
The base URL is /note/history.xml. You can either specify id, post_id, or nothing. Specifying nothing will give you a list of every note verison.
+
+
limit How many versions to retrieve.
+
page The offset.
+
post_id The post id number to retrieve note versions for.
+
id The note id number to retrieve versions for.
+
+
+
+
+
Revert
+
The base URL is /note/revert.xml. Potential error reasons: "Post is locked"
+
+
id The note id to update.
+
version The version to revert to.
+
+
+
+
+
Create/Update
+
The base URL is /note/update.xml. Notes differ from the other controllers in that the interface for creation and updates is the same. If you supply an id parameter, then Danbooru will assume you're updating an existing note. Otherwise, it will create a new note. Potential error reasons: "Post is locked"
+
+
id If you are updating a note, this is the note id number to update.
+
note[post_id] The post id number this note belongs to.
+
note[x] The x coordinate of the note.
+
note[y] The y coordinate of the note.
+
note[width] The width of the note.
+
note[height] The height of the note.
+
note[is_active] Whether or not the note is visible. Set to 1 for active, 0 for inactive.
+
note[body] The note message.
+
+
+
+
+
+
+
+
+
Users
+
+
+
Search
+
The base URL is /user/index.xml. If you don't specify any parameters you'll get a listing of all users.
+
+
id The id number of the user.
+
name The name of the user.
+
+
+
+
+
+
+
+
+
Forum
+
+
+
List
+
The base URL is /forum/index.xml. If you don't specify any parameters you'll get a list of all the parent topics.
+
+
parent_id The parent ID number. You'll return all the responses to that forum post.
+
+
+
+
+
+
+
+
+
Pools
+
+
+
List Pools
+
The base URL is /pool/index.xml. If you don't specify any parameters you'll get a list of all pools.
+
+
query The title.
+
page The page.
+
+
+
+
+
List Posts
+
The base URL is /pool/show.xml. If you don't specify any parameters you'll get a list of all pools.
+
+
id The pool id number.
+
page The page.
+
+
+
+
+
Update
+
The base URL is /pool/update.xml.
+
+
id The pool id number.
+
pool[name] The name.
+
pool[is_public] 1 or 0, whether or not the pool is public.
+
pool[description] A description of the pool.
+
+
+
+
+
Create
+
The base URL is /pool/create.xml.
+
+
pool[name] The name.
+
pool[is_public] 1 or 0, whether or not the pool is public.
+
pool[description] A description of the pool.
+
+
+
+
+
Destroy
+
The base URL is /pool/destroy.xml.
+
+
id The pool id number.
+
+
+
+
+
Add Post
+
The base URL is /pool/add_post.xml. Potential error reasons: "Post already exists", "access denied"
+
+
pool_id The pool to add the post to.
+
post_id The post to add.
+
+
+
+
+
Remove Post
+
The base URL is /pool/remove_post.xml. Potential error reasons: "access denied"
+
+
pool_id The pool to remove the post from.
+
post_id The post to remove.
+
+
+
+
+
+
+
+
+
Change Log
+
+
1.15.0
+
+
Added documentation for pools
+
+
+
1.13.0
+
+
Changed interface for artists to use new URL system
+
JSON requests now end in a .json suffix
+
Renamed some error reason messages
+
Removed comment/index from API
+
Removed url and md5 parameters from artist search (can just pass the URL or MD5 hash to the name parameter)
+
+
+
+
+
1.8.1
+
+
Removed post[is_flagged] attribute
+
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/artists.html.erb b/app/views/help/artists.html.erb
new file mode 100644
index 00000000..cdd936b7
--- /dev/null
+++ b/app/views/help/artists.html.erb
@@ -0,0 +1,65 @@
+
+
Help: Artists
+
+
+
What are artists?
+
Artists in Danbooru represent the people who created a piece of art. Originally tags were used to describe artists (and they still are), but in many ways tags are insufficient. You can't tie a URL to a tag for example. You can fake a hierarchy using tag implications, but in most this cases this is excessive and leads to an explosion of redundant tags. For these reasons, artists were elevated to first class status in Danbooru.
+
+
+
+
How do artists differ from tags?
+
For starters, artists can have URLs associated with them. These come in handy when you're uploading a post from the artist's site and want to auto-identify it; Danbooru will query the artist database for the URL and automatically figure out who it is. It isn't foolproof but as the database gets more artists, the more reliable it becomes.
+
You can also organize artists more. Doujin circles can be represented as artists and can have group members. Artists can also have aliases and notes for extraneous details.
+
+
+
+
How do I search for artists?
+
Start at the <%= link_to "index", :controller => "artist", :action => "index" %>. In addition to browsing through the entire artist list, you can also search for artists.
+
By default, if you just enter a name in the search box Danbooru will return any artist that has your query in their name. This is probably the behavior you want in most cases.
+
Suppose you know the artist's homepage, but can't figure out their name. Simply search for the URL (beginning with http) and Danbooru will return any associated artists.
+
If you have an image, you can query the MD5 hash and Danbooru will try to deduce the artist that way. Simply enter a 32 character hex encoded hash and Danbooru will figure out what you mean.
You'll see five fields. Name is self-explanatory. Jap Name/Aliases is for any aliases the artist has. For example, you would place the artist's name in kanji or kana in this field. If you have more than one alias to enter, you can separate them with commas. Notes are for any extra tidbits of information you want to mention (this field is actually saved to the artist's matching wiki page on Danbooru).
+
The URLs field is a list of URLs associated with the artist, like their home page, their blog, and any servers that store the artist's images. You can separate multiple artists with newlines or spaces.
+
+
+
+
How do I update an artist?
+
The interface for updating an artist is nearly identical to the interface for creating artists, except for one additional field: members. Members is for artists who are a member of this circle. If there are more than one, you can separate them with commas.
+
+
+
+
What are aliases?
+
Artists often have more than one name. In particular, they can have a Japanese name and a romanized name. Ideally, users should be able to search for either and get the same artist.
+
Danbooru allows you to alias artists to one reference artist, typically one that you can search posts on.
+
+
+
+
Are artists in any way tied to posts or tags?
+
No. If you create an artist, a corresponding tag is not automatically created. If you create an artist-typed tag, a corresponding artist is not automatically created. If you create an artist but no corresponding tag, searching for posts by that artist won't return any results.
+
You can think of the artist database as separate from the tags/posts database.
+
This is an intentional design decision. By keeping the two separated, users have far more freedom when it comes to creating aliases, groups, and edits.
+
+
+
+
When I search for a URL, I get a bunch of unrelated results. What's going on?
+
Short answer: this is just a side-effect of the way Danbooru searches URLs. Multiple results typically mean Danbooru couldn't find the artist.
+
Long answer: when you're searching for a URL, typically it's a URL to an image on the artist's site. If this is a new image, querying this will obviously return no results.
+
So what Danbooru does is progressively chop off directories from the URL. http://site.com/a/b/c.jpg becomes http://site.com/a/b becomes http://site.com/a becomes http://site.com. It keeps doing this until a match is found. Danbooru does this more than once because there are cases where the URL is nested by date, like in http://site.com/2007/06/05/image.jpg. Usually this algorithm works very well, provided the artist has an entry in the database.
+
If he doesn't, then the algorithm is probably going to cut the URL down to just the domain, i.e. http://geocities.co.jp. When this happens, you'll sometimes get every artist hosted on that domain.
+
Why not just dump all the results if you get more than one? Well, there are a few cases when multiple artists validly map to the same domain. Usually the domain is just being used to host files or something.
+
+
+
+
Is there an API?
+
Yes. The artist controller uses the same interface as the rest of Danbooru. See the <%= link_to "API documentation", :controller => "help", :action => "api" %> for details.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/bookmarklet.html.erb b/app/views/help/bookmarklet.html.erb
new file mode 100644
index 00000000..77c5e781
--- /dev/null
+++ b/app/views/help/bookmarklet.html.erb
@@ -0,0 +1,21 @@
+
All images that can be uploaded to <%= CONFIG['app_name'] %> will get a thick dashed blue border.
+
Click on an image to upload it to <%= CONFIG['app_name'] %>.
+
You'll be redirected to the upload page where you can fill out the tags, the title, and set the rating.
+
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/cheatsheet.html.erb b/app/views/help/cheatsheet.html.erb
new file mode 100644
index 00000000..61abaaaf
--- /dev/null
+++ b/app/views/help/cheatsheet.html.erb
@@ -0,0 +1,230 @@
+
+
Help: Cheat Sheet
+
+
+
Searching
+
+
tag1 tag2
+
Search for posts that have tag1 and tag2.
+
+
~tag1 ~tag2
+
Search for posts that have tag1 or tag2.
+
+
-tag1
+
Search for posts that don't have tag1.
+
+
tag1*
+
Search for posts with tags that start with tag1.
+
+
user:bob
+
Search for posts uploaded by the user Bob.
+
+
vote:3:bob
+
Search for posts favorited by the user Bob.
+
+
md5:foo
+
Search for posts with the MD5 hash foo.
+
+
rating:questionable
+
Search for posts that are rated questionable.
+
+
-rating:questionable
+
Search for posts that are not rated questionable.
+
+
source:http://site.com
+
Search for posts with a source that starts with http://site.com.
+
+
id:100
+
Search for posts with an ID number of 100.
+
+
id:100..
+
Search for posts with an ID number of 100 or greater.
+
+
id:>=100
+
Same as above.
+
+
id:>100
+
Search for posts with an ID number greater than 100.
+
+
id:..100
+
Search for posts with an ID number of 100 or less.
+
+
id:<=100
+
Same as above.
+
+
id:<100
+
Search for posts with an ID number less than 100.
+
+
id:100..200
+
Search for posts with an ID number between 100 and 200.
+
+
width:100
+
Search for posts with a width of 100 pixels (uses same syntax as id search).
+
+
height:100
+
Search for posts with a height of 100 pixels (uses same syntax as id search)
+
+
score:100
+
Search for posts with a score of 100 (uses same syntax as id search).
+
+
mpixels:2.5..
+
Search for posts with 2.5 million pixels or greater (uses same syntax as id search).
+
+
date:2007-01-01
+
Search for posts uploaded on a certain date (uses same syntax as id search).
+
+
order:id
+
Order search results in ascending order based on post ID.
+
+
order:id_desc
+
Order search results in descending order based on post ID.
+
+
order:score
+
Order search results in descending order based on post score.
+
+
order:score_asc
+
Order search results in ascending order based on post score.
+
+
order:mpixels
+
Order search results in descending order based on resolution.
+
+
order:mpixels_asc
+
Order search results in descending order based on resolution.
+
+
order:landscape
+
Order landscape images first.
+
+
order:portrait
+
Order portrait images first.
+
+
order:vote
+
Order by when the post was voted (only valid when doing a vote search)
+
+ <% if CONFIG["enable_parent_posts"] %>
+
parent:1234
+
Search for posts that have 1234 as a parent (and include post 1234).
+
+
parent:none
+
Search for posts that have no parent.
+ <% end %>
+
+
gun* dress
+
Pattern searches do not work well with other tags.
+
+
~gun dress
+
Or searches do not work well with other tags.
+
+
rating:questionable rating:safe
+
In general, combining the same metatags (the ones that have
+ colons in them) will not work.
+
+
rating:questionable score:100.. id:..1000
+
You can combine different metatags, however.
+
+
+
+
+
Tagging
+
+
tag1 tag2
+
Tags a post with tag1 and tag2.
+
+
maria-sama_ga_miteru
+
Replace spaces in tags with underscores.
+
+
tanaka_rie soryu_asuka_langley
+
Use LastName FirstName order for characters with Japanese last names, or characters with full Chinese or Korean names. Middle names always follow the first name.
+
+
john_smith akira_ferrari tony_leung
+
Use FirstName LastName order for characters with non-Asian names, or characters with Japanese first names but non-Asian last names, or characters with non-Asian first names but Chinese last names.
+
+
general:food
+
Prefix a tag with general to remove any type. The prefix will be dropped when the tag is saved.
+
+
artist:wakatsuki_sana
+
Prefix a tag with artist: to type it as an artist. The prefix will be dropped when the tag is saved.
+
+
character:gasai_yuno
+
Prefix a tag with character: (or char:) to type it as a character.
+
+
copyright:mirai_nikki
+
Prefix a tag with copyright: (or copy:) to type is as a copyright. Copyright tags include things like anime, manga, games, novels, or original doujinshi works.
+
+
ambiguous:sakura
+
Prefix a tag with ambiguous: (or amb:) to make it ambiguous. Ambiguous tags are indicated as such to users, and they are pointed to the wiki for disambiguation.
+
+
rating:questionable
+
Rates a post as questionable. This tag is discarded after the rating is changed.
+
+ <% if CONFIG["enable_parent_posts"] %>
+
parent:1234
+
Sets the post's parent id to 1234. This tag is discarded after the parent id is changed. If the parent id is the same as the post id, then the parent id will be cleared.
+ <% end %>
+
+
pool:maria-sama_ga_miteru_manga
+
Adds the post to the "Maria-sama ga Miteru Manga" pool. This tag is discarded after the post is added to the pool. Make sure to replace spaces with underscores. If the pool with the given name doesn't exist, it will be automatically created.
+
+
pool:10
+
Adds the post to pool #10.
+
+
-pool:10
+
Removes the post from pool #10.
+
+
+
+
+
Comments & Forum
+
+
post #1000
+
Creates a link to post #1000.
+
+
comment #1000
+
Creates a link to comment #1000.
+
+
forum #1000
+
Creates a link to forum post #1000.
+
+
pool #1000
+
Creates a link to pool #1000.
+
+
[spoiler]spoiler text[/spoiler]
+
Marks "spoiler text" as a spoiler.
+
+
[[link to this page]]
+
Creates an internal link to the wiki page with title "link to this page".
+
+
[[my wiki page|click this]]
+
Creates an internal link to the wiki page with title "my wiki page", using "click this" for the link text.
Creates an internal link to the wiki page with title "link to this page".
+
+
[[my wiki page|click this]]
+
Creates an internal link to the wiki page with title "my wiki page", using "click this" for the link text.
+
+
h2. Major Header
+
Major headers should use h2.
+
+
h4. Minor Header
+
Minor headers should use h4.
+
+
+
+
+
Notes
+
+
<tn>translation note</tn>
+
Styles "translation note" as a translation note.
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/comments.html.erb b/app/views/help/comments.html.erb
new file mode 100644
index 00000000..c02c348f
--- /dev/null
+++ b/app/views/help/comments.html.erb
@@ -0,0 +1,11 @@
+
+
Help: Comments
+
+
+
All comments are formatted using <%= link_to "DText", :action => "dtext" %>.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/dtext.html.erb b/app/views/help/dtext.html.erb
new file mode 100644
index 00000000..07aaeb0f
--- /dev/null
+++ b/app/views/help/dtext.html.erb
@@ -0,0 +1,80 @@
+
+
Help: DText
+
+
DText is the name for Danbooru's custom text formatting language. It's a mishmash of several markdown languages including Textile, MediaWiki, and BBCode.
+
+
+
+
Inline
+
+
http://danbooru.donmai.us
+
URLs are automatically linked.
+
+
[b]strong text[/b]
+
Makes text bold.
+
+
[i]emphasized text[/i]
+
Makes text italicized.
+
+
[[wiki page]]
+
Links to the wiki.
+
+
{{touhou monochrome}}
+
Links to a post search.
+
+
post #1234
+
Links to post #1234.
+
+
forum #1234
+
Links to forum #1234.
+
+
comment #1234
+
Links to comment #1234.
+
+
pool #1234
+
Links to pool #1234.
+
+
[spoiler]Some spoiler text[/spoiler]
+
Marks a section of text as spoilers.
+
+
+
+
+
Block
+
+ A paragraph.
+
+ Another paragraph
+ that continues on multiple lines.
+
+
+ h1. An Important Header
+
+ h2. A Less Important Header
+
+ h6. The Smallest Header
+
+
+ [quote]
+ bob said:
+
+ When you are quoting someone.
+ [/quote]
+
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/extension.html.erb b/app/views/help/extension.html.erb
new file mode 100644
index 00000000..402aae8c
--- /dev/null
+++ b/app/views/help/extension.html.erb
@@ -0,0 +1,13 @@
+
+
Help: Firefox Extension
+
+
+
There is a Firefox extension available to upload files from sites that have some sort of referrer or cookie access restriction. It is an alternative to the bookmarklet. The extension provides autocomplete for tags when adding a post or using the site.
+
Note that you need Firefox 2.0.x for the version 0.2.7 and above, which is also now compatible with the lolifox, but 0.2.6 is still available for Firefox 1.5.x users. On upgrading from Firefox 1.5 to 2.0 you should be automatically prompted to update to the latest compatible version.
+
As of version 0.2.8 the autocomplete function extends to the input fields on Danbooru itself.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/faq.html.erb b/app/views/help/faq.html.erb
new file mode 100644
index 00000000..5aff79a9
--- /dev/null
+++ b/app/views/help/faq.html.erb
@@ -0,0 +1,15 @@
+
+
Help: Frequently Asked Questions
+
+
+
How can I get a contributor account?
+
A moderator or janitor has to invite you.
+
+
How do I delete a tag?
+
If you are asking how to delete a tag that has no posts associated with it, you don't have to. A nightly batch is run that cleans up any unused tag.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
diff --git a/app/views/help/favorites.html.erb b/app/views/help/favorites.html.erb
new file mode 100644
index 00000000..7e7bf86a
--- /dev/null
+++ b/app/views/help/favorites.html.erb
@@ -0,0 +1,13 @@
+
+
Help: Favorites
+
+
+
You can save individual posts to a personal list of favorites. You need an account in order to use this feature, and you must have Javascript enabled.
+
To add a post to your favorites, simply click on the Add to Favorites link. Alternatively, you can use the Add to Favorites mode from the main listing.
+
You can view your favorites by clicking on My Favorites from the main listing, or going to My Account, then My Favorites.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
diff --git a/app/views/help/forum.html.erb b/app/views/help/forum.html.erb
new file mode 100644
index 00000000..424efa41
--- /dev/null
+++ b/app/views/help/forum.html.erb
@@ -0,0 +1,11 @@
+
+
Help: Forum
+
+
+
All forum posts are formatted using <%= link_to "DText", :action => "dtext" %>.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
diff --git a/app/views/help/image_sampling.html.erb b/app/views/help/image_sampling.html.erb
new file mode 100644
index 00000000..1858e86c
--- /dev/null
+++ b/app/views/help/image_sampling.html.erb
@@ -0,0 +1,15 @@
+
+
Help: Image Sampling
+
+
+
While high resolution images are nice for archival purposes, beyond a certain resolution they become impractical to view and time consuming to download.
+
Danbooru will automatically resize any image larger than <%= CONFIG["sample_width"] %>x<%= CONFIG["sample_height"] %> to a more manageable size, in addition to the thumbnail. It will also store the original, unresized image.
+ <% unless CONFIG["force_image_samples"] %>
+
You can toggle this behavior by changing the Show Image Samples setting in your <%= link_to "user settings", :controller => "user", :action => "edit" %>.
+ <% end %>
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
diff --git a/app/views/help/index.html.erb b/app/views/help/index.html.erb
new file mode 100644
index 00000000..5420d97c
--- /dev/null
+++ b/app/views/help/index.html.erb
@@ -0,0 +1,43 @@
+
+<% end %>
diff --git a/app/views/help/irc.html.erb b/app/views/help/irc.html.erb
new file mode 100644
index 00000000..87c83b6c
--- /dev/null
+++ b/app/views/help/irc.html.erb
@@ -0,0 +1,11 @@
+
+
Help: IRC
+
+
+
IRC is the best way to contact the creator. The official Danbooru IRC channel is at irc.synirc.net/miezaru-illuminati. Ask for rq. However Moe's IRC channel is located at irc.rizon.net/moe-imouto
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/mass_tag_edit.html.erb b/app/views/help/mass_tag_edit.html.erb
new file mode 100644
index 00000000..531e14af
--- /dev/null
+++ b/app/views/help/mass_tag_edit.html.erb
@@ -0,0 +1,44 @@
+
+
Help: Mass Tag Edit
+
Note: this function is only available to moderators.
+
Mass tag edit allows you to make sweeping changes to posts. It allows you to add tags, remove tags, or change tags to potentially thousands of posts at once. It is an extremely powerful feature that should be used with great caution.
+
There are two text fields and two buttons. The first text field is where you enter your tag query. The tag parser is identical to the one used for the main listing so any tag query that works there will work here. This includes all the meta-tags like source, id, user, and date. The second text field is where you enter the tags you want to tag the matching posts with.
+
Click on the Preview button to see what posts will be affected. This is based solely on the first text field. When you click on Save, this is what happens: Danbooru finds all the posts that match the query you entered in the first text field. Then, for each post, it removes any tag from the first text field, and adds all the tags from the second text field.
+
Here is a table explaining some of the things that you can do:
+
+
+
Tag Query
+
Add Tags
+
Effect
+
+
+
apple
+
banana
+
Change every instance of the apple tag to banana.
+
+
+
apple
+
+
Delete every instance of the apple tag.
+
+
+
apple orange
+
apple
+
Find every post that has both the apple tag and the orange tag and delete the orange tag.
+
+
+
source:orchard
+
apple
+
Find every post with orchard as the source and add the apple tag.
+
+
+
id:10..20 -apple
+
apple
+
Find posts with id numbers between 10 and 20 that don't have the apple tag, and tag them with apple.
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/notes.html.erb b/app/views/help/notes.html.erb
new file mode 100644
index 00000000..a809c5ff
--- /dev/null
+++ b/app/views/help/notes.html.erb
@@ -0,0 +1,22 @@
+
+
Help: Notes
+
+
+
You can annotate images with notes. This is primarily used to translate text. Please do not use a note when a comment would suffice.
+
Because this feature makes heavy usage of DHTML and Ajax, it probably won't work on many browsers. Currently it's been tested with Firefox 2, IE6, and IE7.
+
If you have an issue with an existing note or have a comment about it, instead of replacing the note, post a comment. Comments are more visible to other users, and chances are someone will respond to your inquiry.
+
You can create a new note via the Add Translation link in the sidebar. The note will appear in the middle of the image. You can drag this note inside the image. You can resize the note by dragging the little black box on the bottom-right corner of the note.
+
When you mouse over the note box, the note body will appear. You can click on the body and another box will appear where you can edit the text. This box will also contain four links:
+
+
Save This saves the note to the database.
+
Cancel This reverts the note to the last saved copy. The note position, dimensions, and text will all be restored.
+
History This will redirect you to the history of the note. Whenever you save a note the old data isn't destroyed. You can always revert to an older version. You can even undelete a note.
+
Remove This doesn't actually remove the note from the database; it only hides it from view. You can undelete a note by reverting to a previous version.
+
+
All HTML code will be sanitized. You can place small translation notes by surrounding a block of text with <tn>...</tn> tags.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/pools.html.erb b/app/views/help/pools.html.erb
new file mode 100644
index 00000000..0f660654
--- /dev/null
+++ b/app/views/help/pools.html.erb
@@ -0,0 +1,15 @@
+
+
Help: Pools
+
+
+
Pools are groups of posts with a common theme. They are similar to <%= link_to "favorites", :action => "favorites" %> with three important differences: public pools allow anyone to add or remove from them, you can create multiple pools, and posts in a pool can be ordered. This makes pools ideal for subjective tags, or for posts that are part of a series (as is the case in manga).
+
The interface for adding and removing pools resembles the interface for favorites. You can click on Add to Pool from the post's page. You'll be redirected to a page where you can select the pool.
+
If you're importing several posts into a pool, this process can become tedious. You can instead click on the Import link at the bottom of the pool's page. This allows you to execute a post search using any <%= link_to "tag combination", :action => "cheatsheet" %> you would normally use. Remove any posts that are irrelevant to the pool, then finish the import process.
+
Pools can be private or public. A private pool means you are the only person who can add or remove from it. In contrast, public pools can be updated by anyone, even anonymous users.
+
To remove a post from a pool, go to the pool's page and select the Delete Mode checkbox. Then click on the posts you want to delete. This works similarly to how posts are deleted from favorites.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/post_relationships.html.erb b/app/views/help/post_relationships.html.erb
new file mode 100644
index 00000000..f86f6881
--- /dev/null
+++ b/app/views/help/post_relationships.html.erb
@@ -0,0 +1,11 @@
+
+
Help: Post Relationships
+
+
Every post can have a parent. Any post that has a parent will not show up in the <%= link_to "main listing", :controller => "post", :action => "index" %>. However, the post will appear again if a user does any kind of tag search. This makes it useful for things like duplicates.
+
Please do not use parent/children for pages of a manga or doujinshi. It's better to use a <%= link_to "pool", :action => "pools" %> for these.
+
To use this field, simply enter the id number of the parent post when you upload or edit a post. To search for all the children of a parent post, you can do a tag search for post:nnnn, where nnnn is the id number of the parent post.
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/posts.html.erb b/app/views/help/posts.html.erb
new file mode 100644
index 00000000..826ea30b
--- /dev/null
+++ b/app/views/help/posts.html.erb
@@ -0,0 +1,102 @@
+
+
Help: Posts
+
A post represents a single file that's been uploaded. Each post can have several tags, comments, and notes. If you have an account, you can also add a post to your favorites.
+
+
+
Search
+
Searching for posts is straightforward. Simply enter the tags you want to search for, separated by spaces. For example, searching for original panties will return every post that has both the original tag AND the panties tag.
+
That's not all. Danbooru offers several meta-tags that let you further refine your search, allowing you to query on things like width, height, score, uploader, date, and more. Consult the cheat sheet for a complete list of what you can do.
+
+
+
+
Tag List
+
In both the listing page and the show page you'll notice a list of tag links with characters next to them. Here's an explanation of what the links are:
+
+
?
+
This links to the wiki page for the tag. If the tag is an artist type, then you'll be redirected to the artist page.
+
+
+
+
This adds the tag to the current search.
+
+
–
+
This adds the negated tag to the current search.
+
+
950
+
The number next to the tag represents how many posts there are. This isn't always the total number of posts for that tag. If you're searching for a combination of tags, this will be the number of posts that have the tag AND the current tag query. If you're not searching for anything, this will be the number of posts found within the last twenty-four hours.
+
+
Color
+
Some tag links may be colored green, purple, or red. Green means the tag represents a character. Purple means the tag represents a copyright (things like anime, manag, games, or novels). Red means the tag represents an artist.
+
+
When you're not searching for a tag, by default the tag list will show the most popular tags within the last three days. When you are searching for tags, the tag list will show related tags, sorted by relevancy.
+
+
+
+
+
Mode Menu
+
In the main listing page, you'll notice a menu labeled "Mode" in the sidebar. This menu lets you make several changes without ever leaving the listing page. Simply select an option and whenever you click on a thumbnail, the action will be performed in the background.
+
+
+
View Posts
+
This is the default mode. Whenever you click on a thumbnail, you'll go to that post.
+
+
Edit Posts
+
Whenever you click on a thumbnail, you'll get a JavaScript prompt. Here you can easily change the post's tags, and the site will update the post for you in the background.
+
+
Add to Favorites
+
Whenever you click on a thumbnail, that post will be added to your list of favorites.
+
+
Vote Up
+
Whenever you click on a thumbnail, that post will be voted up.
+
+
Vote Down
+
Whenever you click on a thumbnail, that post will be voted down.
+
+
Rate Safe
+
Whenever you click on a thumbnail, that post will be rated safe.
+
+
Rate Questionable
+
Whenever you click on a thumbnail, that post will be rated questionable.
+
+
Rate Explicit
+
Whenever you click on a thumbnail, that post will be rated explicit.
+
+
Flag Post
+
Whenever you click on a thumbnail, that post will be flagged for deletion.
+
+
Lock Rating
+
Whenever you click on a thumbnail, that post will be rating locked (no one will be able to change the rating).
+
+
Lock Notes
+
Whenever you click on a thumbnail, that post will be note locked (no one will be able to edit notes for that post).
Whenever you click on a thumbnail, the current tag script will be applied to the post.
+
+
+
+
+
Borders
+
In the listing page, you will notice that some thumbnails have a border. The meaning of this border depends on the color.
+
+
+
Red
+
The post was flagged for deletion.
+
+
Blue
+
The post is pending moderator approval.
+
+
Green
+
The post has child posts.
+
+
Yellow
+
The post has a parent.
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/ratings.html.erb b/app/views/help/ratings.html.erb
new file mode 100644
index 00000000..301a49d9
--- /dev/null
+++ b/app/views/help/ratings.html.erb
@@ -0,0 +1,32 @@
+
+
Help: Ratings
+
+
+
All posts on Danbooru are one of three ratings: Safe, Questionable, and Explicit. Questionable is the default if you don't specify one. Please note that this system is not foolproof: from time to time explicit images will be tagged safe, and vice versa. Therefore you should not depend on ratings unless you can tolerate the occasional exception.
+
+
+
Explicit
+
Any image where the vagina or penis are exposed and easily visible. This includes depictions of sex, masturbation, or any sort of penetration.
+
+
+
+
Safe
+
Safe posts are images that you would not feel guilty looking at openly in public. Pictures of nudes, exposed nipples or pubic hair, cameltoe, or any sort of sexually suggestive pose are NOT safe and belong in questionable. Swimsuits and lingerie are borderline cases; some are safe, some are questionable.
+
+
+
+
Questionable
+
Basically anything that isn't safe or explicit. This is the great middle area, and since it includes unrated posts, you shouldn't really expect anything one way or the other when browsing questionable posts.
+
+
+
+
Search
+
You can filter search results by querying for rating:s, rating:q, or rating:e for safe, questionable, and explicit posts, respectively. You can also combine them with other tags and they work as expected.
+
If you want to remove a rating from your search results, use -rating:s, -rating:q, and -rating:e.
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/source_code.html.erb b/app/views/help/source_code.html.erb
new file mode 100644
index 00000000..f8eaa641
--- /dev/null
+++ b/app/views/help/source_code.html.erb
@@ -0,0 +1,12 @@
+
+
Help: Source Code
+
+
+
You can get the Danbooru source code using Subversion. Run svn co svn://donmai.us/danbooru/trunk for the latest copy.
+
All Danbooru code is released under a FreeBSD license.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/start.html.erb b/app/views/help/start.html.erb
new file mode 100644
index 00000000..d7bb8587
--- /dev/null
+++ b/app/views/help/start.html.erb
@@ -0,0 +1,9 @@
+
+
Help: Getting Started
+
If you are already familiar with Danbooru, you may want to consult the cheat sheet for a quick overview of the site.
+
The core of Danbooru is represented by posts and tags. Posts are the content, and tags are how you find the posts.
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/tag_aliases.html.erb b/app/views/help/tag_aliases.html.erb
new file mode 100644
index 00000000..5b06ca6b
--- /dev/null
+++ b/app/views/help/tag_aliases.html.erb
@@ -0,0 +1,11 @@
+
+
Help: Tag Aliases
+
Sometimes, two tags can mean the same thing. For example, pantsu and panties have identical meanings. It makes sense that if you search for one, you should also get the results for the other.
+
Danbooru tries to fix this issue by using tag aliases. You can alias one or more tags to one reference tag. For example, if we aliased pantsu to panties, then whenever someone searched for pantsu or tagged a post with pantsu, it would be internally replaced with panties. Tags are normalized before they are saved to the database. This means that the pantsu tag only exists in the aliases table.
+
When a tag is aliased to another tag, that means that the two tags are equivalent. You would not generally alias rectangle to square, for example, because while all squares are rectangles, not all rectangles are squares. To model this sort of relationship, you would need to use <%= link_to "implications", :action => "tag_implications" %>.
+
While anyone can <%= link_to "suggest", :controller => "tag_alias", :action => "index" %> an alias, only an administrator can approve it.
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/tag_implications.html.erb b/app/views/help/tag_implications.html.erb
new file mode 100644
index 00000000..599ea9b9
--- /dev/null
+++ b/app/views/help/tag_implications.html.erb
@@ -0,0 +1,15 @@
+
+
Help: Tag Implications
+
Suppose you tag a post with miniskirt. Miniskirts are simply a type of skirt, so ideally, you would like people who search for skirt to see your miniskirt post. You could tag your post with both skirt and miniskirt, but this starts to get tedious after awhile.
+
Tag implications can be used to describe is-a relationships. A miniskirt is a type of skirt. When a miniskirt → skirt implication is created, then whenever someone tags a post with miniskirt, Danbooru will also tag it with skirt. The tag is normalized before it is saved to the database.
+
Tag implications have a predicate and a consequent. The predicate is what is matched against. In the previous example, it would be miniskirt. The consequent is the tag that is added. In the example, it would be skirt.
+
You can have multiple implications for the same predicate. Danbooru will just add all the matching consequent tags. For example, if we created a miniskirt → female_clothes implication, then anytime someone tagged a post with miniskirt it would be expanded to miniskirt skirt female_clothes.
+
Implications can also be chained together. Instead of miniskirt → female_clothes we could create a skirt → female_clothes implication. The end result would be the same.
+
This implication process occurs AFTER the alias process.
+
It's easy to go overboard with implications. It's important not to create implications for frivolous things; for example, we could theoretically implicate everything to an object tag, but this is pointless and only adds bloat to the database. For cases where the predicate and the consequent are synonymous, aliases are a much better idea as they have lower overhead.
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/tag_scripts.html.erb b/app/views/help/tag_scripts.html.erb
new file mode 100644
index 00000000..4128ab2e
--- /dev/null
+++ b/app/views/help/tag_scripts.html.erb
@@ -0,0 +1,55 @@
+
+
Help: Tag Scripts
+
+
+
Tag scripts allow you to batch together several tag changes. With a single script you can add tags, remove tags, conditionally add tags, conditionally remove tags, or any combination of the above. Simply create one, select it, and click on a post thumbnail to apply the tag script in the background. The best way to illustrate how they work is through examples.
+
You can combine commands, but you cannot nest them. For example, [if cat, dog] [if dog, cat] works, but [if cat, [reset]] does not.
+
+
+
Add
+
+
cat dog would add the cat and dog tag.
+
+
+
+
+
Remove
+
+
-cat -dog would remove the cat and dog tag.
+
cat -dog would add the cat tag and remove the dog tag.
+
+
+
+
+
Conditional
+
+
[if cat, dog] would add the dog tag if and only if the post had the cat tag.
+
[if -cat, dog] would add the dog tag if and only if the post did not have the cat tag.
+
[if cat, -dog] would remove the dog tag if and only if the post had the cat tag.
+
[if -cat, -dog] would remove the dog tag if and only if the post did not have the cat tag.
+
[if cat -animal, animal] would add the animal tag if and only if the post had the cat tag but did not have the animal tag.
+
+
+
+
+
Reset
+
+
[reset] would remove every tag from the post.
+
[reset] cat would remove every tag from the post, then add the cat tag.
+
cat [reset] would add the cat tag, then remove every tag from the post (this is a pointless script).
+
+
+
+
+
Rating Changes
+
+
rating:e would change the post's rating to explicit.
+
[if sex, rating:e] would change the post's rating to explicit if and only if it had the sex tag.
+
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/tags.html.erb b/app/views/help/tags.html.erb
new file mode 100644
index 00000000..72b78ef5
--- /dev/null
+++ b/app/views/help/tags.html.erb
@@ -0,0 +1,70 @@
+
+
Help: Tags
+
+
+
Tags are basically keywords you can use to describe posts, allowing you to easily search and explore posts based on their content. Consult the cheat sheet for a full list of what you can search on.
+
+
+
Guidelines
+
When you're tagging a post, use the following guidelines:
+
+
+
Replace spaces with underscores
+
For example, maria-sama ga miteru becomes maria-sama_ga_miteru. This small concession makes other features much easier to implement.
+
+
+
+
Forbidden characters
+
The following characters are stripped from tags: commas and semicolons.
+
+
+
+
Name order
+
This is somewhat complicated. In general, use whatever order the the anime uses. Failing this, use the ordering the character's nationality suggests. This typically means LastName FirstName order for Asian names, and FirstName LastName order for Western names.
+
But there are exceptions. Some characters use FirstName LastName order despite having Asian-sounding names. Subaru Nakajima is a good example of this (in all official promotional artwork FirstName LastName order is used). There is nothing we can do but shake our heads.
+
Some characters have a mixture of Asian and Western names. Refer to the source material for these cases. Failing that, the general rule is, use whatever ordering the character's last name suggests. Asuka Langley Soryuu has a Japanese last name, so it would become soryuu_asuka_langley. Akira Ferrari has an Italian last name, so it becomes akira_ferrari. But again, there are exceptions to this like setsuna_f_seiei. You can go ahead and curse the site for not standardizing on FirstName LastName ordering earlier on. It's too late to change the system now.
+
+
+
+
Use full names
+
Using full names reduces the chances of collisions. The definitive resource for character names is Anime News Network (note that all their character names use FirstName LastName order).
+
+
+
+
Ask
+
If you're not sure whether a tag is right or wrong, then post a comment asking for some opinions. There are plenty of obsessive Danbooru fans who will gladly weigh in.
+
+
+
+
+
Types
+
Tags can be typed. Currently there are only three types: artist, character, and copyright.
+
+
+
Artist
+
Artist tags identify the tag as the artist. This doesn't mean the artist of the original copyrighted artwork (for example, you wouldn't use the barasui tag on a picture of Miu drawn by hanaharu_naruko).
+
When tagging something, you can tell Danbooru that a tag is an artist tag by prefixing it with artist:. For example, tagging something artist:mark tree will tag a post with mark and tree. If the mark tag doesn't already exist, it'll be created with the tag type set to artist.
+
+
+
+
Character
+
Character tags identify the tag as a character. They work exactly like artist tags, only you prefix with "character" (or "char").
+
+
+
+
Copyright
+
The copyright type indicates the tag represents an anime, a game, a novel, or some sort of copyrighted setting. Otherwise they work identically to character and artist tags, only you prefix with "copyright" instead (or "copy").
+
+
+
+
Ambiguous
+
Tag ambiguity is handled much in the same way that Wikipedia handles ambiguity. Users who search for an ambiguous tag are directed to a disambiguation page on the wiki, where they can clarify what they want to search for.
+
The flag for marking a tag as ambiguous is separate from the type. This means that an artist tag could be marked as ambiguous, for example. To mark a tag as ambiguous, prefix it with "ambiguous".
+
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/trac.html.erb b/app/views/help/trac.html.erb
new file mode 100644
index 00000000..f9588831
--- /dev/null
+++ b/app/views/help/trac.html.erb
@@ -0,0 +1,9 @@
+
+
Help: Trac
+
+
The best way to submit new bugs and feature requests is to create a ticket in Trac. Simply click the New Ticket button on the Trac site and enter a short title and description. For bug reports, try and enter some steps that reproduce the bug.
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/users.html.erb b/app/views/help/users.html.erb
new file mode 100644
index 00000000..0e7b8442
--- /dev/null
+++ b/app/views/help/users.html.erb
@@ -0,0 +1,12 @@
+
+
Help: Accounts
+
+
+
There are three types of accounts: basic, privileged, and blocked.
+
See the <%= link_to "signup", :controller => "user", :action => "signup" %> page for more details.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/voting.html.erb b/app/views/help/voting.html.erb
new file mode 100644
index 00000000..5081b88d
--- /dev/null
+++ b/app/views/help/voting.html.erb
@@ -0,0 +1,12 @@
+
+
Help: Voting
+
+
+
You can vote on posts. When you click on the vote up or vote down link, your browser queries Danbooru in the background and records your vote. You can change your vote only if you are logged in.
+
In order to vote, you must have Javascript enabled. You DO NOT need an account to vote on posts.
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
\ No newline at end of file
diff --git a/app/views/help/wiki.html.erb b/app/views/help/wiki.html.erb
new file mode 100644
index 00000000..392c2139
--- /dev/null
+++ b/app/views/help/wiki.html.erb
@@ -0,0 +1,24 @@
+
+
Help: Wiki
+
Danbooru uses <%= link_to "DText", :action => "dtext" %> for all formatting.
+
To create an internal wiki link, wrap the title in two sets of square brackets. [[Like this]].
+
+
+
Search
+
By default when you search for a keyword Danbooru will search both the title and the body. If you want to only search the title, prefix your query with title. For example: "title:tag group".
+
+
+
+
Style Guideline
+
The Danbooru wiki has no specific style guide, but here's some general advice for creating wiki pages for tags:
+
+
Use h4 for all headers, h6 for subheaders.
+
Bundle any relevant links at the end under a See also section.
+
For artists, include the artist's home page under the See also section
+
+
+
+
+<% content_for("subnavbar") do %>
+
<%= link_to "Help", :action => "index" %>
+<% end %>
diff --git a/app/views/history/index.html.erb b/app/views/history/index.html.erb
new file mode 100644
index 00000000..a67b69d2
--- /dev/null
+++ b/app/views/history/index.html.erb
@@ -0,0 +1,130 @@
+
+ <% end %>
+
+ <%# If we're searching for a specific object, omit the id/name column and
+ show it once at the top. %>
+ <% if @options[:specific_table] && !@changes.empty? %>
+
+
+ This user name doesn't exist. If you want to create a new account, just
+ verify your password and log in.
+
+
+ This user name exists. If you want to create a new account, please choose
+ a different name.
+
+
+ Enter the email address you have registered in your profile. You'll get an email containing your new password.
+
+
+ You have no email address in your profile, so you can't have your password reset.
+
+
+ Password reset. Check your email in a few minutes.
+
+
+ That account does not exist.
+
+
+
+
+ The email address specified is not registered with this account.
+
+
+ Delivery to this email address has failed.
+
+
+
+
+ <%= render :partial => "layouts/login" %>
+
+ <% if CONFIG["server_host"] == "moe.imouto.org" %>
+ <%# This is actually just to keyword "image board" and "danbooru" for searches like
+ "tinkle danbooru". Be careful, this looks very stupid in search results if taken
+ too far: "Sex hentai porn danbooru image board"; this should hint search engines,
+ not spam them. %>
+
Danbooru-based image board with a specialization in high-quality images.
+<% end %>
\ No newline at end of file
diff --git a/app/views/pool/add_post.html.erb b/app/views/pool/add_post.html.erb
new file mode 100644
index 00000000..d8f260d3
--- /dev/null
+++ b/app/views/pool/add_post.html.erb
@@ -0,0 +1,29 @@
+
+
+ <%# Make sure this is done early, as lots of other scripts depend on this registration. %>
+ <% content_for_prefix("post_cookie_javascripts") do %>
+
+ <% end %>
+ <% end %>
+
+<% content_for("post_cookie_javascripts") do %>
+
+<% end %>
+
diff --git a/app/views/post/_preload.html.erb b/app/views/post/_preload.html.erb
new file mode 100644
index 00000000..0acb1e7a
--- /dev/null
+++ b/app/views/post/_preload.html.erb
@@ -0,0 +1,3 @@
+<% posts.each do |post| %>
+
+<% end %>
diff --git a/app/views/post/_search.html.erb b/app/views/post/_search.html.erb
new file mode 100644
index 00000000..b7eef335
--- /dev/null
+++ b/app/views/post/_search.html.erb
@@ -0,0 +1,9 @@
+
+ <% else %>
+ (parent post hidden due to access restrictions)
+ <% end %>
+
+ <% else %>
+ This post has no parent. If this post has been replaced, reparent this post before deleting, and votes will be transferred.
+ <% end %>
+<% end %>
+
+
+<%= render :partial => "footer" %>
\ No newline at end of file
diff --git a/app/views/post/deleted_index.html.erb b/app/views/post/deleted_index.html.erb
new file mode 100644
index 00000000..740498fa
--- /dev/null
+++ b/app/views/post/deleted_index.html.erb
@@ -0,0 +1,36 @@
+
<%= link_to h(tag), :action => "index", :tags => tag %>
+
+ <% end %>
+
+
+
\ No newline at end of file
diff --git a/app/views/post/show.html.erb b/app/views/post/show.html.erb
new file mode 100644
index 00000000..ecd62f04
--- /dev/null
+++ b/app/views/post/show.html.erb
@@ -0,0 +1,57 @@
+
+
+ <% content_for("post_cookie_javascripts") do %>
+
+ <% end %>
+
+
Favorited by: <%= favorite_list(@post) %>
+
+
diff --git a/app/views/post/show_partials/_status_notices.html.erb b/app/views/post/show_partials/_status_notices.html.erb
new file mode 100644
index 00000000..7c3d9453
--- /dev/null
+++ b/app/views/post/show_partials/_status_notices.html.erb
@@ -0,0 +1,65 @@
+<% if @post.is_flagged? %>
+
+ This post was flagged for deletion by <%= h @post.flag_detail.author %>. Reason: <%= h @post.flag_detail.reason %>
+
+<% elsif @post.is_pending? %>
+
+ This post is pending moderator approval.
+
+<% elsif @post.is_deleted? %>
+
+ This post was deleted.
+ <% if @post.flag_detail %>
+ <% if @current_user.is_mod_or_higher? %>
+ By: <%= link_to h(@post.flag_detail.author), :controller => "user", :action => "show", :id => @post.flag_detail.user_id %>
+ <% end %>
+
+ Reason: <%= h @post.flag_detail.reason %>. MD5: <%= @post.md5 %>
+ <% end %>
+
+<% end %>
+
+<% if @post.is_held %>
+
+ This post has been temporarily held from the index by the poster.
+ <% if @current_user.has_permission?(@post) %>
+ (<%= link_to_function "activate this post", "Post.activate_post(#{ @post.id });" %>)
+ <% end %>
+
+<% end %>
+
+<% if !@post.is_deleted? && @post.use_sample?(@current_user) && @post.can_be_seen_by?(@current_user)%>
+
+ This image has been resized. Click on the <%= link_to_function "Original image", "Post.highres()" %> link in the sidebar to view the original image.
+ <% unless @current_user.is_anonymous? || CONFIG["force_image_samples"] %>
+ <%= link_to_function "Always view original", :onclick => "User.disable_samples()" %>.
+ <% end %>
+ <%= link_to_function "Don't show this message", "$('resized_notice').hide(); Cookie.put('hide_resized_notice', '1')" %>.
+
+
+
+ Image samples have been disabled. If you find this to be too slow, you can turn samples back on in your profile settings.
+
+<% end %>
+
+<% if CONFIG["enable_parent_posts"] %>
+ <% if @post.parent_id %>
+
+ This post belongs to a <%= link_to "parent post", :action => "show", :id => @post.parent_id %>. Child posts are often minor variations of the parent post (<%= link_to "learn more", :controller => "help", :action => "post_relationships" %>).
+
+ <% end %>
+
+ <% if @post.has_children? %>
+
+ This post has <%= link_to "child posts", :action => "index", :tags => "parent:#{@post.id}" %>. Child posts are often minor variations of the parent post (<%= link_to "learn more", :controller => "help", :action => "post_relationships" %>).
+
+ <% end %>
+<% end %>
+
+<% @pools.each do |pool| %>
+ <%= render :partial => "post/show_partials/pool", :locals => {:pool => pool, :pool_post => PoolPost.find(:first, :conditions => ["pool_id = ? AND post_id = ?", pool.id, @post.id])} %>
+<% end %>
diff --git a/app/views/post/similar.html.erb b/app/views/post/similar.html.erb
new file mode 100644
index 00000000..d39d437b
--- /dev/null
+++ b/app/views/post/similar.html.erb
@@ -0,0 +1,191 @@
+
+ Your post may be a duplicate.
+ Please read the <%= link_to "duplicate post guidelines", :controller => "wiki", :action => "show", :title => "duplicate post_guidelines" %>.
+
+
+ If your post is a better version of an existing one, but the old post should remain,
+ <%= link_to_function( "reparent", "$('mode').value = 'reparent'; PostModeMenu.change();"); %>
+ the old post.
+
+ If your post is a better version of an existing one, and the old post should be deleted,
+ <%= link_to_function( "mark the old post as a duplicate", "$('mode').value = 'dupe'; PostModeMenu.change();"); %>.
+
+
+
+
+ Your tag blacklist has hidden some potential duplicates. Use Hidden Posts in
+ the sidebar to view hidden posts.
+
+ <% else %>
+ (parent post hidden due to access restrictions)
+ <% end %>
+
+ <% else %>
+ This post has no parent. If this post has been replaced, reparent this post before deleting, and votes will be transferred.
+ <% end %>
+<% end %>
+
+
+<%= render :partial => "footer" %>
\ No newline at end of file
diff --git a/app/views/post2/deleted_index.html.erb b/app/views/post2/deleted_index.html.erb
new file mode 100644
index 00000000..58fa7821
--- /dev/null
+++ b/app/views/post2/deleted_index.html.erb
@@ -0,0 +1,36 @@
+
<%= link_to h(tag), :action => "index", :tags => tag %>
+
+ <% end %>
+
+
+
\ No newline at end of file
diff --git a/app/views/post2/show.html.erb b/app/views/post2/show.html.erb
new file mode 100644
index 00000000..c0c373d1
--- /dev/null
+++ b/app/views/post2/show.html.erb
@@ -0,0 +1,55 @@
+
+
+ <% content_for("post_cookie_javascripts") do %>
+
+ <% end %>
+
+
Favorited by: <%= favorite_list(@post) %>
+
+
diff --git a/app/views/post2/show_partials/_status_notices.html.erb b/app/views/post2/show_partials/_status_notices.html.erb
new file mode 100644
index 00000000..8203c8f8
--- /dev/null
+++ b/app/views/post2/show_partials/_status_notices.html.erb
@@ -0,0 +1,56 @@
+<% if @post.is_flagged? %>
+
+ This post was flagged for deletion by <%= h @post.flag_detail.author %>. Reason: <%= format_text(@post.flag_detail.reason, :skip_simple_format => true) %>
+
+<% elsif @post.is_pending? %>
+
+ This post is pending moderator approval.
+
+<% elsif @post.is_deleted? %>
+
+ This post was deleted.
+ <% if @post.flag_detail %>
+ <% if @current_user.is_mod_or_higher? %>
+ By: <%= link_to h(@post.flag_detail.author), :controller => "user", :action => "show", :id => @post.flag_detail.user_id %>
+ <% end %>
+
+ Reason: <%= format_text(@post.flag_detail.reason, :skip_simple_format => true) %>. MD5: <%= @post.md5 %>
+ <% end %>
+
+<% end %>
+
+<% if !@post.is_deleted? && @post.use_sample?(@current_user) && @post.can_be_seen_by?(@current_user)%>
+
+ This image has been resized. Click on the <%= link_to_function "Original image", "Post.highres()" %> link in the sidebar to view the original image.
+ <% unless @current_user.is_anonymous? || CONFIG["force_image_samples"] %>
+ <%= link_to_function "Always view original", :onclick => "User.disable_samples()" %>.
+ <% end %>
+ <%= link_to_function "Don't show this message", "$('resized_notice').hide(); Cookie.put('hide_resized_notice', '1')" %>.
+
+
+
+ Image samples have been disabled. If you find this to be too slow, you can turn samples back on in your profile settings.
+
+<% end %>
+
+<% if CONFIG["enable_parent_posts"] %>
+ <% if @post.parent_id %>
+
+ This post belongs to a <%= link_to "parent post", :action => "show", :id => @post.parent_id %>. Child posts are often minor variations of the parent post (<%= link_to "learn more", :controller => "help", :action => "post_relationships" %>).
+
+ <% end %>
+
+ <% if @post.has_children? %>
+
+ This post has <%= link_to "child posts", :action => "index", :tags => "parent:#{@post.id}" %>. Child posts are often minor variations of the parent post (<%= link_to "learn more", :controller => "help", :action => "post_relationships" %>).
+
+ <% end %>
+<% end %>
+
+<% @pools.each do |pool| %>
+ <%= render :partial => "post/show_partials/pool", :locals => {:pool => pool, :pool_post => PoolPost.find(:first, :conditions => ["pool_id = ? AND post_id = ?", pool.id, @post.id])} %>
+<% end %>
diff --git a/app/views/post2/similar.html.erb b/app/views/post2/similar.html.erb
new file mode 100644
index 00000000..5f36ea19
--- /dev/null
+++ b/app/views/post2/similar.html.erb
@@ -0,0 +1,202 @@
+
+ Your post may be a duplicate.
+ Please read the <%= link_to "duplicate post guidelines", :controller => "wiki", :action => "show", :title => "duplicate post_guidelines" %>.
+
+
+ If your post is a better version of an existing one, but the old post should remain,
+ <%= link_to_function( "reparent", "$('mode').value = 'reparent'; PostModeMenu.change();"); %>
+ the old post.
+
+ If your post is a better version of an existing one, and the old post should be deleted,
+ <%= link_to_function( "mark the old post as a duplicate", "$('mode').value = 'dupe'; PostModeMenu.change();"); %>.
+
+
+
+
+ Your tag blacklist has hidden some potential duplicates. Use Hidden in
+ the sidebar to view hidden posts.
+
<%= Post.count(:conditions => ["created_at >= ? AND approver_id = ?", 7.days.ago, user.id]) %>/<%= Post.count(:conditions => ["created_at >= ? AND (approver_id IS NOT NULL OR status = 'pending')", 7.days.ago]) %>
+
<%= Post.count(:conditions => ["created_at >= ? AND approver_id = ?", 14.days.ago, user.id]) %>/<%= Post.count(:conditions => ["created_at >= ? AND (approver_id IS NOT NULL OR status = 'pending')", 14.days.ago]) %>
diff --git a/app/views/static/terms_of_service.html.erb b/app/views/static/terms_of_service.html.erb
new file mode 100644
index 00000000..df362a4f
--- /dev/null
+++ b/app/views/static/terms_of_service.html.erb
@@ -0,0 +1,54 @@
+
+
+
Terms of Service
+
By accessing the "<%= CONFIG["app_name"] %>" website ("Site") you agree to the following terms of service. If you do not agree to these terms, then please do not access the Site.
+
+
+
The Site reserves the right to change these terms at any time.
+
If you are a minor, then you will not use the Site.
+
The Site is presented to you AS IS, without any warranty, express or implied. You will not hold the Site or its staff members liable for damages caused by the use of the site.
+
The Site reserves the right to delete or modify your account, or any content you have posted to the site.
+
You will make a best faith effort to upload only high quality anime-related images.
+
You have read the <%= link_to "tagging guidelines", :controller => "help", :action => "tags" %>.
+
+
+
+
Prohibited Content
+
In addition, you may not use the Site to upload any of the following:
+
+
Child pornography: Any photograph or photorealistic drawing or movie that depicts children in a sexual manner. This includes nudity, explicit sex, and implied sex.
+
Bestiality: Any photograph or photorealistic drawing or movie that depicts humans having sex (either explicit or implied) with other non-human animals.
+
Furry: Any image or movie where a person's skin is made of fur or scales.
+
Watermarked: Any image where a person who is not the original copyright owner has placed a watermark on the image.
+
Poorly compressed: Any image where compression artifacts are easily visible.
+
Grotesque: Any depiction of extreme mutilation, extreme bodily distension, feces, or bodies that are far outside the realm of normal human proportion (for example, breasts that are as large as the body).
+
+
+
+
+
+
Copyright Infringement
+
+
If you believe a post infringes upon your copyright, please send an email to the <%= mail_to CONFIG["admin_contact"], "webmaster", :encode => "hex" %> with the following pieces of information:
+
Keep in mind we only respect requests from original artists or copyright owners, not derivative works.
+
+
The URL of the infringing post.
+
Proof that you own the copyright.
+
An email address that will be provided to the person who uploaded the infringing post to facilitate communication.
+
+
+
+
+
Privacy Policy
+
+
The Site will not disclose the IP address or email address of any user except to the staff.
+
The Site is allowed to make public everything else, including but not limited to: uploaded posts, favorited posts, comments, forum posts, wiki edits, and note edits.
+
+
+
+
Agreement
+
By clicking on the "I Agree" link, you have read all the terms and have agreed to them.
You can suggest a new implication, but it must be approved by a moderator before it is activated.
+
The predicate tag is the tag that is matched against, and the consequent tag is the tag that is added. For example, a tag implication with predicate=square consequent=rectangle would mean any post tagged with square would also be tagged with rectangle.
You have not yet activated your account. Click <%= link_to "here", :action => "resend_confirmation" %> to resend your confirmation email to <%= h @current_user.email %>.
+ <% else %>
+
+ You need an account to access some parts of <%= h CONFIG["app_name"] %>.
+ <% unless @current_user.is_anonymous? %>
+ Click <%= link_to "here", :action => "reset_password" %> to reset your password.
+ <% end %>
+ <% if @current_user.is_anonymous? %>
+ <% if CONFIG["enable_signups"] %>
+ You can register for an account <%= link_to "here", :action => "signup" %>.
+ <% else %>
+ Registration is currently disabled.
+ <% end %>
+ <% end %>
+
+ <% end %>
+
+ <% form_tag({:action => "authenticate"}) do %>
+ <%= hidden_field_tag "url", params[:url] %>
+
+ <% end %>
+ <% form_tag(:controller => "blocks", :action => "block_ip") do %>
+
+
+
+
+
<%= submit_tag "Submit" %>
+
+
+
+
+
+
IP masks may be used, such as 127.0.0.1/24
+
<%= text_field "ban", "ip_addr", :size => "40" %>
+
+
+
+
<%= text_area "ban", "reason", :size => "40x5" %>
+
+
+
+
+
+
<%= text_field "ban", "duration", :size => 10 %>
+
+
+
+ <% end %>
+
+
+<%= render :partial => "footer" %>
\ No newline at end of file
diff --git a/app/views/user/signup.html.erb b/app/views/user/signup.html.erb
new file mode 100644
index 00000000..5bfb690d
--- /dev/null
+++ b/app/views/user/signup.html.erb
@@ -0,0 +1,59 @@
+
+
Signup
+
+<% if !CONFIG["enable_signups"] %>
+
Signups are currently disabled.
+<% else %>
+
By creating an account, you are agreeing to the terms of service. Remember that this site is open to web crawlers, so people will be able to easily search your name.
Hello, <%= h(@user.pretty_name) %>. You need to activate your account by visiting <%= link_to "this link", :controller => "user", :action => "activate_user", :hash => User.confirmation_hash(@user.name), :only_path => false, :host => CONFIG["server_host"] %>.
diff --git a/app/views/user_mailer/confirmation_email.text.plain.erb b/app/views/user_mailer/confirmation_email.text.plain.erb
new file mode 100644
index 00000000..c03ca255
--- /dev/null
+++ b/app/views/user_mailer/confirmation_email.text.plain.erb
@@ -0,0 +1,3 @@
+Hello, <%= @user.pretty_name %>. You need to activate your account by visiting:
+
+ <%= url_for :controller => "user", :action => "activate_user", :hash => User.confirmation_hash(@user.name), :only_path => false, :host => CONFIG["server_host"] %>
diff --git a/app/views/user_mailer/dmail.html.erb b/app/views/user_mailer/dmail.html.erb
new file mode 100644
index 00000000..a3a239f1
--- /dev/null
+++ b/app/views/user_mailer/dmail.html.erb
@@ -0,0 +1,5 @@
+
<%= @sender.name %> said:
+
+
+ <%= format_text(@body) %>
+
diff --git a/app/views/user_mailer/new_password.text.html.erb b/app/views/user_mailer/new_password.text.html.erb
new file mode 100644
index 00000000..43254db3
--- /dev/null
+++ b/app/views/user_mailer/new_password.text.html.erb
@@ -0,0 +1,3 @@
+
Hello, <%= h(@user.pretty_name) %>. Your password has been reset to <%= @password %>.
+
+
You can login to <%= link_to(CONFIG["app_name"], :controller => "user", :action => "login", :only_path => false, :host => CONFIG["server_host"]) %> and change your password to something else.
diff --git a/app/views/user_mailer/new_password.text.plain.erb b/app/views/user_mailer/new_password.text.plain.erb
new file mode 100644
index 00000000..bc78667f
--- /dev/null
+++ b/app/views/user_mailer/new_password.text.plain.erb
@@ -0,0 +1,5 @@
+Hello, <%= @user.pretty_name %>. Your password has been reset to:
+
+ <%= @password %>
+
+You can login to <%= url_for(:controller => "user", :action => "login", :only_path => false, :host => CONFIG["server_host"]) %> and change your password to something else.
diff --git a/app/views/user_record/_footer.html.erb b/app/views/user_record/_footer.html.erb
new file mode 100644
index 00000000..38ecc277
--- /dev/null
+++ b/app/views/user_record/_footer.html.erb
@@ -0,0 +1,7 @@
+<% if @user %>
+ <% content_for("subnavbar") do %>
+
+A paragraph.
+
+Followed by another.
+
+h4. A header
+
+* List item 1
+* List item 2
+* List item 3
+
+Linebreaks are important between lists,
+headers, and paragraphs.
+
+A "conventional link":http://www.google.com
+
+A [[wiki link]] (underscores are not needed)
+
+An aliased [[real page|wiki link]]
+
+Read more.
+
+A paragraph.
+
+Followed by another.
+
+h4. A header
+
+* List item 1
+* List item 2
+* List item 3
+
+Linebreaks are important between lists,
+headers, and paragraphs.
+
+URLs are automatically linked: http://www.google.com
+
+A [[wiki link]] (underscores are not needed).
+
+A {{post link}}.
+
+Read more.
+
+ <% @posts.each do |p| %>
+ <%= print_preview(p) %>
+ <% end %>
+
+ <% end %>
+
+ <% unless @page.nil? %>
+
Updated by <%= link_to h(@page.author), :controller => "user", :action => "show", :id => @page.user_id %> <%= time_ago_in_words(@page.updated_at) %> ago
+ <% end %>
+
+
+
+
+
+
diff --git a/config/.local_config.rb.swp b/config/.local_config.rb.swp
new file mode 100644
index 00000000..b0bc2a87
Binary files /dev/null and b/config/.local_config.rb.swp differ
diff --git a/config/boot.rb b/config/boot.rb
new file mode 100644
index 00000000..cd21fb9e
--- /dev/null
+++ b/config/boot.rb
@@ -0,0 +1,109 @@
+# Don't change this file!
+# Configure your app in config/environment.rb and config/environments/*.rb
+
+RAILS_ROOT = "#{File.dirname(__FILE__)}/.." unless defined?(RAILS_ROOT)
+
+module Rails
+ class << self
+ def boot!
+ unless booted?
+ preinitialize
+ pick_boot.run
+ end
+ end
+
+ def booted?
+ defined? Rails::Initializer
+ end
+
+ def pick_boot
+ (vendor_rails? ? VendorBoot : GemBoot).new
+ end
+
+ def vendor_rails?
+ File.exist?("#{RAILS_ROOT}/vendor/rails")
+ end
+
+ def preinitialize
+ load(preinitializer_path) if File.exist?(preinitializer_path)
+ end
+
+ def preinitializer_path
+ "#{RAILS_ROOT}/config/preinitializer.rb"
+ end
+ end
+
+ class Boot
+ def run
+ load_initializer
+ Rails::Initializer.run(:set_load_path)
+ end
+ end
+
+ class VendorBoot < Boot
+ def load_initializer
+ require "#{RAILS_ROOT}/vendor/rails/railties/lib/initializer"
+ Rails::Initializer.run(:install_gem_spec_stubs)
+ end
+ end
+
+ class GemBoot < Boot
+ def load_initializer
+ self.class.load_rubygems
+ load_rails_gem
+ require 'initializer'
+ end
+
+ def load_rails_gem
+ if version = self.class.gem_version
+ gem 'rails', version
+ else
+ gem 'rails'
+ end
+ rescue Gem::LoadError => load_error
+ $stderr.puts %(Missing the Rails #{version} gem. Please `gem install -v=#{version} rails`, update your RAILS_GEM_VERSION setting in config/environment.rb for the Rails version you do have installed, or comment out RAILS_GEM_VERSION to use the latest version installed.)
+ exit 1
+ end
+
+ class << self
+ def rubygems_version
+ Gem::RubyGemsVersion if defined? Gem::RubyGemsVersion
+ end
+
+ def gem_version
+ if defined? RAILS_GEM_VERSION
+ RAILS_GEM_VERSION
+ elsif ENV.include?('RAILS_GEM_VERSION')
+ ENV['RAILS_GEM_VERSION']
+ else
+ parse_gem_version(read_environment_rb)
+ end
+ end
+
+ def load_rubygems
+ require 'rubygems'
+
+ unless rubygems_version >= '0.9.4'
+ $stderr.puts %(Rails requires RubyGems >= 0.9.4 (you have #{rubygems_version}). Please `gem update --system` and try again.)
+ exit 1
+ end
+
+ rescue LoadError
+ $stderr.puts %(Rails requires RubyGems >= 0.9.4. Please install RubyGems and try again: http://rubygems.rubyforge.org)
+ exit 1
+ end
+
+ def parse_gem_version(text)
+ $1 if text =~ /^[^#]*RAILS_GEM_VERSION\s*=\s*["']([!~<>=]*\s*[\d.]+)["']/
+ end
+
+ private
+ def read_environment_rb
+ File.read("#{RAILS_ROOT}/config/environment.rb")
+ end
+ end
+ end
+end
+
+# All that for this:
+Rails.boot!
diff --git a/config/core_extensions.rb b/config/core_extensions.rb
new file mode 100644
index 00000000..53d05c66
--- /dev/null
+++ b/config/core_extensions.rb
@@ -0,0 +1,54 @@
+class ActiveRecord::Base
+ class << self
+ public :sanitize_sql
+ end
+
+ %w(execute select_value select_values select_all).each do |method_name|
+ define_method("#{method_name}_sql") do |sql, *params|
+ ActiveRecord::Base.connection.__send__(method_name, self.class.sanitize_sql([sql, *params]))
+ end
+
+ self.class.__send__(:define_method, "#{method_name}_sql") do |sql, *params|
+ ActiveRecord::Base.connection.__send__(method_name, ActiveRecord::Base.sanitize_sql([sql, *params]))
+ end
+ end
+end
+
+class NilClass
+ def id
+ raise NoMethodError
+ end
+end
+
+class String
+ def to_escaped_for_sql_like
+ # NOTE: gsub(/\\/, '\\\\') is a NOP, you need gsub(/\\/, '\\\\\\') if you want to turn \ into \\; or you can duplicate the matched text
+ return self.gsub(/\\/, '\0\0').gsub(/%/, '\\%').gsub(/_/, '\\_').gsub(/\*/, '%')
+ end
+
+ def to_escaped_js
+ return self.gsub(/\\/, '\0\0').gsub(/['"]/) {|m| "\\#{m}"}.gsub(/\r\n|\r|\n/, '\\n')
+ end
+end
+
+class Hash
+ def included(m)
+ m.alias_method :to_xml_orig, :to_xml
+ end
+
+ def to_xml(options = {})
+ if false == options.delete(:no_children)
+ to_xml_orig(options)
+ else
+ options[:indent] ||= 2
+ options[:no_children] ||= true
+ options[:root] ||= "hash"
+ dasherize = !options.has_key?(:dasherize) || options[:dasherize]
+ root = dasherize ? options[:root].dasherize : options[:root]
+ options.reverse_merge!({:builder => Builder::XmlMarkup.new(:indent => options[:indent]), :root => root})
+ options[:builder].instruct! unless options.delete(:skip_instruct)
+ options[:builder].tag!(root, self)
+ end
+ end
+end
+
diff --git a/config/database.yml b/config/database.yml
new file mode 100644
index 00000000..33d93e51
--- /dev/null
+++ b/config/database.yml
@@ -0,0 +1,34 @@
+# Copy this to "database.yml" and adjust
+# the fields accordingly.
+
+development:
+ adapter: postgresql
+ database: moe
+ #username: devdanbooru
+ username: moe
+ host: 127.0.0.1
+
+test:
+ adapter: postgresql
+ database: moe
+ username: moe
+ host: 127.0.0.1
+
+production:
+ adapter: postgresql
+ database: moe
+ #username: devdanbooru
+ username: moe
+ host: 127.0.0.1
+
+production_with_logging:
+ adapter: postgresql
+ database: moe
+ username: moe
+ host: 127.0.0.1
+
+job_task:
+ adapter: postgresql
+ database: moe
+ username: moe
+ host: 127.0.0.1
diff --git a/config/database.yml.example b/config/database.yml.example
new file mode 100644
index 00000000..109a6a73
--- /dev/null
+++ b/config/database.yml.example
@@ -0,0 +1,20 @@
+# Copy this to "database.yml" and adjust
+# the fields accordingly.
+
+development:
+ adapter: postgresql
+ database: danbooru
+ username: albert
+ host: 127.0.0.1
+
+test:
+ adapter: postgresql
+ database: danbooru
+ username: albert
+ host: 127.0.0.1
+
+production:
+ adapter: postgresql
+ database: danbooru
+ username: albert
+ host: 127.0.0.1
diff --git a/config/default_config.rb b/config/default_config.rb
new file mode 100644
index 00000000..4e9c28bb
--- /dev/null
+++ b/config/default_config.rb
@@ -0,0 +1,269 @@
+CONFIG = {}
+
+# The version of this Danbooru.
+CONFIG["version"] = "1.15.0"
+
+# The default name to use for anyone who isn't logged in.
+CONFIG["default_guest_name"] = "Anonymous"
+
+# Set to true to require an e-mail address to register.
+CONFIG["enable_account_email_activation"] = false
+
+# This is a salt used to make dictionary attacks on account passwords harder.
+CONFIG["password_salt"] = "choujin-steiner"
+
+# Set to true to allow new account signups.
+CONFIG["enable_signups"] = true
+
+# Newly created users start at this level. Set this to 30 if you want everyone
+# to start out as a privileged member.
+CONFIG["starting_level"] = 20
+
+# What method to use to store images.
+# local_flat: Store every image in one directory.
+# local_hierarchy: Store every image in a hierarchical directory, based on the post's MD5 hash. On some file systems this may be faster.
+# local_flat_with_amazon_s3_backup: Store every image in a flat directory, but also save to an Amazon S3 account for backup.
+# amazon_s3: Save files to an Amazon S3 account.
+# remote_hierarchy: Some images will be stored on separate image servers using a hierarchical directory.
+CONFIG["image_store"] = :local_flat
+
+# Only used when image_store == :remote_hierarchy. An array of image servers (use http://domain.com format).
+#
+# If nozipfile is set, the mirror won't be used for ZIP mirroring.
+CONFIG["image_servers"] = [
+# { :server => "http://domain.com", :traffic => 0.5 },
+# { :server => "http://domain.com", :traffic => 0.5, :nozipfile => true },
+]
+
+# Set to true to enable downloading whole pools as ZIPs. This requires mod_zipfile
+# for lighttpd.
+CONFIG["pool_zips"] = false
+
+# List of servers to mirror image data to. This is run from the task processor.
+# An unpassworded SSH key must be set up to allow direct ssh/scp commands to be
+# run on the remote host. data_dir should point to the equivalent of public/data,
+# and should usually be listed in CONFIG["image_servers"] unless this is a backup-
+# only host.
+CONFIG["mirrors"] = [
+ # { :user => "danbooru", :host => "example.com", :data_dir => "/home/danbooru/public/data" },
+]
+
+# Enables image samples for large images. NOTE: if you enable this, you must manually create a public/data/sample directory.
+CONFIG["image_samples"] = true
+
+# The maximum dimensions and JPEG quality of sample images.
+CONFIG["sample_width"] = 1400
+CONFIG["sample_height"] = 1000 # Set to nil if you never want to scale an image to fit on the screen vertically
+CONFIG["sample_quality"] = 95
+
+# The maximum dimensions of inline images for the forums and wiki.
+CONFIG["inline_sample_width"] = 800
+CONFIG["inline_sample_height"] = 600
+
+# Resample the image only if the image is larger than sample_ratio * sample_dimensions.
+CONFIG["sample_ratio"] = 1.25
+
+# A prefix to prepend to sample files
+CONFIG["sample_filename_prefix"] = ""
+
+# Enables creating JPEGs for PNGs.
+CONFIG["jpeg_enable"] = false
+
+# Scale JPEGs to fit in these dimensions.
+CONFIG["jpeg_width"] = 3500
+CONFIG["jpeg_height"] = 3500
+
+# Resample the image only if the image is larger than jpeg_ratio * jpeg_dimensions. If
+# not, PNGs can still have a JPEG generated, but no resampling will be done.
+CONFIG["jpeg_ratio"] = 1.25
+CONFIG["jpeg_quality"] = { :min => 94, :max => 97, :filesize => 1024*1024*4 }
+
+# If enabled, URLs will be of the form:
+# http://host/image/00112233445566778899aabbccddeeff/12345 tag tag2 tag3.jpg
+#
+# This allows images to be saved with a useful filename, and hides the MD5 hierarchy (if
+# any). This does not break old links; links to the old URLs are still valid. This
+# requires URL rewriting (not redirection!) in your webserver. The rules for lighttpd are:
+#
+# url.rewrite = (
+# "^/image/([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{28})(/.*)?(\.[a-z]*)" => "/data/$1/$2/$1$2$3$5",
+# "^/sample/([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{28})(/.*)?(\.[a-z]*)" => "/data/sample/$1/$2/$1$2$3$5"
+# )
+#
+CONFIG["use_pretty_image_urls"] = false
+
+# If use_pretty_image_urls is true, sets a prefix to prepend to all filenames. This
+# is only present in the generated URL, and is useful to allow your downloaded files
+# to be distinguished from other sites; for example, "moe 12345 tags.jpg" vs.
+# "kc 54321 tags.jpg". If set, this should end with a space.
+CONFIG["download_filename_prefix"] = ""
+
+# Files over this size will always generate a sample, even if already within
+# the above dimensions.
+CONFIG["sample_always_generate_size"] = 512*1024
+
+# These three configs are only relevant if you're using the Amazon S3 image store.
+CONFIG["amazon_s3_access_key_id"] = ""
+CONFIG["amazon_s3_secret_access_key"] = ""
+CONFIG["amazon_s3_bucket_name"] = ""
+
+# This enables various caching mechanisms. You must have memcache (and the memcache-client ruby gem) installed in order for caching to work.
+CONFIG["enable_caching"] = false
+
+# Enabling this will cause Danbooru to cache things longer:
+# - On post/index, any page after the first 10 will be cached for 3-7 days.
+# - post/show is cached
+CONFIG["enable_aggressive_caching"] = false
+
+# The server and port where the memcache client can be accessed. Only relevant if you enable caching.
+CONFIG["memcache_servers"] = ["localhost:4000"]
+
+# Any post rated safe or questionable that has one of the following tags will automatically be rated explicit.
+CONFIG["explicit_tags"] = %w(pussy penis cum anal vibrator dildo masturbation oral_sex sex paizuri penetration guro rape asshole footjob handjob blowjob cunnilingus anal_sex)
+
+# After a post receives this many posts, new comments will no longer bump the post in comment/index.
+CONFIG["comment_threshold"] = 40
+
+# Members cannot post more than X posts in a day.
+CONFIG["member_post_limit"] = 16
+
+# Members cannot post more than X comments in an hour.
+CONFIG["member_comment_limit"] = 2
+
+# This sets the minimum and maximum value a user can record as a vote.
+CONFIG["vote_record_min"] = 0
+CONFIG["vote_record_max"] = 3
+
+# Descriptions for the various vote levels.
+CONFIG["vote_descriptions"] = {
+ 3 => "Favorite",
+ 2 => "Great",
+ 1 => "Good",
+ 0 => "Neutral",
+ -1 => "Bad"
+}
+
+# The maximum image size that will be downloaded by a URL.
+CONFIG["max_image_size"] = 1024*1024*256
+
+# This allows posts to have parent-child relationships. However, this requires manually updating the post counts stored in table_data by periodically running the script/maintenance script.
+CONFIG["enable_parent_posts"] = false
+
+# Show only the first page of post/index to visitors.
+CONFIG["show_only_first_page"] = false
+
+CONFIG["enable_reporting"] = false
+
+# Enable some web server specific optimizations. Possible values include: apache, nginx, lighttpd.
+CONFIG["web_server"] = "apache"
+
+# Show a link to Trac.
+CONFIG["enable_trac"] = true
+
+# The image service name of this host, if any.
+CONFIG["local_image_service"] = ""
+
+# List of image services available for similar image searching.
+CONFIG["image_service_list"] = {
+ "danbooru.donmai.us" => "http://haruhidoujins.yi.org/multi-search.xml",
+ "moe.imouto.org" => "http://haruhidoujins.yi.org/multi-search.xml",
+ "konachan.com" => "http://haruhidoujins.yi.org/multi-search.xml",
+}
+
+# If true, image services receive a URL to the thumbnail for searching, which
+# is faster. If false, the file is sent directly. Set to false if using image
+# services that don't have access to your image URLs.
+CONFIG["image_service_local_searches_use_urls"] = true
+
+# If true, run a dupe check on new uploads using the image search
+# for local_image_service.
+CONFIG["dupe_check_on_upload"] = false
+
+# Defines the various user levels. You should not remove any of the default ones. When Danbooru starts up, the User model will have several methods automatically defined based on what this config contains. For this reason you should only use letters, numbers, and spaces (spaces will be replaced with underscores). Example: is_member?, is_member_or_lower?, is_member_or_higher?
+CONFIG["user_levels"] = {
+ "Unactivated" => 0,
+ "Blocked" => 10,
+ "Member" => 20,
+ "Privileged" => 30,
+ "Contributor" => 33,
+ "Janitor" => 35,
+ "Mod" => 40,
+ "Admin" => 50
+}
+
+# Defines the various tag types. You can also define shortcuts.
+CONFIG["tag_types"] = {
+ "General" => 0,
+ "Artist" => 1,
+ "Copyright" => 3,
+ "Character" => 4,
+
+ "general" => 0,
+ "artist" => 1,
+ "copyright" => 3,
+ "character" => 4,
+ "art" => 1,
+ "copy" => 3,
+ "char" => 4
+}
+
+# Tag type IDs to not list in recent tag summaries, such as on the side of post/index:
+CONFIG["exclude_from_tag_sidebar"] = [0]
+
+# If set, email_from is the address the site sends emails as. If left alone, emails
+# are sent from CONFIG["admin_contact"].
+CONFIG["email_from"] = lambda do CONFIG["admin_contact"] end
+
+# Determine who can see a post. Note that since this is a block, return won't work. Use break.
+CONFIG["can_see_post"] = lambda do |user, post|
+ # By default, no posts are hidden.
+ true
+
+ # Some examples:
+ #
+ # Hide post if user isn't privileged and post is not safe:
+ # post.rating != "e" || user.is_privileged_or_higher?
+ #
+ # Hide post if user isn't a mod and post has the loli tag:
+ # !post.has_tag?("loli") || user.is_mod_or_higher?
+end
+
+# Determines who can see ads. Note that since this is a block, return won't work. Use break.
+CONFIG["can_see_ads"] = lambda do |user|
+ # By default, only show ads to non-priv users.
+ user.is_member_or_lower?
+
+ # Show no ads at all
+ # false
+end
+
+# Defines the default blacklists for new users.
+CONFIG["default_blacklists"] = [
+# "rating:e loli",
+# "rating:e shota",
+]
+
+# Enable the artists interface.
+CONFIG["enable_artists"] = true
+
+# This is required for Rails 2.0.
+CONFIG["session_secret_key"] = "This should be at least 30 characters long"
+
+# Users cannot search for more than X regular tags at a time.
+CONFIG["tag_query_limit"] = 6
+
+# Set this to insert custom CSS or JavaScript files into your app.
+CONFIG["custom_html_headers"] = nil
+
+# Set this to true to hand off time consuming tasks (downloading files, resizing images, any sort of heavy calculation) to a separate process. In general, if a user sees a page where a task was handed off, an HTTP status code of 503 will be returned. You need beanstalkd installed in order for this to work. This is only necessary if you are getting heavy traffic or you are doing several heavy calculations.
+CONFIG["enable_asynchronous_tasks"] = false
+
+CONFIG["avatar_max_width"] = 125
+CONFIG["avatar_max_height"] = 125
+
+# If you want to redirect traffic when the server load average spikes (for the 5min interval), initialize this setting. Set to false if you want to disable this feature.
+# CONFIG["load_average_threshold"] = 2
+CONFIG["load_average_threshold"] = false
+
+CONFIG["favorite_tag_limit"] = 60
+
diff --git a/config/environment.rb b/config/environment.rb
new file mode 100644
index 00000000..5c96842e
--- /dev/null
+++ b/config/environment.rb
@@ -0,0 +1,29 @@
+RAILS_GEM_VERSION = "2.1.0"
+
+require File.join(File.dirname(__FILE__), 'boot')
+
+Rails::Initializer.run do |config|
+ # Skip frameworks you're not going to use
+ config.frameworks -= [:action_web_service]
+
+ # Add additional load paths for your own custom dirs
+ config.load_paths += ["#{RAILS_ROOT}/app/models/post", "#{RAILS_ROOT}/app/models/post/image_store"]
+
+ # Force all environments to use the same logger level
+ # (by default production uses :info, the others :debug
+ config.log_level = :info
+
+ # Enable page/fragment caching by setting a file-based store
+ # (remember to create the caching directory and make it readable to the application)
+ # config.action_controller.fragment_cache_store = :file_store, "#{RAILS_ROOT}/cache"
+
+ # Activate observers that should always be running
+ # config.active_record.observers = :cacher, :garbage_collector
+
+ # Make Active Record use UTC-base instead of local time
+ # config.active_record.default_timezone = :utc
+
+ # Use Active Record's schema dumper instead of SQL when creating the test database
+ # (enables use of different database adapters for development and test environments)
+ config.active_record.schema_format = :sql
+end
diff --git a/config/environments/development.rb b/config/environments/development.rb
new file mode 100644
index 00000000..edaaef2f
--- /dev/null
+++ b/config/environments/development.rb
@@ -0,0 +1,17 @@
+# In the development environment your application's code is reloaded on
+# every request. This slows down response time but is perfect for development
+# since you don't have to restart the webserver when you make code changes.
+config.cache_classes = false
+
+# Log error messages when you accidentally call methods on nil.
+config.whiny_nils = true
+
+# Show full error reports and disable caching
+config.action_controller.consider_all_requests_local = true
+config.action_controller.perform_caching = false
+
+# Don't care if the mailer can't send
+config.action_mailer.raise_delivery_errors = false
+
+config.log_level = :debug
+
diff --git a/config/environments/job_task.rb b/config/environments/job_task.rb
new file mode 100644
index 00000000..edaaef2f
--- /dev/null
+++ b/config/environments/job_task.rb
@@ -0,0 +1,17 @@
+# In the development environment your application's code is reloaded on
+# every request. This slows down response time but is perfect for development
+# since you don't have to restart the webserver when you make code changes.
+config.cache_classes = false
+
+# Log error messages when you accidentally call methods on nil.
+config.whiny_nils = true
+
+# Show full error reports and disable caching
+config.action_controller.consider_all_requests_local = true
+config.action_controller.perform_caching = false
+
+# Don't care if the mailer can't send
+config.action_mailer.raise_delivery_errors = false
+
+config.log_level = :debug
+
diff --git a/config/environments/production.rb b/config/environments/production.rb
new file mode 100644
index 00000000..a904bcff
--- /dev/null
+++ b/config/environments/production.rb
@@ -0,0 +1,20 @@
+# The production environment is meant for finished, "live" apps.
+# Code is not reloaded between requests
+config.cache_classes = true
+
+# Use a different logger for distributed setups
+# config.logger = SyslogLogger.new
+
+# Full error reports are disabled and caching is turned on
+config.action_controller.consider_all_requests_local = false
+config.action_controller.perform_caching = true
+
+# Enable serving of images, stylesheets, and javascripts from an asset server
+# config.action_controller.asset_host = "http://assets.example.com"
+
+# Disable delivery errors if you bad email addresses should just be ignored
+# config.action_mailer.raise_delivery_errors = false
+
+config.log_path = "#{RAILS_ROOT}/log/production.log"
+config.log_level = :error
+#config.log_level = :debug
diff --git a/config/environments/production_with_logging.rb b/config/environments/production_with_logging.rb
new file mode 100644
index 00000000..d1e276b7
--- /dev/null
+++ b/config/environments/production_with_logging.rb
@@ -0,0 +1,20 @@
+# The production environment is meant for finished, "live" apps.
+# Code is not reloaded between requests
+config.cache_classes = true
+
+# Use a different logger for distributed setups
+# config.logger = SyslogLogger.new
+
+# Full error reports are disabled and caching is turned on
+config.action_controller.consider_all_requests_local = false
+config.action_controller.perform_caching = true
+
+# Enable serving of images, stylesheets, and javascripts from an asset server
+# config.action_controller.asset_host = "http://assets.example.com"
+
+# Disable delivery errors if you bad email addresses should just be ignored
+# config.action_mailer.raise_delivery_errors = false
+
+config.log_path = "#{RAILS_ROOT}/log/production_with_logging.log"
+#config.log_level = :error
+config.log_level = :debug
diff --git a/config/environments/test.rb b/config/environments/test.rb
new file mode 100644
index 00000000..31a6a12b
--- /dev/null
+++ b/config/environments/test.rb
@@ -0,0 +1,25 @@
+# The test environment is used exclusively to run your application's
+# test suite. You never need to work with it otherwise. Remember that
+# your test database is "scratch space" for the test suite and is wiped
+# and recreated between test runs. Don't rely on the data there!
+config.cache_classes = true
+
+# Log error messages when you accidentally call methods on nil.
+config.whiny_nils = true
+
+# Show full error reports and disable caching
+config.action_controller.consider_all_requests_local = true
+config.action_controller.perform_caching = false
+
+# Tell ActionMailer not to deliver emails to the real world.
+# The :test delivery method accumulates sent emails in the
+# ActionMailer::Base.deliveries array.
+config.action_mailer.delivery_method = :test
+
+# Overwrite the default settings for fixtures in tests. See Fixtures
+# for more details about these settings.
+# config.transactional_fixtures = true
+# config.instantiated_fixtures = false
+# config.pre_loaded_fixtures = false
+
+config.log_level = :debug
\ No newline at end of file
diff --git a/config/initializers/000_load_config.rb b/config/initializers/000_load_config.rb
new file mode 100644
index 00000000..3524ba2b
--- /dev/null
+++ b/config/initializers/000_load_config.rb
@@ -0,0 +1,36 @@
+require "#{RAILS_ROOT}/config/default_config"
+require "#{RAILS_ROOT}/config/local_config"
+
+CONFIG["url_base"] ||= "http://" + CONFIG["server_host"]
+
+%w(session_secret_key user_password_salt).each do |key|
+ CONFIG[key] = ServerKey[key] if ServerKey[key]
+end
+
+ActionController::Base.session = {:session_key => CONFIG["app_name"], :secret => CONFIG["session_secret_key"]}
+
+require 'post_save'
+require 'base64'
+require 'diff/lcs/array'
+require 'image_size'
+require 'ipaddr'
+require 'open-uri'
+require 'socket'
+require 'time'
+require 'uri'
+require 'net/http'
+require 'aws/s3' if [:amazon_s3, :local_flat_with_amazon_s3_backup].include?(CONFIG["image_store"])
+require 'danbooru_image_resizer/danbooru_image_resizer'
+require 'html_4_tags'
+require 'google_chart' if CONFIG["enable_reporting"]
+require 'core_extensions'
+require 'json'
+require 'json/add/core'
+require 'json/add/rails'
+require 'fix_form_tag'
+require 'download'
+require 'sys/cpu' if CONFIG["load_average_threshold"]
+require 'fileutils'
+require 'versioning'
+require 'error_logging'
+require 'dtext'
diff --git a/config/initializers/001_action_mailer.rb b/config/initializers/001_action_mailer.rb
new file mode 100644
index 00000000..1acf7c11
--- /dev/null
+++ b/config/initializers/001_action_mailer.rb
@@ -0,0 +1,11 @@
+ActionMailer::Base.default_charset = "utf-8"
+#ActionMailer::Base.delivery_method = :sendmail
+ActionMailer::Base.delivery_method = :smtp
+ActionMailer::Base.raise_delivery_errors = true
+ActionMailer::Base.perform_deliveries = true
+
+ActionMailer::Base.smtp_settings = {
+ :address => "localhost",
+ :port => 25,
+ :domain => CONFIG["server_host"]
+}
diff --git a/config/initializers/002_caching.rb b/config/initializers/002_caching.rb
new file mode 100644
index 00000000..72680fb4
--- /dev/null
+++ b/config/initializers/002_caching.rb
@@ -0,0 +1,14 @@
+if CONFIG["enable_caching"]
+ require 'memcache_util'
+ require 'cache'
+ require 'memcache_util_store'
+else
+ require 'cache_dummy'
+end
+
+ CACHE = MemCache.new :c_threshold => 10_000, :compression => true, :debug => false, :namespace => CONFIG["app_name"], :readonly => false, :urlencode => false
+ CACHE.servers = CONFIG["memcache_servers"]
+ begin
+ CACHE.flush_all
+ rescue MemCache::MemCacheError
+ end
diff --git a/config/initializers/003_clear_js_cache.rb b/config/initializers/003_clear_js_cache.rb
new file mode 100644
index 00000000..8931099e
--- /dev/null
+++ b/config/initializers/003_clear_js_cache.rb
@@ -0,0 +1,3 @@
+require "asset_cache"
+
+AssetCache.clear_js_cache
diff --git a/config/initializers/004_exception_notifier.rb b/config/initializers/004_exception_notifier.rb
new file mode 100644
index 00000000..0f5b7fd0
--- /dev/null
+++ b/config/initializers/004_exception_notifier.rb
@@ -0,0 +1,3 @@
+#ExceptionNotifier.exception_recipients = [CONFIG["admin_contact"]]
+#ExceptionNotifier.sender_address = CONFIG["admin_contact"]
+#ExceptionNotifier.email_prefix = "[" + CONFIG["app_name"] + "] "
diff --git a/config/initializers/005_mime_types.rb b/config/initializers/005_mime_types.rb
new file mode 100644
index 00000000..01ebdc2a
--- /dev/null
+++ b/config/initializers/005_mime_types.rb
@@ -0,0 +1 @@
+# Mime::Type.register("application/json", :js)
diff --git a/config/initializers/006_check_javascripts_writable.rb b/config/initializers/006_check_javascripts_writable.rb
new file mode 100644
index 00000000..63486ee3
--- /dev/null
+++ b/config/initializers/006_check_javascripts_writable.rb
@@ -0,0 +1,9 @@
+if true
+ path = ""
+ path += "#{RAILS_ROOT}/" if defined?(RAILS_ROOT)
+ path += "public/javascripts"
+
+ if not File.writable?(path)
+ raise "Path must be writable: %s" % path
+ end
+end
diff --git a/config/local_config.rb b/config/local_config.rb
new file mode 100644
index 00000000..b01f95e3
--- /dev/null
+++ b/config/local_config.rb
@@ -0,0 +1,81 @@
+# This is the file you use to overwrite the default config values.
+# Look at default_config.rb and copy over any settings you want to change.
+
+# You MUST configure these settings for your own server!
+CONFIG["app_name"] = "moe.imouto"
+CONFIG["server_host"] = "moe.imouto.org"
+#CONFIG["server_host"] = "76.73.1.90"
+CONFIG["url_base"] = "http://" + CONFIG["server_host"] # set this to "" to get relative image urls
+CONFIG["admin_contact"] = "dobacco@gmail.com"
+CONFIG["email_from"] = "noreply@moe.imouto.org"
+CONFIG["image_store"] = :remote_hierarchy
+#CONFIG["image_servers"] = ["http://moe.imouto.org", "http://ranka.imouto.org", "http://moe.e-n-m.net"]
+CONFIG["image_servers"] = [
+ #{ :server => "http://elis.imouto.org", :traffic => 2, },
+ { :server => "http://yotsuba.imouto.org", :traffic => 2, :nopreview => false },
+# { :server => "http://elis.imouto.org", :traffic => 3, :nopreview => true },
+# { :server => "http://ranka.imouto.org", :traffic => 1, :previews_only => true } #:nozipfile => true, :nopreview => true }
+]
+#CONFIG["image_servers"] = ["http://sheryl.imouto.org", "http://ranka.imouto.org", "http://moe.e-n-m.net"]
+CONFIG["mirrors"] = [
+ { :user => "moe", :host => "ranka.imouto.org", :data_dir => "/home/moe/moe-live/public/data" },
+# { :user => "moe", :host => "208.43.138.197", :data_dir => "/home/moe/moe-live/public/data" },
+# { :user => "moe", :host => "shana.imouto.org", :data_dir => "/home/moe/moe-live/public/data" },
+# { :user => "moe", :host => "elis.imouto.org", :data_dir => "/home/moe/moe-live/public/data" },
+# { :user => "moe", :host => "188.95.50.2", :data_dir => "/home/moe/moe-live/public/data" },
+# { :user => "moe", :host => "85.12.23.35", :data_dir => "/home/moe/data" },
+]
+CONFIG["dupe_check_on_upload"] = true
+CONFIG["enable_caching"] = true
+CONFIG["enable_anonymous_comment_access"] = true
+CONFIG["enable_anonymous_safe_post_mode"] = false
+CONFIG["use_pretty_image_urls"] = true
+CONFIG["download_filename_prefix"] = "moe"
+CONFIG["member_post_limit"] = 99
+CONFIG["member_comment_limit"] = 20
+CONFIG["enable_parent_posts"] = true
+CONFIG["starting_level"] = 30
+CONFIG["memcache_servers"] = ["localhost:11211"]
+CONFIG["hide_loli_posts"] = false
+CONFIG["enable_reporting"] = true
+CONFIG["web_server"] = "lighttpd"
+CONFIG["enable_trac"] = false
+CONFIG["sample_ratio"] = 1
+CONFIG["tag_types"]["Circle"] = 5
+CONFIG["tag_types"]["cir"] = 5
+CONFIG["tag_types"]["circle"] = 5
+CONFIG["tag_types"]["Faults"] = 6
+CONFIG["tag_types"]["faults"] = 6
+CONFIG["tag_types"]["fault"] = 6
+CONFIG["tag_types"]["flt"] = 6
+CONFIG["exclude_from_tag_sidebar"] = [0, 6]
+CONFIG["local_image_service"] = "moe.imouto.org"
+CONFIG["default_blacklists"] = [
+ "rating:e loli",
+ "rating:e shota",
+ "extreme_content",
+]
+# List of image services available for similar image searching.
+CONFIG["image_service_list"] = {
+ "danbooru.donmai.us" => "http://iqdb.hanyuu.net/index.xml",
+ "moe.imouto.org" => "http://iqdb.hanyuu.net/index.xml",
+ "konachan.com" => "http://iqdb.hanyuu.net/index.xml",
+ "e-shuushuu.net" => "http://iqdb.hanyuu.net/index.xml",
+ "gelbooru.com" => "http://iqdb.hanyuu.net/index.xml",
+}
+# This sets the minimum and maximum value a single user's vote can affect the post's total score.
+CONFIG["vote_sum_max"] = 1
+CONFIG["vote_sum_min"] = 0
+CONFIG["can_see_ads"] = lambda do |user|
+
+user.is_privileged_or_lower?
+
+end
+
+CONFIG["pool_zips"] = true
+CONFIG["comment_threshold"] = 9999
+CONFIG["image_samples"] = true
+CONFIG["jpeg_enable"] = true
+
+#ActionMailer::Base.delivery_method = :smtp
+
diff --git a/config/local_config.rb.example b/config/local_config.rb.example
new file mode 100644
index 00000000..c38f0a8d
--- /dev/null
+++ b/config/local_config.rb.example
@@ -0,0 +1,13 @@
+# This is the file you use to overwrite the default config values.
+# Look at default_config.rb and copy over any settings you want to change.
+
+# You MUST configure these settings for your own server!
+CONFIG["app_name"] = "DAN_SITENAME"
+CONFIG["server_host"] = "DAN_HOSTNAME"
+CONFIG["admin_contact"] = "webmaster@" + CONFIG["server_host"]
+# CONFIG["session_secret_key"] = "This should be at least 30 characters long"
+
+CONFIG["url_base"] = "http://" + CONFIG["server_host"] # set this to "" to get relative image urls
+CONFIG["admin_contact"] = "webmaster@" + CONFIG["server_host"]
+CONFIG["local_image_service"] = CONFIG["app_name"]
+# CONFIG["image_service_list"][CONFIG["local_image_service"]] = "http://127.0.0.1/iqdb/iqdb-xml.php"
diff --git a/config/routes.rb b/config/routes.rb
new file mode 100644
index 00000000..3eed414b
--- /dev/null
+++ b/config/routes.rb
@@ -0,0 +1,9 @@
+ActionController::Routing::Routes.draw do |map|
+ map.connect "", :controller => "static", :action => "index"
+ map.connect "post/show/:id/:tag_title", :controller => "post", :action => "show", :requirements => {:id => /\d+/}
+ map.connect "pool/zip/:id/:filename", :controller => "pool", :action => "zip", :requirements => {:id => /\d+/, :filename => /.*/}
+ map.connect ":controller/:action/:id.:format", :requirements => {:id => /[-\d]+/}
+ map.connect ":controller/:action/:id", :requirements => {:id => /[-\d]+/}
+ map.connect ":controller/:action.:format"
+ map.connect ":controller/:action"
+end
diff --git a/db/migrate/001_add_post_links.rb b/db/migrate/001_add_post_links.rb
new file mode 100644
index 00000000..94a4e60b
--- /dev/null
+++ b/db/migrate/001_add_post_links.rb
@@ -0,0 +1,13 @@
+class AddPostLinks < ActiveRecord::Migration
+ def self.up
+ execute("ALTER TABLE posts ADD COLUMN next_post_id INTEGER REFERENCES posts ON DELETE SET NULL")
+ execute("ALTER TABLE posts ADD COLUMN prev_post_id INTEGER REFERENCES posts ON DELETE SET NULL")
+ execute("UPDATE posts SET next_post_id = (SELECT _.id FROM posts _ WHERE _.id > posts.id ORDER BY _.id LIMIT 1)")
+ execute("UPDATE posts SET prev_post_id = (SELECT _.id FROM posts _ WHERE _.id < posts.id ORDER BY _.id DESC LIMIT 1)")
+ end
+
+ def self.down
+ execute("ALTER TABLE posts DROP COLUMN next_post_id")
+ execute("ALTER TABLE posts DROP COLUMN prev_post_id")
+ end
+end
diff --git a/db/migrate/002_create_artists.rb b/db/migrate/002_create_artists.rb
new file mode 100644
index 00000000..be1dc79a
--- /dev/null
+++ b/db/migrate/002_create_artists.rb
@@ -0,0 +1,23 @@
+class CreateArtists < ActiveRecord::Migration
+ def self.up
+ execute(<<-EOS)
+ CREATE TABLE artists (
+ id SERIAL,
+ japanese_name TEXT,
+ personal_name TEXT,
+ handle_name TEXT,
+ circle_name TEXT,
+ site_name TEXT,
+ site_url TEXT,
+ image_url TEXT
+ )
+ EOS
+ execute("CREATE INDEX idx_artists__image_url ON artists (image_url)")
+ execute("CREATE INDEX idx_artists__personal_name ON artists (personal_name) WHERE personal_name IS NOT NULL")
+ execute("CREATE INDEX idx_artists__handle_name ON artists (handle_name) WHERE handle_name IS NOT NULL")
+ end
+
+ def self.down
+ execute("DROP TABLE artists")
+ end
+end
diff --git a/db/migrate/003_extend_post_tag_histories.rb b/db/migrate/003_extend_post_tag_histories.rb
new file mode 100644
index 00000000..8be31de8
--- /dev/null
+++ b/db/migrate/003_extend_post_tag_histories.rb
@@ -0,0 +1,13 @@
+class ExtendPostTagHistories < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE post_tag_histories ADD COLUMN user_id INTEGER REFERENCES users ON DELETE SET NULL"
+ execute "ALTER TABLE post_tag_histories ADD COLUMN ip_addr TEXT"
+ execute "ALTER TABLE post_tag_histories ADD COLUMN created_at TIMESTAMP NOT NULL DEFAULT now()"
+ end
+
+ def self.down
+ execute "ALTER TABLE post_tag_histories DROP COLUMN user_id"
+ execute "ALTER TABLE post_tag_histories DROP COLUMN ip_addr"
+ execute "ALTER TABLE post_tag_histories DROP COLUMN created_at"
+ end
+end
diff --git a/db/migrate/004_post_tag_history_constraints.rb b/db/migrate/004_post_tag_history_constraints.rb
new file mode 100644
index 00000000..063e9b7e
--- /dev/null
+++ b/db/migrate/004_post_tag_history_constraints.rb
@@ -0,0 +1,13 @@
+class PostTagHistoryConstraints < ActiveRecord::Migration
+ def self.up
+ execute("UPDATE post_tag_histories SET created_at = now() WHERE created_at IS NULL")
+ execute("UPDATE post_tag_histories SET ip_addr = '' WHERE ip_addr IS NULL")
+ execute("ALTER TABLE post_tag_histories ALTER COLUMN created_at SET NOT NULL")
+ execute("ALTER TABLE post_tag_histories ALTER COLUMN ip_addr SET NOT NULL")
+ end
+
+ def self.down
+ execute("ALTER TABLE post_tag_histories ALTER COLUMN created_at DROP NOT NULL")
+ execute("ALTER TABLE post_tag_histories ALTER COLUMN ip_addr DROP NOT NULL")
+ end
+end
diff --git a/db/migrate/005_create_forum_post.rb b/db/migrate/005_create_forum_post.rb
new file mode 100644
index 00000000..60fd2012
--- /dev/null
+++ b/db/migrate/005_create_forum_post.rb
@@ -0,0 +1,19 @@
+class CreateForumPost < ActiveRecord::Migration
+ def self.up
+ execute(<<-EOS)
+ CREATE TABLE forum_posts (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMP NOT NULL,
+ updated_at TIMESTAMP NOT NULL,
+ title TEXT NOT NULL,
+ body TEXT NOT NULL,
+ creator_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ parent_id INTEGER REFERENCES forum_posts ON DELETE CASCADE
+ )
+ EOS
+ end
+
+ def self.down
+ execute("DROP TABLE forum_posts")
+ end
+end
diff --git a/db/migrate/006_create_forum_posts.rb b/db/migrate/006_create_forum_posts.rb
new file mode 100644
index 00000000..8928c278
--- /dev/null
+++ b/db/migrate/006_create_forum_posts.rb
@@ -0,0 +1,7 @@
+class CreateForumPosts < ActiveRecord::Migration
+ def self.up
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/007_add_spam_field_to_comments.rb b/db/migrate/007_add_spam_field_to_comments.rb
new file mode 100644
index 00000000..da0ebc27
--- /dev/null
+++ b/db/migrate/007_add_spam_field_to_comments.rb
@@ -0,0 +1,11 @@
+class AddSpamFieldToComments < ActiveRecord::Migration
+ def self.up
+ execute("ALTER TABLE comments DROP COLUMN signal_level")
+ execute("ALTER TABLE comments ADD COLUMN is_spam BOOLEAN")
+ end
+
+ def self.down
+ execute("ALTER TABLE comments ADD COLUMN signal_level")
+ execute("ALTER TABLE comments DROP COLUMN is_spam BOOLEAN")
+ end
+end
diff --git a/db/migrate/008_upgrade_forums.rb b/db/migrate/008_upgrade_forums.rb
new file mode 100644
index 00000000..9b50d330
--- /dev/null
+++ b/db/migrate/008_upgrade_forums.rb
@@ -0,0 +1,15 @@
+class UpgradeForums < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE forum_posts ADD COLUMN reply_count INTEGER NOT NULL DEFAULT 0"
+ execute "ALTER TABLE forum_posts ADD COLUMN last_updated_by INTEGER REFERENCES users ON DELETE SET NULL"
+ execute "ALTER TABLE forum_posts ADD COLUMN is_sticky BOOLEAN NOT NULL DEFAULT FALSE"
+ execute "ALTER TABLE users ADD COLUMN last_seen_forum_post_id INTEGER REFERENCES forum_posts ON DELETE SET NULL"
+ end
+
+ def self.down
+ execute "ALTER TABLE forum_posts DROP COLUMN reply_count"
+ execute "ALTER TABLE forum_posts DROP COLUMN last_updated_by"
+ execute "ALTER TABLE forum_posts DROP COLUMN is_sticky"
+ execute "ALTER TABLE users DROP COLUMN last_seen_forum_post_id"
+ end
+end
diff --git a/db/migrate/009_add_last_seen_forum_post_date.rb b/db/migrate/009_add_last_seen_forum_post_date.rb
new file mode 100644
index 00000000..b4c39aec
--- /dev/null
+++ b/db/migrate/009_add_last_seen_forum_post_date.rb
@@ -0,0 +1,11 @@
+class AddLastSeenForumPostDate < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users DROP COLUMN last_seen_forum_post_id"
+ execute "ALTER TABLE users ADD COLUMN last_seen_forum_post_date TIMESTAMP NOT NULL DEFAULT now()"
+ end
+
+ def self.down
+ execute "ALTER TABLE users ADD COLUMN last_seen_forum_post_id INTEGER REFERENCES users ON DELETE SET NULL"
+ execute "ALTER TABLE users DROP COLUMN last_seen_forum_post_date"
+ end
+end
diff --git a/db/migrate/010_add_user_fields.rb b/db/migrate/010_add_user_fields.rb
new file mode 100644
index 00000000..bbb93696
--- /dev/null
+++ b/db/migrate/010_add_user_fields.rb
@@ -0,0 +1,17 @@
+class AddUserFields < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN email TEXT NOT NULL DEFAULT ''"
+ execute "ALTER TABLE users ADD COLUMN tag_blacklist TEXT NOT NULL DEFAULT ''"
+ execute "ALTER TABLE users ADD COLUMN user_blacklist TEXT NOT NULL DEFAULT ''"
+ execute "ALTER TABLE users ADD COLUMN my_tags TEXT NOT NULL DEFAULT ''"
+ execute "ALTER TABLE users ADD COLUMN post_threshold INTEGER NOT NULL DEFAULT -100"
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN email"
+ execute "ALTER TABLE users DROP COLUMN tag_blacklist"
+ execute "ALTER TABLE users DROP COLUMN user_blacklist"
+ execute "ALTER TABLE users DROP COLUMN my_tags"
+ execute "ALTER TABLE users DROP COLUMN post_threshold"
+ end
+end
diff --git a/db/migrate/011_add_invites.rb b/db/migrate/011_add_invites.rb
new file mode 100644
index 00000000..999b4659
--- /dev/null
+++ b/db/migrate/011_add_invites.rb
@@ -0,0 +1,18 @@
+class AddInvites < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN invite_count INTEGER NOT NULL DEFAULT 0"
+ execute <<-EOS
+ CREATE TABLE invites (
+ id SERIAL PRIMARY KEY,
+ user_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ activation_key TEXT NOT NULL,
+ invite_email TEXT NOT NULL
+ )
+ EOS
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN invite_count"
+ execute "DROP TABLE invites"
+ end
+end
diff --git a/db/migrate/012_rename_invite_email_field.rb b/db/migrate/012_rename_invite_email_field.rb
new file mode 100644
index 00000000..7a6e57cd
--- /dev/null
+++ b/db/migrate/012_rename_invite_email_field.rb
@@ -0,0 +1,9 @@
+class RenameInviteEmailField < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE invites RENAME COLUMN invite_email TO email"
+ end
+
+ def self.down
+ execute "ALTER TABLE invites RENAME COLUMN email TO invite_email"
+ end
+end
diff --git a/db/migrate/013_drop_is_ambiguous_field_from_tags.rb b/db/migrate/013_drop_is_ambiguous_field_from_tags.rb
new file mode 100644
index 00000000..b016d405
--- /dev/null
+++ b/db/migrate/013_drop_is_ambiguous_field_from_tags.rb
@@ -0,0 +1,7 @@
+class DropIsAmbiguousFieldFromTags < ActiveRecord::Migration
+ def self.up
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/014_add_pending_to_aliases_and_implications.rb b/db/migrate/014_add_pending_to_aliases_and_implications.rb
new file mode 100644
index 00000000..2681db23
--- /dev/null
+++ b/db/migrate/014_add_pending_to_aliases_and_implications.rb
@@ -0,0 +1,11 @@
+class AddPendingToAliasesAndImplications < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE tag_aliases ADD COLUMN is_pending BOOLEAN NOT NULL DEFAULT FALSE"
+ execute "ALTER TABLE tag_implications ADD COLUMN is_pending BOOLEAN NOT NULL DEFAULT FALSE"
+ end
+
+ def self.down
+ execute "ALTER TABLE tag_aliases DROP COLUMN is_pending"
+ execute "ALTER TABLE tag_implications DROP COLUMN is_pending"
+ end
+end
diff --git a/db/migrate/015_rename_implication_fields.rb b/db/migrate/015_rename_implication_fields.rb
new file mode 100644
index 00000000..641a714b
--- /dev/null
+++ b/db/migrate/015_rename_implication_fields.rb
@@ -0,0 +1,11 @@
+class RenameImplicationFields < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE tag_implications RENAME COLUMN parent_id TO consequent_id"
+ execute "ALTER TABLE tag_implications RENAME COLUMN child_id TO predicate_id"
+ end
+
+ def self.down
+ execute "ALTER TABLE tag_implications RENAME COLUMN consequent_id TO parent_id"
+ execute "ALTER TABLE tag_implications RENAME COLUMN predicate_id TO child_id"
+ end
+end
diff --git a/db/migrate/016_add_forum_posts_user_views.rb b/db/migrate/016_add_forum_posts_user_views.rb
new file mode 100644
index 00000000..46a60500
--- /dev/null
+++ b/db/migrate/016_add_forum_posts_user_views.rb
@@ -0,0 +1,18 @@
+class AddForumPostsUserViews < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE forum_posts_user_views (
+ forum_post_id INTEGER NOT NULL REFERENCES forum_posts ON DELETE CASCADE,
+ user_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ last_viewed_at TIMESTAMP NOT NULL
+ )
+ EOS
+
+ execute "CREATE INDEX forum_posts_user_views__forum_post_id__idx ON forum_posts_user_views (forum_post_id)"
+ execute "CREATE INDEX forum_posts_user_views__user_id__idx ON forum_posts_user_views (user_id)"
+ end
+
+ def self.down
+ execute "DROP TABLE forum_posts_user_views"
+ end
+end
diff --git a/db/migrate/017_drop_last_seen_forum_post_date_from_users.rb b/db/migrate/017_drop_last_seen_forum_post_date_from_users.rb
new file mode 100644
index 00000000..f3b9cf7f
--- /dev/null
+++ b/db/migrate/017_drop_last_seen_forum_post_date_from_users.rb
@@ -0,0 +1,9 @@
+class DropLastSeenForumPostDateFromUsers < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users DROP COLUMN last_seen_forum_post_date"
+ end
+
+ def self.down
+ execute "ALTER TABLE users ADD COLUMN last_seen_forum_post_date TIMESTAMP NOT NULL DEFAULT now()"
+ end
+end
diff --git a/db/migrate/018_add_constraints_to_forum_posts_user_views.rb b/db/migrate/018_add_constraints_to_forum_posts_user_views.rb
new file mode 100644
index 00000000..d41b3906
--- /dev/null
+++ b/db/migrate/018_add_constraints_to_forum_posts_user_views.rb
@@ -0,0 +1,11 @@
+class AddConstraintsToForumPostsUserViews < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE forum_posts_user_views ADD CONSTRAINT forum_posts_user_views__unique_forum_post_id_user_id UNIQUE (forum_post_id, user_id)"
+ execute "CREATE INDEX forum_posts__parent_id_idx ON forum_posts (parent_id) WHERE parent_id IS NULL"
+ end
+
+ def self.down
+ execute "ALTER TABLE forum_posts_user_views DROP CONSTRAINT forum_posts_user_views__unique_forum_post_id_user_id"
+ execute "DROP INDEX forum_posts__parent_id_idx"
+ end
+end
diff --git a/db/migrate/019_add_id_to_forum_posts_user_views.rb b/db/migrate/019_add_id_to_forum_posts_user_views.rb
new file mode 100644
index 00000000..a365c66f
--- /dev/null
+++ b/db/migrate/019_add_id_to_forum_posts_user_views.rb
@@ -0,0 +1,9 @@
+class AddIdToForumPostsUserViews < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE forum_posts_user_views ADD COLUMN id SERIAL PRIMARY KEY"
+ end
+
+ def self.down
+ execute "ALTER TABLE forum_posts_user_views DROP COLUMN id"
+ end
+end
diff --git a/db/migrate/020_change_artists_a.rb b/db/migrate/020_change_artists_a.rb
new file mode 100644
index 00000000..de2de0e2
--- /dev/null
+++ b/db/migrate/020_change_artists_a.rb
@@ -0,0 +1,16 @@
+class ChangeArtistsA < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE artists ADD PRIMARY KEY (id)"
+ execute "ALTER TABLE artists ADD COLUMN alias_id INTEGER REFERENCES artists ON DELETE SET NULL"
+ execute "ALTER TABLE artists ADD COLUMN group_id INTEGER REFERENCES artists ON DELETE SET NULL"
+ execute "ALTER TABLE artists RENAME COLUMN site_url TO url_a"
+ execute "ALTER TABLE artists RENAME COLUMN image_url TO url_b"
+ execute "ALTER TABLE artists ADD COLUMN url_c TEXT"
+ execute "ALTER TABLE artists ADD COLUMN name TEXT NOT NULL DEFAULT ''"
+ execute "ALTER TABLE artists ALTER COLUMN name DROP DEFAULT"
+ end
+
+ def self.down
+ raise ActiveRecord::IrreversibleMigration.new
+ end
+end
diff --git a/db/migrate/021_change_artists_b.rb b/db/migrate/021_change_artists_b.rb
new file mode 100644
index 00000000..94efb361
--- /dev/null
+++ b/db/migrate/021_change_artists_b.rb
@@ -0,0 +1,18 @@
+class ChangeArtistsB < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE artists DROP COLUMN japanese_name"
+ execute "ALTER TABLE artists DROP COLUMN personal_name"
+ execute "ALTER TABLE artists DROP COLUMN handle_name"
+ execute "ALTER TABLE artists DROP COLUMN circle_name"
+ execute "ALTER TABLE artists DROP COLUMN site_name"
+ execute "DELETE FROM artists WHERE name = ''"
+ execute "ALTER TABLE artists ADD CONSTRAINT artists_name_uniq UNIQUE (name)"
+ execute "CREATE INDEX artists_url_a_idx ON artists (url_a)"
+ execute "CREATE INDEX artists_url_b_idx ON artists (url_b) WHERE url_b IS NOT NULL"
+ execute "CREATE INDEX artists_url_c_idx ON artists (url_c) WHERE url_c IS NOT NULL"
+ end
+
+ def self.down
+ raise ActiveRecord::IrreversibleMigration.new
+ end
+end
diff --git a/db/migrate/022_add_notes_to_artists.rb b/db/migrate/022_add_notes_to_artists.rb
new file mode 100644
index 00000000..65f43a9a
--- /dev/null
+++ b/db/migrate/022_add_notes_to_artists.rb
@@ -0,0 +1,9 @@
+class AddNotesToArtists < ActiveRecord::Migration
+ def self.up
+ execute "alter table artists add column notes text not null default ''"
+ end
+
+ def self.down
+ execute "alter table artists drop column notes"
+ end
+end
diff --git a/db/migrate/023_add_updated_at_to_artists.rb b/db/migrate/023_add_updated_at_to_artists.rb
new file mode 100644
index 00000000..1b3b81a9
--- /dev/null
+++ b/db/migrate/023_add_updated_at_to_artists.rb
@@ -0,0 +1,9 @@
+class AddUpdatedAtToArtists < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE artists ADD COLUMN updated_at TIMESTAMP NOT NULL DEFAULT now()"
+ end
+
+ def self.down
+ execute "ALTER TABLE artists DROP COLUMN updated_at"
+ end
+end
diff --git a/db/migrate/024_drop_extra_indexes_on_artists.rb b/db/migrate/024_drop_extra_indexes_on_artists.rb
new file mode 100644
index 00000000..3e701a11
--- /dev/null
+++ b/db/migrate/024_drop_extra_indexes_on_artists.rb
@@ -0,0 +1,11 @@
+class DropExtraIndexesOnArtists < ActiveRecord::Migration
+ def self.up
+ execute "DROP INDEX idx_artists__image_url"
+ execute "DROP INDEX idx_favorites__post_user"
+ end
+
+ def self.down
+ execute "CREATE INDEX idx_artists__image_url ON artists (url_b)"
+ execute "CREATE INDEX idx_favorites__post_user ON favorites (post_id, user_id)"
+ end
+end
diff --git a/db/migrate/025_add_updater_id_to_artists.rb b/db/migrate/025_add_updater_id_to_artists.rb
new file mode 100644
index 00000000..209494a1
--- /dev/null
+++ b/db/migrate/025_add_updater_id_to_artists.rb
@@ -0,0 +1,9 @@
+class AddUpdaterIdToArtists < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE artists ADD COLUMN updater_id INTEGER REFERENCES users ON DELETE SET NULL"
+ end
+
+ def self.down
+ execute "ALTER TABLE artists DROP COLUMN updater_id"
+ end
+end
diff --git a/db/migrate/026_add_ambiguous_field_to_tags.rb b/db/migrate/026_add_ambiguous_field_to_tags.rb
new file mode 100644
index 00000000..4affbc3a
--- /dev/null
+++ b/db/migrate/026_add_ambiguous_field_to_tags.rb
@@ -0,0 +1,10 @@
+class AddAmbiguousFieldToTags < ActiveRecord::Migration
+ def self.up
+ execute "alter table tags add column is_ambiguous boolean not null default false"
+ execute "update tags set is_ambiguous = true where tag_type = 2"
+ execute "update tags set tag_type = 0 where tag_type = 2"
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/027_add_response_count_to_forum.rb b/db/migrate/027_add_response_count_to_forum.rb
new file mode 100644
index 00000000..5e535214
--- /dev/null
+++ b/db/migrate/027_add_response_count_to_forum.rb
@@ -0,0 +1,9 @@
+class AddResponseCountToForum < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE forum_posts ADD COLUMN response_count INTEGER NOT NULL DEFAULT 0"
+ end
+
+ def self.down
+ execute "ALTER TABLE forum_posts DROP COLUMN response_count"
+ end
+end
diff --git a/db/migrate/028_add_image_resize_user_setting.rb b/db/migrate/028_add_image_resize_user_setting.rb
new file mode 100644
index 00000000..4d819563
--- /dev/null
+++ b/db/migrate/028_add_image_resize_user_setting.rb
@@ -0,0 +1,9 @@
+class AddImageResizeUserSetting < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN always_resize_images BOOLEAN NOT NULL DEFAULT FALSE"
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN always_resize_images"
+ end
+end
diff --git a/db/migrate/029_add_safe_post_count_to_tags.rb b/db/migrate/029_add_safe_post_count_to_tags.rb
new file mode 100644
index 00000000..039223dd
--- /dev/null
+++ b/db/migrate/029_add_safe_post_count_to_tags.rb
@@ -0,0 +1,53 @@
+class AddSafePostCountToTags < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE tags ADD COLUMN safe_post_count INTEGER NOT NULL DEFAULT 0"
+ execute "UPDATE tags SET safe_post_count = (SELECT COUNT(*) FROM posts p, posts_tags pt WHERE p.id = pt.post_id AND pt.tag_id = tags.id AND p.rating = 's')"
+ execute "DROP TRIGGER trg_posts_tags__delete ON posts_tags"
+ execute "DROP TRIGGER trg_posts_tags__insert ON posts_tags"
+ execute "INSERT INTO table_data (name, row_count) VALUES ('safe_posts', (SELECT COUNT(*) FROM posts WHERE rating = 's'))"
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__delete() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count - 1 WHERE tags.id = OLD.tag_id;
+ UPDATE tags SET safe_post_count = safe_post_count - 1 FROM posts WHERE tags.id = OLD.tag_id AND OLD.post_id = posts.id AND posts.rating = 's';
+ RETURN OLD;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__insert() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count + 1 WHERE tags.id = NEW.tag_id;
+ UPDATE tags SET safe_post_count = safe_post_count + 1 FROM posts WHERE tags.id = NEW.tag_id AND NEW.post_id = posts.id AND posts.rating = 's';
+ RETURN NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute "CREATE TRIGGER trg_posts_tags__delete BEFORE DELETE ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__delete()"
+ execute "CREATE TRIGGER trg_posts_tags__insert BEFORE INSERT ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__insert()"
+ end
+
+ def self.down
+ execute "ALTER TABLE tags DROP COLUMN safe_post_count"
+ execute "DROP TRIGGER trg_posts_tags__delete ON posts_tags"
+ execute "DROP TRIGGER trg_posts_tags__insert ON posts_tags"
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__delete() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count - 1 WHERE tags.id = OLD.tag_id;
+ RETURN OLD;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__insert() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count + 1 WHERE tags.id = NEW.tag_id;
+ RETURN NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute "CREATE TRIGGER trg_posts_tags__delete BEFORE DELETE ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__delete()"
+ execute "CREATE TRIGGER trg_posts_tags__insert BEFORE INSERT ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__insert()"
+ end
+end
diff --git a/db/migrate/030_add_invited_by_to_users.rb b/db/migrate/030_add_invited_by_to_users.rb
new file mode 100644
index 00000000..76688e99
--- /dev/null
+++ b/db/migrate/030_add_invited_by_to_users.rb
@@ -0,0 +1,9 @@
+class AddInvitedByToUsers < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN invited_by INTEGER"
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN invited_by"
+ end
+end
diff --git a/db/migrate/031_create_news_updates.rb b/db/migrate/031_create_news_updates.rb
new file mode 100644
index 00000000..0c45be6d
--- /dev/null
+++ b/db/migrate/031_create_news_updates.rb
@@ -0,0 +1,18 @@
+class CreateNewsUpdates < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE news_updates (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMP NOT NULL DEFAULT now(),
+ updated_at TIMESTAMP NOT NULL DEFAULT now(),
+ user_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ title TEXT NOT NULL,
+ body TEXT NOT NULL
+ )
+ EOS
+ end
+
+ def self.down
+ execute "DROP TABLE news_updates"
+ end
+end
diff --git a/db/migrate/032_forum_posts_fix_creator_id.rb b/db/migrate/032_forum_posts_fix_creator_id.rb
new file mode 100644
index 00000000..15ad860c
--- /dev/null
+++ b/db/migrate/032_forum_posts_fix_creator_id.rb
@@ -0,0 +1,10 @@
+class ForumPostsFixCreatorId < ActiveRecord::Migration
+ def self.up
+ execute "alter table forum_posts drop constraint forum_posts_creator_id_fkey"
+ execute "alter table forum_posts alter column creator_id drop not null"
+ execute "alter table forum_posts add foreign key (creator_id) references users on delete set null"
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/033_posts_add_is_flagged.rb b/db/migrate/033_posts_add_is_flagged.rb
new file mode 100644
index 00000000..c9119526
--- /dev/null
+++ b/db/migrate/033_posts_add_is_flagged.rb
@@ -0,0 +1,9 @@
+class PostsAddIsFlagged < ActiveRecord::Migration
+ def self.up
+ execute "alter table posts add column is_flagged boolean not null default false"
+ end
+
+ def self.down
+ execute "alter table posts drop column is_flagged"
+ end
+end
diff --git a/db/migrate/034_pools_create.rb b/db/migrate/034_pools_create.rb
new file mode 100644
index 00000000..3ce5ced1
--- /dev/null
+++ b/db/migrate/034_pools_create.rb
@@ -0,0 +1,72 @@
+class PoolsCreate < ActiveRecord::Migration
+ def self.up
+ ActiveRecord::Base.transaction do
+ execute <<-EOS
+ CREATE TABLE pools (
+ id SERIAL PRIMARY KEY,
+ name TEXT NOT NULL,
+ created_at TIMESTAMP NOT NULL,
+ updated_at TIMESTAMP NOT NULL,
+ user_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ is_public BOOLEAN NOT NULL DEFAULT FALSE,
+ post_count INTEGER NOT NULL DEFAULT 0,
+ description TEXT NOT NULL DEFAULT ''
+ )
+ EOS
+ execute <<-EOS
+ CREATE TABLE pools_posts (
+ id SERIAL PRIMARY KEY,
+ sequence INTEGER NOT NULL DEFAULT 0,
+ pool_id INTEGER NOT NULL REFERENCES pools ON DELETE CASCADE,
+ post_id INTEGER NOT NULL REFERENCES posts ON DELETE CASCADE
+ )
+ EOS
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION pools_posts_delete_trg() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE pools SET post_count = post_count - 1 WHERE id = OLD.pool_id;
+ RETURN OLD;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION pools_posts_insert_trg() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE pools SET post_count = post_count + 1 WHERE id = NEW.pool_id;
+ RETURN NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute <<-EOS
+ CREATE TRIGGER pools_posts_insert_trg
+ BEFORE INSERT ON pools_posts
+ FOR EACH ROW
+ EXECUTE PROCEDURE pools_posts_insert_trg();
+ EOS
+ execute <<-EOS
+ CREATE TRIGGER pools_posts_delete_trg
+ BEFORE DELETE ON pools_posts
+ FOR EACH ROW
+ EXECUTE PROCEDURE pools_posts_delete_trg();
+ EOS
+ execute <<-EOS
+ CREATE INDEX pools_user_id_idx ON pools (user_id)
+ EOS
+ execute <<-EOS
+ CREATE INDEX pools_posts_pool_id_idx ON pools_posts (pool_id)
+ EOS
+ execute <<-EOS
+ CREATE INDEX pools_posts_post_id_idx ON pools_posts (post_id)
+ EOS
+ end
+ end
+
+ def self.down
+ ActiveRecord::Base.transaction do
+ execute "DROP TABLE pools_posts"
+ execute "DROP TABLE pools"
+ execute "DROP FUNCTION pools_posts_insert_trg()"
+ execute "DROP FUNCTION pools_posts_delete_trg()"
+ end
+ end
+end
diff --git a/db/migrate/035_users_rename_password.rb b/db/migrate/035_users_rename_password.rb
new file mode 100644
index 00000000..a272999c
--- /dev/null
+++ b/db/migrate/035_users_rename_password.rb
@@ -0,0 +1,9 @@
+class UsersRenamePassword < ActiveRecord::Migration
+ def self.up
+ rename_column :users, :password, :password_hash
+ end
+
+ def self.down
+ rename_column :users, :password_hash, :password
+ end
+end
diff --git a/db/migrate/036_users_update_level.rb b/db/migrate/036_users_update_level.rb
new file mode 100644
index 00000000..ffa5a585
--- /dev/null
+++ b/db/migrate/036_users_update_level.rb
@@ -0,0 +1,8 @@
+class UsersUpdateLevel < ActiveRecord::Migration
+ def self.up
+ execute "update users set level = 3 where level = 2"
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/037_users_add_created_at.rb b/db/migrate/037_users_add_created_at.rb
new file mode 100644
index 00000000..80726ca7
--- /dev/null
+++ b/db/migrate/037_users_add_created_at.rb
@@ -0,0 +1,9 @@
+class UsersAddCreatedAt < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN created_at TIMESTAMP NOT NULL DEFAULT now()"
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN created_at"
+ end
+end
diff --git a/db/migrate/038_favorites_add_created_at.rb b/db/migrate/038_favorites_add_created_at.rb
new file mode 100644
index 00000000..8f70fb88
--- /dev/null
+++ b/db/migrate/038_favorites_add_created_at.rb
@@ -0,0 +1,10 @@
+class FavoritesAddCreatedAt < ActiveRecord::Migration
+ def self.up
+ execute "alter table favorites add column created_at timestamp not null default now()"
+ execute "update favorites set created_at = (select created_at from posts where id = favorites.post_id)"
+ end
+
+ def self.down
+ execute "alter table favorites drop column created_at"
+ end
+end
diff --git a/db/migrate/039_posts_add_is_pending.rb b/db/migrate/039_posts_add_is_pending.rb
new file mode 100644
index 00000000..38a0b9ac
--- /dev/null
+++ b/db/migrate/039_posts_add_is_pending.rb
@@ -0,0 +1,9 @@
+class PostsAddIsPending < ActiveRecord::Migration
+ def self.up
+ execute "alter table posts add column is_pending boolean not null default false"
+ end
+
+ def self.down
+ execute "alter table posts drop column is_pending"
+ end
+end
diff --git a/db/migrate/040_cleanup.rb b/db/migrate/040_cleanup.rb
new file mode 100644
index 00000000..e10b864f
--- /dev/null
+++ b/db/migrate/040_cleanup.rb
@@ -0,0 +1,19 @@
+class Cleanup < ActiveRecord::Migration
+ def self.up
+ remove_column :forum_posts, :reply_count
+ remove_column :users, :user_blacklist
+ remove_column :users, :post_threshold
+ drop_table :invites
+ drop_table :news_updates
+ end
+
+ def self.down
+ add_column :forum_posts, :reply_count, :integer, :null => false, :default => 0
+ add_column :users, :user_blacklist, :text, :null => false, :default => ""
+ add_column :users, :post_threshold, :integer, :null => false, :default => -100
+ create_table :invites do |t|
+ end
+ create_table :news_updates do |t|
+ end
+ end
+end
diff --git a/db/migrate/041_users_add_ip_addr.rb b/db/migrate/041_users_add_ip_addr.rb
new file mode 100644
index 00000000..497cd46d
--- /dev/null
+++ b/db/migrate/041_users_add_ip_addr.rb
@@ -0,0 +1,11 @@
+class UsersAddIpAddr < ActiveRecord::Migration
+ def self.up
+ execute "alter table users add column ip_addr text not null default ''"
+ execute "alter table users add column last_logged_in_at timestamp not null default now()"
+ end
+
+ def self.down
+ execute "alter table users drop column ip_addr"
+ execute "alter table users drop column last_logged_in_at"
+ end
+end
diff --git a/db/migrate/042_note_versions_add_index_on_user_id.rb b/db/migrate/042_note_versions_add_index_on_user_id.rb
new file mode 100644
index 00000000..27a64cd2
--- /dev/null
+++ b/db/migrate/042_note_versions_add_index_on_user_id.rb
@@ -0,0 +1,9 @@
+class NoteVersionsAddIndexOnUserId < ActiveRecord::Migration
+ def self.up
+ add_index :note_versions, :user_id
+ end
+
+ def self.down
+ remove_index :note_versions, :user_id
+ end
+end
diff --git a/db/migrate/043_flagged_posts_create.rb b/db/migrate/043_flagged_posts_create.rb
new file mode 100644
index 00000000..a1c13bb3
--- /dev/null
+++ b/db/migrate/043_flagged_posts_create.rb
@@ -0,0 +1,19 @@
+class FlaggedPostsCreate < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ create table flagged_posts (
+ id serial primary key,
+ created_at timestamp not null default now(),
+ post_id integer not null references posts on delete cascade,
+ reason text not null
+ )
+ EOS
+
+ execute "alter table posts drop column is_flagged"
+ end
+
+ def self.down
+ execute "alter table posts add column is_flagged boolean not null default false"
+ execute "drop table flagged_posts"
+ end
+end
diff --git a/db/migrate/044_forum_add_is_locked.rb b/db/migrate/044_forum_add_is_locked.rb
new file mode 100644
index 00000000..b5a05e3e
--- /dev/null
+++ b/db/migrate/044_forum_add_is_locked.rb
@@ -0,0 +1,30 @@
+class ForumAddIsLocked < ActiveRecord::Migration
+ def self.up
+ transaction do
+ add_column :forum_posts, :is_locked, :boolean, :null => false, :default => false
+ execute "alter table users add column last_forum_topic_read_at timestamp not null default '1960-01-01'"
+ drop_table :forum_posts_user_views
+ add_index :forum_posts, :updated_at
+ end
+ end
+
+ def self.down
+ transaction do
+ remove_column :forum_posts, :is_locked
+ remove_column :users, :last_forum_topic_read_at
+ remove_index :forum_posts, :updated_at
+ execute <<-EOS
+ CREATE TABLE forum_posts_user_views (
+ id serial primary key,
+ forum_post_id INTEGER NOT NULL REFERENCES forum_posts ON DELETE CASCADE,
+ user_id INTEGER NOT NULL REFERENCES users ON DELETE CASCADE,
+ last_viewed_at TIMESTAMP NOT NULL
+ )
+ EOS
+
+ execute "CREATE INDEX forum_posts_user_views__forum_post_id__idx ON forum_posts_user_views (forum_post_id)"
+ execute "CREATE INDEX forum_posts_user_views__user_id__idx ON forum_posts_user_views (user_id)"
+ execute "ALTER TABLE forum_posts_user_views ADD CONSTRAINT forum_posts_user_views__unique_forum_post_id_user_id UNIQUE (forum_post_id, user_id)"
+ end
+ end
+end
diff --git a/db/migrate/045_user_records_create.rb b/db/migrate/045_user_records_create.rb
new file mode 100644
index 00000000..5eb48f30
--- /dev/null
+++ b/db/migrate/045_user_records_create.rb
@@ -0,0 +1,18 @@
+class UserRecordsCreate < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ create table user_records (
+ id serial primary key,
+ user_id integer not null references users on delete cascade,
+ reported_by integer not null references users on delete cascade,
+ created_at timestamp not null default now(),
+ is_positive boolean not null default true,
+ body text not null
+ )
+ EOS
+ end
+
+ def self.down
+ drop_table :user_records
+ end
+end
diff --git a/db/migrate/046_posts_tags_add_foreign_keys.rb b/db/migrate/046_posts_tags_add_foreign_keys.rb
new file mode 100644
index 00000000..c0818ee1
--- /dev/null
+++ b/db/migrate/046_posts_tags_add_foreign_keys.rb
@@ -0,0 +1,30 @@
+class PostsTagsAddForeignKeys < ActiveRecord::Migration
+ def self.up
+ transaction do
+ execute "update tags set post_count = (select count(*) from posts_tags pt where pt.tag_id = tags.id)"
+ execute "update tags set safe_post_count = (select count(*) from posts_tags pt, posts p where pt.tag_id = tags.id and pt.post_id = p.id and p.rating = 's')"
+
+ begin
+ execute "alter table posts_tags add constraint fk_posts_tags__post foreign key (post_id) references posts on delete cascade"
+ rescue Exception
+ end
+
+ begin
+ execute "alter table posts_tags add constraint fk_posts_tags__tag foreign key (tag_id) references tags on delete cascade"
+ rescue Exception
+ end
+ end
+ end
+
+ def self.down
+ begin
+ execute "alter table posts_tags drop constraint fk_posts_tags__post"
+ rescue Exception
+ end
+
+ begin
+ execute "alter table posts_tags drop constraint fk_posts_tags__tag"
+ rescue Exception
+ end
+ end
+end
diff --git a/db/migrate/047_posts_add_parent_id.rb b/db/migrate/047_posts_add_parent_id.rb
new file mode 100644
index 00000000..d1a1fb80
--- /dev/null
+++ b/db/migrate/047_posts_add_parent_id.rb
@@ -0,0 +1,10 @@
+class PostsAddParentId < ActiveRecord::Migration
+ def self.up
+ execute "alter table posts add column parent_id integer references posts on delete set null"
+ execute "create index idx_posts_parent_id on posts (parent_id) where parent_id is not null"
+ end
+
+ def self.down
+ execute "alter table posts drop column parent_id"
+ end
+end
diff --git a/db/migrate/048_posts_add_has_children.rb b/db/migrate/048_posts_add_has_children.rb
new file mode 100644
index 00000000..277478c7
--- /dev/null
+++ b/db/migrate/048_posts_add_has_children.rb
@@ -0,0 +1,9 @@
+class PostsAddHasChildren < ActiveRecord::Migration
+ def self.up
+ execute "alter table posts add column has_children boolean not null default false"
+ end
+
+ def self.down
+ execute "alter table posts drop column has_children"
+ end
+end
diff --git a/db/migrate/049_flagged_posts_drop_foreign_key.rb b/db/migrate/049_flagged_posts_drop_foreign_key.rb
new file mode 100644
index 00000000..966dff23
--- /dev/null
+++ b/db/migrate/049_flagged_posts_drop_foreign_key.rb
@@ -0,0 +1,9 @@
+class FlaggedPostsDropForeignKey < ActiveRecord::Migration
+ def self.up
+ execute "alter table flagged_posts drop constraint flagged_posts_post_id_fkey"
+ end
+
+ def self.down
+ execute "alter table flagged_posts add constraint flagged_posts_post_id_fkey foreign key (post_id) references posts (id) on delete cascade"
+ end
+end
diff --git a/db/migrate/050_posts_tags_fix_foreign_keys.rb b/db/migrate/050_posts_tags_fix_foreign_keys.rb
new file mode 100644
index 00000000..eb52ae11
--- /dev/null
+++ b/db/migrate/050_posts_tags_fix_foreign_keys.rb
@@ -0,0 +1,25 @@
+class PostsTagsFixForeignKeys < ActiveRecord::Migration
+ def self.up
+ begin
+ execute "alter table posts_tags add constraint fk_posts_tags__post foreign key (post_id) references posts on delete cascade"
+ rescue Exception
+ end
+
+ begin
+ execute "alter table posts_tags add constraint fk_posts_tags__tag foreign key (tag_id) references tags on delete cascade"
+ rescue Exception
+ end
+ end
+
+ def self.down
+ begin
+ execute "alter table posts_tags drop constraint fk_posts_tags__post"
+ rescue Exception
+ end
+
+ begin
+ execute "alter table posts_tags drop constraint fk_posts_tags__tag"
+ rescue Exception
+ end
+ end
+end
diff --git a/db/migrate/051_posts_drop_has_children.rb b/db/migrate/051_posts_drop_has_children.rb
new file mode 100644
index 00000000..42297fec
--- /dev/null
+++ b/db/migrate/051_posts_drop_has_children.rb
@@ -0,0 +1,9 @@
+class PostsDropHasChildren < ActiveRecord::Migration
+ def self.up
+ #execute "alter table posts drop column has_children"
+ end
+
+ def self.down
+ #execute "alter table posts add column has_children boolean not null default false"
+ end
+end
diff --git a/db/migrate/052_flagged_posts_add_user_id.rb b/db/migrate/052_flagged_posts_add_user_id.rb
new file mode 100644
index 00000000..72292fd2
--- /dev/null
+++ b/db/migrate/052_flagged_posts_add_user_id.rb
@@ -0,0 +1,12 @@
+class FlaggedPostsAddUserId < ActiveRecord::Migration
+ def self.up
+ execute "alter table flagged_posts add column user_id integer references users on delete cascade"
+ execute "alter table flagged_posts add column is_resolved boolean not null default false"
+ execute "update flagged_posts set is_resolved = false"
+ end
+
+ def self.down
+ execute "alter table flagged_posts drop column user_id"
+ execute "alter table flagged_posts drop column is_resolved"
+ end
+end
diff --git a/db/migrate/053_convert_ip_text_to_inet.rb b/db/migrate/053_convert_ip_text_to_inet.rb
new file mode 100644
index 00000000..a6da2f52
--- /dev/null
+++ b/db/migrate/053_convert_ip_text_to_inet.rb
@@ -0,0 +1,35 @@
+class ConvertIpTextToInet < ActiveRecord::Migration
+ def self.up
+ transaction do
+ execute "update posts set last_voter_ip = null where last_voter_ip = ''"
+ execute "update post_tag_histories set ip_addr = '127.0.0.1' where ip_addr = ''"
+ execute "alter table users alter column ip_addr drop default"
+ execute "update users set ip_addr = '127.0.0.1' where ip_addr = ''"
+ execute "update comments set ip_addr = '127.0.0.1' where ip_addr = 'unknown'"
+ execute "alter table posts alter column last_voter_ip type inet using inet(last_voter_ip)"
+ execute "alter table posts alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table comments alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table note_versions alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table notes alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table post_tag_histories alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table users alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table wiki_page_versions alter column ip_addr type inet using inet(ip_addr)"
+ execute "alter table wiki_pages alter column ip_addr type inet using inet(ip_addr)"
+ end
+ end
+
+ def self.down
+ transaction do
+ execute "alter table posts alter column last_voter_ip type text"
+ execute "alter table posts alter column ip_addr type text"
+ execute "alter table comments alter column ip_addr type text"
+ execute "alter table note_versions alter column ip_addr type text"
+ execute "alter table notes alter column ip_addr type text"
+ execute "alter table post_tag_histories alter column ip_addr type text"
+ execute "alter table users alter column ip_addr type text"
+ execute "alter table wiki_page_versions alter column ip_addr type text"
+ execute "alter table wiki_pages alter column ip_addr type text"
+ execute "alter table users alter column ip_addr set default ''"
+ end
+ end
+end
diff --git a/db/migrate/054_posts_add_status.rb b/db/migrate/054_posts_add_status.rb
new file mode 100644
index 00000000..d079304e
--- /dev/null
+++ b/db/migrate/054_posts_add_status.rb
@@ -0,0 +1,20 @@
+class PostsAddStatus < ActiveRecord::Migration
+ def self.up
+ transaction do
+ execute "create type post_status as enum ('deleted', 'flagged', 'pending', 'active')"
+ execute "alter table posts add column status post_status not null default 'active'"
+ execute "update posts set status = 'pending' where is_pending = true"
+ execute "alter table posts drop column is_pending"
+ execute "update posts set status = 'flagged' where id in (select post_id from flagged_posts)"
+ execute "alter table posts add column deletion_reason text not null default ''"
+ execute "update posts set deletion_reason = (select reason from flagged_posts where post_id = posts.id) where id in (select post_id from flagged_posts)"
+ execute "drop table flagged_posts"
+ execute "create index post_status_idx on posts (status) where status < 'active'"
+ end
+ end
+
+ def self.down
+ # I'm lazy
+ raise IrreversibleMigration
+ end
+end
diff --git a/db/migrate/055_add_full_text_search.rb b/db/migrate/055_add_full_text_search.rb
new file mode 100644
index 00000000..e9b4715f
--- /dev/null
+++ b/db/migrate/055_add_full_text_search.rb
@@ -0,0 +1,24 @@
+class AddFullTextSearch < ActiveRecord::Migration
+ def self.up
+ transaction do
+ execute "alter table notes add column text_search_index tsvector"
+ execute "update notes set text_search_index = to_tsvector('english', body)"
+ execute "create trigger trg_note_search_update before insert or update on notes for each row execute procedure tsvector_update_trigger(text_search_index, 'pg_catalog.english', body)"
+ execute "create index notes_text_search_idx on notes using gin(text_search_index)"
+
+ execute "alter table wiki_pages add column text_search_index tsvector"
+ execute "update wiki_pages set text_search_index = to_tsvector('english', title || ' ' || body)"
+ execute "create trigger trg_wiki_page_search_update before insert or update on wiki_pages for each row execute procedure tsvector_update_trigger(text_search_index, 'pg_catalog.english', title, body)"
+ execute "create index wiki_pages_search_idx on wiki_pages using gin(text_search_index)"
+
+ execute "alter table forum_posts add column text_search_index tsvector"
+ execute "update forum_posts set text_search_index = to_tsvector('english', title || ' ' || body)"
+ execute "create trigger trg_forum_post_search_update before insert or update on forum_posts for each row execute procedure tsvector_update_trigger(text_search_index, 'pg_catalog.english', title, body)"
+ execute "create index forum_posts_search_idx on forum_posts using gin(text_search_index)"
+ end
+ end
+
+ def self.down
+ raise IrreversibleMigration
+ end
+end
diff --git a/db/migrate/056_add_text_search_to_versions.rb b/db/migrate/056_add_text_search_to_versions.rb
new file mode 100644
index 00000000..d848179d
--- /dev/null
+++ b/db/migrate/056_add_text_search_to_versions.rb
@@ -0,0 +1,10 @@
+class AddTextSearchToVersions < ActiveRecord::Migration
+ def self.up
+ execute "alter table note_versions add column text_search_index tsvector"
+ execute "alter table wiki_page_versions add column text_search_index tsvector"
+ end
+
+ def self.down
+ raise IrreversibleMigration
+ end
+end
diff --git a/db/migrate/057_drop_post_count_triggers.rb b/db/migrate/057_drop_post_count_triggers.rb
new file mode 100644
index 00000000..5db53b35
--- /dev/null
+++ b/db/migrate/057_drop_post_count_triggers.rb
@@ -0,0 +1,39 @@
+class DropPostCountTriggers < ActiveRecord::Migration
+ def self.up
+ execute "drop trigger trg_posts__insert on posts"
+ execute "drop trigger trg_posts_delete on posts"
+ execute "drop function trg_posts__insert()"
+ execute "drop function trg_posts__delete()"
+ execute "drop trigger trg_users_delete on users"
+ execute "drop trigger trg_users_insert on users"
+ execute "drop function trg_users__delete()"
+ execute "drop function trg_users__insert()"
+ execute "insert into table_data (name, row_count) values ('non-explicit_posts', (select count(*) from posts where rating <> 'e'))"
+ execute "delete from table_data where name = 'safe_posts'"
+ execute "drop trigger trg_posts_tags__delete on posts_tags"
+ execute "drop trigger trg_posts_tags__insert on posts_tags"
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__delete() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count - 1 WHERE tags.id = OLD.tag_id;
+ RETURN OLD;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_posts_tags__insert() RETURNS "trigger" AS $$
+ BEGIN
+ UPDATE tags SET post_count = post_count + 1 WHERE tags.id = NEW.tag_id;
+ RETURN NEW;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute "CREATE TRIGGER trg_posts_tags__delete BEFORE DELETE ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__delete()"
+ execute "CREATE TRIGGER trg_posts_tags__insert BEFORE INSERT ON posts_tags FOR EACH ROW EXECUTE PROCEDURE trg_posts_tags__insert()"
+ execute "alter table tags drop column safe_post_count"
+ end
+
+ def self.down
+ raise IrreversibleMigration
+ end
+end
diff --git a/db/migrate/058_remove_notes_from_artists.rb b/db/migrate/058_remove_notes_from_artists.rb
new file mode 100644
index 00000000..99aa2931
--- /dev/null
+++ b/db/migrate/058_remove_notes_from_artists.rb
@@ -0,0 +1,20 @@
+class RemoveNotesFromArtists < ActiveRecord::Migration
+ def self.up
+ Artist.find(:all, :conditions => ["notes <> '' and notes is not null"]).each do |artist|
+ page = WikiPage.find_by_title(artist.name)
+ notes = artist.__send__(:read_attribute, :notes)
+
+ if page
+ page.update_attributes(:body => notes, :ip_addr => '127.0.0.1', :user_id => 1)
+ else
+ page = WikiPage.create(:title => artist.name, :body => notes, :ip_addr => '127.0.0.1', :user_id => 1)
+ end
+ end
+
+ remove_column :artists, :notes
+ end
+
+ def self.down
+ add_column :artists, :notes, :text, :default => "", :null => false
+ end
+end
diff --git a/db/migrate/059_create_dmails.rb b/db/migrate/059_create_dmails.rb
new file mode 100644
index 00000000..f750bf12
--- /dev/null
+++ b/db/migrate/059_create_dmails.rb
@@ -0,0 +1,26 @@
+class CreateDmails < ActiveRecord::Migration
+ def self.up
+ transaction do
+ create_table :dmails do |t|
+ t.column :from_id, :integer, :null => false
+ t.foreign_key :from_id, :users, :id, :on_delete => :cascade
+ t.column :to_id, :integer, :null => false
+ t.foreign_key :to_id, :users, :id, :on_delete => :cascade
+ t.column :title, :text, :null => false
+ t.column :body, :text, :null => false
+ t.column :created_at, :timestamp, :null => false
+ t.column :has_seen, :boolean, :null => false, :default => false
+ end
+
+ add_index :dmails, :from_id
+ add_index :dmails, :to_id
+
+ add_column :users, :has_mail, :boolean, :default => false, :null => false
+ end
+ end
+
+ def self.down
+ drop_table :dmails
+ remove_column :users, :has_mail
+ end
+end
diff --git a/db/migrate/060_add_receive_mails_to_users.rb b/db/migrate/060_add_receive_mails_to_users.rb
new file mode 100644
index 00000000..31a3e67f
--- /dev/null
+++ b/db/migrate/060_add_receive_mails_to_users.rb
@@ -0,0 +1,9 @@
+class AddReceiveMailsToUsers < ActiveRecord::Migration
+ def self.up
+ add_column :users, :receive_dmails, :boolean, :default => false, :null => false
+ end
+
+ def self.down
+ remove_column :users, :receive_dmails
+ end
+end
diff --git a/db/migrate/061_enhance_dmails.rb b/db/migrate/061_enhance_dmails.rb
new file mode 100644
index 00000000..b4b4e9d7
--- /dev/null
+++ b/db/migrate/061_enhance_dmails.rb
@@ -0,0 +1,11 @@
+class EnhanceDmails < ActiveRecord::Migration
+ def self.up
+ add_column :dmails, :parent_id, :integer
+ add_foreign_key :dmails, :parent_id, :dmails, :id
+ add_index :dmails, :parent_id
+ end
+
+ def self.down
+ remove_column :dmails, :parent_id
+ end
+end
diff --git a/db/migrate/062_create_bans.rb b/db/migrate/062_create_bans.rb
new file mode 100644
index 00000000..724165a8
--- /dev/null
+++ b/db/migrate/062_create_bans.rb
@@ -0,0 +1,23 @@
+class CreateBans < ActiveRecord::Migration
+ def self.up
+ create_table :bans do |t|
+ t.column :user_id, :integer, :null => false
+ t.foreign_key :user_id, :users, :id, :on_delete => :cascade
+ t.column :reason, :text, :null => false
+ t.column :expires_at, :datetime, :null => false
+ t.column :banned_by, :integer, :null => false
+ t.foreign_key :banned_by, :users, :id, :on_delete => :cascade
+ end
+
+ add_index :bans, :user_id
+
+ User.find(:all, :conditions => ["level = 0 or level = 1"]).each do |user|
+ user.update_attribute(:level, User::LEVEL_BLOCKED)
+ Ban.create(:user_id => user.id, :reason => "Grandfathered", :banned_by => 1, :expires_at => 7.days.from_now)
+ end
+ end
+
+ def self.down
+ drop_table :bans
+ end
+end
diff --git a/db/migrate/063_add_blacklisted_tags_to_users.rb b/db/migrate/063_add_blacklisted_tags_to_users.rb
new file mode 100644
index 00000000..a458b5d3
--- /dev/null
+++ b/db/migrate/063_add_blacklisted_tags_to_users.rb
@@ -0,0 +1,9 @@
+class AddBlacklistedTagsToUsers < ActiveRecord::Migration
+ def self.up
+ add_column :users, :blacklisted_tags, :text, :null => false, :default => ""
+ end
+
+ def self.down
+ remove_column :users, :blacklisted_tags
+ end
+end
diff --git a/db/migrate/064_remove_neighbor_constraints.rb b/db/migrate/064_remove_neighbor_constraints.rb
new file mode 100644
index 00000000..de005f77
--- /dev/null
+++ b/db/migrate/064_remove_neighbor_constraints.rb
@@ -0,0 +1,11 @@
+class RemoveNeighborConstraints < ActiveRecord::Migration
+ def self.up
+ remove_foreign_key :posts, :posts_next_post_id_fkey
+ remove_foreign_key :posts, :posts_prev_post_id_fkey
+ end
+
+ def self.down
+ add_foreign_key :posts, :next_post_id, :posts, :id, :on_delete => :set_null
+ add_foreign_key :posts, :prev_post_id, :posts, :id, :on_delete => :set_null
+ end
+end
diff --git a/db/migrate/065_remove_yaml.rb b/db/migrate/065_remove_yaml.rb
new file mode 100644
index 00000000..21388dc6
--- /dev/null
+++ b/db/migrate/065_remove_yaml.rb
@@ -0,0 +1,14 @@
+require 'yaml'
+
+class RemoveYaml < ActiveRecord::Migration
+ def self.up
+ Tag.find(:all).each do |tag|
+ mapping = YAML::load(tag.cached_related)
+ tag.cached_related = mapping.flatten.join(",")
+ tag.save
+ end
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/066_fix_user_levels.rb b/db/migrate/066_fix_user_levels.rb
new file mode 100644
index 00000000..802403e5
--- /dev/null
+++ b/db/migrate/066_fix_user_levels.rb
@@ -0,0 +1,19 @@
+class FixUserLevels < ActiveRecord::Migration
+ def self.up
+ execute("UPDATE users SET level = 50 WHERE level = 20")
+ execute("UPDATE users SET level = 40 WHERE level = 10")
+ execute("UPDATE users SET level = 30 WHERE level = 3")
+ execute("UPDATE users SET level = 20 WHERE level = 2")
+ execute("UPDATE users SET level = 10 WHERE level = 0")
+ execute("UPDATE users SET level = 0 WHERE Level = -1")
+ end
+
+ def self.down
+ execute("UPDATE users SET level = -1 WHERE level = 0")
+ execute("UPDATE users SET level = 0 WHERE level = 10")
+ execute("UPDATE users SET level = 2 WHERE level = 20")
+ execute("UPDATE users SET level = 3 WHERE level = 30")
+ execute("UPDATE users SET level = 10 WHERE level = 40")
+ execute("UPDATE users SET level = 20 WHERE Level = 50")
+ end
+end
diff --git a/db/migrate/067_create_artist_urls.rb b/db/migrate/067_create_artist_urls.rb
new file mode 100644
index 00000000..bf76e926
--- /dev/null
+++ b/db/migrate/067_create_artist_urls.rb
@@ -0,0 +1,34 @@
+class Artist < ActiveRecord::Base
+end
+
+class CreateArtistUrls < ActiveRecord::Migration
+ def self.up
+ create_table :artist_urls do |t|
+ t.column :artist_id, :integer, :null => false
+ t.column :url, :text, :null => false
+ t.column :normalized_url, :text, :null => false
+ end
+
+ add_index :artist_urls, :artist_id
+ add_index :artist_urls, :url
+ add_index :artist_urls, :normalized_url
+
+ add_foreign_key :artist_urls, :artist_id, :artists, :id
+
+ Artist.find(:all, :order => "id").each do |artist|
+ [:url_a, :url_b, :url_c].each do |field|
+ unless artist[field].blank?
+ ArtistUrl.create(:artist_id => artist.id, :url => artist[field])
+ end
+ end
+ end
+
+ remove_column :artists, :url_a
+ remove_column :artists, :url_b
+ remove_column :artists, :url_c
+ end
+
+ def self.down
+ drop_table :artist_urls
+ end
+end
diff --git a/db/migrate/068_add_pixiv_to_artists.rb b/db/migrate/068_add_pixiv_to_artists.rb
new file mode 100644
index 00000000..39fcda8c
--- /dev/null
+++ b/db/migrate/068_add_pixiv_to_artists.rb
@@ -0,0 +1,10 @@
+class AddPixivToArtists < ActiveRecord::Migration
+ def self.up
+ add_column :artists, :pixiv_id, :integer
+ add_index :artists, :pixiv_id
+ end
+
+ def self.down
+ remove_column :artists, :pixiv_id
+ end
+end
diff --git a/db/migrate/069_clean_up_users.rb b/db/migrate/069_clean_up_users.rb
new file mode 100644
index 00000000..b2ea7517
--- /dev/null
+++ b/db/migrate/069_clean_up_users.rb
@@ -0,0 +1,13 @@
+class CleanUpUsers < ActiveRecord::Migration
+ def self.up
+ remove_column :users, :ip_addr
+ remove_column :users, :tag_blacklist
+ remove_column :users, :login_count
+ end
+
+ def self.down
+ execute "ALTER TABLE users ADD COLUMN ip_addr inet NOT NULL"
+ add_column :users, :tag_blacklist, :text, :null => false, :default => ""
+ add_column :users, :login_count, :integer, :null => false, :default => 0
+ end
+end
diff --git a/db/migrate/070_add_approved_by_to_posts.rb b/db/migrate/070_add_approved_by_to_posts.rb
new file mode 100644
index 00000000..c26d4aa6
--- /dev/null
+++ b/db/migrate/070_add_approved_by_to_posts.rb
@@ -0,0 +1,10 @@
+class AddApprovedByToPosts < ActiveRecord::Migration
+ def self.up
+ add_column :posts, :approved_by, :integer
+ add_foreign_key :posts, :approved_by, :users, :id
+ end
+
+ def self.down
+ remove_column :posts, :approved_by
+ end
+end
diff --git a/db/migrate/071_create_flagged_post_details.rb b/db/migrate/071_create_flagged_post_details.rb
new file mode 100644
index 00000000..a8a003c2
--- /dev/null
+++ b/db/migrate/071_create_flagged_post_details.rb
@@ -0,0 +1,36 @@
+class Post < ActiveRecord::Base
+end
+
+class FlaggedPostDetail < ActiveRecord::Base
+end
+
+class CreateFlaggedPostDetails < ActiveRecord::Migration
+ def self.up
+ remove_column :posts, :approved_by
+
+ create_table :flagged_post_details do |t|
+ t.column :created_at, :datetime, :null => false
+ t.column :post_id, :integer, :null => false
+ t.column :reason, :text, :null => false
+ t.column :user_id, :integer, :null => false
+ t.column :is_resolved, :boolean, :null => false
+ end
+
+ add_index :flagged_post_details, :post_id
+ add_foreign_key :flagged_post_details, :post_id, :posts, :id
+ add_foreign_key :flagged_post_details, :user_id, :users, :id
+
+ Post.find(:all, :conditions => "deletion_reason <> ''", :select => "deletion_reason, id, status").each do |post|
+ FlaggedPostDetail.create(:post_id => post.id, :reason => post.deletion_reason, :user_id => 1, :is_resolved => (post.status == 'deleted'))
+ end
+
+ remove_column :posts, :deletion_reason
+ end
+
+ def self.down
+ add_column :posts, :approved_by, :integer
+ add_foreign_key :posts, :approved_by, :users, :id
+ drop_table :flagged_post_details
+ add_column :posts, :deletion_reason, :text, :null => false, :default => ""
+ end
+end
diff --git a/db/migrate/072_add_reason_to_aliases_and_implications.rb b/db/migrate/072_add_reason_to_aliases_and_implications.rb
new file mode 100644
index 00000000..dde3d44f
--- /dev/null
+++ b/db/migrate/072_add_reason_to_aliases_and_implications.rb
@@ -0,0 +1,11 @@
+class AddReasonToAliasesAndImplications < ActiveRecord::Migration
+ def self.up
+ add_column :tag_aliases, :reason, :text, :null => false, :default => ""
+ add_column :tag_implications, :reason, :text, :null => false, :default => ""
+ end
+
+ def self.down
+ remove_column :tag_aliases, :reason
+ remove_column :tag_implications, :reason
+ end
+end
diff --git a/db/migrate/073_fix_flagged_post_details_foreign_keys.rb b/db/migrate/073_fix_flagged_post_details_foreign_keys.rb
new file mode 100644
index 00000000..1e9e4d6c
--- /dev/null
+++ b/db/migrate/073_fix_flagged_post_details_foreign_keys.rb
@@ -0,0 +1,15 @@
+class FixFlaggedPostDetailsForeignKeys < ActiveRecord::Migration
+ def self.up
+ remove_foreign_key :flagged_post_details, :flagged_post_details_post_id_fkey
+ remove_foreign_key :flagged_post_details, :flagged_post_details_user_id_fkey
+ add_foreign_key :flagged_post_details, :post_id, :posts, :id, :on_delete => :cascade
+ add_foreign_key :flagged_post_details, :user_id, :users, :id, :on_delete => :cascade
+ end
+
+ def self.down
+ remove_foreign_key :flagged_post_details, :flagged_post_details_post_id_fkey
+ remove_foreign_key :flagged_post_details, :flagged_post_details_user_id_fkey
+ add_foreign_key :flagged_post_details, :post_id, :posts, :id
+ add_foreign_key :flagged_post_details, :user_id, :users, :id
+ end
+end
diff --git a/db/migrate/074_remove_pixiv_field.rb b/db/migrate/074_remove_pixiv_field.rb
new file mode 100644
index 00000000..f817c1ea
--- /dev/null
+++ b/db/migrate/074_remove_pixiv_field.rb
@@ -0,0 +1,9 @@
+class RemovePixivField < ActiveRecord::Migration
+ def self.up
+ remove_column :artists, :pixiv_id
+ end
+
+ def self.down
+ add_column :artists, :pixiv_id, :integer
+ end
+end
diff --git a/db/migrate/075_add_sample_columns.rb b/db/migrate/075_add_sample_columns.rb
new file mode 100644
index 00000000..0ebfb4ff
--- /dev/null
+++ b/db/migrate/075_add_sample_columns.rb
@@ -0,0 +1,14 @@
+class AddSampleColumns < ActiveRecord::Migration
+ def self.up
+ add_column :posts, :sample_width, :integer
+ add_column :posts, :sample_height, :integer
+ add_column :users, :show_samples, :boolean
+ end
+
+ def self.down
+ remove_column :posts, :sample_width
+ remove_column :posts, :sample_height
+ remove_column :users, :show_samples
+ end
+end
+
diff --git a/db/migrate/076_create_user_blacklisted_tags.rb b/db/migrate/076_create_user_blacklisted_tags.rb
new file mode 100644
index 00000000..df7d55d0
--- /dev/null
+++ b/db/migrate/076_create_user_blacklisted_tags.rb
@@ -0,0 +1,35 @@
+class User < ActiveRecord::Base
+end
+
+class UserBlacklistedTags < ActiveRecord::Base
+end
+
+class CreateUserBlacklistedTags < ActiveRecord::Migration
+ def self.up
+ create_table :user_blacklisted_tags do |t|
+ t.column :user_id, :integer, :null => false
+ t.column :tags, :text, :null => false
+ end
+
+ add_index :user_blacklisted_tags, :user_id
+
+ add_foreign_key :user_blacklisted_tags, :user_id, :users, :id, :on_delete => :cascade
+ UserBlacklistedTags.reset_column_information
+
+ User.find(:all, :order => "id").each do |user|
+ unless user[:blacklisted_tags].blank?
+ tags = user[:blacklisted_tags].scan(/\S+/).each do |tag|
+ UserBlacklistedTags.create(:user_id => user.id, :tags => tag)
+ end
+ end
+ end
+
+ remove_column :users, :blacklisted_tags
+ end
+
+ def self.down
+ drop_table :user_blacklisted_tags
+ add_column :users, :blacklisted_tags, :text, :null => false, :default => ""
+ end
+end
+
diff --git a/db/migrate/077_create_server_keys.rb b/db/migrate/077_create_server_keys.rb
new file mode 100644
index 00000000..32c2f9f9
--- /dev/null
+++ b/db/migrate/077_create_server_keys.rb
@@ -0,0 +1,22 @@
+require 'digest/sha1'
+
+class CreateServerKeys < ActiveRecord::Migration
+ def self.up
+ create_table :server_keys do |t|
+ t.column :name, :string, :null => false
+ t.column :value, :text
+ end
+
+ add_index :server_keys, :name, :unique => true
+
+ session_secret_key = CONFIG["session_secret_key"] || Digest::SHA1.hexdigest(rand(10 ** 32))
+ user_password_salt = CONFIG["password_salt"] || Digest::SHA1.hexdigest(rand(10 ** 32))
+
+ execute "insert into server_keys (name, value) values ('session_secret_key', '#{session_secret_key}')"
+ execute "insert into server_keys (name, value) values ('user_password_salt', '#{user_password_salt}')"
+ end
+
+ def self.down
+ drop_table :server_keys
+ end
+end
diff --git a/db/migrate/078_add_neighbors_to_pools.rb b/db/migrate/078_add_neighbors_to_pools.rb
new file mode 100644
index 00000000..0573b23c
--- /dev/null
+++ b/db/migrate/078_add_neighbors_to_pools.rb
@@ -0,0 +1,34 @@
+class PoolPost < ActiveRecord::Base
+ set_table_name "pools_posts"
+ belongs_to :pool
+end
+
+class Pool < ActiveRecord::Base
+ has_many :pool_posts, :class_name => "PoolPost", :order => "sequence"
+end
+
+class AddNeighborsToPools < ActiveRecord::Migration
+ def self.up
+ add_column :pools_posts, :next_post_id, :integer
+ add_column :pools_posts, :prev_post_id, :integer
+ add_foreign_key :pools_posts, :next_post_id, :posts, :id, :on_delete => :set_null
+ add_foreign_key :pools_posts, :prev_post_id, :posts, :id, :on_delete => :set_null
+
+ PoolPost.reset_column_information
+
+ Pool.find(:all).each do |pool|
+ pp = pool.pool_posts
+
+ pp.each_index do |i|
+ pp[i].next_post_id = pp[i + 1].post_id unless i == pp.size - 1
+ pp[i].prev_post_id = pp[i - 1].post_id unless i == 0
+ pp[i].save
+ end
+ end
+ end
+
+ def self.down
+ remove_column :pools_posts, :next_post_id
+ remove_column :pools_posts, :prev_post_id
+ end
+end
diff --git a/db/migrate/079_create_post_change_seq.rb b/db/migrate/079_create_post_change_seq.rb
new file mode 100644
index 00000000..3b71a0bc
--- /dev/null
+++ b/db/migrate/079_create_post_change_seq.rb
@@ -0,0 +1,14 @@
+class CreatePostChangeSeq < ActiveRecord::Migration
+ def self.up
+ execute "CREATE SEQUENCE post_change_seq INCREMENT BY 1 CACHE 10;"
+ execute "ALTER TABLE posts ADD COLUMN change_seq INTEGER DEFAULT nextval('post_change_seq'::regclass) NOT NULL;"
+ execute "ALTER SEQUENCE post_change_seq OWNED BY posts.change_seq"
+ add_index :posts, :change_seq
+ end
+
+ def self.down
+ remove_index :posts, :change_seq
+ execute "ALTER TABLE posts DROP COLUMN change_seq;"
+ end
+end
+
diff --git a/db/migrate/080_remove_neighbor_fields_from_posts.rb b/db/migrate/080_remove_neighbor_fields_from_posts.rb
new file mode 100644
index 00000000..44d0726f
--- /dev/null
+++ b/db/migrate/080_remove_neighbor_fields_from_posts.rb
@@ -0,0 +1,11 @@
+class RemoveNeighborFieldsFromPosts < ActiveRecord::Migration
+ def self.up
+ remove_column :posts, :next_post_id
+ remove_column :posts, :prev_post_id
+ end
+
+ def self.down
+ add_column :posts, :next_post_id, :integer
+ add_column :posts, :prev_post_id, :integer
+ end
+end
diff --git a/db/migrate/081_disable_change_seq_cache.rb b/db/migrate/081_disable_change_seq_cache.rb
new file mode 100644
index 00000000..2f5a28e6
--- /dev/null
+++ b/db/migrate/081_disable_change_seq_cache.rb
@@ -0,0 +1,12 @@
+class DisableChangeSeqCache < ActiveRecord::Migration
+ def self.up
+ execute "ALTER SEQUENCE post_change_seq CACHE 1"
+ execute "ALTER TABLE posts ALTER COLUMN change_seq DROP NOT NULL"
+ end
+
+ def self.down
+ execute "ALTER SEQUENCE post_change_seq CACHE 10"
+ execute "ALTER TABLE posts ALTER COLUMN change_seq SET NOT NULL"
+ end
+end
+
diff --git a/db/migrate/082_add_post_votes.rb b/db/migrate/082_add_post_votes.rb
new file mode 100644
index 00000000..f2172770
--- /dev/null
+++ b/db/migrate/082_add_post_votes.rb
@@ -0,0 +1,33 @@
+require 'activerecord.rb'
+
+class AddPostVotes < ActiveRecord::Migration
+ def self.up
+ create_table :post_votes do |t|
+ t.column :user_id, :integer, :null => false
+ t.foreign_key :user_id, :users, :id, :on_delete => :cascade
+ t.column :post_id, :integer, :null => false
+ t.foreign_key :post_id, :posts, :id, :on_delete => :cascade
+ t.column :score, :integer, :null => false, :default => 0
+ t.column :updated_at, :timestamp, :null => false, :default => "now()"
+ end
+
+ # This should probably be the primary key, but ActiveRecord assumes the primary
+ # key is a single column.
+ execute "ALTER TABLE post_votes ADD UNIQUE (user_id, post_id)"
+
+ add_index :post_votes, :user_id
+ add_index :post_votes, :post_id
+
+ add_column :posts, :last_vote, :integer, :null => false, :default => 0
+ add_column :posts, :anonymous_votes, :integer, :null => false, :default => 0
+
+ # Set anonymous_votes = score - num favorited
+ execute "UPDATE posts SET anonymous_votes = posts.score - (SELECT COUNT(*) FROM favorites f WHERE f.post_id = posts.id)"
+ end
+ def self.down
+ drop_table :post_votes
+ remove_column :posts, :last_vote
+ remove_column :posts, :anonymous_votes
+ end
+end
+
diff --git a/db/migrate/083_add_user_id_to_aliases_and_implicatons.rb b/db/migrate/083_add_user_id_to_aliases_and_implicatons.rb
new file mode 100644
index 00000000..e4b3ab56
--- /dev/null
+++ b/db/migrate/083_add_user_id_to_aliases_and_implicatons.rb
@@ -0,0 +1,13 @@
+class AddUserIdToAliasesAndImplicatons < ActiveRecord::Migration
+ def self.up
+ add_column "tag_aliases", "creator_id", :integer
+ add_column "tag_implications", "creator_id", :integer
+ add_foreign_key "tag_aliases", "creator_id", "users", "id", :on_delete => :cascade
+ add_foreign_key "tag_implications", "creator_id", "users", "id", :on_delete => :cascade
+ end
+
+ def self.down
+ remove_column "tag_aliases", "creator_id"
+ remove_column "tag_implications", "creator_id"
+ end
+end
diff --git a/db/migrate/084_add_user_id_index_on_post_tag_histories.rb b/db/migrate/084_add_user_id_index_on_post_tag_histories.rb
new file mode 100644
index 00000000..fabc4f23
--- /dev/null
+++ b/db/migrate/084_add_user_id_index_on_post_tag_histories.rb
@@ -0,0 +1,9 @@
+class AddUserIdIndexOnPostTagHistories < ActiveRecord::Migration
+ def self.up
+ add_index "post_tag_histories", "user_id"
+ end
+
+ def self.down
+ remove_index "post_tag_histories", "user_id"
+ end
+end
diff --git a/db/migrate/085_revert_post_votes.rb b/db/migrate/085_revert_post_votes.rb
new file mode 100644
index 00000000..0e8705ae
--- /dev/null
+++ b/db/migrate/085_revert_post_votes.rb
@@ -0,0 +1,8 @@
+class RevertPostVotes < ActiveRecord::Migration
+ # bad change disabled
+ def self.down
+ end
+
+ def self.up
+ end
+end
diff --git a/db/migrate/086_add_is_active_field_to_pools.rb b/db/migrate/086_add_is_active_field_to_pools.rb
new file mode 100644
index 00000000..16d90de8
--- /dev/null
+++ b/db/migrate/086_add_is_active_field_to_pools.rb
@@ -0,0 +1,9 @@
+class AddIsActiveFieldToPools < ActiveRecord::Migration
+ def self.up
+ add_column :pools, :is_active, :boolean, :null => false, :default => true
+ end
+
+ def self.down
+ remove_column :pools, :is_active
+ end
+end
diff --git a/db/migrate/087_add_dimensions_index_on_posts.rb b/db/migrate/087_add_dimensions_index_on_posts.rb
new file mode 100644
index 00000000..15913e9c
--- /dev/null
+++ b/db/migrate/087_add_dimensions_index_on_posts.rb
@@ -0,0 +1,13 @@
+class AddDimensionsIndexOnPosts < ActiveRecord::Migration
+ def self.up
+ add_index "posts", "width"
+ add_index "posts", "height"
+ execute "CREATE INDEX posts_mpixels ON posts ((width*height/1000000.0))"
+ end
+
+ def self.down
+ remove_index "posts", "width"
+ remove_index "posts", "height"
+ execute "DROP INDEX posts_mpixels"
+ end
+end
diff --git a/db/migrate/088_add_approver_id_to_posts.rb b/db/migrate/088_add_approver_id_to_posts.rb
new file mode 100644
index 00000000..ebae0ba8
--- /dev/null
+++ b/db/migrate/088_add_approver_id_to_posts.rb
@@ -0,0 +1,10 @@
+class AddApproverIdToPosts < ActiveRecord::Migration
+ def self.up
+ add_column :posts, :approver_id, :integer
+ add_foreign_key :posts, :approver_id, :users, :id, :on_delete => :set_null
+ end
+
+ def self.down
+ remove_column :posts, :approver_id
+ end
+end
diff --git a/db/migrate/089_add_index_on_post_source.rb b/db/migrate/089_add_index_on_post_source.rb
new file mode 100644
index 00000000..4dae2aeb
--- /dev/null
+++ b/db/migrate/089_add_index_on_post_source.rb
@@ -0,0 +1,9 @@
+class AddIndexOnPostSource < ActiveRecord::Migration
+ def self.up
+ add_index :posts, :source
+ end
+
+ def self.down
+ remove_index :posts, :source
+ end
+end
diff --git a/db/migrate/090_add_rating_to_tag_history.rb b/db/migrate/090_add_rating_to_tag_history.rb
new file mode 100644
index 00000000..9259b846
--- /dev/null
+++ b/db/migrate/090_add_rating_to_tag_history.rb
@@ -0,0 +1,9 @@
+class AddRatingToTagHistory < ActiveRecord::Migration
+ def self.up
+# execute "ALTER TABLE post_tag_histories ADD COLUMN rating CHARACTER"
+ end
+
+ def self.down
+# remove_column :post_tag_histories, :rating
+ end
+end
diff --git a/db/migrate/091_migrate_users_to_contributor.rb b/db/migrate/091_migrate_users_to_contributor.rb
new file mode 100644
index 00000000..83c302cf
--- /dev/null
+++ b/db/migrate/091_migrate_users_to_contributor.rb
@@ -0,0 +1,17 @@
+class MigrateUsersToContributor < ActiveRecord::Migration
+ def self.up
+ User.find(:all, :conditions => "level = 30").each do |user|
+ post_count = Post.count(:conditions => ["user_id = ? AND status <> 'deleted'", user.id])
+
+ if post_count > 50
+ user.update_attribute(:level, 33)
+ end
+ end
+
+ User.update_all("invite_count = 0", "level < 35")
+ end
+
+ def self.down
+ User.update_all("level = 30", "level = 33")
+ end
+end
diff --git a/db/migrate/092_create_job_tasks.rb b/db/migrate/092_create_job_tasks.rb
new file mode 100644
index 00000000..81c79e6d
--- /dev/null
+++ b/db/migrate/092_create_job_tasks.rb
@@ -0,0 +1,15 @@
+class CreateJobTasks < ActiveRecord::Migration
+ def self.up
+ create_table :job_tasks do |t|
+ t.column :task_type, :string, :null => false
+ t.column :data_as_json, :string, :null => false
+ t.column :status, :string, :null => false
+ t.column :status_message, :text
+ t.timestamps
+ end
+ end
+
+ def self.down
+ drop_table :job_tasks
+ end
+end
diff --git a/db/migrate/093_change_type_of_data_in_job_tasks.rb b/db/migrate/093_change_type_of_data_in_job_tasks.rb
new file mode 100644
index 00000000..3e2043da
--- /dev/null
+++ b/db/migrate/093_change_type_of_data_in_job_tasks.rb
@@ -0,0 +1,11 @@
+class ChangeTypeOfDataInJobTasks < ActiveRecord::Migration
+ def self.up
+ remove_column :job_tasks, :data_as_json
+ add_column :job_tasks, :data_as_json, :text, :null => false, :default => "{}"
+ end
+
+ def self.down
+ remove_column :job_tasks, :data_as_json
+ add_column :job_tasks, :data_as_json, :string, :null => false, :default => "{}"
+ end
+end
diff --git a/db/migrate/094_favorite_tags.rb b/db/migrate/094_favorite_tags.rb
new file mode 100644
index 00000000..cd186c0b
--- /dev/null
+++ b/db/migrate/094_favorite_tags.rb
@@ -0,0 +1,15 @@
+class FavoriteTags < ActiveRecord::Migration
+ def self.up
+ create_table :favorite_tags do |t|
+ t.column :user_id, :integer, :null => false
+ t.column :tag_query, :text, :null => false
+ t.column :cached_post_ids, :text, :null => false, :default => ""
+ end
+
+ add_index :favorite_tags, :user_id
+ end
+
+ def self.down
+ drop_table :favorite_tags
+ end
+end
diff --git a/db/migrate/095_add_repeat_count_to_job_tasks.rb b/db/migrate/095_add_repeat_count_to_job_tasks.rb
new file mode 100644
index 00000000..f514bef1
--- /dev/null
+++ b/db/migrate/095_add_repeat_count_to_job_tasks.rb
@@ -0,0 +1,11 @@
+class AddRepeatCountToJobTasks < ActiveRecord::Migration
+ def self.up
+ add_column :job_tasks, :repeat_count, :integer, :null => false, :default => 0
+ JobTask.create(:task_type => "calculate_favorite_tags", :status => "pending", :repeat_count => -1)
+ end
+
+ def self.down
+ remove_column :job_tasks, :repeat_count
+ JobTask.destroy_all(["task_type = 'calculate_favorite_tags'"])
+ end
+end
diff --git a/db/migrate/096_create_advertisements.rb b/db/migrate/096_create_advertisements.rb
new file mode 100644
index 00000000..bcbfe601
--- /dev/null
+++ b/db/migrate/096_create_advertisements.rb
@@ -0,0 +1,27 @@
+class CreateAdvertisements < ActiveRecord::Migration
+ def self.up
+ create_table :advertisements do |t|
+ t.column :image_url, :string, :null => false
+ t.column :referral_url, :string, :null => false
+ t.column :ad_type, :string, :null => false
+ t.column :status, :string, :null => false
+ t.column :hit_count, :integer, :null => false, :default => 0
+ t.column :width, :integer, :null => false
+ t.column :height, :integer, :null => false
+ end
+
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/180x300_1.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'vertical', 'active', 0, 180, 300)"
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/180x300_2.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'vertical', 'active', 0, 180, 300)"
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/180x300_3.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'vertical', 'active', 0, 180, 300)"
+
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/728x90_1.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'horizontal', 'active', 0, 728, 90)"
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/728x90_2.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'horizontal', 'active', 0, 728, 90)"
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/728x90_3.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'horizontal', 'active', 0, 728, 90)"
+ execute "insert into advertisements (image_url, referral_url, ad_type, status, hit_count, width, height) values ('/images/728x90_4.jpg', 'http://affiliates.jlist.com/click/2253?url=http://www.jlist.com/index.html', 'horizontal', 'active', 0, 728, 90)"
+
+ end
+
+ def self.down
+ drop_table :advertisements
+ end
+end
diff --git a/db/migrate/20080901000000_no_really_add_post_votes.rb b/db/migrate/20080901000000_no_really_add_post_votes.rb
new file mode 100644
index 00000000..12baa2b5
--- /dev/null
+++ b/db/migrate/20080901000000_no_really_add_post_votes.rb
@@ -0,0 +1,38 @@
+require 'activerecord.rb'
+
+# Upstream 085 removes post votes. Ours doesn't. Ours is right. If the site was
+# migrated with our migrations, leave things alone; we're all set. If the site was
+# migrated with upstream 085, the post_votes table is missing and needs to be
+# recreated.
+class NoReallyAddPostVotes < ActiveRecord::Migration
+ def self.up
+ return if select_value_sql "SELECT 1 FROM information_schema.tables WHERE table_name = 'post_votes'"
+
+ # We don't have this table, so this migration is needed.
+ create_table :post_votes do |t|
+ t.column :user_id, :integer, :null => false
+ t.foreign_key :user_id, :users, :id, :on_delete => :cascade
+ t.column :post_id, :integer, :null => false
+ t.foreign_key :post_id, :posts, :id, :on_delete => :cascade
+ t.column :score, :integer, :null => false, :default => 0
+ t.column :updated_at, :timestamp, :null => false, :default => "now()"
+ end
+
+ # This should probably be the primary key, but ActiveRecord assumes the primary
+ # key is a single column.
+ execute "ALTER TABLE post_votes ADD UNIQUE (user_id, post_id)"
+
+ add_index :post_votes, :user_id
+ add_index :post_votes, :post_id
+
+ add_column :posts, :last_vote, :integer, :null => false, :default => 0
+ add_column :posts, :anonymous_votes, :integer, :null => false, :default => 0
+
+ # Set anonymous_votes = score - num favorited
+ execute "UPDATE posts SET anonymous_votes = posts.score - (SELECT COUNT(*) FROM favorites f WHERE f.post_id = posts.id)"
+ end
+
+ def self.down
+ end
+end
+
diff --git a/db/migrate/20080927145957_make_wiki_titles_unique.rb b/db/migrate/20080927145957_make_wiki_titles_unique.rb
new file mode 100644
index 00000000..e10fbbbd
--- /dev/null
+++ b/db/migrate/20080927145957_make_wiki_titles_unique.rb
@@ -0,0 +1,11 @@
+class MakeWikiTitlesUnique < ActiveRecord::Migration
+ def self.up
+ execute "DROP INDEX idx_wiki_pages__title"
+ execute "CREATE UNIQUE INDEX idx_wiki_pages__title ON wiki_pages (title)"
+ end
+
+ def self.down
+ execute "DROP INDEX idx_wiki_pages__title"
+ execute "CREATE INDEX idx_wiki_pages__title ON wiki_pages (title)"
+ end
+end
diff --git a/db/migrate/20081015004825_create_user_log.rb b/db/migrate/20081015004825_create_user_log.rb
new file mode 100644
index 00000000..83eb2da8
--- /dev/null
+++ b/db/migrate/20081015004825_create_user_log.rb
@@ -0,0 +1,19 @@
+class CreateUserLog < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE user_logs (
+ id SERIAL PRIMARY KEY,
+ user_id integer NOT NULL REFERENCES users ON DELETE CASCADE,
+ created_at timestamp NOT NULL DEFAULT now(),
+ ip_addr inet NOT NULL
+ )
+ EOS
+
+ add_index :user_logs, :user_id
+ add_index :user_logs, :created_at
+ end
+
+ def self.down
+ drop_table :user_logs
+ end
+end
diff --git a/db/migrate/20081015004855_add_random_to_posts.rb b/db/migrate/20081015004855_add_random_to_posts.rb
new file mode 100644
index 00000000..d6af9692
--- /dev/null
+++ b/db/migrate/20081015004855_add_random_to_posts.rb
@@ -0,0 +1,10 @@
+class AddRandomToPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ADD COLUMN random REAL DEFAULT RANDOM() NOT NULL;"
+ add_index :posts, :random
+ end
+
+ def self.down
+ execute "ALTER TABLE posts DROP COLUMN random;"
+ end
+end
diff --git a/db/migrate/20081015004938_convert_favorites_to_votes.rb b/db/migrate/20081015004938_convert_favorites_to_votes.rb
new file mode 100644
index 00000000..cf801bbc
--- /dev/null
+++ b/db/migrate/20081015004938_convert_favorites_to_votes.rb
@@ -0,0 +1,24 @@
+require 'post'
+
+class ConvertFavoritesToVotes < ActiveRecord::Migration
+ def self.up
+ # Favorites doesn't have a dupe constraint and post_votes does, so make sure
+ # there are no dupes before we copy.
+ execute "DELETE FROM favorites " +
+ "WHERE id IN (" +
+ "SELECT f.id FROM favorites f, favorites f2 " +
+ " WHERE f.user_id = f2.user_id AND " +
+ " f.post_id = f2.post_id AND " +
+ " f.id <> f2.id AND f.id > f2.id)"
+ execute "DELETE FROM post_votes pv WHERE pv.id IN " +
+ " (SELECT pv.id FROM post_votes pv JOIN favorites f ON (pv.user_id = f.user_id AND pv.post_id = f.post_id))"
+ execute "INSERT INTO post_votes (user_id, post_id, score, updated_at) " +
+ " SELECT f.user_id, f.post_id, 3, f.created_at FROM favorites f"
+ p Post
+ debugger
+ Post.recalculate_score
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/20081015005018_create_ip_bans.rb b/db/migrate/20081015005018_create_ip_bans.rb
new file mode 100644
index 00000000..b490bd45
--- /dev/null
+++ b/db/migrate/20081015005018_create_ip_bans.rb
@@ -0,0 +1,20 @@
+class CreateIpBans < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE ip_bans (
+ id SERIAL PRIMARY KEY,
+ created_at timestamp NOT NULL DEFAULT now(),
+ expires_at timestamp,
+ ip_addr inet NOT NULL,
+ reason text NOT NULL,
+ banned_by integer NOT NULL
+ )
+ EOS
+ add_foreign_key "ip_bans", "banned_by", "users", "id", :on_delete => :cascade
+ add_index :ip_bans, :ip_addr
+ end
+
+ def self.down
+ execute "DROP TABLE ip_bans"
+ end
+end
diff --git a/db/migrate/20081015005051_add_avatar_to_user.rb b/db/migrate/20081015005051_add_avatar_to_user.rb
new file mode 100644
index 00000000..81a4b947
--- /dev/null
+++ b/db/migrate/20081015005051_add_avatar_to_user.rb
@@ -0,0 +1,25 @@
+class AddAvatarToUser < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN avatar_post_id INTEGER"
+ execute "ALTER TABLE users ADD COLUMN avatar_width REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_height REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_top REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_bottom REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_left REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_right REAL"
+ execute "ALTER TABLE users ADD COLUMN avatar_timestamp TIMESTAMP"
+
+ add_foreign_key "users", "avatar_post_id", "posts", "id", :on_delete => :set_null
+ add_index :users, :avatar_post_id
+ end
+
+ def self.down
+ execute "ALTER TABLE users DROP COLUMN avatar_post_id"
+ execute "ALTER TABLE users DROP COLUMN avatar_top"
+ execute "ALTER TABLE users DROP COLUMN avatar_bottom"
+ execute "ALTER TABLE users DROP COLUMN avatar_left"
+ execute "ALTER TABLE users DROP COLUMN avatar_right"
+ execute "ALTER TABLE users DROP COLUMN avatar_width"
+ execute "ALTER TABLE users DROP COLUMN avatar_height"
+ end
+end
diff --git a/db/migrate/20081015005124_add_last_comment_read_at_to_user.rb b/db/migrate/20081015005124_add_last_comment_read_at_to_user.rb
new file mode 100644
index 00000000..ed9da3b5
--- /dev/null
+++ b/db/migrate/20081015005124_add_last_comment_read_at_to_user.rb
@@ -0,0 +1,9 @@
+class AddLastCommentReadAtToUser < ActiveRecord::Migration
+ def self.up
+ execute "alter table users add column last_comment_read_at timestamp not null default '1960-01-01'"
+ end
+
+ def self.down
+ remove_column :forum_posts, :is_locked
+ end
+end
diff --git a/db/migrate/20081015005201_add_history_table.rb b/db/migrate/20081015005201_add_history_table.rb
new file mode 100644
index 00000000..05310a7e
--- /dev/null
+++ b/db/migrate/20081015005201_add_history_table.rb
@@ -0,0 +1,60 @@
+class AddHistoryTable < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE history_changes (
+ id SERIAL PRIMARY KEY,
+ field TEXT NOT NULL,
+ remote_id INTEGER NOT NULL,
+ table_name TEXT NOT NULL,
+ value TEXT,
+ history_id INTEGER NOT NULL,
+ previous_id INTEGER
+ )
+ EOS
+
+ execute <<-EOS
+ CREATE TABLE histories (
+ id SERIAL PRIMARY KEY,
+ created_at TIMESTAMP NOT NULL DEFAULT now(),
+ user_id INTEGER,
+ group_by_id INTEGER NOT NULL,
+ group_by_table TEXT NOT NULL
+ )
+ EOS
+
+ # cleanup_history entries can be deleted by a rule (see update_versioned_tables). When
+ # the last change for a history is deleted, delete the history, so it doesn't show up
+ # as an empty line in the history list.
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION trg_purge_histories() RETURNS "trigger" AS $$
+ BEGIN
+ DELETE FROM histories h WHERE h.id = OLD.history_id AND
+ (SELECT COUNT(*) FROM history_changes hc WHERE hc.history_id = OLD.history_id LIMIT 1) = 0;
+ RETURN OLD;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ execute "CREATE TRIGGER trg_cleanup_history AFTER DELETE ON history_changes FOR EACH ROW EXECUTE PROCEDURE trg_purge_histories()"
+
+ add_foreign_key :history_changes, :history_id, :histories, :id, :on_delete => :cascade
+ add_foreign_key :history_changes, :previous_id, :history_changes, :id, :on_delete => :set_null
+
+ add_index :histories, :group_by_table
+ add_index :histories, :group_by_id
+ add_index :histories, :user_id
+ add_index :histories, :created_at
+ add_index :history_changes, :table_name
+ add_index :history_changes, :remote_id
+ add_index :history_changes, :history_id
+
+ add_column :pools_posts, :active, :boolean, :default => true, :null => false
+ add_index :pools_posts, :active
+ end
+
+ def self.down
+ execute "DROP TABLE history_changes CASCADE"
+ execute "DROP TABLE histories"
+ remove_column :pools_posts, :active
+ end
+end
+
diff --git a/db/migrate/20081015005919_import_post_tag_histories.rb b/db/migrate/20081015005919_import_post_tag_histories.rb
new file mode 100644
index 00000000..d8c82430
--- /dev/null
+++ b/db/migrate/20081015005919_import_post_tag_histories.rb
@@ -0,0 +1,8 @@
+class ImportPostTagHistories < ActiveRecord::Migration
+ def self.up
+ ActiveRecord::Base.import_post_tag_history
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/20081015010657_update_histories.rb b/db/migrate/20081015010657_update_histories.rb
new file mode 100644
index 00000000..dff4df82
--- /dev/null
+++ b/db/migrate/20081015010657_update_histories.rb
@@ -0,0 +1,8 @@
+class UpdateHistories < ActiveRecord::Migration
+ def self.up
+ ActiveRecord::Base.update_all_versioned_tables
+ end
+
+ def self.down
+ end
+end
diff --git a/db/migrate/20081016002814_post_source_not_null.rb b/db/migrate/20081016002814_post_source_not_null.rb
new file mode 100644
index 00000000..d67807e5
--- /dev/null
+++ b/db/migrate/20081016002814_post_source_not_null.rb
@@ -0,0 +1,11 @@
+class PostSourceNotNull < ActiveRecord::Migration
+ def self.up
+ execute "UPDATE posts SET source='' WHERE source IS NULL"
+ execute "UPDATE history_changes SET value='' WHERE table_name='posts' AND field='source' AND value IS NULL"
+ execute "ALTER TABLE posts ALTER COLUMN source SET NOT NULL"
+ end
+
+ def self.down
+ execute "ALTER TABLE posts ALTER COLUMN source DROP NOT NULL"
+ end
+end
diff --git a/db/migrate/20081018175545_post_source_default.rb b/db/migrate/20081018175545_post_source_default.rb
new file mode 100644
index 00000000..89b4eb01
--- /dev/null
+++ b/db/migrate/20081018175545_post_source_default.rb
@@ -0,0 +1,9 @@
+class PostSourceDefault < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ALTER COLUMN source SET DEFAULT ''"
+ end
+
+ def self.down
+ execute "ALTER TABLE posts ALTER COLUMN source DROP DEFAULT"
+ end
+end
diff --git a/db/migrate/20081023224739_add_mirror_posts_to_job_tasks.rb b/db/migrate/20081023224739_add_mirror_posts_to_job_tasks.rb
new file mode 100644
index 00000000..ffca571d
--- /dev/null
+++ b/db/migrate/20081023224739_add_mirror_posts_to_job_tasks.rb
@@ -0,0 +1,9 @@
+class AddMirrorPostsToJobTasks < ActiveRecord::Migration
+ def self.up
+ JobTask.create(:task_type => "upload_posts_to_mirrors", :status => "pending", :repeat_count => -1)
+ end
+
+ def self.down
+ JobTask.destroy_all(["task_type = 'upload_posts_to_mirrors'"])
+ end
+end
diff --git a/db/migrate/20081024083115_pools_default_to_public.rb b/db/migrate/20081024083115_pools_default_to_public.rb
new file mode 100644
index 00000000..8eaf1473
--- /dev/null
+++ b/db/migrate/20081024083115_pools_default_to_public.rb
@@ -0,0 +1,9 @@
+class PoolsDefaultToPublic < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE pools ALTER COLUMN is_public SET DEFAULT TRUE"
+ end
+
+ def self.down
+ execute "ALTER TABLE pools ALTER COLUMN is_public SET DEFAULT FALSE"
+ end
+end
diff --git a/db/migrate/20081024223856_add_old_level_to_bans.rb b/db/migrate/20081024223856_add_old_level_to_bans.rb
new file mode 100644
index 00000000..4e307774
--- /dev/null
+++ b/db/migrate/20081024223856_add_old_level_to_bans.rb
@@ -0,0 +1,9 @@
+class AddOldLevelToBans < ActiveRecord::Migration
+ def self.up
+ add_column :bans, :old_level, :integer
+ end
+
+ def self.down
+ remove_column :bans, :old_level
+ end
+end
diff --git a/db/migrate/20081025222424_add_fts_to_comments.rb b/db/migrate/20081025222424_add_fts_to_comments.rb
new file mode 100644
index 00000000..2888069a
--- /dev/null
+++ b/db/migrate/20081025222424_add_fts_to_comments.rb
@@ -0,0 +1,13 @@
+class AddFtsToComments < ActiveRecord::Migration
+ def self.up
+ execute "alter table comments add column text_search_index tsvector"
+ execute "update comments set text_search_index = to_tsvector('english', body)"
+ execute "create trigger trg_comment_search_update before insert or update on comments for each row execute procedure tsvector_update_trigger(text_search_index, 'pg_catalog.english', body)"
+ execute "create index comments_text_search_idx on comments using gin(text_search_index)"
+ end
+
+ def self.down
+ execute "drop trigger trg_comment_search_update on comments"
+ execute "alter table comments drop column text_search_index"
+ end
+end
diff --git a/db/migrate/20081105030832_add_periodic_maintenance_to_job_tasks.rb b/db/migrate/20081105030832_add_periodic_maintenance_to_job_tasks.rb
new file mode 100644
index 00000000..1e3ad239
--- /dev/null
+++ b/db/migrate/20081105030832_add_periodic_maintenance_to_job_tasks.rb
@@ -0,0 +1,9 @@
+class AddPeriodicMaintenanceToJobTasks < ActiveRecord::Migration
+ def self.up
+ JobTask.create(:task_type => "periodic_maintenance", :status => "pending", :repeat_count => -1)
+ end
+
+ def self.down
+ JobTask.destroy_all(["task_type = 'periodic_maintenance'"])
+ end
+end
diff --git a/db/migrate/20081122055610_add_last_deleted_post_seen_at.rb b/db/migrate/20081122055610_add_last_deleted_post_seen_at.rb
new file mode 100644
index 00000000..3d394d47
--- /dev/null
+++ b/db/migrate/20081122055610_add_last_deleted_post_seen_at.rb
@@ -0,0 +1,14 @@
+class AddLastDeletedPostSeenAt < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE users ADD COLUMN last_deleted_post_seen_at timestamp not null default '1960-01-01'"
+ add_index :flagged_post_details, :created_at
+
+ # Set all existing users to now, so we don't notify everyone of previous deletions.
+ execute "UPDATE users SET last_deleted_post_seen_at=now()"
+ end
+
+ def self.down
+ remove_column :users, :last_deleted_post_seen_at
+ remove_index :flagged_post_details, :created_at
+ end
+end
diff --git a/db/migrate/20081130190723_add_file_size_to_posts.rb b/db/migrate/20081130190723_add_file_size_to_posts.rb
new file mode 100644
index 00000000..723e6422
--- /dev/null
+++ b/db/migrate/20081130190723_add_file_size_to_posts.rb
@@ -0,0 +1,21 @@
+class AddFileSizeToPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ADD COLUMN file_size INTEGER NOT NULL DEFAULT 0"
+ execute "ALTER TABLE posts ADD COLUMN sample_size INTEGER NOT NULL DEFAULT 0"
+
+ p "Updating file sizes..."
+ Post.find(:all, :order => "ID ASC").each do |post|
+ update = []
+ update << "file_size=#{File.size(post.file_path) rescue 0}"
+ if post.has_sample?
+ update << "sample_size=#{File.size(post.sample_path) rescue 0}"
+ end
+ execute "UPDATE posts SET #{update.join(",")} WHERE id=#{post.id}"
+ end
+ end
+
+ def self.down
+ execute "ALTER TABLE posts DROP COLUMN file_size"
+ execute "ALTER TABLE posts DROP COLUMN sample_size"
+ end
+end
diff --git a/db/migrate/20081130191226_add_crc32_to_posts.rb b/db/migrate/20081130191226_add_crc32_to_posts.rb
new file mode 100644
index 00000000..70c15603
--- /dev/null
+++ b/db/migrate/20081130191226_add_crc32_to_posts.rb
@@ -0,0 +1,15 @@
+class AddCrc32ToPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ADD COLUMN crc32 BIGINT"
+ execute "ALTER TABLE posts ADD COLUMN sample_crc32 BIGINT"
+ execute "ALTER TABLE pools ADD COLUMN zip_created_at TIMESTAMP"
+ execute "ALTER TABLE pools ADD COLUMN zip_is_warehoused BOOLEAN NOT NULL DEFAULT FALSE"
+ end
+
+ def self.down
+ execute "ALTER TABLE posts DROP COLUMN crc32"
+ execute "ALTER TABLE posts DROP COLUMN sample_crc32"
+ execute "ALTER TABLE pools DROP COLUMN zip_created_at"
+ execute "ALTER TABLE pools DROP COLUMN zip_is_warehoused"
+ end
+end
diff --git a/db/migrate/20081203035506_add_is_held_to_posts.rb b/db/migrate/20081203035506_add_is_held_to_posts.rb
new file mode 100644
index 00000000..dd7d1170
--- /dev/null
+++ b/db/migrate/20081203035506_add_is_held_to_posts.rb
@@ -0,0 +1,13 @@
+class AddIsHeldToPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ADD COLUMN is_held BOOLEAN NOT NULL DEFAULT FALSE"
+ execute "ALTER TABLE posts ADD COLUMN index_timestamp TIMESTAMP NOT NULL DEFAULT now()"
+ execute "UPDATE posts SET index_timestamp = created_at"
+ add_index :posts, :is_held
+ end
+
+ def self.down
+ execute "ALTER TABLE posts DROP COLUMN is_held"
+ execute "ALTER TABLE posts DROP COLUMN index_timestamp"
+ end
+end
diff --git a/db/migrate/20081204062728_add_shown_to_posts.rb b/db/migrate/20081204062728_add_shown_to_posts.rb
new file mode 100644
index 00000000..0b87cf04
--- /dev/null
+++ b/db/migrate/20081204062728_add_shown_to_posts.rb
@@ -0,0 +1,11 @@
+class AddShownToPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE posts ADD COLUMN is_shown_in_index BOOLEAN NOT NULL DEFAULT TRUE"
+ ActiveRecord::Base.update_versioned_tables Post, :attrs => [:is_shown_in_index]
+ end
+
+ def self.down
+ execute "ALTER TABLE posts DROP COLUMN is_shown_in_index"
+ execute "DELETE FROM history_changes WHERE table_name = 'posts' AND field = 'is_shown_in_index'"
+ end
+end
diff --git a/db/migrate/20081205061033_add_natural_sort_to_pools.rb b/db/migrate/20081205061033_add_natural_sort_to_pools.rb
new file mode 100644
index 00000000..a4fe6e46
--- /dev/null
+++ b/db/migrate/20081205061033_add_natural_sort_to_pools.rb
@@ -0,0 +1,34 @@
+class AddNaturalSortToPools < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION nat_sort_pad(t text) RETURNS text IMMUTABLE AS $$
+ DECLARE
+ match text;
+ BEGIN
+ IF t ~ '[0-9]' THEN
+ match := '0000000000' || t;
+ match := SUBSTRING(match FROM '^0*([0-9]{10}[0-9]*)$');
+ return match;
+ END IF;
+ return t;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION nat_sort(t text) RETURNS text IMMUTABLE AS $$
+ BEGIN
+ return array_to_string(array(select nat_sort_pad((regexp_matches(t, '([0-9]+|[^0-9]+)', 'g'))[1])), '');
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+
+ execute "CREATE INDEX idx_pools__name_nat ON pools (nat_sort(name))"
+ end
+
+ def self.down
+ execute "DROP INDEX idx_pools__name_nat"
+ execute "DROP FUNCTION nat_sort_pad(t text)"
+ execute "DROP FUNCTION nat_sort(t text)"
+ end
+end
diff --git a/db/migrate/20081205072029_add_index_timestamp_index.rb b/db/migrate/20081205072029_add_index_timestamp_index.rb
new file mode 100644
index 00000000..f56f2cc9
--- /dev/null
+++ b/db/migrate/20081205072029_add_index_timestamp_index.rb
@@ -0,0 +1,9 @@
+class AddIndexTimestampIndex < ActiveRecord::Migration
+ def self.up
+ add_index :posts, :index_timestamp
+ end
+
+ def self.down
+ remove_index :posts, :index_timestamp
+ end
+end
diff --git a/db/migrate/20081208220020_pool_sequence_as_string.rb b/db/migrate/20081208220020_pool_sequence_as_string.rb
new file mode 100644
index 00000000..8f4865c2
--- /dev/null
+++ b/db/migrate/20081208220020_pool_sequence_as_string.rb
@@ -0,0 +1,11 @@
+class PoolSequenceAsString < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE pools_posts ALTER COLUMN sequence TYPE TEXT"
+ execute "CREATE INDEX idx_pools_posts__sequence_nat ON pools_posts (nat_sort(sequence))"
+ end
+
+ def self.down
+ execute "DROP INDEX idx_pools_posts__sequence_nat"
+ execute "ALTER TABLE pools_posts ALTER COLUMN sequence TYPE INTEGER USING sequence::integer"
+ end
+end
diff --git a/db/migrate/20081209221550_add_slave_pool_posts.rb b/db/migrate/20081209221550_add_slave_pool_posts.rb
new file mode 100644
index 00000000..e859d7cd
--- /dev/null
+++ b/db/migrate/20081209221550_add_slave_pool_posts.rb
@@ -0,0 +1,21 @@
+require 'pool_post'
+
+class AddSlavePoolPosts < ActiveRecord::Migration
+ def self.up
+ execute "ALTER TABLE pools_posts ADD COLUMN master_id INTEGER REFERENCES pools_posts ON DELETE SET NULL"
+ execute "ALTER TABLE pools_posts ADD COLUMN slave_id INTEGER REFERENCES pools_posts ON DELETE SET NULL"
+
+ PoolPost.find(:all).each { |pp|
+ pp.need_slave_update = true
+ pp.copy_changes_to_slave
+ }
+
+ #execute "CREATE INDEX idx_pools_posts_child_id on pools_posts (child_id) WHERE child_id IS NOT NULL"
+ end
+
+ def self.down
+ execute "DELETE FROM pools_posts WHERE master_id IS NOT NULL"
+ execute "ALTER TABLE pools_posts DROP COLUMN master_id"
+ execute "ALTER TABLE pools_posts DROP COLUMN slave_id"
+ end
+end
diff --git a/db/migrate/20081210193125_add_index_history_changes_previous_id.rb b/db/migrate/20081210193125_add_index_history_changes_previous_id.rb
new file mode 100644
index 00000000..cf392b88
--- /dev/null
+++ b/db/migrate/20081210193125_add_index_history_changes_previous_id.rb
@@ -0,0 +1,10 @@
+class AddIndexHistoryChangesPreviousId < ActiveRecord::Migration
+ def self.up
+ # We need an index on this for its ON DELETE SET NULL.
+ add_index :history_changes, :previous_id
+ end
+
+ def self.down
+ remove_index :history_changes, :previous_id
+ end
+end
diff --git a/db/migrate/20090215000207_add_inline_images.rb b/db/migrate/20090215000207_add_inline_images.rb
new file mode 100644
index 00000000..e3bfc002
--- /dev/null
+++ b/db/migrate/20090215000207_add_inline_images.rb
@@ -0,0 +1,33 @@
+class AddInlineImages < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TABLE inlines (
+ id SERIAL PRIMARY KEY,
+ user_id integer REFERENCES users ON DELETE SET NULL,
+ created_at timestamp NOT NULL DEFAULT now(),
+ description text NOT NULL DEFAULT ''
+ )
+ EOS
+ execute <<-EOS
+ CREATE TABLE inline_images (
+ id SERIAL PRIMARY KEY,
+ inline_id integer NOT NULL REFERENCES inlines ON DELETE CASCADE,
+ md5 text NOT NULL,
+ file_ext text NOT NULL,
+ description text NOT NULL DEFAULT '',
+ sequence INTEGER NOT NULL,
+ width INTEGER NOT NULL,
+ height INTEGER NOT NULL,
+ sample_width INTEGER,
+ sample_height INTEGER
+ )
+ EOS
+
+ add_index :inline_images, :inline_id
+ end
+
+ def self.down
+ drop_table :inlines
+ drop_table :inline_images
+ end
+end
diff --git a/db/migrate/20090903232732_update_post_text.rb b/db/migrate/20090903232732_update_post_text.rb
new file mode 100644
index 00000000..4b46d187
--- /dev/null
+++ b/db/migrate/20090903232732_update_post_text.rb
@@ -0,0 +1,21 @@
+class UpdatePostText < ActiveRecord::Migration
+ def self.up
+ Comment.find(:all, :conditions => ["body ILIKE '%%%%' OR body ILIKE '%%%%'"]).each { |comment|
+ comment.body = comment.body.gsub(//i, "[i]")
+ comment.body = comment.body.gsub(/<\/i>/i, "[/i]")
+ comment.body = comment.body.gsub(//i, "[b]")
+ comment.body = comment.body.gsub(/<\/b>/i, "[/b]")
+ comment.save!
+ }
+ end
+
+ def self.down
+ Comment.find(:all, :conditions => ["body ILIKE '%%[i]%%' OR body ILIKE '%%[b]%%'"]).each { |comment|
+ comment.body = comment.body.gsub(/[i]/i, "")
+ comment.body = comment.body.gsub(/[\/i]/i, "")
+ comment.body = comment.body.gsub(/[b]/i, "")
+ comment.body = comment.body.gsub(/[\/b]/i, "")
+ comment.save!
+ }
+ end
+end
diff --git a/db/migrate/20091228170149_add_jpeg_columns.rb b/db/migrate/20091228170149_add_jpeg_columns.rb
new file mode 100644
index 00000000..525fb766
--- /dev/null
+++ b/db/migrate/20091228170149_add_jpeg_columns.rb
@@ -0,0 +1,16 @@
+class AddJpegColumns < ActiveRecord::Migration
+ def self.up
+ add_column :posts, :jpeg_width, :integer
+ add_column :posts, :jpeg_height, :integer
+ add_column :posts, :jpeg_size, :integer, :default => 0, :null => false
+ add_column :posts, :jpeg_crc32, :bigint
+ end
+
+ def self.down
+ remove_column :posts, :jpeg_width
+ remove_column :posts, :jpeg_height
+ remove_column :posts, :jpeg_size
+ remove_column :posts, :jpeg_crc32
+ end
+end
+
diff --git a/db/migrate/20100101225942_constrain_user_logs.rb b/db/migrate/20100101225942_constrain_user_logs.rb
new file mode 100644
index 00000000..e9ad7dbf
--- /dev/null
+++ b/db/migrate/20100101225942_constrain_user_logs.rb
@@ -0,0 +1,55 @@
+class ConstrainUserLogs < ActiveRecord::Migration
+ def self.up
+ execute <<-EOS
+ CREATE TEMPORARY TABLE user_logs_new (
+ id SERIAL PRIMARY KEY,
+ user_id integer NOT NULL,
+ created_at timestamp NOT NULL DEFAULT now(),
+ ip_addr inet NOT NULL,
+ CONSTRAINT user_logs_new_user_ip UNIQUE (user_id, ip_addr)
+ )
+ EOS
+
+ execute <<-EOS
+ INSERT INTO user_logs_new (user_id, ip_addr, created_at)
+ SELECT user_id, ip_addr, MAX(created_at) FROM user_logs GROUP BY user_id, ip_addr;
+ EOS
+
+ execute "DELETE FROM user_logs;"
+
+ execute <<-EOS
+ INSERT INTO user_logs (user_id, ip_addr, created_at)
+ SELECT user_id, ip_addr, created_at FROM user_logs_new;
+ EOS
+
+ # Make user_logs user/ip pairs unique.
+ execute "ALTER TABLE user_logs ADD CONSTRAINT user_logs_user_ip UNIQUE (user_id, ip_addr);"
+
+ # If a log for a user/ip pair exists, update its timestamp. Otherwise, create a new
+ # record. Updating an existing record is the fast path.
+ execute <<-EOS
+ CREATE OR REPLACE FUNCTION user_logs_touch(new_user_id integer, new_ip inet) RETURNS VOID AS $$
+ BEGIN
+ FOR i IN 1..3 LOOP
+ UPDATE user_logs SET created_at = now() where user_id = new_user_id and ip_addr = new_ip;
+ IF found THEN
+ RETURN;
+ END IF;
+
+ BEGIN
+ INSERT INTO user_logs (user_id, ip_addr) VALUES (new_user_id, new_ip);
+ RETURN;
+ EXCEPTION WHEN unique_violation THEN
+ -- Try again.
+ END;
+ END LOOP;
+ END;
+ $$ LANGUAGE plpgsql;
+ EOS
+ end
+
+ def self.down
+ execute "ALTER TABLE user_logs DROP CONSTRAINT user_logs_user_ip;"
+ execute "DROP FUNCTION user_logs_touch(integer, inet);"
+ end
+end
diff --git a/db/postgres.sql b/db/postgres.sql
new file mode 100644
index 00000000..e48a27b9
--- /dev/null
+++ b/db/postgres.sql
@@ -0,0 +1,1108 @@
+--
+-- PostgreSQL database dump
+--
+
+SET client_encoding = 'SQL_ASCII';
+SET standard_conforming_strings = off;
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+SET escape_string_warning = off;
+
+--
+-- Name: SCHEMA public; Type: COMMENT; Schema: -; Owner: -
+--
+
+COMMENT ON SCHEMA public IS 'Standard public schema';
+
+
+--
+-- Name: plpgsql; Type: PROCEDURAL LANGUAGE; Schema: -; Owner: -
+--
+
+CREATE PROCEDURAL LANGUAGE plpgsql;
+
+
+SET search_path = public, pg_catalog;
+
+--
+-- Name: trg_posts__delete(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_posts__delete() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE table_data SET row_count = row_count - 1 WHERE name = 'posts';
+ RETURN OLD;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+--
+-- Name: trg_posts__insert(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_posts__insert() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE table_data SET row_count = row_count + 1 WHERE name = 'posts';
+ RETURN NEW;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+--
+-- Name: trg_posts_tags__delete(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_posts_tags__delete() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE tags SET post_count = post_count - 1 WHERE tags.id = OLD.tag_id;
+ RETURN OLD;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+--
+-- Name: trg_posts_tags__insert(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_posts_tags__insert() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE tags SET post_count = post_count + 1 WHERE tags.id = NEW.tag_id;
+ RETURN NEW;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+--
+-- Name: trg_users__delete(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_users__delete() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE table_data SET row_count = row_count - 1 WHERE name = 'users';
+ RETURN OLD;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+--
+-- Name: trg_users__insert(); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION trg_users__insert() RETURNS "trigger"
+ AS $$
+BEGIN
+ UPDATE table_data SET row_count = row_count + 1 WHERE name = 'users';
+ RETURN NEW;
+END;
+$$
+ LANGUAGE plpgsql;
+
+
+SET default_tablespace = '';
+
+SET default_with_oids = false;
+
+--
+-- Name: comments; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE comments (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ post_id integer NOT NULL,
+ user_id integer,
+ body text NOT NULL,
+ ip_addr text NOT NULL,
+ signal_level smallint DEFAULT 1 NOT NULL
+);
+
+
+--
+-- Name: comments_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE comments_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: comments_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE comments_id_seq OWNED BY comments.id;
+
+
+--
+-- Name: favorites; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE favorites (
+ id integer NOT NULL,
+ post_id integer NOT NULL,
+ user_id integer NOT NULL
+);
+
+
+--
+-- Name: favorites_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE favorites_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: favorites_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE favorites_id_seq OWNED BY favorites.id;
+
+
+--
+-- Name: note_versions; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE note_versions (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ updated_at timestamp without time zone NOT NULL,
+ x integer NOT NULL,
+ y integer NOT NULL,
+ width integer NOT NULL,
+ height integer NOT NULL,
+ body text NOT NULL,
+ version integer NOT NULL,
+ ip_addr text NOT NULL,
+ is_active boolean DEFAULT true NOT NULL,
+ note_id integer NOT NULL,
+ post_id integer NOT NULL,
+ user_id integer
+);
+
+
+--
+-- Name: note_versions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE note_versions_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: note_versions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE note_versions_id_seq OWNED BY note_versions.id;
+
+
+--
+-- Name: notes; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE notes (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ updated_at timestamp without time zone NOT NULL,
+ user_id integer,
+ x integer NOT NULL,
+ y integer NOT NULL,
+ width integer NOT NULL,
+ height integer NOT NULL,
+ ip_addr text NOT NULL,
+ version integer DEFAULT 1 NOT NULL,
+ is_active boolean DEFAULT true NOT NULL,
+ post_id integer NOT NULL,
+ body text NOT NULL
+);
+
+
+--
+-- Name: notes_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE notes_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: notes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE notes_id_seq OWNED BY notes.id;
+
+
+--
+-- Name: post_tag_histories; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE post_tag_histories (
+ id integer NOT NULL,
+ post_id integer NOT NULL,
+ tags text NOT NULL
+);
+
+
+--
+-- Name: post_tag_histories_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE post_tag_histories_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: post_tag_histories_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE post_tag_histories_id_seq OWNED BY post_tag_histories.id;
+
+
+--
+-- Name: posts; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE posts (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ user_id integer,
+ score integer DEFAULT 0 NOT NULL,
+ source text NOT NULL,
+ md5 text NOT NULL,
+ last_commented_at timestamp without time zone,
+ rating character(1) DEFAULT 'q'::bpchar NOT NULL,
+ width integer,
+ height integer,
+ is_warehoused boolean DEFAULT false NOT NULL,
+ last_voter_ip text,
+ ip_addr text NOT NULL,
+ cached_tags text DEFAULT ''::text NOT NULL,
+ is_note_locked boolean DEFAULT false NOT NULL,
+ fav_count integer DEFAULT 0 NOT NULL,
+ file_ext text DEFAULT ''::text NOT NULL,
+ last_noted_at timestamp without time zone,
+ is_rating_locked boolean DEFAULT false NOT NULL
+);
+
+
+--
+-- Name: posts_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE posts_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: posts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE posts_id_seq OWNED BY posts.id;
+
+
+--
+-- Name: posts_tags; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE posts_tags (
+ post_id integer NOT NULL,
+ tag_id integer NOT NULL
+);
+
+
+--
+-- Name: table_data; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE table_data (
+ name text NOT NULL,
+ row_count integer NOT NULL
+);
+
+
+--
+-- Name: tag_aliases; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE tag_aliases (
+ id integer NOT NULL,
+ name text NOT NULL,
+ alias_id integer NOT NULL
+);
+
+
+--
+-- Name: tag_aliases_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE tag_aliases_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: tag_aliases_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE tag_aliases_id_seq OWNED BY tag_aliases.id;
+
+
+--
+-- Name: tag_implications; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE tag_implications (
+ id integer NOT NULL,
+ parent_id integer NOT NULL,
+ child_id integer NOT NULL
+);
+
+
+--
+-- Name: tag_implications_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE tag_implications_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: tag_implications_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE tag_implications_id_seq OWNED BY tag_implications.id;
+
+
+--
+-- Name: tags; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE tags (
+ id integer NOT NULL,
+ name text NOT NULL,
+ post_count integer DEFAULT 0 NOT NULL,
+ cached_related text DEFAULT '[]'::text NOT NULL,
+ cached_related_expires_on timestamp without time zone DEFAULT now() NOT NULL,
+ tag_type smallint DEFAULT 0 NOT NULL
+);
+
+
+--
+-- Name: tags_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE tags_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
+
+
+--
+-- Name: users; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE users (
+ id integer NOT NULL,
+ name text NOT NULL,
+ "password" text NOT NULL,
+ "level" integer DEFAULT 0 NOT NULL,
+ login_count integer DEFAULT 0 NOT NULL
+);
+
+
+--
+-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE users_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE users_id_seq OWNED BY users.id;
+
+
+--
+-- Name: wiki_page_versions; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE wiki_page_versions (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ updated_at timestamp without time zone NOT NULL,
+ version integer DEFAULT 1 NOT NULL,
+ title text NOT NULL,
+ body text NOT NULL,
+ user_id integer,
+ ip_addr text NOT NULL,
+ wiki_page_id integer NOT NULL,
+ is_locked boolean DEFAULT false NOT NULL
+);
+
+
+--
+-- Name: wiki_page_versions_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE wiki_page_versions_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: wiki_page_versions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE wiki_page_versions_id_seq OWNED BY wiki_page_versions.id;
+
+
+--
+-- Name: wiki_pages; Type: TABLE; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE TABLE wiki_pages (
+ id integer NOT NULL,
+ created_at timestamp without time zone NOT NULL,
+ updated_at timestamp without time zone NOT NULL,
+ version integer DEFAULT 1 NOT NULL,
+ title text NOT NULL,
+ body text NOT NULL,
+ user_id integer,
+ ip_addr text NOT NULL,
+ is_locked boolean DEFAULT false NOT NULL
+);
+
+
+--
+-- Name: wiki_pages_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE wiki_pages_id_seq
+ INCREMENT BY 1
+ NO MAXVALUE
+ NO MINVALUE
+ CACHE 1;
+
+
+--
+-- Name: wiki_pages_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE wiki_pages_id_seq OWNED BY wiki_pages.id;
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE comments ALTER COLUMN id SET DEFAULT nextval('comments_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE favorites ALTER COLUMN id SET DEFAULT nextval('favorites_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE note_versions ALTER COLUMN id SET DEFAULT nextval('note_versions_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE notes ALTER COLUMN id SET DEFAULT nextval('notes_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE post_tag_histories ALTER COLUMN id SET DEFAULT nextval('post_tag_histories_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE posts ALTER COLUMN id SET DEFAULT nextval('posts_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE tag_aliases ALTER COLUMN id SET DEFAULT nextval('tag_aliases_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE tag_implications ALTER COLUMN id SET DEFAULT nextval('tag_implications_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE wiki_page_versions ALTER COLUMN id SET DEFAULT nextval('wiki_page_versions_id_seq'::regclass);
+
+
+--
+-- Name: id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE wiki_pages ALTER COLUMN id SET DEFAULT nextval('wiki_pages_id_seq'::regclass);
+
+
+--
+-- Name: comments_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY comments
+ ADD CONSTRAINT comments_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: favorites_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY favorites
+ ADD CONSTRAINT favorites_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: note_versions_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY note_versions
+ ADD CONSTRAINT note_versions_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: notes_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY notes
+ ADD CONSTRAINT notes_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: post_tag_histories_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY post_tag_histories
+ ADD CONSTRAINT post_tag_histories_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: posts_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY posts
+ ADD CONSTRAINT posts_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: table_data_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY table_data
+ ADD CONSTRAINT table_data_pkey PRIMARY KEY (name);
+
+
+--
+-- Name: tag_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY tag_aliases
+ ADD CONSTRAINT tag_aliases_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: tag_implications_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY tag_implications
+ ADD CONSTRAINT tag_implications_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: tags_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY tags
+ ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: users_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY users
+ ADD CONSTRAINT users_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: wiki_page_versions_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY wiki_page_versions
+ ADD CONSTRAINT wiki_page_versions_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: wiki_pages_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
+--
+
+ALTER TABLE ONLY wiki_pages
+ ADD CONSTRAINT wiki_pages_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: idx_comments__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_comments__post ON comments USING btree (post_id);
+
+
+--
+-- Name: idx_favorites__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_favorites__post ON favorites USING btree (post_id);
+
+
+--
+-- Name: idx_favorites__post_user; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE UNIQUE INDEX idx_favorites__post_user ON favorites USING btree (post_id, user_id);
+
+
+--
+-- Name: idx_favorites__user; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_favorites__user ON favorites USING btree (user_id);
+
+
+--
+-- Name: idx_note_versions__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_note_versions__post ON note_versions USING btree (post_id);
+
+
+--
+-- Name: idx_notes__note; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_notes__note ON note_versions USING btree (note_id);
+
+
+--
+-- Name: idx_notes__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_notes__post ON notes USING btree (post_id);
+
+
+--
+-- Name: idx_post_tag_histories__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_post_tag_histories__post ON post_tag_histories USING btree (post_id);
+
+
+--
+-- Name: idx_posts__created_at; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts__created_at ON posts USING btree (created_at);
+
+
+--
+-- Name: idx_posts__last_commented_at; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts__last_commented_at ON posts USING btree (last_commented_at) WHERE last_commented_at IS NOT NULL;
+
+--
+-- Name: idx_posts__last_noted_at; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts__last_noted_at ON posts USING btree (last_noted_at) WHERE last_noted_at IS NOT NULL;
+
+
+--
+-- Name: idx_posts__md5; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE UNIQUE INDEX idx_posts__md5 ON posts USING btree (md5);
+
+
+--
+-- Name: idx_posts__user; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts__user ON posts USING btree (user_id) WHERE user_id IS NOT NULL;
+
+
+--
+-- Name: idx_posts_tags__post; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts_tags__post ON posts_tags USING btree (post_id);
+
+
+--
+-- Name: idx_posts_tags__tag; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_posts_tags__tag ON posts_tags USING btree (tag_id);
+
+
+--
+-- Name: idx_tag_aliases__name; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE UNIQUE INDEX idx_tag_aliases__name ON tag_aliases USING btree (name);
+
+
+--
+-- Name: idx_tag_implications__child; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_tag_implications__child ON tag_implications USING btree (child_id);
+
+
+--
+-- Name: idx_tag_implications__parent; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_tag_implications__parent ON tag_implications USING btree (parent_id);
+
+
+--
+-- Name: idx_tags__name; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE UNIQUE INDEX idx_tags__name ON tags USING btree (name);
+
+
+--
+-- Name: idx_tags__post_count; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_tags__post_count ON tags USING btree (post_count);
+
+
+--
+-- Name: idx_users__name; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_users__name ON users USING btree (lower(name));
+
+
+--
+-- Name: idx_wiki_page_versions__wiki_page; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_wiki_page_versions__wiki_page ON wiki_page_versions USING btree (wiki_page_id);
+
+
+--
+-- Name: idx_wiki_pages__title; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_wiki_pages__title ON wiki_pages USING btree (lower(title));
+
+
+--
+-- Name: idx_wiki_pages__updated_at; Type: INDEX; Schema: public; Owner: -; Tablespace:
+--
+
+CREATE INDEX idx_wiki_pages__updated_at ON wiki_pages USING btree (updated_at);
+
+
+--
+-- Name: trg_posts__insert; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_posts__insert
+ BEFORE INSERT ON posts
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_posts__insert();
+
+
+--
+-- Name: trg_posts_delete; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_posts_delete
+ BEFORE DELETE ON posts
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_posts__delete();
+
+
+--
+-- Name: trg_posts_tags__delete; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_posts_tags__delete
+ BEFORE DELETE ON posts_tags
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_posts_tags__delete();
+
+
+--
+-- Name: trg_posts_tags__insert; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_posts_tags__insert
+ BEFORE INSERT ON posts_tags
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_posts_tags__insert();
+
+
+--
+-- Name: trg_users_delete; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_users_delete
+ BEFORE DELETE ON users
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_users__delete();
+
+
+--
+-- Name: trg_users_insert; Type: TRIGGER; Schema: public; Owner: -
+--
+
+CREATE TRIGGER trg_users_insert
+ BEFORE INSERT ON users
+ FOR EACH ROW
+ EXECUTE PROCEDURE trg_users__insert();
+
+
+--
+-- Name: fk_comments__post; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY comments
+ ADD CONSTRAINT fk_comments__post FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_comments__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY comments
+ ADD CONSTRAINT fk_comments__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+--
+-- Name: fk_favorites__post; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY favorites
+ ADD CONSTRAINT fk_favorites__post FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_favorites__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY favorites
+ ADD CONSTRAINT fk_favorites__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_note_versions__note; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY note_versions
+ ADD CONSTRAINT fk_note_versions__note FOREIGN KEY (note_id) REFERENCES notes(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_note_versions__post; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY note_versions
+ ADD CONSTRAINT fk_note_versions__post FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_note_versions__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY note_versions
+ ADD CONSTRAINT fk_note_versions__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+--
+-- Name: fk_notes__post; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY notes
+ ADD CONSTRAINT fk_notes__post FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_notes__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY notes
+ ADD CONSTRAINT fk_notes__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+--
+-- Name: fk_post_tag_histories__post; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY post_tag_histories
+ ADD CONSTRAINT fk_post_tag_histories__post FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_posts__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY posts
+ ADD CONSTRAINT fk_posts__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+--
+-- Name: fk_tag_aliases__alias; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY tag_aliases
+ ADD CONSTRAINT fk_tag_aliases__alias FOREIGN KEY (alias_id) REFERENCES tags(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_tag_implications__child; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY tag_implications
+ ADD CONSTRAINT fk_tag_implications__child FOREIGN KEY (child_id) REFERENCES tags(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_tag_implications__parent; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY tag_implications
+ ADD CONSTRAINT fk_tag_implications__parent FOREIGN KEY (parent_id) REFERENCES tags(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_wiki_page_versions__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY wiki_page_versions
+ ADD CONSTRAINT fk_wiki_page_versions__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+--
+-- Name: fk_wiki_page_versions__wiki_page; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY wiki_page_versions
+ ADD CONSTRAINT fk_wiki_page_versions__wiki_page FOREIGN KEY (wiki_page_id) REFERENCES wiki_pages(id) ON DELETE CASCADE;
+
+
+--
+-- Name: fk_wiki_pages__user; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY wiki_pages
+ ADD CONSTRAINT fk_wiki_pages__user FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL;
+
+
+INSERT INTO table_data (name, row_count) VALUES ('posts', 0), ('users', 0);
+
+--
+-- Name: public; Type: ACL; Schema: -; Owner: -
+--
+
+REVOKE ALL ON SCHEMA public FROM PUBLIC;
+REVOKE ALL ON SCHEMA public FROM postgres;
+GRANT ALL ON SCHEMA public TO postgres;
+GRANT ALL ON SCHEMA public TO PUBLIC;
+
+
+--
+-- PostgreSQL database dump complete
+--
+
diff --git a/db/schema.rb b/db/schema.rb
new file mode 100644
index 00000000..5900c610
--- /dev/null
+++ b/db/schema.rb
@@ -0,0 +1,284 @@
+# This file is auto-generated from the current state of the database. Instead of editing this file,
+# please use the migrations feature of ActiveRecord to incrementally modify your database, and
+# then regenerate this schema definition.
+#
+# Note that this schema.rb definition is the authoritative source for your database schema. If you need
+# to create the application database on another system, you should be using db:schema:load, not running
+# all the migrations from scratch. The latter is a flawed and unsustainable approach (the more migrations
+# you'll amass, the slower it'll run and the greater likelihood for issues).
+#
+# It's strongly recommended to check this file into your version control system.
+
+ActiveRecord::Schema.define(:version => 72) do
+
+ create_table "amazon_keywords", :force => true do |t|
+ t.string "keywords", :null => false
+ t.datetime "expires_on", :null => false
+ end
+
+ create_table "amazon_results", :force => true do |t|
+ t.integer "amazon_keyword_id", :null => false
+ t.string "asin", :null => false
+ t.string "title", :default => "Unknown"
+ t.string "author", :default => "Unknown"
+ t.string "image_url", :default => "unknown.jpg"
+ t.string "detail_url", :null => false
+ t.string "price", :default => "Unknown"
+ t.datetime "date_relased"
+ t.string "company", :default => "Unknown"
+ end
+
+ create_table "artist_urls", :force => true do |t|
+ t.integer "artist_id", :null => false
+ t.text "url", :null => false
+ t.text "normalized_url", :null => false
+ end
+
+ add_index "artist_urls", ["artist_id"], :name => "index_artist_urls_on_artist_id"
+ add_index "artist_urls", ["normalized_url"], :name => "index_artist_urls_on_normalized_url"
+ add_index "artist_urls", ["url"], :name => "index_artist_urls_on_url"
+
+ create_table "artists", :force => true do |t|
+ t.integer "alias_id"
+ t.integer "group_id"
+ t.text "name", :null => false
+ t.datetime "updated_at", :null => false
+ t.integer "updater_id"
+ t.integer "pixiv_id"
+ end
+
+ add_index "artists", ["name"], :name => "artists_name_uniq", :unique => true
+ add_index "artists", ["pixiv_id"], :name => "index_artists_on_pixiv_id"
+
+ create_table "bans", :force => true do |t|
+ t.integer "user_id", :null => false
+ t.text "reason", :null => false
+ t.datetime "expires_at", :null => false
+ t.integer "banned_by", :null => false
+ end
+
+ add_index "bans", ["user_id"], :name => "index_bans_on_user_id"
+
+ create_table "coefficients", :id => false, :force => true do |t|
+ t.integer "post_id"
+ t.string "color", :limit => 1
+ t.integer "bin"
+ t.integer "v"
+ t.integer "x"
+ t.integer "y"
+ end
+
+ create_table "comments", :force => true do |t|
+ t.datetime "created_at", :null => false
+ t.integer "post_id", :null => false
+ t.integer "user_id"
+ t.text "body", :null => false
+ t.string "ip_addr", :limit => nil, :null => false
+ t.boolean "is_spam"
+ end
+
+ add_index "comments", ["post_id"], :name => "idx_comments__post"
+
+ create_table "dmails", :force => true do |t|
+ t.integer "from_id", :null => false
+ t.integer "to_id", :null => false
+ t.text "title", :null => false
+ t.text "body", :null => false
+ t.datetime "created_at", :null => false
+ t.boolean "has_seen", :default => false, :null => false
+ t.integer "parent_id"
+ end
+
+ add_index "dmails", ["from_id"], :name => "index_dmails_on_from_id"
+ add_index "dmails", ["parent_id"], :name => "index_dmails_on_parent_id"
+ add_index "dmails", ["to_id"], :name => "index_dmails_on_to_id"
+
+ create_table "favorites", :force => true do |t|
+ t.integer "post_id", :null => false
+ t.integer "user_id", :null => false
+ t.datetime "created_at", :null => false
+ end
+
+ add_index "favorites", ["post_id"], :name => "idx_favorites__post"
+ add_index "favorites", ["user_id"], :name => "idx_favorites__user"
+
+ create_table "flagged_post_details", :force => true do |t|
+ t.datetime "created_at", :null => false
+ t.integer "post_id", :null => false
+ t.text "reason", :null => false
+ t.integer "user_id", :null => false
+ t.boolean "is_resolved", :null => false
+ end
+
+ add_index "flagged_post_details", ["post_id"], :name => "index_flagged_post_details_on_post_id"
+
+ create_table "flagged_posts", :force => true do |t|
+ t.datetime "created_at", :null => false
+ t.integer "post_id", :null => false
+ t.text "reason", :null => false
+ t.integer "user_id"
+ t.boolean "is_resolved", :default => false, :null => false
+ end
+
+# Could not dump table "forum_posts" because of following StandardError
+# Unknown type 'tsvector' for column 'text_search_index'
+
+# Could not dump table "note_versions" because of following StandardError
+# Unknown type 'tsvector' for column 'text_search_index'
+
+# Could not dump table "notes" because of following StandardError
+# Unknown type 'tsvector' for column 'text_search_index'
+
+ create_table "pools", :force => true do |t|
+ t.text "name", :null => false
+ t.datetime "created_at", :null => false
+ t.datetime "updated_at", :null => false
+ t.integer "user_id", :null => false
+ t.boolean "is_public", :default => false, :null => false
+ t.integer "post_count", :default => 0, :null => false
+ t.text "description", :default => "", :null => false
+ end
+
+ add_index "pools", ["user_id"], :name => "pools_user_id_idx"
+
+ create_table "pools_posts", :force => true do |t|
+ t.integer "sequence", :default => 0, :null => false
+ t.integer "pool_id", :null => false
+ t.integer "post_id", :null => false
+ end
+
+ add_index "pools_posts", ["pool_id"], :name => "pools_posts_pool_id_idx"
+ add_index "pools_posts", ["post_id"], :name => "pools_posts_post_id_idx"
+
+ create_table "post_tag_histories", :force => true do |t|
+ t.integer "post_id", :null => false
+ t.text "tags", :null => false
+ t.integer "user_id"
+ t.string "ip_addr", :limit => nil
+ t.datetime "created_at", :null => false
+ end
+
+ add_index "post_tag_histories", ["post_id"], :name => "idx_post_tag_histories__post"
+
+# Could not dump table "posts" because of following StandardError
+# Unknown type 'post_status' for column 'status'
+
+ create_table "posts_tags", :id => false, :force => true do |t|
+ t.integer "post_id", :null => false
+ t.integer "tag_id", :null => false
+ end
+
+ add_index "posts_tags", ["post_id"], :name => "idx_posts_tags__post"
+ add_index "posts_tags", ["tag_id"], :name => "idx_posts_tags__tag"
+
+ create_table "table_data", :id => false, :force => true do |t|
+ t.text "name", :null => false
+ t.integer "row_count", :null => false
+ end
+
+ create_table "tag_aliases", :force => true do |t|
+ t.text "name", :null => false
+ t.integer "alias_id", :null => false
+ t.boolean "is_pending", :default => false, :null => false
+ t.text "reason", :default => "", :null => false
+ end
+
+ add_index "tag_aliases", ["name"], :name => "idx_tag_aliases__name", :unique => true
+
+ create_table "tag_implications", :force => true do |t|
+ t.integer "consequent_id", :null => false
+ t.integer "predicate_id", :null => false
+ t.boolean "is_pending", :default => false, :null => false
+ t.text "reason", :default => "", :null => false
+ end
+
+ add_index "tag_implications", ["predicate_id"], :name => "idx_tag_implications__child"
+ add_index "tag_implications", ["consequent_id"], :name => "idx_tag_implications__parent"
+
+ create_table "tags", :force => true do |t|
+ t.text "name", :null => false
+ t.integer "post_count", :default => 0, :null => false
+ t.text "cached_related", :default => "[]", :null => false
+ t.datetime "cached_related_expires_on", :null => false
+ t.integer "tag_type", :default => 0, :null => false
+ t.boolean "is_ambiguous", :default => false, :null => false
+ t.integer "safe_post_count", :default => 0, :null => false
+ end
+
+ add_index "tags", ["name"], :name => "idx_tags__name", :unique => true
+ add_index "tags", ["post_count"], :name => "idx_tags__post_count"
+
+ create_table "user_records", :force => true do |t|
+ t.integer "user_id", :null => false
+ t.integer "reported_by", :null => false
+ t.datetime "created_at", :null => false
+ t.boolean "is_positive", :default => true, :null => false
+ t.text "body", :null => false
+ end
+
+ create_table "users", :force => true do |t|
+ t.text "name", :null => false
+ t.text "password_hash", :null => false
+ t.integer "level", :default => 0, :null => false
+ t.text "email", :default => "", :null => false
+ t.text "my_tags", :default => "", :null => false
+ t.integer "invite_count", :default => 0, :null => false
+ t.boolean "always_resize_images", :default => false, :null => false
+ t.integer "invited_by"
+ t.datetime "created_at", :null => false
+ t.datetime "last_logged_in_at", :null => false
+ t.datetime "last_forum_topic_read_at", :default => '1960-01-01 00:00:00', :null => false
+ t.boolean "has_mail", :default => false, :null => false
+ t.boolean "receive_dmails", :default => false, :null => false
+ t.text "blacklisted_tags", :default => "", :null => false
+ t.boolean "show_samples", :default => true
+ end
+
+# Could not dump table "wiki_page_versions" because of following StandardError
+# Unknown type 'tsvector' for column 'text_search_index'
+
+# Could not dump table "wiki_pages" because of following StandardError
+# Unknown type 'tsvector' for column 'text_search_index'
+
+ add_foreign_key "artist_urls", ["artist_id"], "artists", ["id"], :name => "artist_urls_artist_id_fkey"
+
+ add_foreign_key "artists", ["alias_id"], "artists", ["id"], :on_delete => :set_null, :name => "artists_alias_id_fkey"
+ add_foreign_key "artists", ["group_id"], "artists", ["id"], :on_delete => :set_null, :name => "artists_group_id_fkey"
+ add_foreign_key "artists", ["updater_id"], "users", ["id"], :on_delete => :set_null, :name => "artists_updater_id_fkey"
+
+ add_foreign_key "bans", ["banned_by"], "users", ["id"], :on_delete => :cascade, :name => "bans_banned_by_fkey"
+ add_foreign_key "bans", ["user_id"], "users", ["id"], :on_delete => :cascade, :name => "bans_user_id_fkey"
+
+ add_foreign_key "comments", ["post_id"], "posts", ["id"], :on_delete => :cascade, :name => "fk_comments__post"
+ add_foreign_key "comments", ["user_id"], "users", ["id"], :on_delete => :set_null, :name => "fk_comments__user"
+
+ add_foreign_key "dmails", ["from_id"], "users", ["id"], :on_delete => :cascade, :name => "dmails_from_id_fkey"
+ add_foreign_key "dmails", ["parent_id"], "dmails", ["id"], :name => "dmails_parent_id_fkey"
+ add_foreign_key "dmails", ["to_id"], "users", ["id"], :on_delete => :cascade, :name => "dmails_to_id_fkey"
+
+ add_foreign_key "favorites", ["post_id"], "posts", ["id"], :on_delete => :cascade, :name => "fk_favorites__post "
+ add_foreign_key "favorites", ["user_id"], "users", ["id"], :on_delete => :cascade, :name => "fk_favorites__user"
+
+ add_foreign_key "flagged_post_details", ["post_id"], "posts", ["id"], :name => "flagged_post_details_post_id_fkey"
+ add_foreign_key "flagged_post_details", ["user_id"], "users", ["id"], :name => "flagged_post_details_user_id_fkey"
+
+ add_foreign_key "flagged_posts", ["user_id"], "users", ["id"], :on_delete => :cascade, :name => "flagged_posts_user_id_fkey"
+
+ add_foreign_key "pools", ["user_id"], "users", ["id"], :on_delete => :cascade, :name => "pools_user_id_fkey"
+
+ add_foreign_key "pools_posts", ["pool_id"], "pools", ["id"], :on_delete => :cascade, :name => "pools_posts_pool_id_fkey"
+ add_foreign_key "pools_posts", ["post_id"], "posts", ["id"], :on_delete => :cascade, :name => "pools_posts_post_id_fkey"
+
+ add_foreign_key "post_tag_histories", ["user_id"], "users", ["id"], :on_delete => :set_null, :name => "post_tag_histories_user_id_fkey"
+
+ add_foreign_key "posts_tags", ["tag_id"], "tags", ["id"], :on_delete => :cascade, :name => "fk_posts_tags__tag"
+
+ add_foreign_key "tag_aliases", ["alias_id"], "tags", ["id"], :on_delete => :cascade, :name => "fk_tag_aliases__alias"
+
+ add_foreign_key "tag_implications", ["predicate_id"], "tags", ["id"], :on_delete => :cascade, :name => "fk_tag_implications__child"
+ add_foreign_key "tag_implications", ["consequent_id"], "tags", ["id"], :on_delete => :cascade, :name => "fk_tag_implications__parent"
+
+ add_foreign_key "user_records", ["reported_by"], "users", ["id"], :on_delete => :cascade, :name => "user_records_reported_by_fkey"
+ add_foreign_key "user_records", ["user_id"], "users", ["id"], :on_delete => :cascade, :name => "user_records_user_id_fkey"
+
+end
diff --git a/lib/asset_cache.rb b/lib/asset_cache.rb
new file mode 100644
index 00000000..3cfd03d8
--- /dev/null
+++ b/lib/asset_cache.rb
@@ -0,0 +1,54 @@
+require "action_view/helpers/tag_helper.rb"
+require "action_view/helpers/asset_tag_helper.rb"
+
+# Fix a bug in expand_javascript_sources: if the cache file exists, but the server
+# is started in development, the old cache will be included among all of the individual
+# source files.
+module ActionView
+ module Helpers
+ module AssetTagHelper
+ private
+ alias_method :orig_expand_javascript_sources, :expand_javascript_sources
+ def expand_javascript_sources(sources)
+ x = orig_expand_javascript_sources sources
+ x.delete("application")
+ x
+ end
+ end
+ end
+end
+
+# Fix another bug: if the javascript sources are changed, the cache is never
+# regenerated. Call on init.
+module AssetCache
+ # This is dumb. How do I call this function without wrapping it in a class?
+ class RegenerateJavascriptCache
+ include ActionView::Helpers::TagHelper
+ include ActionView::Helpers::AssetTagHelper
+ end
+
+ def clear_js_cache
+ # Don't do anything if caching is disabled; we won't use the file anyway, and
+ # if we're in a rake script, we'll delete the file and then not regenerate it.
+ return if not ActionController::Base.perform_caching
+
+ # Overwrite the file atomically, so nothing breaks if a user requests the file
+ # before we finish writing it.
+ path = (defined?(RAILS_ROOT) ? "#{RAILS_ROOT}/public" : "public")
+ # HACK: Many processes will do this simultaneously, and they'll pick up
+ # the temporary application-new-12345 file being created by other processes
+ # as a regular Javascript file and try to include it in their own, causing
+ # weird race conditions. Write the file in the parent directory.
+ cache_temp = "../../tmp/application-new-#{$PROCESS_ID}"
+ temp = "#{path}/javascripts/#{cache_temp}.js"
+ file = "#{path}/javascripts/application.js"
+ File.unlink(temp) if File.exist?(temp)
+ c = RegenerateJavascriptCache.new
+ c.javascript_include_tag(:all, :cache => cache_temp)
+
+ FileUtils.mv(temp, file)
+ end
+
+ module_function :clear_js_cache
+end
+
diff --git a/lib/cache.rb b/lib/cache.rb
new file mode 100644
index 00000000..486b608f
--- /dev/null
+++ b/lib/cache.rb
@@ -0,0 +1,28 @@
+module Cache
+ def expire(options = {})
+ if CONFIG["enable_caching"]
+ tags = options[:tags]
+ cache_version = Cache.get("$cache_version").to_i
+
+ Cache.put("$cache_version", cache_version + 1)
+
+ if tags
+ tags.scan(/\S+/).each do |x|
+ key = "tag:#{x}"
+ key_version = Cache.get(key).to_i
+ Cache.put(key, key_version + 1)
+ end
+ end
+ end
+ end
+
+ def incr(key)
+ if CONFIG["enable_caching"]
+ val = Cache.get(key)
+ Cache.put(key, val.to_i + 1)
+ end
+ end
+
+ module_function :expire
+ module_function :incr
+end
diff --git a/lib/cache_dummy.rb b/lib/cache_dummy.rb
new file mode 100644
index 00000000..0f062587
--- /dev/null
+++ b/lib/cache_dummy.rb
@@ -0,0 +1,13 @@
+class MemCache
+ def flush_all
+ end
+end
+
+module Cache
+ def self.get(key, expiry = 0)
+ if block_given? then
+ yield
+ end
+ end
+end
+
diff --git a/lib/danbooru_image_resizer/ConvertToRGB.cpp b/lib/danbooru_image_resizer/ConvertToRGB.cpp
new file mode 100644
index 00000000..eb1e950d
--- /dev/null
+++ b/lib/danbooru_image_resizer/ConvertToRGB.cpp
@@ -0,0 +1,66 @@
+#include
+#include
+#include
+#include "ConvertToRGB.h"
+#include "Filter.h"
+#include
+using namespace std;
+
+ConvertToRGB::ConvertToRGB(auto_ptr pCompressor):
+ m_pCompressor(pCompressor)
+{
+ m_pBuffer = NULL;
+}
+
+ConvertToRGB::~ConvertToRGB()
+{
+ delete[] m_pBuffer;
+}
+
+bool ConvertToRGB::Init(int iSourceWidth, int iSourceHeight, int iBPP)
+{
+ m_iSourceWidth = iSourceWidth;
+ // m_iSourceHeight = iSourceHeight;
+ m_iBPP = iBPP;
+ m_pBuffer = new uint8_t[iSourceWidth * 3];
+ assert(m_iBPP == 1 || m_iBPP == 3 || m_iBPP == 4); // greyscale, RGB or RGBA
+
+ return m_pCompressor->Init(iSourceWidth, iSourceHeight, 3);
+}
+
+bool ConvertToRGB::WriteRow(uint8_t *pNewRow)
+{
+ if(m_iBPP == 3)
+ return m_pCompressor->WriteRow(pNewRow);
+ if(m_iBPP == 1)
+ {
+ uint8_t *pBuffer = m_pBuffer;
+ for(int i = 0; i < m_iSourceWidth; ++i)
+ {
+ *pBuffer++ = *pNewRow;
+ *pBuffer++ = *pNewRow;
+ *pBuffer++ = *pNewRow;
+ ++pNewRow;
+ }
+ }
+ else if(m_iBPP == 4)
+ {
+ uint8_t *pBuffer = m_pBuffer;
+ for(int i = 0; i < m_iSourceWidth; ++i)
+ {
+ uint8_t iR = *pNewRow++;
+ uint8_t iG = *pNewRow++;
+ uint8_t iB = *pNewRow++;
+ uint8_t iA = *pNewRow++;
+ iR = uint8_t((iR * iA) / 255.0f);
+ iG = uint8_t((iG * iA) / 255.0f);
+ iB = uint8_t((iB * iA) / 255.0f);
+ *pBuffer++ = iR;
+ *pBuffer++ = iG;
+ *pBuffer++ = iB;
+ }
+ }
+
+ return m_pCompressor->WriteRow(m_pBuffer);
+}
+
diff --git a/lib/danbooru_image_resizer/ConvertToRGB.h b/lib/danbooru_image_resizer/ConvertToRGB.h
new file mode 100644
index 00000000..be117c90
--- /dev/null
+++ b/lib/danbooru_image_resizer/ConvertToRGB.h
@@ -0,0 +1,27 @@
+#ifndef CONVERT_TO_RGB_H
+#define CONVERT_TO_RGB_H
+
+#include "Filter.h"
+#include
+using namespace std;
+
+class ConvertToRGB: public Filter
+{
+public:
+ ConvertToRGB(auto_ptr pCompressor);
+ ~ConvertToRGB();
+
+ bool Init(int iSourceWidth, int iSourceHeight, int BPP);
+ bool WriteRow(uint8_t *pNewRow);
+ bool Finish() { return true; }
+
+ const char *GetError() const { return NULL; }
+
+private:
+ uint8_t *m_pBuffer;
+ auto_ptr m_pCompressor;
+ int m_iSourceWidth;
+ int m_iBPP;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/Crop.cpp b/lib/danbooru_image_resizer/Crop.cpp
new file mode 100644
index 00000000..bfc11c5c
--- /dev/null
+++ b/lib/danbooru_image_resizer/Crop.cpp
@@ -0,0 +1,39 @@
+#include "Crop.h"
+
+Crop::Crop(auto_ptr pOutput):
+ m_pOutput(pOutput)
+{
+ m_iRow = 0;
+}
+
+void Crop::SetCrop(int iTop, int iBottom, int iLeft, int iRight)
+{
+ m_iTop = iTop;
+ m_iBottom = iBottom;
+ m_iLeft = iLeft;
+ m_iRight = iRight;
+}
+
+bool Crop::Init(int iWidth, int iHeight, int iBPP)
+{
+ m_iSourceWidth = iWidth;
+ m_iSourceHeight = iHeight;
+ m_iSourceBPP = iBPP;
+
+ return m_pOutput->Init(m_iRight - m_iLeft, m_iBottom - m_iTop, iBPP);
+}
+
+bool Crop::WriteRow(uint8_t *pNewRow)
+{
+ if(m_iRow >= m_iTop && m_iRow < m_iBottom)
+ {
+ pNewRow += m_iLeft * m_iSourceBPP;
+ if(!m_pOutput->WriteRow(pNewRow))
+ return false;
+ }
+
+ ++m_iRow;
+
+ return true;
+}
+
diff --git a/lib/danbooru_image_resizer/Crop.h b/lib/danbooru_image_resizer/Crop.h
new file mode 100644
index 00000000..11be870a
--- /dev/null
+++ b/lib/danbooru_image_resizer/Crop.h
@@ -0,0 +1,31 @@
+#ifndef CROP_H
+#define CROP_H
+
+#include "Filter.h"
+#include
+using namespace std;
+
+class Crop: public Filter
+{
+public:
+ Crop(auto_ptr pOutput);
+ void SetCrop(int iTop, int iBottom, int iLeft, int iRight);
+ bool Init(int iWidth, int iHeight, int iBPP);
+ bool WriteRow(uint8_t *pNewRow);
+ bool Finish() { return m_pOutput->Finish(); }
+ const char *GetError() const { return m_pOutput->GetError(); }
+
+private:
+ auto_ptr m_pOutput;
+
+ int m_iRow;
+ int m_iTop;
+ int m_iBottom;
+ int m_iLeft;
+ int m_iRight;
+ int m_iSourceWidth;
+ int m_iSourceHeight;
+ int m_iSourceBPP;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/Filter.h b/lib/danbooru_image_resizer/Filter.h
new file mode 100644
index 00000000..03b1093e
--- /dev/null
+++ b/lib/danbooru_image_resizer/Filter.h
@@ -0,0 +1,16 @@
+#ifndef FILTER_H
+#define FILTER_H
+
+#include
+
+class Filter
+{
+public:
+ virtual ~Filter() { }
+ virtual bool Init(int iSourceWidth, int iSourceHeight, int iSourceBPP) = 0;
+ virtual bool WriteRow(uint8_t *row) = 0;
+ virtual bool Finish() = 0;
+ virtual const char *GetError() const = 0;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/GIFReader.cpp b/lib/danbooru_image_resizer/GIFReader.cpp
new file mode 100644
index 00000000..b4bbc9eb
--- /dev/null
+++ b/lib/danbooru_image_resizer/GIFReader.cpp
@@ -0,0 +1,60 @@
+#include
+#include
+#include
+#include "GIFReader.h"
+#include "Resize.h"
+
+bool GIF::Read(FILE *f, Filter *pOutput, char error[1024])
+{
+ bool Ret = false;
+ gdImage *image = gdImageCreateFromGif(f);
+
+ if(!image)
+ {
+ strcpy(error, "couldn't read GIF");
+ return false;
+ }
+
+ uint8_t *pBuf = NULL;
+ pBuf = (uint8_t *) malloc(image->sx * 3);
+ if(pBuf == NULL)
+ {
+ strcpy(error, "out of memory");
+ goto cleanup;
+ }
+
+ pOutput->Init(image->sx, image->sy, 3);
+ for(int y = 0; y < image->sy; ++y)
+ {
+ uint8_t *p = pBuf;
+
+ for(int x = 0; x < image->sx; ++x)
+ {
+ int c = gdImageGetTrueColorPixel(image, x, y);
+ (*p++) = gdTrueColorGetRed(c);
+ (*p++) = gdTrueColorGetGreen(c);
+ (*p++) = gdTrueColorGetBlue(c);
+ }
+
+ if(!pOutput->WriteRow(pBuf))
+ {
+ strcpy(error, pOutput->GetError());
+ goto cleanup;
+ }
+ }
+
+ if(!pOutput->Finish())
+ {
+ strcpy(error, pOutput->GetError());
+ goto cleanup;
+ }
+
+ Ret = true;
+
+cleanup:
+ if(pBuf != NULL)
+ free(pBuf);
+
+ gdImageDestroy(image);
+ return Ret;
+}
diff --git a/lib/danbooru_image_resizer/GIFReader.h b/lib/danbooru_image_resizer/GIFReader.h
new file mode 100644
index 00000000..42488c1b
--- /dev/null
+++ b/lib/danbooru_image_resizer/GIFReader.h
@@ -0,0 +1,12 @@
+#ifndef GIF_READER_H
+#define GIF_READER_H
+
+#include "Reader.h"
+class Filter;
+class GIF: public Reader
+{
+public:
+ bool Read(FILE *f, Filter *pOutput, char error[1024]);
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/JPEGReader.cpp b/lib/danbooru_image_resizer/JPEGReader.cpp
new file mode 100644
index 00000000..8e45a2fc
--- /dev/null
+++ b/lib/danbooru_image_resizer/JPEGReader.cpp
@@ -0,0 +1,175 @@
+#include
+#include
+#include "JPEGReader.h"
+#include "Resize.h"
+#include
+using namespace std;
+
+static void jpeg_error_exit(j_common_ptr CInfo)
+{
+ jpeg_error *myerr = (jpeg_error *) CInfo->err;
+ (*CInfo->err->format_message) (CInfo, myerr->buffer);
+ longjmp(myerr->setjmp_buffer, 1);
+}
+
+static void jpeg_warning(j_common_ptr cinfo, int msg_level)
+{
+}
+
+JPEGCompressor::JPEGCompressor(FILE *f)
+{
+ m_File = f;
+ memset(&m_CInfo, 0, sizeof(m_CInfo));
+}
+
+JPEGCompressor::~JPEGCompressor()
+{
+ jpeg_destroy_compress(&m_CInfo);
+}
+
+const char *JPEGCompressor::GetError() const
+{
+ return m_JErr.buffer;
+}
+
+void JPEGCompressor::SetQuality(int quality)
+{
+ m_iQuality = quality;
+}
+
+bool JPEGCompressor::Init(int width, int height, int bpp)
+{
+ assert(bpp == 3);
+ m_CInfo.err = jpeg_std_error(&m_JErr.pub);
+
+ m_JErr.pub.error_exit = jpeg_error_exit;
+ m_JErr.pub.emit_message = jpeg_warning;
+
+ if(setjmp(m_JErr.setjmp_buffer))
+ return false;
+
+ jpeg_create_compress(&m_CInfo);
+
+ jpeg_stdio_dest(&m_CInfo, m_File);
+
+ m_CInfo.image_width = width;
+ m_CInfo.image_height = height;
+ m_CInfo.input_components = 3; /* # of color components per pixel */
+ m_CInfo.in_color_space = JCS_RGB; /* colorspace of input image */
+
+ jpeg_set_defaults(&m_CInfo);
+ jpeg_set_quality(&m_CInfo, m_iQuality, TRUE); // limit to baseline-JPEG values
+
+ /* For high-quality compression, disable color subsampling. */
+ if(m_iQuality >= 95)
+ {
+ m_CInfo.comp_info[0].h_samp_factor = 1;
+ m_CInfo.comp_info[0].v_samp_factor = 1;
+ m_CInfo.comp_info[1].h_samp_factor = 1;
+ m_CInfo.comp_info[1].v_samp_factor = 1;
+ m_CInfo.comp_info[2].h_samp_factor = 1;
+ m_CInfo.comp_info[2].v_samp_factor = 1;
+ }
+
+ jpeg_start_compress(&m_CInfo, TRUE);
+
+ return true;
+}
+
+int JPEGCompressor::GetWidth() const
+{
+ return m_CInfo.image_width;
+}
+
+int JPEGCompressor::GetHeight() const
+{
+ return m_CInfo.image_height;
+}
+
+bool JPEGCompressor::WriteRow(uint8_t *row)
+{
+ if(setjmp(m_JErr.setjmp_buffer))
+ return false;
+
+ jpeg_write_scanlines(&m_CInfo, (JSAMPLE **) &row, 1);
+ return true;
+}
+
+bool JPEGCompressor::Finish()
+{
+ if(setjmp(m_JErr.setjmp_buffer))
+ return false;
+
+ jpeg_finish_compress(&m_CInfo);
+ return true;
+}
+
+bool JPEG::Read(FILE *f, Filter *pOutput, char error[1024])
+{
+ // JMSG_LENGTH_MAX <= sizeof(error)
+ m_pOutputFilter = pOutput;
+
+ struct jpeg_decompress_struct CInfo;
+ CInfo.err = jpeg_std_error(&m_JErr.pub);
+ m_JErr.pub.error_exit = jpeg_error_exit;
+ m_JErr.pub.emit_message = jpeg_warning;
+
+ bool Ret = false;
+ uint8_t *pBuf = NULL;
+ if(setjmp(m_JErr.setjmp_buffer))
+ {
+ memcpy(error, m_JErr.buffer, JMSG_LENGTH_MAX);
+ goto cleanup;
+ }
+
+ jpeg_create_decompress(&CInfo);
+
+ jpeg_stdio_src(&CInfo, f);
+ jpeg_read_header(&CInfo, TRUE);
+ CInfo.out_color_space = JCS_RGB;
+
+ jpeg_start_decompress(&CInfo);
+
+ if(!m_pOutputFilter->Init(CInfo.output_width, CInfo.output_height, 3))
+ {
+ strncpy(error, m_pOutputFilter->GetError(), sizeof(error));
+ error[sizeof(error)-1] = 0;
+ goto cleanup;
+ }
+
+ pBuf = (uint8_t *) malloc(CInfo.output_width * 3);
+ if(pBuf == NULL)
+ {
+ strcpy(error, "out of memory");
+ goto cleanup;
+ }
+
+ while(CInfo.output_scanline < CInfo.output_height)
+ {
+ jpeg_read_scanlines(&CInfo, &pBuf, 1);
+
+ if(!m_pOutputFilter->WriteRow(pBuf))
+ {
+ strcpy(error, m_pOutputFilter->GetError());
+ goto cleanup;
+ }
+ }
+
+ if(!m_pOutputFilter->Finish())
+ {
+ strcpy(error, m_pOutputFilter->GetError());
+ goto cleanup;
+ }
+
+ jpeg_finish_decompress(&CInfo);
+
+ Ret = true;
+
+cleanup:
+ if(pBuf != NULL)
+ free(pBuf);
+ jpeg_destroy_decompress(&CInfo);
+
+ return Ret;
+}
+
diff --git a/lib/danbooru_image_resizer/JPEGReader.h b/lib/danbooru_image_resizer/JPEGReader.h
new file mode 100644
index 00000000..06c8625c
--- /dev/null
+++ b/lib/danbooru_image_resizer/JPEGReader.h
@@ -0,0 +1,50 @@
+#ifndef JPEG_READER_H
+#define JPEG_READER_H
+
+#include
+#include
+#include
+#include "jpeglib-extern.h"
+#include "Reader.h"
+#include "Filter.h"
+
+struct jpeg_error
+{
+ struct jpeg_error_mgr pub;
+ jmp_buf setjmp_buffer;
+ char buffer[JMSG_LENGTH_MAX];
+};
+
+class JPEG: public Reader
+{
+public:
+ bool Read(FILE *f, Filter *pOutput, char error[1024]);
+
+private:
+ Filter *m_pOutputFilter;
+ struct jpeg_error m_JErr;
+};
+
+class JPEGCompressor: public Filter
+{
+public:
+ JPEGCompressor(FILE *f);
+ ~JPEGCompressor();
+
+ bool Init(int iSourceWidth, int iSourceHeight, int iBPP);
+ void SetQuality(int quality);
+ bool WriteRow(uint8_t *row);
+ bool Finish();
+
+ int GetWidth() const;
+ int GetHeight() const;
+ const char *GetError() const;
+
+private:
+ FILE *m_File;
+ int m_iQuality;
+ struct jpeg_compress_struct m_CInfo;
+ struct jpeg_error m_JErr;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/Makefile b/lib/danbooru_image_resizer/Makefile
new file mode 100644
index 00000000..6a7e68e0
--- /dev/null
+++ b/lib/danbooru_image_resizer/Makefile
@@ -0,0 +1,149 @@
+
+SHELL = /bin/sh
+
+#### Start of system configuration section. ####
+
+srcdir = .
+topdir = /usr/lib/ruby/1.8/x86_64-linux
+hdrdir = $(topdir)
+VPATH = $(srcdir):$(topdir):$(hdrdir)
+prefix = $(DESTDIR)/usr
+exec_prefix = $(prefix)
+sitedir = $(DESTDIR)/usr/local/lib/site_ruby
+rubylibdir = $(libdir)/ruby/$(ruby_version)
+docdir = $(datarootdir)/doc/$(PACKAGE)
+dvidir = $(docdir)
+datarootdir = $(prefix)/share
+archdir = $(rubylibdir)/$(arch)
+sbindir = $(exec_prefix)/sbin
+psdir = $(docdir)
+localedir = $(datarootdir)/locale
+htmldir = $(docdir)
+datadir = $(datarootdir)
+includedir = $(prefix)/include
+infodir = $(prefix)/share/info
+sysconfdir = $(DESTDIR)/etc
+mandir = $(prefix)/share/man
+libdir = $(exec_prefix)/lib
+sharedstatedir = $(prefix)/com
+oldincludedir = $(DESTDIR)/usr/include
+pdfdir = $(docdir)
+sitearchdir = $(sitelibdir)/$(sitearch)
+bindir = $(exec_prefix)/bin
+localstatedir = $(DESTDIR)/var
+sitelibdir = $(sitedir)/$(ruby_version)
+libexecdir = $(prefix)/lib/ruby1.8
+
+CC = g++
+LIBRUBY = $(LIBRUBY_SO)
+LIBRUBY_A = lib$(RUBY_SO_NAME)-static.a
+LIBRUBYARG_SHARED = -l$(RUBY_SO_NAME)
+LIBRUBYARG_STATIC = -l$(RUBY_SO_NAME)-static
+
+RUBY_EXTCONF_H =
+CFLAGS = -fPIC -O2 -Wall
+INCFLAGS = -I. -I. -I/usr/lib/ruby/1.8/x86_64-linux -I. -I/usr/local/include
+CPPFLAGS = -DHAVE_GD_H -DHAVE_GDIMAGECREATEFROMGIF -DHAVE_GDIMAGEJPEG -DHAVE_JPEG_SET_QUALITY -DHAVE_PNG_SET_EXPAND_GRAY_1_2_4_TO_8
+CXXFLAGS = $(CFLAGS)
+DLDFLAGS = -L. -Wl,-Bsymbolic-functions -rdynamic -Wl,-export-dynamic
+LDSHARED = $(CC) -shared
+AR = ar
+EXEEXT =
+
+RUBY_INSTALL_NAME = ruby1.8
+RUBY_SO_NAME = ruby1.8
+arch = x86_64-linux
+sitearch = x86_64-linux
+ruby_version = 1.8
+ruby = /usr/bin/ruby1.8
+RUBY = $(ruby)
+RM = rm -f
+MAKEDIRS = mkdir -p
+INSTALL = /usr/bin/install -c
+INSTALL_PROG = $(INSTALL) -m 0755
+INSTALL_DATA = $(INSTALL) -m 644
+COPY = cp
+
+#### End of system configuration section. ####
+
+preload =
+
+libpath = . $(libdir)
+LIBPATH = -L"." -L"$(libdir)"
+DEFFILE =
+
+CLEANFILES = mkmf.log
+DISTCLEANFILES =
+
+extout =
+extout_prefix =
+target_prefix =
+LOCAL_LIBS =
+LIBS = $(LIBRUBYARG_SHARED) -lpng -ljpeg -lgd -lpthread -ldl -lcrypt -lm -lc
+SRCS = ConvertToRGB.cpp GIFReader.cpp Resize.cpp JPEGReader.cpp Crop.cpp danbooru_image_resizer.cpp PNGReader.cpp
+OBJS = ConvertToRGB.o GIFReader.o Resize.o JPEGReader.o Crop.o danbooru_image_resizer.o PNGReader.o
+TARGET = danbooru_image_resizer
+DLLIB = $(TARGET).so
+EXTSTATIC =
+STATIC_LIB =
+
+RUBYCOMMONDIR = $(sitedir)$(target_prefix)
+RUBYLIBDIR = $(sitelibdir)$(target_prefix)
+RUBYARCHDIR = $(sitearchdir)$(target_prefix)
+
+TARGET_SO = $(DLLIB)
+CLEANLIBS = $(TARGET).so $(TARGET).il? $(TARGET).tds $(TARGET).map
+CLEANOBJS = *.o *.a *.s[ol] *.pdb *.exp *.bak
+
+all: $(DLLIB)
+static: $(STATIC_LIB)
+
+clean:
+ @-$(RM) $(CLEANLIBS) $(CLEANOBJS) $(CLEANFILES)
+
+distclean: clean
+ @-$(RM) Makefile $(RUBY_EXTCONF_H) conftest.* mkmf.log
+ @-$(RM) core ruby$(EXEEXT) *~ $(DISTCLEANFILES)
+
+realclean: distclean
+install: install-so install-rb
+
+install-so: $(RUBYARCHDIR)
+install-so: $(RUBYARCHDIR)/$(DLLIB)
+$(RUBYARCHDIR)/$(DLLIB): $(DLLIB)
+ $(INSTALL_PROG) $(DLLIB) $(RUBYARCHDIR)
+install-rb: pre-install-rb install-rb-default
+install-rb-default: pre-install-rb-default
+pre-install-rb: Makefile
+pre-install-rb-default: Makefile
+$(RUBYARCHDIR):
+ $(MAKEDIRS) $@
+
+site-install: site-install-so site-install-rb
+site-install-so: install-so
+site-install-rb: install-rb
+
+.SUFFIXES: .c .m .cc .cxx .cpp .C .o
+
+.cc.o:
+ $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) -c $<
+
+.cxx.o:
+ $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) -c $<
+
+.cpp.o:
+ $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) -c $<
+
+.C.o:
+ $(CXX) $(INCFLAGS) $(CPPFLAGS) $(CXXFLAGS) -c $<
+
+.c.o:
+ $(CC) $(INCFLAGS) $(CPPFLAGS) $(CFLAGS) -c $<
+
+$(DLLIB): $(OBJS)
+ @-$(RM) $@
+ $(LDSHARED) -o $@ $(OBJS) $(LIBPATH) $(DLDFLAGS) $(LOCAL_LIBS) $(LIBS)
+
+
+
+$(OBJS): ruby.h defines.h
diff --git a/lib/danbooru_image_resizer/PNGReader.cpp b/lib/danbooru_image_resizer/PNGReader.cpp
new file mode 100644
index 00000000..7fa1bd7a
--- /dev/null
+++ b/lib/danbooru_image_resizer/PNGReader.cpp
@@ -0,0 +1,139 @@
+#include
+#include
+#include "PNGReader.h"
+#include "Resize.h"
+#include
+using namespace std;
+
+void PNG::Error(png_struct *png, const char *error)
+{
+ png_error_info *info = (png_error_info *) png->error_ptr;
+ strncpy(info->err, error, 1024);
+ info->err[1023] = 0;
+ longjmp(png->jmpbuf, 1);
+}
+
+void PNG::Warning(png_struct *png, const char *warning)
+{
+}
+
+void PNG::InfoCallback(png_struct *png, png_info *info_ptr)
+{
+ PNG *data = (PNG *) png_get_progressive_ptr(png);
+
+ png_uint_32 width, height;
+ int bit_depth, color_type;
+ png_get_IHDR(png, info_ptr, &width, &height, &bit_depth, &color_type, NULL, NULL, NULL);
+
+ png_set_palette_to_rgb(png);
+ png_set_tRNS_to_alpha(png);
+ png_set_filler(png, 0xFF, PNG_FILLER_AFTER);
+ if(bit_depth < 8)
+ png_set_packing(png);
+ if(color_type == PNG_COLOR_TYPE_GRAY && bit_depth < 8)
+ png_set_expand_gray_1_2_4_to_8(png);
+ if(bit_depth == 16)
+ png_set_strip_16(png);
+ data->m_Passes = png_set_interlace_handling(png);
+
+ if (color_type == PNG_COLOR_TYPE_GRAY || color_type == PNG_COLOR_TYPE_GRAY_ALPHA)
+ png_set_gray_to_rgb(png);
+
+ if(!data->m_Rows.Init(width, height, 4))
+ Error(png, "out of memory");
+
+ png_read_update_info(png, info_ptr);
+
+ data->m_pOutputFilter->Init(width, height, 4);
+}
+
+void PNG::RowCallback(png_struct *png, png_byte *new_row, png_uint_32 row_num, int pass)
+{
+ PNG *data = (PNG *) png_get_progressive_ptr(png);
+
+ uint8_t *p = data->m_Rows.GetRow(row_num);
+ if(p == NULL)
+ Error(png, "out of memory");
+
+ png_progressive_combine_row(png, p, new_row);
+
+ if(pass != data->m_Passes - 1)
+ return;
+
+ /* We've allocated data->m_RowsAllocated, but if we're doing multiple passes, only
+ * rows 0 to row_num will actually have usable data. */
+ if(!data->m_pOutputFilter->WriteRow(p))
+ Error(png, data->m_pOutputFilter->GetError());
+
+ /* If we're interlaced, never discard rows. */
+ if(data->m_Passes == 1)
+ data->m_Rows.DiscardRows(row_num+1);
+}
+
+void PNG::EndCallback(png_struct *png, png_info *info)
+{
+ PNG *data = (PNG *) png_get_progressive_ptr(png);
+ data->m_Done = true;
+}
+
+
+bool PNG::Read(FILE *f, Filter *pOutput, char error[1024])
+{
+ m_pOutputFilter = pOutput;
+
+ png_error_info err;
+ err.err = error;
+
+ png_struct *png = png_create_read_struct(PNG_LIBPNG_VER_STRING, &err, Error, Warning);
+ if(png == NULL)
+ {
+ sprintf(error, "creating png_create_read_struct failed");
+ return false;
+ }
+
+ png_info *info_ptr = png_create_info_struct(png);
+ if(info_ptr == NULL)
+ {
+ png_destroy_read_struct(&png, NULL, NULL);
+ sprintf(error, "creating png_create_info_struct failed");
+ return false;
+ }
+
+ if(setjmp(png->jmpbuf))
+ {
+ png_destroy_read_struct(&png, &info_ptr, NULL);
+ return false;
+ }
+
+ png_set_progressive_read_fn(png, this, InfoCallback, RowCallback, EndCallback);
+
+ while(1)
+ {
+ png_byte buf[1024*16];
+ int ret = fread(buf, 1, sizeof(buf), f);
+ if(ret == 0)
+ break;
+ if(ferror(f))
+ {
+ strcpy(error, strerror(errno));
+ png_destroy_read_struct(&png, &info_ptr, NULL);
+ return false;
+ }
+
+ png_process_data(png, info_ptr, buf, ret);
+ }
+
+ if(!m_pOutputFilter->Finish())
+ Error(png, m_pOutputFilter->GetError());
+
+ if(!m_Done)
+ {
+ strcpy(error, "incomplete file");
+ png_destroy_read_struct(&png, &info_ptr, NULL);
+ return false;
+ }
+
+ png_destroy_read_struct(&png, &info_ptr, NULL);
+ return true;
+}
+
diff --git a/lib/danbooru_image_resizer/PNGReader.h b/lib/danbooru_image_resizer/PNGReader.h
new file mode 100644
index 00000000..a065152d
--- /dev/null
+++ b/lib/danbooru_image_resizer/PNGReader.h
@@ -0,0 +1,38 @@
+#ifndef PNG_READER_H
+#define PNG_READER_H
+
+#include
+#include "Reader.h"
+#include "Filter.h"
+#include "RowBuffer.h"
+
+struct png_error_info
+{
+ char *err;
+};
+
+class PNG: public Reader
+{
+public:
+ PNG()
+ {
+ m_Done = false;
+ }
+
+ bool Read(FILE *f, Filter *pOutput, char error[1024]);
+
+private:
+ RowBuffer m_Rows;
+ Filter *m_pOutputFilter;
+
+ bool m_Done;
+ int m_Passes;
+
+ static void Error(png_struct *png, const char *error);
+ static void Warning(png_struct *png, const char *warning);
+ static void InfoCallback(png_struct *png, png_info *info_ptr);
+ static void RowCallback(png_struct *png, png_byte *new_row, png_uint_32 row_num, int pass);
+ static void EndCallback(png_struct *png, png_info *info);
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/Reader.h b/lib/danbooru_image_resizer/Reader.h
new file mode 100644
index 00000000..eb217ccf
--- /dev/null
+++ b/lib/danbooru_image_resizer/Reader.h
@@ -0,0 +1,14 @@
+#ifndef READER_H
+#define READER_H
+
+#include
+
+class Filter;
+class Reader
+{
+public:
+ virtual ~Reader() { }
+ virtual bool Read(FILE *f, Filter *rp, char errorbuf[1024]) = 0;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/Resize.cpp b/lib/danbooru_image_resizer/Resize.cpp
new file mode 100644
index 00000000..353f0b8a
--- /dev/null
+++ b/lib/danbooru_image_resizer/Resize.cpp
@@ -0,0 +1,286 @@
+#include
+#include
+#include
+#include
+#include
+#include "Resize.h"
+#include "Filter.h"
+#include
+using namespace std;
+
+namespace
+{
+ inline float sincf(float x)
+ {
+ if(fabsf(x) < 1e-9)
+ return 1.0;
+
+ return sinf(x) / x;
+ }
+
+ inline double fract(double f)
+ {
+ return f - floor(f);
+ }
+}
+
+static const int KERNEL_SIZE = 3;
+
+LanczosFilter::LanczosFilter()
+{
+ m_pFilters = NULL;
+}
+
+LanczosFilter::~LanczosFilter()
+{
+ delete[] m_pFilters;
+}
+
+void LanczosFilter::Init(float fFactor)
+{
+ /* If we're reducing the image, each output pixel samples each input pixel in the
+ * range once, so we step one pixel. If we're enlarging it by 2x, each output pixel
+ * samples each input pixel twice, so we step half a pixel. */
+ m_fStep = 1;
+ if(fFactor > 1.0)
+ m_fStep = 1.0 / fFactor;
+
+ /* If we're sampling each pixel twice (m_fStep is .5), then we need twice as many taps
+ * to sample KERNEL_SIZE pixels. */
+ m_iTaps = (int) ceil(KERNEL_SIZE / m_fStep) * 2;
+
+ delete[] m_pFilters;
+ m_pFilters = NULL; // in case of exception
+ m_pFilters = new float[m_iTaps * 256];
+
+ float *pOutput = m_pFilters;
+ for(int i=0; i < 256; ++i)
+ {
+ float fOffset = i / 256.0f;
+
+ float fSum = 0;
+ for(int i = 0; i < m_iTaps; ++i)
+ {
+ float fPos = -(m_iTaps/2-1) - fOffset + i;
+ fPos *= m_fStep;
+
+ float fValue = 0;
+ if(fabs(fPos) < KERNEL_SIZE)
+ fValue = sincf(M_PI*fPos) * sincf(M_PI / KERNEL_SIZE * fPos);
+
+ pOutput[i] = fValue;
+ fSum += fValue;
+ }
+
+ /* Scale the filter so it sums to 1. */
+ for(int i = 0; i pOutput):
+ m_pCompressor(pOutput)
+{
+ m_DestWidth = -1;
+ m_DestHeight = -1;
+ m_CurrentY = 0;
+ m_OutBuf = NULL;
+ m_szError = NULL;
+ m_iInputY = 0;
+}
+
+Resizer::~Resizer()
+{
+ if(m_OutBuf)
+ free(m_OutBuf);
+}
+
+const char *Resizer::GetError() const
+{
+ if(m_szError != NULL)
+ return m_szError;
+ return m_pCompressor->GetError();
+}
+
+bool Resizer::Init(int iSourceWidth, int iSourceHeight, int iBPP)
+{
+ assert(m_DestWidth != -1);
+ assert(m_DestHeight != -1);
+ assert(iBPP == 3);
+ m_SourceWidth = iSourceWidth;
+ m_SourceHeight = iSourceHeight;
+ m_SourceBPP = iBPP;
+
+ float fXFactor = float(m_SourceWidth) / m_DestWidth;
+ m_XFilter.Init(fXFactor);
+
+ float fYFactor = float(m_SourceHeight) / m_DestHeight;
+ m_YFilter.Init(fYFactor);
+
+ if(!m_Rows.Init(m_DestWidth, m_SourceHeight, m_SourceBPP, m_YFilter.m_iTaps))
+ {
+ m_szError = "out of memory";
+ return false;
+ }
+
+ m_OutBuf = (uint8_t *) malloc(m_DestWidth * m_SourceBPP);
+ if(m_OutBuf == NULL)
+ {
+ m_szError = "out of memory";
+ return false;
+ }
+
+ return m_pCompressor->Init(m_DestWidth, m_DestHeight, m_SourceBPP);
+}
+
+void Resizer::SetDest(int iDestWidth, int iDestHeight)
+{
+ m_DestWidth = iDestWidth;
+ m_DestHeight = iDestHeight;
+}
+
+static uint8_t *PadRow(const uint8_t *pSourceRow, int iWidth, int iBPP, int iPadding)
+{
+ uint8_t *pRow = new uint8_t[(iWidth + iPadding*2) * iBPP];
+ uint8_t *pDest = pRow;
+ for(int x = 0; x < iPadding; ++x)
+ {
+ for(int i = 0; i < iBPP; ++i)
+ pDest[i] = pSourceRow[i];
+ pDest += iBPP;
+ }
+
+ memcpy(pDest, pSourceRow, iWidth*iBPP*sizeof(uint8_t));
+ pDest += iWidth*iBPP;
+
+ for(int x = 0; x < iPadding; ++x)
+ {
+ for(int i = 0; i < iBPP; ++i)
+ pDest[i] = pSourceRow[i];
+ pDest += iBPP;
+ }
+
+ return pRow;
+}
+
+bool Resizer::WriteRow(uint8_t *pNewRow)
+{
+ if(m_SourceWidth == m_DestWidth && m_SourceHeight == m_DestHeight)
+ {
+ ++m_CurrentY;
+
+ /* We don't actually have any resizing to do, so short-circuit. */
+ if(!m_pCompressor->WriteRow((uint8_t *) pNewRow))
+ return false;
+
+ if(m_CurrentY != m_DestHeight)
+ return true;
+
+ return m_pCompressor->Finish();
+ }
+
+ /* Make a copy of pNewRow with the first and last pixel duplicated, so we don't have to do
+ * bounds checking in the inner loop below. */
+ uint8_t *pActualPaddedRow = PadRow(pNewRow, m_SourceWidth, m_SourceBPP, m_XFilter.m_iTaps/2);
+ const uint8_t *pPaddedRow = pActualPaddedRow + (m_XFilter.m_iTaps/2)*m_SourceBPP;
+
+ const float fXFactor = float(m_SourceWidth) / m_DestWidth;
+ const float fYFactor = float(m_SourceHeight) / m_DestHeight;
+
+ /* Run the horizontal filter on the incoming row, and drop the result into m_Rows. */
+ {
+ float *pRow = m_Rows.GetRow(m_iInputY);
+ ++m_iInputY;
+
+ float *pOutput = pRow;
+ for(int x = 0; x < m_DestWidth; ++x)
+ {
+ const double fSourceX = (x + 0.5f) * fXFactor;
+ const double fOffset = fract(fSourceX + 0.5);
+ const float *pFilter = m_XFilter.GetFilter(fOffset);
+ const int iStartX = lrint(fSourceX - m_XFilter.m_iTaps/2 + 1e-6);
+
+ const uint8_t *pSource = pPaddedRow + iStartX*3;
+
+ float fR = 0, fG = 0, fB = 0;
+ for(int i = 0; i < m_XFilter.m_iTaps; ++i)
+ {
+ float fWeight = *pFilter++;
+
+ fR += pSource[0] * fWeight;
+ fG += pSource[1] * fWeight;
+ fB += pSource[2] * fWeight;
+ pSource += 3;
+ }
+
+ pOutput[0] = fR;
+ pOutput[1] = fG;
+ pOutput[2] = fB;
+
+ pOutput += m_SourceBPP;
+ }
+ }
+ delete[] pActualPaddedRow;
+
+ const float *const *pSourceRows = m_Rows.GetRows();
+ while(m_CurrentY < m_DestHeight)
+ {
+ const double fSourceY = (m_CurrentY + 0.5) * fYFactor;
+ const double fOffset = fract(fSourceY + 0.5);
+ const int iStartY = lrint(fSourceY - m_YFilter.m_iTaps/2 + 1e-6);
+
+ /* iStartY is the first row we'll need, and we never move backwards. Discard rows
+ * before it to save memory. */
+ m_Rows.DiscardRows(iStartY);
+
+ if(m_iInputY != m_SourceHeight && iStartY+m_YFilter.m_iTaps >= m_iInputY)
+ return true;
+
+ /* Process the next output row. */
+ uint8_t *pOutput = m_OutBuf;
+ for(int x = 0; x < m_DestWidth; ++x)
+ {
+ const float *pFilter = m_YFilter.GetFilter(fOffset);
+
+ float fR = 0, fG = 0, fB = 0;
+ for(int i = 0; i < m_YFilter.m_iTaps; ++i)
+ {
+ const float *pSource = pSourceRows[iStartY+i];
+ pSource += x * m_SourceBPP;
+
+ float fWeight = *pFilter++;
+ fR += pSource[0] * fWeight;
+ fG += pSource[1] * fWeight;
+ fB += pSource[2] * fWeight;
+ }
+
+ pOutput[0] = (uint8_t) max(0, min(255, (int) lrintf(fR)));
+ pOutput[1] = (uint8_t) max(0, min(255, (int) lrintf(fG)));
+ pOutput[2] = (uint8_t) max(0, min(255, (int) lrintf(fB)));
+
+ pOutput += 3;
+ }
+
+ if(!m_pCompressor->WriteRow((uint8_t *) m_OutBuf))
+ return false;
+ ++m_CurrentY;
+ }
+
+ if(m_CurrentY == m_DestHeight)
+ {
+ if(!m_pCompressor->Finish())
+ return false;
+ }
+
+ return true;
+}
+
diff --git a/lib/danbooru_image_resizer/Resize.h b/lib/danbooru_image_resizer/Resize.h
new file mode 100644
index 00000000..946daada
--- /dev/null
+++ b/lib/danbooru_image_resizer/Resize.h
@@ -0,0 +1,56 @@
+#ifndef RESIZE_H
+#define RESIZE_H
+
+#include "RowBuffer.h"
+#include "Filter.h"
+#include
+using namespace std;
+#include
+
+struct LanczosFilter
+{
+ LanczosFilter();
+ ~LanczosFilter();
+ void Init(float fFactor);
+ const float *GetFilter(float fOffset) const;
+
+ float m_fStep;
+ int m_iTaps;
+ float *m_pFilters;
+};
+
+class Resizer: public Filter
+{
+public:
+ Resizer(auto_ptr pCompressor);
+ ~Resizer();
+
+ // BPP is 3 or 4, indicating RGB or RGBA.
+ bool Init(int iSourceWidth, int iSourceHeight, int BPP);
+ void SetDest(int iDestWidth, int iDestHeight);
+ bool WriteRow(uint8_t *pNewRow);
+ bool Finish() { return true; }
+
+ const char *GetError() const;
+
+private:
+ auto_ptr m_pCompressor;
+ uint8_t *m_OutBuf;
+ RowBuffer m_Rows;
+ const char *m_szError;
+
+ int m_SourceWidth;
+ int m_SourceHeight;
+ int m_SourceBPP;
+
+ int m_DestWidth;
+ int m_DestHeight;
+
+ LanczosFilter m_XFilter;
+ LanczosFilter m_YFilter;
+
+ int m_iInputY;
+ int m_CurrentY;
+};
+
+#endif
diff --git a/lib/danbooru_image_resizer/RowBuffer.h b/lib/danbooru_image_resizer/RowBuffer.h
new file mode 100644
index 00000000..2c961830
--- /dev/null
+++ b/lib/danbooru_image_resizer/RowBuffer.h
@@ -0,0 +1,137 @@
+#ifndef ROW_BUFFER_H
+#define ROW_BUFFER_H
+
+#include
+#include
+#include
+#include
+#include "RowBuffer.h"
+#include
+using namespace std;
+
+template
+class RowBuffer
+{
+public:
+ RowBuffer()
+ {
+ m_Rows = NULL;
+ m_ActualRows = NULL;
+ m_StartRow = 0;
+ m_EndRow = 0;
+ m_BPP = 0;
+ m_Height = 0;
+ }
+
+ ~RowBuffer()
+ {
+ for(int i = 0; i < m_Height; ++i)
+ delete [] m_Rows[i];
+
+ delete [] m_ActualRows;
+ }
+
+ /*
+ * If iVertPadding is non-zero, simulate padding on the top and bottom of the image. After
+ * row 0 is written, rows [-1 ... -iVertPadding] will point to the same row. After the bottom
+ * row is written, the following iVertPadding will also point to the last row. These rows
+ * are discarded when the row they refer to is discarded.
+ */
+ bool Init(int iWidth, int iHeight, int iBPP, int iVertPadding = 0)
+ {
+ m_Width = iWidth;
+ m_Height = iHeight;
+ m_BPP = iBPP;
+ m_iVertPadding = iVertPadding;
+
+ m_ActualRows = new T *[iHeight + iVertPadding*2];
+ m_Rows = m_ActualRows + iVertPadding;
+ memset(m_ActualRows, 0, sizeof(T *) * (iHeight + iVertPadding*2));
+
+ return true;
+ }
+
+ /* Return row, allocating if necessary. */
+ T *GetRow(int Row)
+ {
+ assert(m_BPP > 0);
+
+ if(m_Rows[Row] == NULL)
+ {
+ m_Rows[Row] = new T[m_Width*m_BPP];
+ if(Row == 0)
+ {
+ for(int i = -m_iVertPadding; i < 0; ++i)
+ m_Rows[i] = m_Rows[0];
+ }
+ if(Row == m_Height - 1)
+ {
+ for(int i = m_Height; i < m_Height + m_iVertPadding; ++i)
+ m_Rows[i] = m_Rows[m_Height - 1];
+ }
+ if(m_Rows[Row] == NULL)
+ return NULL;
+ if(m_StartRow == m_EndRow)
+ {
+ m_StartRow = Row;
+ m_EndRow = m_StartRow + 1;
+ }
+ }
+
+ if(int(Row) == m_StartRow+1)
+ {
+ while(m_StartRow != 0 && m_Rows[m_StartRow-1])
+ --m_StartRow;
+ }
+
+ if(int(Row) == m_EndRow)
+ {
+ while(m_EndRow < m_Height && m_Rows[m_EndRow])
+ ++m_EndRow;
+ }
+ return m_Rows[Row];
+ }
+
+ // Free rows [0,DiscardRow).
+ void DiscardRows(int DiscardRow)
+ {
+ assert(m_BPP > 0);
+ if(DiscardRow > m_Height)
+ DiscardRow = m_Height;
+
+ for(int i = m_StartRow; i < DiscardRow; ++i)
+ {
+ delete [] m_Rows[i];
+ m_Rows[i] = NULL;
+ }
+
+ m_StartRow = max(m_StartRow, DiscardRow);
+ m_EndRow = max(m_EndRow, DiscardRow);
+ }
+
+ /* Get a range of rows allocated in m_Rows: [m_StartRow,m_EndRow). If
+ * more than one allocated range exists, which range is returned is undefined. */
+ int GetStartRow() const { return m_StartRow; }
+ int GetEndRow() const { return m_EndRow; }
+ const T *const *GetRows() const { return m_Rows; }
+
+private:
+ /* Array of image rows. These are allocated as needed. */
+ T **m_Rows;
+
+ /* The actual pointer m_Rows is contained in. m_Rows may be offset from this to
+ * implement padding. */
+ T **m_ActualRows;
+
+ /* in m_Rows is allocated: */
+ int m_StartRow;
+ int m_EndRow;
+
+ int m_Width;
+ int m_Height;
+ int m_BPP;
+ int m_iVertPadding;
+};
+
+#endif
+
diff --git a/lib/danbooru_image_resizer/danbooru_image_resizer.bundle b/lib/danbooru_image_resizer/danbooru_image_resizer.bundle
new file mode 100644
index 00000000..06c8d74e
Binary files /dev/null and b/lib/danbooru_image_resizer/danbooru_image_resizer.bundle differ
diff --git a/lib/danbooru_image_resizer/danbooru_image_resizer.cpp b/lib/danbooru_image_resizer/danbooru_image_resizer.cpp
new file mode 100644
index 00000000..f77dbbc2
--- /dev/null
+++ b/lib/danbooru_image_resizer/danbooru_image_resizer.cpp
@@ -0,0 +1,106 @@
+#include
+#include
+#include
+#include
+using namespace std;
+#include "PNGReader.h"
+#include "GIFReader.h"
+#include "JPEGReader.h"
+#include "Resize.h"
+#include "Crop.h"
+#include "ConvertToRGB.h"
+
+static VALUE danbooru_module;
+
+static VALUE danbooru_resize_image(VALUE module, VALUE file_ext_val, VALUE read_path_val, VALUE write_path_val,
+ VALUE output_width_val, VALUE output_height_val,
+ VALUE crop_top_val, VALUE crop_bottom_val, VALUE crop_left_val, VALUE crop_right_val,
+ VALUE output_quality_val)
+{
+ const char * file_ext = StringValueCStr(file_ext_val);
+ const char * read_path = StringValueCStr(read_path_val);
+ const char * write_path = StringValueCStr(write_path_val);
+ int output_width = NUM2INT(output_width_val);
+ int output_height = NUM2INT(output_height_val);
+ int output_quality = NUM2INT(output_quality_val);
+ int crop_top = NUM2INT(crop_top_val);
+ int crop_bottom = NUM2INT(crop_bottom_val);
+ int crop_left = NUM2INT(crop_left_val);
+ int crop_right = NUM2INT(crop_right_val);
+
+ FILE *read_file = fopen(read_path, "rb");
+ if(read_file == NULL)
+ rb_raise(rb_eIOError, "can't open %s\n", read_path);
+
+ FILE *write_file = fopen(write_path, "wb");
+ if(write_file == NULL)
+ {
+ fclose(read_file);
+ rb_raise(rb_eIOError, "can't open %s\n", write_path);
+ }
+
+ bool ret = false;
+ char error[1024];
+
+ try
+ {
+ auto_ptr pReader(NULL);
+ if (!strcmp(file_ext, "jpg") || !strcmp(file_ext, "jpeg"))
+ pReader.reset(new JPEG);
+ else if (!strcmp(file_ext, "gif"))
+ pReader.reset(new GIF);
+ else if (!strcmp(file_ext, "png"))
+ pReader.reset(new PNG);
+ else
+ {
+ strcpy(error, "unknown filetype");
+ goto cleanup;
+ }
+
+ auto_ptr pFilter(NULL);
+
+ {
+ auto_ptr pCompressor(new JPEGCompressor(write_file));
+ pCompressor->SetQuality(output_quality);
+ pFilter.reset(pCompressor.release());
+ }
+
+ {
+ auto_ptr pResizer(new Resizer(pFilter));
+ pResizer->SetDest(output_width, output_height);
+ pFilter.reset(pResizer.release());
+ }
+
+ if(crop_bottom > crop_top && crop_right > crop_left)
+ {
+ auto_ptr pCropper(new Crop(pFilter));
+ pCropper->SetCrop(crop_top, crop_bottom, crop_left, crop_right);
+ pFilter.reset(pCropper.release());
+ }
+
+ {
+ auto_ptr pConverter(new ConvertToRGB(pFilter));
+ pFilter.reset(pConverter.release());
+ }
+
+ ret = pReader->Read(read_file, pFilter.get(), error);
+ }
+ catch(const std::bad_alloc &e)
+ {
+ strcpy(error, "out of memory");
+ }
+
+cleanup:
+ fclose(read_file);
+ fclose(write_file);
+
+ if(!ret)
+ rb_raise(rb_eException, "%s", error);
+
+ return INT2FIX(0);
+}
+
+extern "C" void Init_danbooru_image_resizer() {
+ danbooru_module = rb_define_module("Danbooru");
+ rb_define_module_function(danbooru_module, "resize_image", (VALUE(*)(...))danbooru_resize_image, 10);
+}
diff --git a/lib/danbooru_image_resizer/danbooru_image_resizer.rb b/lib/danbooru_image_resizer/danbooru_image_resizer.rb
new file mode 100644
index 00000000..74726e42
--- /dev/null
+++ b/lib/danbooru_image_resizer/danbooru_image_resizer.rb
@@ -0,0 +1,83 @@
+require 'danbooru_image_resizer/danbooru_image_resizer.so'
+
+module Danbooru
+ class ResizeError < Exception; end
+
+ # If output_quality is an integer, it specifies the JPEG output quality to use.
+ #
+ # If it's a hash, it's of this form:
+ # { :min => 90, :max => 100, :filesize => 1048576 }
+ #
+ # This will search for the highest quality compression under :filesize between 90 and 100.
+ # This allows cleanly filtered images to receive a high compression ratio, but allows lowering
+ # the compression on noisy images.
+ def resize(file_ext, read_path, write_path, output_size, output_quality)
+ if output_quality.class == Fixnum
+ output_quality = { :min => output_quality, :max => output_quality, :filesize => 1024*1024*1024 }
+ end
+
+ # A binary search is a poor fit here: we'd always have to do at least two compressions
+ # to find out whether the conversion we've done is the maximum fit, and most images will
+ # generally fit with maximum-quality compression anyway. Just search linearly from :max
+ # down.
+ quality = output_quality[:max]
+ begin
+ while true
+ # If :crop is set, crop between [crop_top,crop_bottom) and [crop_left,crop_right)
+ # before resizing.
+ Danbooru.resize_image(file_ext, read_path, write_path, output_size[:width], output_size[:height],
+ output_size[:crop_top] || 0, output_size[:crop_bottom] || 0, output_size[:crop_left] || 0, output_size[:crop_right] || 0,
+ quality)
+
+ # If the file is small enough, or if we're at the lowest allowed quality setting
+ # already, finish.
+ return if !output_quality[:filesize].nil? && File.size(write_path) <= output_quality[:filesize]
+ return if quality <= output_quality[:min]
+ quality -= 1
+ end
+ rescue IOError
+ raise
+ rescue Exception => e
+ raise ResizeError, e.to_s
+ end
+ end
+
+ # If allow_enlarge is true, always scale to fit, even if the source area is
+ # smaller than max_size.
+ def reduce_to(size, max_size, ratio = 1, allow_enlarge = false)
+ ret = size.dup
+
+ if allow_enlarge
+ if ret[:width] < max_size[:width]
+ scale = max_size[:width].to_f / ret[:width].to_f
+ ret[:width] = ret[:width] * scale
+ ret[:height] = ret[:height] * scale
+ end
+
+ if max_size[:height] && (ret[:height] < ratio * max_size[:height])
+ scale = max_size[:height].to_f / ret[:height].to_f
+ ret[:width] = ret[:width] * scale
+ ret[:height] = ret[:height] * scale
+ end
+ end
+
+ if ret[:width] > ratio * max_size[:width]
+ scale = max_size[:width].to_f / ret[:width].to_f
+ ret[:width] = ret[:width] * scale
+ ret[:height] = ret[:height] * scale
+ end
+
+ if max_size[:height] && (ret[:height] > ratio * max_size[:height])
+ scale = max_size[:height].to_f / ret[:height].to_f
+ ret[:width] = ret[:width] * scale
+ ret[:height] = ret[:height] * scale
+ end
+
+ ret[:width] = ret[:width].round
+ ret[:height] = ret[:height].round
+ ret
+ end
+
+ module_function :resize
+ module_function :reduce_to
+end
diff --git a/lib/danbooru_image_resizer/danbooru_image_resizer.so b/lib/danbooru_image_resizer/danbooru_image_resizer.so
new file mode 100755
index 00000000..c51db064
Binary files /dev/null and b/lib/danbooru_image_resizer/danbooru_image_resizer.so differ
diff --git a/lib/danbooru_image_resizer/extconf.rb b/lib/danbooru_image_resizer/extconf.rb
new file mode 100644
index 00000000..78db2ac7
--- /dev/null
+++ b/lib/danbooru_image_resizer/extconf.rb
@@ -0,0 +1,28 @@
+#!/bin/env ruby
+
+require 'mkmf'
+
+CONFIG['CC'] = "g++"
+CONFIG['LDSHARED'] = CONFIG['LDSHARED'].sub(/^cc /,'g++ ') # otherwise we would not link with the C++ runtime
+$INCFLAGS << " -I/usr/local/include"
+
+dir_config("gd")
+dir_config("jpeg")
+dir_config("png")
+
+have_header("gd.h")
+
+have_library("gd")
+have_library("jpeg")
+have_library("png")
+
+have_func("gdImageCreateFromGif", "gd.h")
+have_func("gdImageJpeg", "gd.h")
+have_func("jpeg_set_quality", ["stdlib.h", "stdio.h", "jpeglib-extern.h"])
+have_func("png_set_expand_gray_1_2_4_to_8", "png.h")
+
+#with_cflags("-O0 -g -Wall") {true}
+with_cflags("-O2 -Wall") {true}
+#with_cflags("-O0 -g -fno-exceptions -Wall") {true}
+
+create_makefile("danbooru_image_resizer")
diff --git a/lib/danbooru_image_resizer/jpeglib-extern.h b/lib/danbooru_image_resizer/jpeglib-extern.h
new file mode 100644
index 00000000..de92d400
--- /dev/null
+++ b/lib/danbooru_image_resizer/jpeglib-extern.h
@@ -0,0 +1,16 @@
+// Needed for OS X
+
+#ifndef JPEGLIB_EXTERN_H
+#define JPEGLIB_EXTERN_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/lib/danbooru_image_resizer/test-out-95.jpg b/lib/danbooru_image_resizer/test-out-95.jpg
new file mode 100644
index 00000000..5e9601a8
Binary files /dev/null and b/lib/danbooru_image_resizer/test-out-95.jpg differ
diff --git a/lib/danbooru_image_resizer/test-out-96.jpg b/lib/danbooru_image_resizer/test-out-96.jpg
new file mode 100644
index 00000000..bf4147b6
Binary files /dev/null and b/lib/danbooru_image_resizer/test-out-96.jpg differ
diff --git a/lib/danbooru_image_resizer/test-out-97.jpg b/lib/danbooru_image_resizer/test-out-97.jpg
new file mode 100644
index 00000000..fa857bc2
Binary files /dev/null and b/lib/danbooru_image_resizer/test-out-97.jpg differ
diff --git a/lib/danbooru_image_resizer/test-out-98.jpg b/lib/danbooru_image_resizer/test-out-98.jpg
new file mode 100644
index 00000000..96c89349
Binary files /dev/null and b/lib/danbooru_image_resizer/test-out-98.jpg differ
diff --git a/lib/danbooru_image_resizer/test.png b/lib/danbooru_image_resizer/test.png
new file mode 100644
index 00000000..629db7b4
Binary files /dev/null and b/lib/danbooru_image_resizer/test.png differ
diff --git a/lib/danbooru_image_resizer/test.rb b/lib/danbooru_image_resizer/test.rb
new file mode 100644
index 00000000..aab00c33
--- /dev/null
+++ b/lib/danbooru_image_resizer/test.rb
@@ -0,0 +1,7 @@
+#!/usr/local/bin/ruby
+require 'danbooru_image_resizer.so'
+[95,96,97,98].each { |n|
+ Danbooru.resize_image("png", "test.png", "test-out-#{n}.jpg", 2490, 3500,
+ 0, 0, 0, 0, n)
+}
+
diff --git a/lib/diff.rb b/lib/diff.rb
new file mode 100644
index 00000000..3596f197
--- /dev/null
+++ b/lib/diff.rb
@@ -0,0 +1,61 @@
+module Danbooru
+ TAG_DEL = ''
+ TAG_INS = ''
+ TAG_DEL_CLOSE = ''
+ TAG_INS_CLOSE = ''
+ TAG_NEWLINE = "↲\n"
+ TAG_BREAK = " \n"
+
+ # Produce a formatted page that shows the difference between two versions of a page.
+ def diff(old, new)
+ pattern = Regexp.new('(?:<.+?>)|(?:[0-9_A-Za-z\x80-\xff]+[\x09\x20]?)|(?:[ \t]+)|(?:\r?\n)|(?:.+?)')
+
+ thisarr = old.scan(pattern)
+ otharr = new.scan(pattern)
+
+ cbo = Diff::LCS::ContextDiffCallbacks.new
+ diffs = thisarr.diff(otharr, cbo)
+
+ escape_html = lambda {|str| str.gsub(/&/,'&').gsub(/,'<').gsub(/>/,'>')}
+
+ output = thisarr;
+ output.each { |q| q.replace(escape_html[q]) }
+
+ diffs.reverse_each do |hunk|
+ newchange = hunk.max{|a,b| a.old_position <=> b.old_position}
+ newstart = newchange.old_position
+ oldstart = hunk.min{|a,b| a.old_position <=> b.old_position}.old_position
+
+ if newchange.action == '+'
+ output.insert(newstart, TAG_INS_CLOSE)
+ end
+
+ hunk.reverse_each do |chg|
+ case chg.action
+ when '-'
+ oldstart = chg.old_position
+ output[chg.old_position] = TAG_NEWLINE if chg.old_element.match(/^\r?\n$/)
+ when '+'
+ if chg.new_element.match(/^\r?\n$/)
+ output.insert(chg.old_position, TAG_NEWLINE)
+ else
+ output.insert(chg.old_position, "#{escape_html[chg.new_element]}")
+ end
+ end
+ end
+
+ if newchange.action == '+'
+ output.insert(newstart, TAG_INS)
+ end
+
+ if hunk[0].action == '-'
+ output.insert((newstart == oldstart || newchange.action != '+') ? newstart+1 : newstart, TAG_DEL_CLOSE)
+ output.insert(oldstart, TAG_DEL)
+ end
+ end
+
+ output.join.gsub(/\r?\n/, TAG_BREAK)
+ end
+
+ module_function :diff
+end
diff --git a/lib/download.rb b/lib/download.rb
new file mode 100644
index 00000000..5aa66d3b
--- /dev/null
+++ b/lib/download.rb
@@ -0,0 +1,61 @@
+module Danbooru
+ # Download the given URL, following redirects; once we have the result, yield the request.
+ def http_get_streaming(source, options = {}, &block)
+ max_size = options[:max_size] || CONFIG["max_image_size"]
+ max_size = nil if max_size == 0 # unlimited
+
+ limit = 4
+
+ while true
+ url = URI.parse(source)
+
+ unless url.is_a?(URI::HTTP)
+ raise SocketError, "URL must be HTTP"
+ end
+
+ Net::HTTP.start(url.host, url.port) do |http|
+ http.read_timeout = 10
+
+ headers = {
+ "User-Agent" => "#{CONFIG["app_name"]}/#{CONFIG["version"]}",
+ "Referer" => source
+ }
+
+ if source =~ /pixiv\.net/
+ headers["Referer"] = "http://www.pixiv.net"
+
+ # Don't download the small version
+ if source =~ %r!(/img/.+?/.+?)_m.+$!
+ match = $1
+ source.sub!(match + "_m", match)
+ end
+ end
+
+ http.request_get(url.request_uri, headers) do |res|
+ case res
+ when Net::HTTPSuccess then
+ if max_size
+ len = res["Content-Length"]
+ raise SocketError, "File is too large (#{len} bytes)" if len && len.to_i > max_size
+ end
+
+ return yield(res)
+
+ when Net::HTTPRedirection then
+ if limit == 0 then
+ raise SocketError, "Too many redirects"
+ end
+ source = res["location"]
+ limit -= 1
+
+ else
+ raise SocketError, "HTTP error code: #{res.code} #{res.message}"
+ end
+ end
+ end
+ end
+ end
+
+ module_function :http_get_streaming
+end
+
diff --git a/lib/dtext.rb b/lib/dtext.rb
new file mode 100644
index 00000000..ab4c8ad9
--- /dev/null
+++ b/lib/dtext.rb
@@ -0,0 +1,124 @@
+#!/usr/bin/env ruby
+
+require 'cgi'
+
+module DText
+ def parse_inline(str)
+ str = CGI.escapeHTML(str)
+ str.gsub!(/\[\[.+?\]\]/m) do |tag|
+ tag = tag[2..-3]
+ if tag =~ /^(.+?)\|(.+)$/
+ tag = $1
+ name = $2
+ '' + name + ''
+ else
+ '' + tag + ''
+ end
+ end
+ str.gsub!(/\{\{.+?\}\}/m) do |tag|
+ tag = tag[2..-3]
+ '' + tag + ''
+ end
+ str.gsub!(/[Pp]ost #(\d+)/, 'post #\1')
+ str.gsub!(/[Ff]orum #(\d+)/, 'forum #\1')
+ str.gsub!(/[Cc]omment #(\d+)/, 'comment #\1')
+ str.gsub!(/[Pp]ool #(\d+)/, 'pool #\1')
+ str.gsub!(/\n/m, " ")
+ str.gsub!(/\[b\](.+?)\[\/b\]/, '\1')
+ str.gsub!(/\[i\](.+?)\[\/i\]/, '\1')
+ str.gsub!(/\[spoilers?\](.+?)\[\/spoilers?\]/m, 'spoiler\1')
+ str.gsub!(/\[spoilers?(=(.+))\](.+?)\[\/spoilers?\]/m, '\2\3')
+
+ # Ruby regexes are in the localization dark ages, so we need to match UTF-8 characters
+ # manually:
+ utf8_char = '[\xC0-\xFF][\x80-\xBF]+'
+
+ url = "(h?ttps?:\\/\\/(?:[a-zA-Z0-9_\\-#~%.,:;\\(\\)\\[\\]$@!&=+?\\/#]|#{utf8_char})+)"
+ str.gsub!(/#{url}|<<#{url}(?:\|(.+?))?>>/m) do |link| # url or <>
+ if $1 then
+ link = $1
+ url = link.gsub(/[.;,:'"]+$/, "")
+ if url =~ /^ttp/ then url = "h" + url end
+ '' + link + ''
+ else
+ link = $2
+ text = $3
+ '' + text + ''
+ end
+ end
+ str
+ end
+
+ def parse_list(str)
+ html = ""
+ layout = []
+ nest = 0
+
+ str.split(/\n/).each do |line|
+ if line =~ /^\s*(\*+) (.+)/
+ nest = $1.size
+ content = parse_inline($2)
+ else
+ content = parse_inline(line)
+ end
+
+ if nest > layout.size
+ html += "
"
+ layout << "ul"
+ end
+
+ while nest < layout.size
+ elist = layout.pop
+ if elist
+ html += "#{elist}>"
+ end
+ end
+
+ html += "
#{content}
"
+ end
+
+ while layout.any?
+ elist = layout.pop
+ html += "#{elist}>"
+ end
+
+ html
+ end
+
+ def parse(str)
+ # Make sure quote tags are surrounded by newlines
+ str.gsub!(/\s*\[quote\]\s*/m, "\n\n[quote]\n\n")
+ str.gsub!(/\s*\[\/quote\]\s*/m, "\n\n[/quote]\n\n")
+ str.gsub!(/(?:\r?\n){3,}/, "\n\n")
+ str.strip!
+ blocks = str.split(/(?:\r?\n){2}/)
+
+ html = blocks.map do |block|
+ case block
+ when /^(h[1-6])\.\s*(.+)$/
+ tag = $1
+ content = $2
+ "<#{tag}>" + parse_inline(content) + "#{tag}>"
+
+ when /^\s*\*+ /
+ parse_list(block)
+
+ when "[quote]"
+ '
'
+
+ when "[/quote]"
+ '
'
+
+ else
+ '
' + parse_inline(block) + "
"
+ end
+ end
+
+ html.join("")
+ end
+
+ module_function :parse_inline
+ module_function :parse_list
+ module_function :parse
+end
+
diff --git a/lib/error_logging.rb b/lib/error_logging.rb
new file mode 100644
index 00000000..1e44c633
--- /dev/null
+++ b/lib/error_logging.rb
@@ -0,0 +1,39 @@
+module ActionController #:nodoc:
+ module Rescue
+ protected
+ alias_method :orig_log_error, :log_error
+ def log_error(exception) #:doc:
+ case exception
+ when
+ ActiveRecord::RecordNotFound,
+ ActionController::UnknownController,
+ ActionController::UnknownAction,
+ ActionController::RoutingError
+ return
+ end
+
+ ActiveSupport::Deprecation.silence do
+ if ActionView::TemplateError === exception
+ logger.fatal(exception.to_s)
+ else
+ text = "\n\n"
+ text << "#{exception.class} (#{exception.message}) #{self.controller_name}/#{self.action_name}\n"
+ text << "Host: #{request.env["REMOTE_ADDR"]}\n"
+ text << "U-A: #{request.env["HTTP_USER_AGENT"]}\n"
+
+
+
+ text << "Parameters: #{request.parameters.inspect}\n" if not request.parameters.empty?
+ text << "Cookies: #{request.cookies.inspect}\n" if not request.cookies.empty?
+ text << " "
+ text << clean_backtrace(exception).join("\n ")
+ text << "\n\n"
+ logger.fatal(text)
+ end
+ end
+
+# orig_log_error exception
+ end
+ end
+end
+
diff --git a/lib/external_post.rb b/lib/external_post.rb
new file mode 100644
index 00000000..352a585c
--- /dev/null
+++ b/lib/external_post.rb
@@ -0,0 +1,35 @@
+class ExternalPost
+ # These mimic the equivalent attributes in Post directly.
+ attr_accessor :md5, :url, :preview_url, :service, :width, :height, :tags, :rating, :id
+
+ class << self
+ def get_service_icon(service)
+ if service == CONFIG["local_image_service"] then
+ "/favicon.ico"
+ elsif service == "gelbooru.com" then # hack
+ "/favicon-" + service + ".png"
+ else
+ "/favicon-" + service + ".ico"
+ end
+ end
+ end
+
+ def service_icon
+ ExternalPost.get_service_icon(service)
+ end
+ def ext
+ true
+ end
+ def cached_tags
+ tags
+ end
+
+ def to_xml(options = {})
+ {:md5 => md5, :url => url, :preview_url => preview_url, :service => service}.to_xml(options.merge(:root => "external-post"))
+ end
+
+ def preview_dimensions
+ dim = Danbooru.reduce_to({:width => width, :height => height}, {:width => 150, :height => 150})
+ return [dim[:width], dim[:height]]
+ end
+end
diff --git a/lib/fix_form_tag.rb b/lib/fix_form_tag.rb
new file mode 100644
index 00000000..801e7049
--- /dev/null
+++ b/lib/fix_form_tag.rb
@@ -0,0 +1,20 @@
+require "action_view/helpers/tag_helper.rb"
+
+# submit_tag "Search" generates a submit tag that adds "commit=Search" to the URL,
+# which is ugly and unnecessary. Override TagHelper#tag and remove this globally.
+module ActionView
+ module Helpers
+ module TagHelper
+ alias_method :orig_tag, :tag
+ def tag(name, options = nil, open = false, escape = true)
+
+ if name == :input && options["type"] == "submit" && options["name"] == "commit" && options["value"] == "Search"
+ options.delete("name")
+ end
+
+ orig_tag name, options, open, escape
+ end
+ end
+ end
+end
+
diff --git a/lib/html_4_tags.rb b/lib/html_4_tags.rb
new file mode 100644
index 00000000..507a5dbb
--- /dev/null
+++ b/lib/html_4_tags.rb
@@ -0,0 +1,29 @@
+# Override default tag helper to output HTMl 4 code
+module ActionView
+ module Helpers #:nodoc:
+ module TagHelper
+ # Disable open; validates better...
+ def tag(name, options = nil, open = true, escape = true)
+ # workaround: PicLens is rendered as HTML, instead of XML, so don't force open tags
+ # based on MIME type instead of template_format
+ if headers["Content-Type"] != "application/rss+xml"
+ open = true
+ end
+
+ "<#{name}#{tag_options(options, escape) if options}" + (open ? ">" : " />")
+ end
+ end
+
+ module AssetTagHelper
+ def stylesheet_tag(source, options)
+ tag("link", { "rel" => "stylesheet", "type" => Mime::CSS, "media" => "screen", "href" => html_escape(path_to_stylesheet(source)) }.merge(options), false, false)
+ end
+ end
+
+ class InstanceTag
+ def tag(name, options = nil, open = true, escape = true)
+ "<#{name}#{tag_options(options, escape) if options}" + (open ? ">" : " />")
+ end
+ end
+ end
+end
diff --git a/lib/memcache_util_store.rb b/lib/memcache_util_store.rb
new file mode 100644
index 00000000..2c57ec9a
--- /dev/null
+++ b/lib/memcache_util_store.rb
@@ -0,0 +1,100 @@
+##
+# A copy of the MemCacheStore that uses memcache-client instead of ruby-memcache.
+#
+# Mod by Geoffrey Grosenbach http://topfunky.com
+
+begin
+ require 'cgi/session'
+ require 'memcache_util'
+
+ class CGI
+ class Session
+ # MemCache-based session storage class.
+ #
+ # This builds upon the top-level MemCache class provided by the
+ # library file memcache.rb. Session data is marshalled and stored
+ # in a memcached cache.
+ class MemcacheUtilStore
+ def check_id(id) #:nodoc:#
+ /[^0-9a-zA-Z]+/ =~ id.to_s ? false : true
+ end
+
+ # Create a new CGI::Session::MemCache instance
+ #
+ # This constructor is used internally by CGI::Session. The
+ # user does not generally need to call it directly.
+ #
+ # +session+ is the session for which this instance is being
+ # created. The session id must only contain alphanumeric
+ # characters; automatically generated session ids observe
+ # this requirement.
+ #
+ # +options+ is a hash of options for the initializer. The
+ # following options are recognized:
+ #
+ # cache:: an instance of a MemCache client to use as the
+ # session cache.
+ #
+ # expires:: an expiry time value to use for session entries in
+ # the session cache. +expires+ is interpreted in seconds
+ # relative to the current time if it is less than 60*60*24*30
+ # (30 days), or as an absolute Unix time (e.g., Time#to_i) if
+ # greater. If +expires+ is +0+, or not passed on +options+,
+ # the entry will never expire.
+ #
+ # This session's memcache entry will be created if it does
+ # not exist, or retrieved if it does.
+ def initialize(session, options = {})
+ id = session.session_id
+ unless check_id(id)
+ raise ArgumentError, "session_id '%s' is invalid" % id
+ end
+ @expires = options['expires'] || 0
+ @session_key = "session:#{id}"
+ @session_data = {}
+ end
+
+ # Restore session state from the session's memcache entry.
+ #
+ # Returns the session state as a hash.
+ def restore
+ begin
+ @session_data = Cache.get(@session_key) || {}
+ rescue
+ @session_data = {}
+ end
+ end
+
+ # Save session state to the session's memcache entry.
+ def update
+ begin
+ Cache.put(@session_key, @session_data, @expires)
+ rescue
+ # Ignore session update failures.
+ end
+ end
+
+ # Update and close the session's memcache entry.
+ def close
+ update
+ end
+
+ # Delete the session's memcache entry.
+ def delete
+ begin
+ Cache.delete(@session_key)
+ rescue
+ # Ignore session delete failures.
+ end
+ @session_data = {}
+ end
+
+ def data
+ @session_data
+ end
+ end
+ end
+ end
+rescue LoadError
+ # MemCache wasn't available so neither can the store be
+end
diff --git a/lib/mirror.rb b/lib/mirror.rb
new file mode 100644
index 00000000..09fa1bfd
--- /dev/null
+++ b/lib/mirror.rb
@@ -0,0 +1,158 @@
+module Mirrors
+ class MirrorError < Exception; end
+
+ def ssh_open_pipe(mirror, command, timeout=30)
+ remote_user_host = "#{mirror[:user]}@#{mirror[:host]}"
+ ret = nil
+ IO.popen("/usr/bin/ssh -o Compression=no -o BatchMode=yes -o ConnectTimeout=#{timeout} #{remote_user_host} '#{command}'") do |f|
+ ret = yield(f)
+ end
+ if ($? & 0xFF) != 0 then
+ raise MirrorError, "Command \"%s\" to %s exited with signal %i" % [command, mirror[:host], $? & 0xFF]
+ end
+ if ($? >> 8) != 0 then
+ raise MirrorError, "Command \"%s\" to %s exited with status %i" % [command, mirror[:host], $? >> 8]
+ end
+ return ret
+ end
+ module_function :ssh_open_pipe
+
+ # Copy a file to all mirrors. file is an absolute path which must be
+ # located in public/data; the files will land in the equivalent public/data
+ # on each mirror.
+ #
+ # Because we have no mechanism for indicating that a file is only available on
+ # certain mirrors, if any mirror fails to upload, MirrorError will be thrown
+ # and the file should be treated as completely unwarehoused.
+ def copy_file_to_mirrors(file, options={})
+ # CONFIG[:data_dir] is equivalent to our local_base.
+ local_base = "#{RAILS_ROOT}/public/data/"
+ options = { :timeout => 30 }.merge(options)
+
+ if file[0,local_base.length] != local_base then
+ raise "Invalid filename to mirror: \"%s" % file
+ end
+
+ expected_md5 = File.open(file, 'rb') {|fp| Digest::MD5.hexdigest(fp.read)}
+
+ CONFIG["mirrors"].each { |mirror|
+ remote_user_host = "#{mirror[:user]}@#{mirror[:host]}"
+ remote_filename = "#{mirror[:data_dir]}/#{file[local_base.length, file.length]}"
+
+ # Tolerate a few errors in case of communication problems.
+ retry_count = 0
+
+ begin
+ # Check if the file is already mirrored before we spend time uploading it.
+ # Linux needs md5sum; FreeBSD needs md5 -q.
+ actual_md5 = Mirrors.ssh_open_pipe(mirror,
+ "if [ -f #{remote_filename} ]; then (which md5sum >/dev/null) && md5sum #{remote_filename} || md5 -q #{remote_filename}; fi",
+ timeout=options[:timeout]) do |f| f.gets end
+ if actual_md5 =~ /^[0-9a-f]{32}/
+ actual_md5 = actual_md5.slice(0, 32)
+ if expected_md5 == actual_md5
+ next
+ end
+ end
+
+ if not system("/usr/bin/scp", "-pq", "-o", "Compression no", "-o", "BatchMode=yes",
+ "-o", "ConnectTimeout=%i" % timeout,
+ file, "#{remote_user_host}:#{remote_filename}") then
+ raise MirrorError, "Error copying #{file} to #{remote_user_host}:#{remote_filename}"
+ end
+
+ # Don't trust scp; verify the files.
+ actual_md5 = Mirrors.ssh_open_pipe(mirror, "if [ -f #{remote_filename} ]; then (which md5sum >/dev/null) && md5sum #{remote_filename} || md5 -q #{remote_filename}; fi") do |f| f.gets end
+ if actual_md5 !~ /^[0-9a-f]{32}/
+ raise MirrorError, "Error verifying #{remote_user_host}:#{remote_filename}: #{actual_md5}"
+ end
+
+ actual_md5 = actual_md5.slice(0, 32)
+
+ if expected_md5 != actual_md5
+ raise MirrorError, "Verifying #{remote_user_host}:#{remote_filename} failed: got #{actual_md5}, expected #{expected_md5}"
+ end
+ rescue MirrorError => e
+ retry_count += 1
+ raise if retry_count == 3
+
+ retry
+ end
+ }
+ end
+ module_function :copy_file_to_mirrors
+
+ # Return a URL prefix for a file. If not warehoused, always returns the main
+ # server. If a seed is specified, seeds the server selection; otherwise, each
+ # IP will always use the same server.
+ #
+ # If :zipfile is set, ignore mirrors with the :nozipfile flag.
+ if CONFIG["image_store"] == :remote_hierarchy
+ def select_main_image_server
+ return CONFIG["url_base"] if !CONFIG["image_servers"] || CONFIG["image_servers"].empty?
+ raise 'CONFIG["url_base"] is set incorrectly; please see config/default_config.rb' if CONFIG["image_servers"][0].class == String
+
+ return CONFIG["image_servers"][0][:server]
+ end
+
+ def select_image_server(is_warehoused, seed = 0, options = {})
+ return CONFIG["url_base"] if !CONFIG["image_servers"] || CONFIG["image_servers"].empty?
+ raise 'CONFIG["url_base"] is set incorrectly; please see config/default_config.rb' if CONFIG["image_servers"][0].class == String
+
+ if not is_warehoused
+ # return CONFIG["url_base"]
+ return CONFIG["image_servers"][0][:server]
+ end
+
+ mirrors = CONFIG["image_servers"]
+ if(options[:preview]) then
+ mirrors = mirrors.select { |mirror|
+ mirror[:nopreview] != true
+ }
+ end
+
+ if not options[:preview] then
+ mirrors = mirrors.select { |mirror|
+ mirror[:previews_only] != true
+ }
+ end
+
+ if(options[:zipfile]) then
+ mirrors = mirrors.select { |mirror|
+ mirror[:nozipfile] != true
+ }
+ end
+
+ raise "No usable mirrors" if mirrors.empty?
+
+ total_weights = 0
+ mirrors.each { |s| total_weights += s[:traffic] }
+
+ seed += Thread.current["danbooru-ip_addr_seed"] || 0
+ seed %= total_weights
+
+ server = nil
+ mirrors.each { |s|
+ w = s[:traffic]
+ if seed < w
+ server = s
+ break
+ end
+
+ seed -= w
+ }
+ server ||= mirrors[0]
+
+ return server[:server]
+ end
+ else
+ def select_main_image_server
+ return CONFIG["url_base"]
+ end
+ def select_image_server(is_warehoused, seed = 0, options = {})
+ return CONFIG["url_base"]
+ end
+ end
+ module_function :select_main_image_server
+ module_function :select_image_server
+end
diff --git a/lib/multipart.rb b/lib/multipart.rb
new file mode 100644
index 00000000..e61ccda9
--- /dev/null
+++ b/lib/multipart.rb
@@ -0,0 +1,30 @@
+require 'net/http'
+require 'mime/types'
+
+class Net::HTTP::Post
+ def multipart=(params=[])
+ boundary_token = "--multipart-boundary"
+ self.content_type = "multipart/form-data; boundary=#{boundary_token}"
+
+ self.body = ""
+ params.each { |p|
+ self.body += "--#{boundary_token}\r\n"
+ self.body += "Content-Disposition: form-data; name=#{p[:name]}"
+ self.body += "; filename=#{p[:filename]}" if p[:filename]
+ self.body += "\r\n"
+ if p[:binary] then
+ self.body += "Content-Transfer-Encoding: binary\r\n"
+
+ mime_type = "application/octet-stream"
+ if p[:filename]
+ mime_types = MIME::Types.of(p[:filename])
+ mime_type = mime_types.first.content_type unless mime_types.empty?
+ end
+
+ self.body += "Content-Type: #{mime_type}\r\n"
+ end
+ self.body += "\r\n#{p[:data].to_s}\r\n"
+ }
+ self.body += "--#{boundary_token}--\r\n"
+ end
+end
diff --git a/lib/nagato.rb b/lib/nagato.rb
new file mode 100644
index 00000000..c3b900a9
--- /dev/null
+++ b/lib/nagato.rb
@@ -0,0 +1,186 @@
+# Nagato is a library that allows you to programatically build SQL queries.
+module Nagato
+ # Represents a single subquery.
+ class Subquery
+ # === Parameters
+ # * :join:: Can be either "and" or "or". All the conditions will be joined using this string.
+ def initialize(join = "and")
+ @join = join.upcase
+ @conditions = []
+ @condition_params = []
+ end
+
+ # Returns true if the subquery is empty.
+ def empty?
+ return @conditions.empty?
+ end
+
+ # Returns an array of 1 or more elements, the first being a SQL fragment and the rest being placeholder parameters.
+ def conditions
+ if @conditions.empty?
+ return ["TRUE"]
+ else
+ return [@conditions.join(" " + @join + " "), *@condition_params]
+ end
+ end
+
+ # Creates a subquery (within the current subquery).
+ #
+ # === Parameters
+ # * :join:: Can be either "and" or "or". This will be passed on to the generated subquery.
+ def subquery(join = "and")
+ subconditions = self.class.new(join)
+ yield(subconditions)
+ c = subconditions.conditions
+ @conditions << "(#{c[0]})"
+ @condition_params += c[1..-1]
+ end
+
+ # Adds a condition to the subquery. If the condition has placeholder parameters, you can pass them in directly in :params:.
+ #
+ # === Parameters
+ # * :sql:: A SQL fragment.
+ # * :params
No comments.
+ <% end %> + + <% @posts.each do |post| %> +