From 56f077db6973cd98732e2ae5277d0b4fd18ecd7c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?R=C3=A9gis=20Hanol?= <regis@hanol.fr>
Date: Mon, 1 Jun 2015 11:13:56 +0200
Subject: [PATCH] FIX: optimized images fail if source is remote and S3 is
 disabled

---
 Gemfile.lock                  |  3 ---
 lib/file_store/base_store.rb  | 34 +++++++++++++++++++++++++++
 lib/file_store/local_store.rb |  3 ++-
 lib/file_store/s3_store.rb    | 43 ++---------------------------------
 4 files changed, 38 insertions(+), 45 deletions(-)

diff --git a/Gemfile.lock b/Gemfile.lock
index 2b2f3aa8d..e69072faf 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -484,6 +484,3 @@ DEPENDENCIES
   uglifier
   unf
   unicorn
-
-BUNDLED WITH
-   1.10.2
diff --git a/lib/file_store/base_store.rb b/lib/file_store/base_store.rb
index 29e9fe97f..e416df240 100644
--- a/lib/file_store/base_store.rb
+++ b/lib/file_store/base_store.rb
@@ -53,6 +53,19 @@ module FileStore
     end
 
     def download(upload)
+      DistributedMutex.synchronize("download_#{upload.sha1}") do
+        filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
+        file = get_from_cache(filename)
+
+        if !file
+          max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
+          url = SiteSetting.scheme + ":" + upload.url
+          file = FileHelper.download(url, max_file_size_kb, "discourse-download", true)
+          cache_file(file, filename)
+        end
+
+        file
+      end
     end
 
     def purge_tombstone(grace_period)
@@ -74,6 +87,27 @@ module FileStore
       get_path_for("optimized".freeze, upload.id, upload.sha1, extension)
     end
 
+    CACHE_DIR ||= "#{Rails.root}/tmp/download_cache/"
+    CACHE_MAXIMUM_SIZE ||= 500
+
+    def get_cache_path_for(filename)
+      "#{CACHE_DIR}#{filename}"
+    end
+
+    def get_from_cache(filename)
+      path = get_cache_path_for(filename)
+      File.open(path) if File.exists?(path)
+    end
+
+    def cache_file(file, filename)
+      path = get_cache_path_for(filename)
+      dir = File.dirname(path)
+      FileUtils.mkdir_p(dir) unless Dir[dir].present?
+      FileUtils.cp(file.path, path)
+      # keep up to 500 files
+      `ls -tr #{CACHE_DIR} | head -n +#{CACHE_MAXIMUM_SIZE} | xargs rm -f`
+    end
+
   end
 
 end
diff --git a/lib/file_store/local_store.rb b/lib/file_store/local_store.rb
index d173bec43..20255931d 100644
--- a/lib/file_store/local_store.rb
+++ b/lib/file_store/local_store.rb
@@ -1,4 +1,4 @@
-require 'file_store/base_store'
+require_dependency 'file_store/base_store'
 
 module FileStore
 
@@ -41,6 +41,7 @@ module FileStore
     end
 
     def path_for(upload)
+      return unless upload && has_been_uploaded?(upload.url)
       "#{public_dir}#{upload.url}"
     end
 
diff --git a/lib/file_store/s3_store.rb b/lib/file_store/s3_store.rb
index 44ed2cdda..352bbd3eb 100644
--- a/lib/file_store/s3_store.rb
+++ b/lib/file_store/s3_store.rb
@@ -1,7 +1,7 @@
-require "file_store/base_store"
+require_dependency "file_store/base_store"
+require_dependency "file_store/local_store"
 require_dependency "s3_helper"
 require_dependency "file_helper"
-require_dependency "file_store/local_store"
 
 module FileStore
 
@@ -66,24 +66,6 @@ module FileStore
       true
     end
 
-    def download(upload)
-      return unless has_been_uploaded?(upload.url)
-
-      DistributedMutex.synchronize("s3_download_#{upload.sha1}") do
-        filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
-        file = get_from_cache(filename)
-
-        if !file
-          max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
-          url = SiteSetting.scheme + ":" + upload.url
-          file = FileHelper.download(url, max_file_size_kb, "discourse-s3", true)
-          cache_file(file, filename)
-        end
-
-        file
-      end
-    end
-
     def purge_tombstone(grace_period)
       @s3_helper.update_tombstone_lifecycle(grace_period)
     end
@@ -110,27 +92,6 @@ module FileStore
       UserAvatar.external_avatar_url(user_id, avatar.upload_id, avatar.width)
     end
 
-    CACHE_DIR ||= "#{Rails.root}/tmp/s3_cache/"
-    CACHE_MAXIMUM_SIZE ||= 500
-
-    def get_cache_path_for(filename)
-      "#{CACHE_DIR}#{filename}"
-    end
-
-    def get_from_cache(filename)
-      path = get_cache_path_for(filename)
-      File.open(path) if File.exists?(path)
-    end
-
-    def cache_file(file, filename)
-      path = get_cache_path_for(filename)
-      dir = File.dirname(path)
-      FileUtils.mkdir_p(dir) unless Dir[dir].present?
-      FileUtils.cp(file.path, path)
-      # keep up to 500 files
-      `ls -tr #{CACHE_DIR} | head -n +#{CACHE_MAXIMUM_SIZE} | xargs rm -f`
-    end
-
     def s3_bucket
       return @s3_bucket if @s3_bucket
       raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?