discourse/lib/file_store/base_store.rb

146 lines
3.4 KiB
Ruby
Raw Normal View History

2013-11-06 02:04:47 +08:00
module FileStore
class BaseStore
2014-04-15 19:04:14 +08:00
def store_upload(file, upload, content_type = nil)
2015-05-30 00:39:47 +08:00
path = get_path_for_upload(upload)
store_file(file, path)
2013-11-06 02:04:47 +08:00
end
def store_optimized_image(file, optimized_image)
2015-05-30 00:39:47 +08:00
path = get_path_for_optimized_image(optimized_image)
store_file(file, path)
end
def store_file(file, path, opts = {})
not_implemented
2013-11-06 02:04:47 +08:00
end
def remove_upload(upload)
remove_file(upload.url, get_path_for_upload(upload))
2013-11-06 02:04:47 +08:00
end
def remove_optimized_image(optimized_image)
remove_file(optimized_image.url, get_path_for_optimized_image(optimized_image))
2015-05-30 00:39:47 +08:00
end
def remove_file(url, path)
not_implemented
2013-11-06 02:04:47 +08:00
end
def has_been_uploaded?(url)
not_implemented
2013-11-06 02:04:47 +08:00
end
2015-05-30 00:39:47 +08:00
def download_url(upload)
not_implemented
2013-11-06 02:04:47 +08:00
end
2015-05-30 00:39:47 +08:00
def cdn_url(url)
not_implemented
2013-11-06 02:04:47 +08:00
end
2015-05-30 00:39:47 +08:00
def absolute_base_url
not_implemented
2015-05-30 00:39:47 +08:00
end
def relative_base_url
not_implemented
end
2013-11-06 02:04:47 +08:00
def external?
not_implemented
2013-11-06 02:04:47 +08:00
end
def internal?
2015-05-30 00:39:47 +08:00
!external?
2013-11-06 02:04:47 +08:00
end
def path_for(upload)
not_implemented
2013-11-06 02:04:47 +08:00
end
def download(upload)
DistributedMutex.synchronize("download_#{upload.sha1}") do
filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
file = get_from_cache(filename)
if !file
max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
url = SiteSetting.scheme + ":" + upload.url
file = FileHelper.download(
url,
max_file_size: max_file_size_kb,
tmp_file_name: "discourse-download",
follow_redirect: true
)
cache_file(file, filename)
end
file
end
2013-11-06 02:04:47 +08:00
end
2013-11-28 05:01:41 +08:00
def purge_tombstone(grace_period)
end
def get_depth_for(id)
[0, Math.log(id / 1_000.0, 16).ceil].max
end
def get_path_for(type, id, sha, extension)
depth = get_depth_for(id)
tree = File.join(*sha[0, depth].split(""), "")
"#{type}/#{depth + 1}X/#{tree}#{sha}#{extension}"
end
def get_path_for_upload(upload)
extension =
if upload.extension
".#{upload.extension}"
else
# Maintain backward compatibility before Jobs::MigrateUploadExtensions
# runs
File.extname(upload.original_filename)
end
get_path_for("original".freeze, upload.id, upload.sha1, extension)
end
def get_path_for_optimized_image(optimized_image)
upload = optimized_image.upload
extension = "_#{OptimizedImage::VERSION}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
get_path_for("optimized".freeze, upload.id, upload.sha1, extension)
end
CACHE_DIR ||= "#{Rails.root}/tmp/download_cache/"
CACHE_MAXIMUM_SIZE ||= 500
def get_cache_path_for(filename)
"#{CACHE_DIR}#{filename}"
end
def get_from_cache(filename)
path = get_cache_path_for(filename)
File.open(path) if File.exists?(path)
end
def cache_file(file, filename)
path = get_cache_path_for(filename)
dir = File.dirname(path)
FileUtils.mkdir_p(dir) unless Dir[dir].present?
FileUtils.cp(file.path, path)
2015-06-01 23:49:58 +08:00
# keep latest 500 files
`ls -tr #{CACHE_DIR} | head -n +#{CACHE_MAXIMUM_SIZE} | xargs rm -f`
end
private
def not_implemented
raise "Not implemented."
end
2013-11-06 02:04:47 +08:00
end
end