Xenofex
10/10/2014 - 7:52 AM

CarrierWave with backup url from the fog, quick and dirty. Useful for pulling production data to development, in case you have to pull all t

CarrierWave with backup url from the fog, quick and dirty. Useful for pulling production data to development, in case you have to pull all the UGC as well

  # Copy the file from S3 to local public folder
  def copy_to_local!(recursive=true)
    public_folder = Rails.root.join("public")

    path = self.path
    if path.start_with?(public_folder.to_s)
      local_path = path
      path = path[(public_folder.to_s.length+1)..path.length]
    else
      local_path = public_folder.join(path)
    end

    remote_url = "s3://#{S3_CONFIG['S3_BUCKET_NAME']}/#{path}"

    if File.exists?(local_path)
      puts " * Skipped existing file: #{local_path}"
    else
      cmd = "aws s3 cp #{remote_url} #{local_path}"
      puts " * #{cmd}"
      system cmd
    end

    versions.values.each { |v| v.copy_to_local! } if recursive
  end

  def url(*args)
    # For development data pulled from production/staging database, don't bother
    # to download all the assets (which may be 10GB+) to local folder. With this
    # it check if the local file exists, otherwise it returns a S3 URL.
    if Rails.env.development?

      if self.path.nil? or File.exists?(self.path)
        super
      else
        public_folder = Rails.root.join("public")

        path = self.path
        if path.start_with?(public_folder.to_s)
          path = path[(public_folder.to_s.length+1)..path.length]
        end

        "http://s3.amazonaws.com/#{S3_CONFIG['S3_BUCKET_NAME']}/#{path}"
      end
    else
      super
    end
  end