Sha256: 8fc446213d6e5af2ca9dcae2de67d7edd40ab10da9532d6d20256b96fa6b484f

Contents?: true

Size: 1.58 KB

Versions: 3

Compression:

Stored size: 1.58 KB

Contents

require "json"
class Cache
	attr_accessor :size, :last_fetched

	def initialize(path:)
		@config = {
			:path => path
		}

		@size = File.readable?(@config[:path]) ? JSON.parse(File.read(@config[:path]).to_s)["data"].count : 0

		@last_fetched = File.readable?(@config[:path]) ? JSON.parse(File.read(@config[:path]).to_s)["last_fetched"] : nil
	end

	def read_from_file
		if File.readable? @config[:path]
			JSON.parse(File.read(@config[:path]).to_s)
		else
			return {
				"size" => 0,
				"last_fetched" => nil,
				"data" => Array.new
			}
		end
	end

	def remove_entries_from_file(ids:)
		cache = self.read_from_file

		ids.each do |id|
			# TODO We could implement a search algo like binary search for performance here
			cache["data"].each do |cached_entry|
				if cached_entry["id"].to_i == id.to_i
					cache["data"].delete cached_entry
				end
			end
		end

		self.size = cache["data"].count
		self.write_to_file data: cache
	end

	def add_entries_to_file(data:)
		cache = self.read_from_file
		new_entries_count = 0

		# If an entry doesn't exist in the cache, we add it.
		data.each do |new_entry|
			unless cache["data"].find_index { |cache_entry| cache_entry["id"] == new_entry["id"] }
				cache["data"].push (new_entry.filter { |key| key != "content" })
				new_entries_count += 1
			end
		end

		self.size = cache["data"].count
		self.last_fetched = Date.today
		self.write_to_file data: cache

		puts "#{new_entries_count} new entries were written to cache."
	end

	def write_to_file(data:)
		data["size"] = @size
		data["last_fetched"] = @last_fetched
		File.write(@config[:path], data.to_json)
	end
end

Version data entries

3 entries across 3 versions & 1 rubygems

Version Path
miniflux_sanity-0.2.2 lib/utils/cache.rb
miniflux_sanity-0.2.1 lib/utils/cache.rb
miniflux_sanity-0.2.0 lib/utils/cache.rb