lib/scrivito/cms_backend.rb in scrivito_sdk-1.0.0 vs lib/scrivito/cms_backend.rb in scrivito_sdk-1.1.0.rc1
- old
+ new
@@ -1,81 +1,31 @@
-require 'addressable/uri'
-
module Scrivito
-
- class ContentServiceObjQueries
- def initialize(queries)
- @queries = queries
- @open_queries = queries.dup
- @results = {}
- end
-
- def open_queries
- @open_queries[0..99]
- end
-
- def handle_response(response)
- objs = {}
- response["objs"].each do |obj|
- objs[obj["_id"].first] = obj
- end
-
- queries_to_delete = []
- response["results"].each_with_index do |response, i|
- query = @open_queries[i]
- if response["continuation_handle"]
- query[:continuation_handle] = response["continuation_handle"]
- else
- queries_to_delete << i
- end
- result = (@results[query.__id__] ||= [])
- response["refs"].each do |obj_ref|
- id = obj_ref["id"]
- # TODO fetch missing ObjData from Service
- result << (objs[id] or raise "Data for Obj with id #{id} missing!")
- end
- end
- queries_to_delete.reverse_each {|i| @open_queries.delete_at(i) }
- end
-
- def results
- @queries.map {|query| @results[query.__id__] || [] }
- end
-
- def finished?
- open_queries.empty?
- end
- end
-
class CmsBackend
VALID_INDEX_NAMES = %w[id path ppath permalink].freeze
class << self
def instance
@instance ||= new
end
end
- attr_accessor :die_content_service
-
def initialize
@query_counter = 0
@caching = true
- @die_content_service = true
end
def begin_caching
@caching = true
end
def end_caching
- CmsDataCache.cache.clear
+ CmsDataCache.clear_request_cache
@caching = false
end
def clear_cache
- CmsDataCache.cache.clear
+ CmsDataCache.clear_request_cache
end
def caching?
!!@caching
end
@@ -88,129 +38,80 @@
def reset_query_counter!
@query_counter = 0
end
def find_workspace_data_from_cache(id)
- if die_content_service
- cached_workspace_state = CmsDataCache.read_workspace_state(id)
- cached_data_tag = cached_workspace_state.try(:second)
- cached_content_state_id = cached_workspace_state.try(:first)
+ cached_workspace_state = CmsDataCache.read_workspace_state(id)
+ cached_data_tag = cached_workspace_state.try(:second)
+ cached_content_state_id = cached_workspace_state.try(:first)
- if cached_data_tag && cached_content_state_id
- if raw_workspace_data = fetch_cached_data_by_tag(cached_data_tag)
- build_workspace_data(raw_workspace_data, cached_content_state_id)
- end
+ if cached_data_tag && cached_content_state_id
+ if raw_workspace_data = fetch_cached_data_by_tag(cached_data_tag)
+ build_workspace_data(raw_workspace_data, cached_content_state_id)
end
- else
- WorkspaceDataFromService.find_from_cache(id)
end
end
def find_workspace_data_by_id(id, timeout=nil)
options = timeout ? {timeout: timeout} : {}
- if die_content_service
- begin
- cached_workspace_state = CmsDataCache.read_workspace_state(id)
+ cached_workspace_state = CmsDataCache.read_workspace_state(id)
- cached_csid = cached_workspace_state.try(:first)
- cached_workspace_data_tag = cached_workspace_state.try(:second)
+ cached_csid = cached_workspace_state.try(:first)
+ cached_workspace_data_tag = cached_workspace_state.try(:second)
- changes = CmsRestApi.get("/workspaces/#{id}/changes", {from: cached_csid}, options)
+ changes = CmsRestApi.get("/workspaces/#{id}/changes", {from: cached_csid}, options)
- update_obj_cache(id, cached_csid, changes)
+ update_obj_cache(id, cached_csid, changes)
- raw_workspace_data, workspace_data_tag = update_workspace_cache(
- id, cached_workspace_data_tag, changes["workspace"], options)
+ raw_workspace_data, workspace_data_tag = update_workspace_cache(
+ id, cached_workspace_data_tag, changes["workspace"], options)
- current_csid = changes["current"]
- current_workspace_state = [current_csid, workspace_data_tag]
+ current_csid = changes["current"]
+ current_workspace_state = [current_csid, workspace_data_tag]
- if current_workspace_state != cached_workspace_state
- CmsDataCache.write_workspace_state(id, current_workspace_state)
- end
-
- return build_workspace_data(raw_workspace_data, current_csid)
-
- rescue Scrivito::ClientError => client_error
- if client_error.http_code == 404
- return nil
- else
- raise
- end
- end
+ if current_workspace_state != cached_workspace_state
+ CmsDataCache.write_workspace_state(id, current_workspace_state)
end
- workspace_data_from_cache = find_workspace_data_from_cache(id)
- from_content_state_id = workspace_data_from_cache.try(:content_state_id)
-
- request_params = {:workspace_id => id}
- request_params[:content_state_id] = from_content_state_id if from_content_state_id
-
- raw_data = ContentService.query('workspaces/query', request_params, options)
-
- if raw_workspace_data = raw_data['workspace']
- workspace_data = WorkspaceDataFromService.new(raw_workspace_data)
- if from_content_state_id != workspace_data.content_state_id
- workspace_data.store_in_cache_and_create_content_state
- end
- workspace_data
+ build_workspace_data(raw_workspace_data, current_csid)
+ rescue Scrivito::ClientError => client_error
+ if client_error.http_code == 404
+ nil
+ else
+ raise
end
end
def find_obj_data_by(revision, index, keys)
index = index.to_s
- if die_content_service
- obj_datas =
- if index == "id"
- obj_datas = Backend::ObjLoad.load(revision, keys)
- obj_datas.map { |obj_data| obj_data ? [obj_data] : [] }
- else
- index_implementation = Backend::Index.by_name(index)
- Backend::ObjQuery.query(revision, index_implementation, keys)
- end
-
- return obj_datas
+ if index == "id"
+ obj_datas = Backend::ObjLoad.load(revision, keys)
+ obj_datas.map { |obj_data| obj_data ? [obj_data] : [] }
+ else
+ index_implementation = Backend::Index.by_name(index)
+ Backend::ObjQuery.query(revision, index_implementation, keys)
end
-
- assert_valid_index_name(index)
- raw_data = find_raw_data_from_cache_or_database_by(revision, index, keys)
-
- raw_data.map do |raw_result|
- raw_result.each_with_object([]) do |raw_data, result|
- result << ObjDataFromService.new(raw_data)
- end
- end
end
- def find_blob_data(id, access, verb, transformation_definition)
- if blob_data = find_blob_data_from_cache(id, access, verb, transformation_definition)
+ def find_blob_data(id, access, verb, options = {})
+ if blob_data = find_blob_data_from_cache(id, access, verb, options)
blob_data
else
id = normalize_blob_id(id)
- blob_datas = request_blob_datas_from_backend(id, transformation_definition)
- store_blob_datas_in_cache(id, transformation_definition, blob_datas)
+ blob_datas = request_blob_datas_from_backend(id, options)
+ store_blob_datas_in_cache(id, options, blob_datas)
blob_datas[access][verb]
end
end
- def find_blob_data_from_cache(id, access, verb, transformation_definition)
- cache_key = blob_data_cache_key(normalize_blob_id(id), access, verb, transformation_definition)
+ def find_blob_data_from_cache(id, access, verb, options)
+ cache_key = blob_data_cache_key(normalize_blob_id(id), access, verb, options)
CmsDataCache.cache.read(cache_key)
end
- def find_blob_metadata(id, url)
- if blob_metadata = fetch_blob_metadata_from_cache(id)
- blob_metadata
- else
- blob_metadata = request_blob_metadata_from_s3(url)
- store_blob_metadata_in_cache(id, blob_metadata)
- blob_metadata
- end
- end
-
def find_binary_meta_data(blob_id)
blob_id = normalize_blob_id(blob_id)
cache_key = "binary_meta_data/#{blob_id}"
if meta_data = CmsDataCache.cache.read(cache_key)
meta_data
@@ -222,34 +123,20 @@
end
def search_objs(workspace, params)
cache_index = 'search'
cache_key = params.to_param
+ cache = Backend::ObjDataCache.view_for_revision(workspace.revision)
- if die_content_service
- cache = Backend::ObjDataCache.view_for_revision(workspace.revision)
-
- if hit = cache.read_index(cache_index, cache_key)
- return hit
- end
-
- result = request_search_result_from_backend(workspace, params)
-
- cache.write_index_not_updatable(cache_index, cache_key, result)
-
- return result
+ if hit = cache.read_index(cache_index, cache_key)
+ return hit
end
- content_state = workspace.revision.content_state
-
- if result = fetch_search_result_from_cache(content_state, cache_index, cache_key)
- result
- else
- request_search_result_from_backend(workspace, params).tap do |result|
- store_search_result_in_cache(content_state, cache_index, cache_key, result)
- end
- end
+ result = request_search_result_from_backend(workspace, params)
+ persistent = !result.delete('tentative')
+ cache.write_index_not_updatable(cache_index, cache_key, result, persistent: persistent)
+ result
end
def create_obj(workspace_id, attributes)
write_obj(:post, "/workspaces/#{workspace_id}/objs", attributes)
end
@@ -301,166 +188,54 @@
cache.write_obj_tag(id, tag)
end
end
end
- def fetch_search_result_from_cache(content_state, cache_index, cache_key)
- content_state.find_obj_data(cache_index, cache_key) if caching?
- end
-
def request_search_result_from_backend(workspace, params)
+ params = params.merge(consistent_with: workspace.content_state_id)
CmsRestApi.get("workspaces/#{workspace.id}/objs/search", params)
end
- def store_search_result_in_cache(content_state, cache_index, cache_key, result)
- content_state.save_obj_data(cache_index, cache_key, result) if caching?
- end
-
- def request_blob_datas_from_backend(id, transformation_definition)
+ def request_blob_datas_from_backend(id, options)
@query_counter += 1
- if transformation_definition
+ case
+ when transformation_definition = options[:transformation_definition]
CmsRestApi.get("blobs/#{id}/transform", transformation: transformation_definition)
+ when options[:no_cache]
+ CmsRestApi.get("blobs/#{id}/no_cache")
else
CmsRestApi.get("blobs/#{id}")
end
end
- def store_blob_datas_in_cache(id, transformation_definition, blob_datas)
+ def store_blob_datas_in_cache(id, options, blob_datas)
%w[public_access private_access].each do |access|
%w[get head].each do |verb|
- blob_data = blob_datas[access][verb]
- cache_key = blob_data_cache_key(id, access, verb, transformation_definition)
- CmsDataCache.cache.write(cache_key, blob_data, blob_data['maxage'])
+ if access_blob_data = blob_datas[access]
+ if blob_data = access_blob_data[verb]
+ cache_key = blob_data_cache_key(id, access, verb, options)
+ CmsDataCache.cache.write(cache_key, blob_data, expires_in: blob_data['maxage'])
+ end
+ end
end
end
end
- def blob_data_cache_key(id, access, verb, transformation_definition)
+ def blob_data_cache_key(id, access, verb, options)
cache_key = "blob_data/#{id}/#{access}/#{verb}"
- cache_key << "/#{transformation_definition.to_query}" if transformation_definition
- cache_key
- end
- def normalize_blob_id(id)
- CmsRestApi.normalize_path_component(id)
- end
-
- def fetch_blob_metadata_from_cache(id)
- CmsDataCache.cache.read(blob_metadata_cache_key(id))
- end
-
- def request_blob_metadata_from_s3(url)
- uri = URI.parse(url)
- retried = false
- begin
- response = ConnectionManager.request(uri, Net::HTTP::Head.new(uri))
- @query_counter += 1
- rescue NetworkError
- raise if retried
- retried = true
- retry
+ if transformation_definition = options[:transformation_definition]
+ cache_key << "/#{transformation_definition.to_query}"
end
- raise ScrivitoError, "S3 responded with #{response.code}" unless response.code == '200'
-
- {
- content_length: response['content-length'],
- content_type: response['content-type'],
- cache_control: response['cache-control'],
- }
- end
-
- def store_blob_metadata_in_cache(id, blob_metadata)
- max_age = blob_metadata.delete(:cache_control) =~ /max-age=(.*),/ && $1
- max_age = max_age.to_i if max_age
- CmsDataCache.cache.write(blob_metadata_cache_key(id), blob_metadata, max_age)
- end
-
- def blob_metadata_cache_key(id)
- "blob_metadata/#{id}"
- end
-
- def find_raw_data_from_cache_or_database_by(revision, index, keys)
- keys_from_database = []
- # load results from cache
- results_from_cache = keys.map do |key|
- find_raw_data_from_cache_by(revision, index, key).tap do |objs|
- keys_from_database << key unless objs
- end
+ if options[:no_cache]
+ cache_key << '/no_cache'
end
- # load cache misses from database and store them in cache
- results_from_database =
- find_raw_data_from_database_by(revision, index, keys_from_database)
- keys_from_database.each_with_index do |key, key_number|
- store_raw_data_list_in_cache(revision, index, key, results_from_database[key_number])
- end
-
- # combine the results
- results_from_cache.map do |objs_from_cache|
- objs_from_cache || results_from_database.shift
- end
+ cache_key
end
- def find_raw_data_from_cache_by(revision, index, key)
- ContentStateCaching.find_obj_data(revision.content_state, index, key) if caching?
+ def normalize_blob_id(id)
+ CmsRestApi.normalize_path_component(id)
end
-
- def find_raw_data_from_database_by(revision, index, keys)
- return [] if keys.blank?
- instrumenter = ActiveSupport::Notifications.instrumenter
- instrumenter.instrument(
- "cms_load.scrivito", :name => "Obj Load", :index => index, :keys => keys
- ) do
- @query_counter += 1
- queries = ContentServiceObjQueries.new(keys.map {|key| {:type => index, :param => key} })
- queries.handle_response(request_content_service(queries, revision)) until queries.finished?
- queries.results
- end
- end
-
- def request_content_service(queries, revision)
- ContentService.query('objs/query', content_service_request_params(queries, revision))
- end
-
- def content_service_request_params(queries, revision)
- params = {
- queries: queries.open_queries,
- revision_id: revision.id,
- include_deleted: true
- }
-
- # A base revision doesn't have a directly corresponding workspace. Instead it uses its
- # derivative workspace as fallback to access the contents. Thus fallback workspace of a base
- # revision may not be used for backend requests.
- params[:workspace_id] = revision.workspace.id unless revision.base?
-
- params
- end
-
- UNIQUE_INDICES = [:id, :path, :permalink].freeze
-
- def store_raw_data_list_in_cache(revision, index, key, raw_data_list)
- raw_data_list.each do |values|
- UNIQUE_INDICES.each do |unique_index|
- unique_index_values = values["_#{unique_index}"]
- if unique_index_values.present?
- store_item_in_cache(revision, unique_index, unique_index_values.first, [values])
- end
- end
- end
- unless UNIQUE_INDICES.include?(index)
- store_item_in_cache(revision, index, key, raw_data_list)
- end
- end
-
- def store_item_in_cache(revision, index, key, item)
- ContentStateCaching.store_obj_data(revision.content_state, index, key, item)
- end
-
- def assert_valid_index_name(index)
- raise ArgumentError, "invalid index name '#{index}'" unless VALID_INDEX_NAMES.include?(index)
- end
-
end
-
end