Skip to content

Commit

Permalink
Merge pull request #1219 from Foodee/feature/single-mget-from-cache
Browse files Browse the repository at this point in the history
Improve caching performance
  • Loading branch information
lgebhardt authored Feb 22, 2019
2 parents d2db72b + fbb6d36 commit 036e1a4
Show file tree
Hide file tree
Showing 3 changed files with 186 additions and 97 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,5 @@ coverage
test/log
test_db
test_db-journal
.idea
*.iml
115 changes: 66 additions & 49 deletions lib/jsonapi/cached_response_fragment.rb
Original file line number Diff line number Diff line change
@@ -1,22 +1,42 @@
module JSONAPI
class CachedResponseFragment
def self.fetch_cached_fragments(resource_klass, serializer_config_key, cache_ids, context)
context_json = resource_klass.attribute_caching_context(context).to_json
context_b64 = JSONAPI.configuration.resource_cache_digest_function.call(context_json)
context_key = "ATTR-CTX-#{context_b64.gsub("/", "_")}"

results = self.lookup(resource_klass, serializer_config_key, context, context_key, cache_ids)

if JSONAPI.configuration.resource_cache_usage_report_function
miss_ids = results.select{|_k,v| v.nil? }.keys
JSONAPI.configuration.resource_cache_usage_report_function.call(
resource_klass.name,
cache_ids.size - miss_ids.size,
miss_ids.size
)

Lookup = Struct.new(:resource_klass, :serializer_config_key, :context, :context_key, :cache_ids) do

def type
resource_klass._type
end

results
def keys
cache_ids.map do |(id, cache_key)|
[type, id, cache_key, serializer_config_key, context_key]
end
end
end

Write = Struct.new(:resource_klass, :resource, :serializer, :serializer_config_key, :context, :context_key, :relationship_data) do
def to_key_value

(id, cache_key) = resource.cache_id

json = serializer.object_hash(resource, relationship_data)

cr = CachedResponseFragment.new(
resource_klass,
id,
json['type'],
context,
resource.fetchable_fields,
json['relationships'],
json['links'],
json['attributes'],
json['meta']
)

key = [resource_klass._type, id, cache_key, serializer_config_key, context_key]

[key, cr]
end
end

attr_reader :resource_klass, :id, :type, :context, :fetchable_fields, :relationships,
Expand Down Expand Up @@ -50,26 +70,46 @@ def to_cache_value
}
end

private
# @param [Lookup[]] lookups
# @return [Hash<Class<Resource>, Hash<ID, CachedResourceFragment>>]
def self.lookup(lookups, context)
type_to_klass = lookups.map {|l| [l.type, l.resource_klass]}.to_h

def self.lookup(resource_klass, serializer_config_key, context, context_key, cache_ids)
type = resource_klass._type
keys = lookups.map(&:keys).flatten(1)

keys = cache_ids.map do |(id, cache_key)|
[type, id, cache_key, serializer_config_key, context_key]
end
hits = JSONAPI.configuration.resource_cache.read_multi(*keys).reject {|_, v| v.nil?}

return keys.inject({}) do |hash, key|
(type, id, _, _) = key
resource_klass = type_to_klass[type]
hash[resource_klass] ||= {}

hits = JSONAPI.configuration.resource_cache.read_multi(*keys).reject{|_,v| v.nil? }
return keys.each_with_object({}) do |key, hash|
(_, id, _, _) = key
if hits.has_key?(key)
hash[id] = self.from_cache_value(resource_klass, context, hits[key])
hash[resource_klass][id] = self.from_cache_value(resource_klass, context, hits[key])
else
hash[id] = nil
hash[resource_klass][id] = nil
end

hash
end
end

# @param [Write[]] lookups
def self.write(writes)
key_values = writes.map(&:to_key_value)

to_write = key_values.map {|(k, v)| [k, v.to_cache_value]}.to_h

if JSONAPI.configuration.resource_cache.respond_to? :write_multi
JSONAPI.configuration.resource_cache.write_multi(to_write)
else
to_write.each do |key, value|
JSONAPI.configuration.resource_cache.write(key, value)
end
end

end

def self.from_cache_value(resource_klass, context, h)
new(
resource_klass,
Expand All @@ -83,28 +123,5 @@ def self.from_cache_value(resource_klass, context, h)
h.fetch(:meta, nil)
)
end

def self.write(resource_klass, resource, serializer, serializer_config_key, context, context_key, relationship_data )
(id, cache_key) = resource.cache_id

json = serializer.object_hash(resource, relationship_data)

cr = self.new(
resource_klass,
id,
json['type'],
context,
resource.fetchable_fields,
json['relationships'],
json['links'],
json['attributes'],
json['meta']
)

key = [resource_klass._type, id, cache_key, serializer_config_key, context_key]
JSONAPI.configuration.resource_cache.write(key, cr.to_cache_value)
return [id, cr]
end

end
end
166 changes: 118 additions & 48 deletions lib/jsonapi/resource_set.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,80 +5,150 @@ class ResourceSet

attr_reader :resource_klasses, :populated

def initialize(resource_id_tree)
def initialize(resource_id_tree = nil)
@populated = false
@resource_klasses = flatten_resource_id_tree(resource_id_tree)
@resource_klasses = resource_id_tree.nil? ? {} : flatten_resource_id_tree(resource_id_tree)
end

def populate!(serializer, context, find_options)
# For each resource klass we want to generate the caching key

# Hash for collecting types and ids
# @type [Hash<Class<Resource>, Id[]]]
missed_resource_ids = {}

# Array for collecting CachedResponseFragment::Lookups
# @type [Lookup[]]
lookups = []


# Step One collect all of the lookups for the cache, or keys that don't require cache access
@resource_klasses.each_key do |resource_klass|
missed_ids = []

serializer_config_key = serializer.config_key(resource_klass).gsub("/", "_")
context_json = resource_klass.attribute_caching_context(context).to_json
context_b64 = JSONAPI.configuration.resource_cache_digest_function.call(context_json)
context_key = "ATTR-CTX-#{context_b64.gsub("/", "_")}"

if resource_klass.caching?
cache_ids = []

@resource_klasses[resource_klass].each_pair do |k, v|
cache_ids = @resource_klasses[resource_klass].map do |(k, v)|
# Store the hashcode of the cache_field to avoid storing objects and to ensure precision isn't lost
# on timestamp types (i.e. string conversions dropping milliseconds)
cache_ids.push([k, resource_klass.hash_cache_field(v[:cache_id])])
[k, resource_klass.hash_cache_field(v[:cache_id])]
end

found_resources = CachedResponseFragment.fetch_cached_fragments(
lookups.push(
CachedResponseFragment::Lookup.new(
resource_klass,
serializer_config_key,
cache_ids,
context)

found_resources.each do |found_result|
resource = found_result[1]
if resource.nil?
missed_ids.push(found_result[0])
else
@resource_klasses[resource_klass][resource.id][:resource] = resource
end
end
context,
context_key,
cache_ids
)
)
else
missed_ids = @resource_klasses[resource_klass].keys
missed_resource_ids[resource_klass] ||= {}
missed_resource_ids[resource_klass] = @resource_klasses[resource_klass].keys
end
end

if lookups.any?
raise "You've declared some Resources as caching without providing a caching store" if JSONAPI.configuration.resource_cache.nil?

# Step Two execute the cache lookup
found_resources = CachedResponseFragment.lookup(lookups, context)
else
found_resources = {}
end

# fill in any missed resources
unless missed_ids.empty?
find_opts = {
context: context,
fields: find_options[:fields] }

found_resources = resource_klass.find_by_keys(missed_ids, find_opts)

found_resources.each do |resource|
relationship_data = @resource_klasses[resource_klass][resource.id][:relationships]

if resource_klass.caching?
(id, cr) = CachedResponseFragment.write(
resource_klass,
resource,
serializer,
serializer_config_key,
context,
context_key,
relationship_data)

@resource_klasses[resource_klass][id][:resource] = cr
else
@resource_klasses[resource_klass][resource.id][:resource] = resource
end

# Step Three collect the results and collect hit/miss stats
stats = {}
found_resources.each do |resource_klass, resources|
resources.each do |id, cached_resource|
stats[resource_klass] ||= {}

if cached_resource.nil?
stats[resource_klass][:misses] ||= 0
stats[resource_klass][:misses] += 1

# Collect misses
missed_resource_ids[resource_klass] ||= []
missed_resource_ids[resource_klass].push(id)
else
stats[resource_klass][:hits] ||= 0
stats[resource_klass][:hits] += 1

register_resource(resource_klass, cached_resource)
end
end
end
@populated = true

report_stats(stats)

writes = []

# Step Four find any of the missing resources and join them into the result
missed_resource_ids.each_pair do |resource_klass, ids|
find_opts = {context: context, fields: find_options[:fields]}
found_resources = resource_klass.find_by_keys(ids, find_opts)

found_resources.each do |resource|
relationship_data = @resource_klasses[resource_klass][resource.id][:relationships]

if resource_klass.caching?

serializer_config_key = serializer.config_key(resource_klass).gsub("/", "_")
context_json = resource_klass.attribute_caching_context(context).to_json
context_b64 = JSONAPI.configuration.resource_cache_digest_function.call(context_json)
context_key = "ATTR-CTX-#{context_b64.gsub("/", "_")}"

writes.push(CachedResponseFragment::Write.new(
resource_klass,
resource,
serializer,
serializer_config_key,
context,
context_key,
relationship_data
))
end

register_resource(resource_klass, resource)
end
end

# Step Five conditionally write to the cache
CachedResponseFragment.write(writes) unless JSONAPI.configuration.resource_cache.nil?

mark_populated!
self
end

def mark_populated!
@populated = true
end

def register_resource(resource_klass, resource, primary = false)
@resource_klasses[resource_klass] ||= {}
@resource_klasses[resource_klass][resource.id] ||= {primary: resource.try(:primary) || primary, relationships: {}}
@resource_klasses[resource_klass][resource.id][:resource] = resource
end

private

def report_stats(stats)
return unless JSONAPI.configuration.resource_cache_usage_report_function || JSONAPI.configuration.resource_cache.nil?

stats.each_pair do |resource_klass, stat|
JSONAPI.configuration.resource_cache_usage_report_function.call(
resource_klass.name,
stat[:hits] || 0,
stat[:misses] || 0
)
end
end

def flatten_resource_id_tree(resource_id_tree, flattened_tree = {})
resource_id_tree.fragments.each_pair do |resource_rid, fragment|

Expand All @@ -87,7 +157,7 @@ def flatten_resource_id_tree(resource_id_tree, flattened_tree = {})

flattened_tree[resource_klass] ||= {}

flattened_tree[resource_klass][id] ||= { primary: fragment.primary, relationships: {} }
flattened_tree[resource_klass][id] ||= {primary: fragment.primary, relationships: {}}
flattened_tree[resource_klass][id][:cache_id] ||= fragment.cache

fragment.related.try(:each_pair) do |relationship_name, related_rids|
Expand All @@ -104,4 +174,4 @@ def flatten_resource_id_tree(resource_id_tree, flattened_tree = {})
flattened_tree
end
end
end
end

0 comments on commit 036e1a4

Please sign in to comment.