lib/vmpooler/api/helpers.rb in vmpooler-2.2.0 vs lib/vmpooler/api/helpers.rb in vmpooler-2.3.0

- old
+ new

@@ -145,16 +145,16 @@ end end def export_tags(backend, hostname, tags) tracer.in_span("Vmpooler::API::Helpers.#{__method__}") do - backend.pipelined do + backend.pipelined do |pipeline| tags.each_pair do |tag, value| next if value.nil? or value.empty? - backend.hset("vmpooler__vm__#{hostname}", "tag:#{tag}", value) - backend.hset("vmpooler__tag__#{Date.today}", "#{hostname}:#{tag}", value) + pipeline.hset("vmpooler__vm__#{hostname}", "tag:#{tag}", value) + pipeline.hset("vmpooler__tag__#{Date.today}", "#{hostname}:#{tag}", value) end end end end @@ -182,10 +182,12 @@ end def hostname_shorten(hostname, domain=nil) if domain && hostname =~ /^[\w-]+\.#{domain}$/ hostname = hostname[/[^.]+/] + elsif hostname =~ /^[\w-]+\..+$/ + hostname = hostname[/[^.]+/] end hostname end @@ -199,13 +201,13 @@ # returns an integer for the total count def get_total_across_pools_redis_scard(pools, key, backend) tracer.in_span("Vmpooler::API::Helpers.#{__method__}") do # using pipelined is much faster than querying each of the pools and adding them # as we get the result. - res = backend.pipelined do + res = backend.pipelined do |pipeline| pools.each do |pool| - backend.scard(key + pool['name']) + pipeline.scard(key + pool['name']) end end res.inject(0) { |m, x| m + x }.to_i end end @@ -215,13 +217,13 @@ def get_list_across_pools_redis_scard(pools, key, backend) tracer.in_span("Vmpooler::API::Helpers.#{__method__}") do # using pipelined is much faster than querying each of the pools and adding them # as we get the result. temp_hash = {} - res = backend.pipelined do + res = backend.pipelined do |pipeline| pools.each do |pool| - backend.scard(key + pool['name']) + pipeline.scard(key + pool['name']) end end pools.each_with_index do |pool, i| temp_hash[pool['name']] = res[i].to_i end @@ -234,12 +236,12 @@ def get_list_across_pools_redis_hget(pools, key, backend) tracer.in_span("Vmpooler::API::Helpers.#{__method__}") do # using pipelined is much faster than querying each of the pools and adding them # as we get the result. temp_hash = {} - res = backend.pipelined do + res = backend.pipelined do |pipeline| pools.each do |pool| - backend.hget(key, pool['name']) + pipeline.hget(key, pool['name']) end end pools.each_with_index do |pool, i| temp_hash[pool['name']] = res[i].to_s end