require 'cgi' require 'ostruct' require_relative 'hal_resource' require_relative 'indifferent_hash' module Finix class Pagination include ::Enumerable include HalResource attr_accessor :resource_class attr_reader :attributes attr_reader :hyperlinks def initialize(*args) opts = args.slice!(0) || {} href = opts.delete(:href) @hyperlinks = Finix::Utils.eval_class(self, IndifferentHash).new @hyperlinks[:self] = href @attributes = {} @resource_class = nil extract_opts opts end def init!(*args) opts = args.slice(0) || {} extract_opts opts self end def each return enum_for :each unless block_given? fetch :first loop do items.each { |item| yield item } fetch :next end end def count(*args) refresh # always refresh to get last items cnt = (block_given?) ? super(*args) : self.send(:method_missing, :count, args) cnt = 0 if cnt.nil? cnt end def fetch(scope = nil) # :next, :last, :first, :prev, :self opts = {} if scope.is_a? Hash opts = Finix::Utils.indifferent_read_access scope scope = nil end scope = :self if scope.nil? scope = scope.to_s.to_sym unless scope.nil? href = @hyperlinks[scope] unless href if scope == :first @attributes['page']['offset'] = 0 unless @attributes['page']['offset'].nil? href = @hyperlinks[:self] href = href[/[^\?]+/] end end if href load_from href, opts return self.items end raise StopIteration end alias retrieve fetch def refresh fetch self end alias load! refresh alias retrieve refresh def create(attrs={}) attrs = attrs.attributes if attrs.is_a?(Resource) attrs = Finix::Utils.indifferent_read_access attrs href = @hyperlinks[:self] @resource_class = Finix.from_hypermedia_registry href, attrs attrs[:href] = href @resource_class.new(attrs).save end def next_page fetch :next self end def previous_page fetch :prev self end def last_page fetch :last self end def first_page fetch :first self end def total # refresh unless loaded self.page.count end def num_pages num = total / limit num += 1 if total % limit > 0 num end def loaded not self.items.nil? end private # def current_page # (offset / limit) + 1 # end def extract_opts(opts={}) opts = Finix::Utils.indifferent_read_access opts limit = opts.delete('limit') offset = opts.delete('offset') @attributes['page'] = @attributes['page'] || {} @attributes['page']['limit'] = limit unless limit.nil? @attributes['page']['offset'] = offset unless offset.nil? @attributes.merge! opts unless opts.empty? if not limit.nil? or not offset.nil? # reset @hyperlinks @hyperlinks.reject! {|k, v| k.to_s != 'self'} parsed_url = URI.parse(@hyperlinks[:self]) parsed_url.query = nil @hyperlinks[:self] = parsed_url.to_s end end def load_from(url, opts = {}) parsed_url = URI.parse(url) params = {} params.merge! @attributes['page'] if @attributes.has_key? 'page' params.merge! parse_query(parsed_url.query) parsed_url.query = nil # remove query # params page opts ||= {} page = opts.delete('page') unless page.nil? page = Finix::Utils.indifferent_read_access page params.merge! page unless page.nil? end params.merge! opts unless opts.empty? params.delete('count') # remove count from previous query response = Finix.get parsed_url.to_s, params load_page_from_response! response end # Stolen from Mongrel, with some small modifications: # Parses a query string by breaking it up at the '&' # and ';' characters. You can also use this to parse # cookies by changing the characters used in the second # parameter (which defaults to '&;'). def parse_query(qs, d = nil) params = {} (qs || '').split(d ? /[#{d}] */n : /[&;] */n).each do |p| k, v = p.split('=', 2).map { |x| CGI::unescape(x) } if (cur = params[k]) if cur.class == Array params[k] << v else params[k] = [cur, v] end else params[k] = v end end params end end end