lib/infoboxer/media_wiki.rb in infoboxer-0.3.1 vs lib/infoboxer/media_wiki.rb in infoboxer-0.3.2
- old
+ new
@@ -39,21 +39,24 @@
end
# @private
attr_reader :api_base_url, :traits
+ # @return [MediaWiktory::Wikipedia::Client]
+ attr_reader :api
+
# Creating new MediaWiki client. {Infoboxer.wiki} provides shortcut
# for it, as well as shortcuts for some well-known wikis, like
# {Infoboxer.wikipedia}.
#
# @param api_base_url [String] URL of `api.php` file in your MediaWiki
# installation. Typically, its `<domain>/w/api.php`, but can vary
# in different wikis.
# @param user_agent [String] (also aliased as `:ua`) Custom User-Agent header.
def initialize(api_base_url, ua: nil, user_agent: ua)
@api_base_url = Addressable::URI.parse(api_base_url)
- @client = MediaWiktory::Wikipedia::Api.new(api_base_url, user_agent: user_agent(user_agent))
+ @api = MediaWiktory::Wikipedia::Api.new(api_base_url, user_agent: user_agent(user_agent))
@traits = Traits.get(@api_base_url.host, siteinfo)
end
# Receive "raw" data from Wikipedia (without parsing or wrapping in
# classes).
@@ -70,11 +73,11 @@
def raw(*titles, &processor)
# could emerge on "automatically" created page lists, should work
return {} if titles.empty?
titles.each_slice(50).map do |part|
- request = prepare_request(@client.query.titles(*part), &processor)
+ request = prepare_request(@api.query.titles(*part), &processor)
response = request.response
# If additional props are required, there may be additional pages, even despite each_slice(50)
response = response.continue while response.continue?
@@ -171,11 +174,11 @@
# @return [Tree::Nodes<Page>] array of parsed pages.
#
def category(title, limit: 'max', &processor)
title = normalize_category_title(title)
- list(@client.query.generator(:categorymembers).title(title), limit, &processor)
+ list(@api.query.generator(:categorymembers).title(title), limit, &processor)
end
# Receive list of parsed MediaWiki pages for provided search query.
# See [MediaWiki API docs](https://www.mediawiki.org/w/api.php?action=help&modules=query%2Bsearch)
# for details.
@@ -191,11 +194,11 @@
# while using it.
#
# @return [Tree::Nodes<Page>] array of parsed pages.
#
def search(query, limit: 'max', &processor)
- list(@client.query.generator(:search).search(query), limit, &processor)
+ list(@api.query.generator(:search).search(query), limit, &processor)
end
# Receive list of parsed MediaWiki pages with titles startin from prefix.
# See [MediaWiki API docs](https://www.mediawiki.org/w/api.php?action=help&modules=query%2Bprefixsearch)
# for details.
@@ -208,11 +211,11 @@
# while using it.
#
# @return [Tree::Nodes<Page>] array of parsed pages.
#
def prefixsearch(prefix, limit: 'max', &processor)
- list(@client.query.generator(:prefixsearch).search(prefix), limit, &processor)
+ list(@api.query.generator(:prefixsearch).search(prefix), limit, &processor)
end
# @return [String]
def inspect
"#<#{self.class}(#{@api_base_url.host})>"
@@ -258,10 +261,10 @@
def user_agent(custom)
custom || self.class.user_agent || UA
end
def siteinfo
- @siteinfo ||= @client.query.meta(:siteinfo).prop(:namespaces, :namespacealiases, :interwikimap).response.to_h
+ @siteinfo ||= @api.query.meta(:siteinfo).prop(:namespaces, :namespacealiases, :interwikimap).response.to_h
end
def interwikis(prefix)
@interwikis ||= Hash.new { |h, pre|
interwiki = siteinfo['interwikimap'].detect { |iw| iw['prefix'] == prefix } or