lib/infoboxer/media_wiki.rb in infoboxer-0.2.3 vs lib/infoboxer/media_wiki.rb in infoboxer-0.2.4
- old
+ new
@@ -58,18 +58,23 @@
# Receive "raw" data from Wikipedia (without parsing or wrapping in
# classes).
#
# @return [Array<Hash>]
def raw(*titles)
+ return [] if titles.empty? # could emerge on "automatically" created page lists, should work
+
titles.each_slice(50).map{|part|
@client.query.
titles(*part).
prop(revisions: {prop: :content}, info: {prop: :url}).
redirects(true). # FIXME: should be done transparently by MediaWiktory?
perform.pages
}.inject(:concat). # somehow flatten(1) fails!
- sort_by{|page| titles.index(page.queried_title) || 1_000}
+ sort_by{|page|
+ res_title = page.alt_titles.detect{|t| titles.map(&:downcase).include?(t.downcase)} # FIXME?..
+ titles.index(res_title) || 1_000
+ }
end
# Receive list of parsed MediaWiki pages for list of titles provided.
# All pages are received with single query to MediaWiki API.
#
@@ -121,10 +126,10 @@
# @return [Hash<String, Page>]
#
def get_h(*titles)
pages = [*get(*titles)]
titles.map{|t|
- [t, pages.detect{|p| p.source.queried_title == t}]
+ [t, pages.detect{|p| p.source.alt_titles.map(&:downcase).include?(t.downcase)}]
}.to_h
end
# Receive list of parsed MediaWiki pages from specified category.
#