module Stanford::SearchworksHelper require_plugin_dependency 'vendor/plugins/blacklight/app/helpers/application_helper.rb' include Stanford::SolrHelper # for nearby on shelf lookups # def application_name # 'SearchWorks (SULAIR)' # end def vern_document_heading @document[Blacklight.config[:show][:vern_heading]] end def home_facet_field_names Blacklight.config[:home_facet][:solr] end def home_facet_field_labels Blacklight.config[:home_facet][:labels] end # overriding because we need to escape the '< Previous' at the linking level def link_to_previous_document(previous_document) return if previous_document == nil link_to_document previous_document, :label=>'« Previous', :counter => session[:search][:counter].to_i - 1 end # overriding because we need to escape the 'Next >' at the linking level def link_to_next_document(next_document) return if next_document == nil link_to_document next_document, :label=>'Next »', :counter => session[:search][:counter].to_i + 1 end # copies the current params (or whatever is passed in as the 3rd arg) # removes the field value from params[:f] # removes the field if there are no more values in params[:f][field] # removes additional params (page, id, etc..) def remove_query_params(value, source_params=params) p = source_params.dup.symbolize_keys! # need to dup the facet values too, # if the values aren't dup'd, then the values # from the session will get remove in the show view... p[:q] = p[:q].dup p.delete :page p.delete :id p.delete :total p.delete :counter p.delete :commit #return p unless p[field] p[:q] = p[:q].gsub(value,"").strip p.delete(:q) if p[:q].size == 0 p end # link_back_to_catalog(:label=>'Back to Search') # Create a link back to the index screen, keeping the user's facet, query and paging choices intact by using session. def link_back_to_catalog(opts={:label=>'Back to Search'}) query_params = session[:search].dup || {} query_params.delete :counter query_params.delete :total link_url = root_path(query_params) link_to opts[:label], link_url end # This is an updated +link_to+ that allows you to pass a +data+ hash along with the +html_options+ # which are then written to the generated form for non-GET requests. The key is the form element name # and the value is the value: # # link_to_with_data('Name', some_path(some_id), :method => :post, :html) def link_to_with_data(*args, &block) if block_given? options = args.first || {} html_options = args.second concat(link_to(capture(&block), options, html_options)) else name = args.first options = args.second || {} html_options = args.third url = url_for(options) if html_options html_options = html_options.stringify_keys href = html_options['href'] convert_options_to_javascript_with_data!(html_options, url) tag_options = tag_options(html_options) else tag_options = nil end href_attr = "href=\"#{url}\"" unless href "#{name || url}" end end # Generate the # - # of # results text def results_text(pp, p, result_num) if pp.nil? per_page = Blacklight.config[:index][:num_per_page].to_i else per_page = pp.to_i end if p.nil? start_num = 1 p = 1 else start_num = (p.to_i * per_page) - (per_page - 1) end if p == 1 and per_page < result_num end_num = per_page elsif ((per_page * p.to_i) > result_num) end_num = result_num else end_num = per_page * p.to_i end "#{start_num} - #{end_num} of " end # Genrate dt/dd with a relevance def get_relevance_bar(score,label) score_mod = (score * 9).round(2) if score_mod > 100 score_mod = 100 elsif score_mod == 0.0 score_mod = (score * 9).round(4) end text = "
#{label}
" text += "
" text += "
" text += "#{score_mod}%" text += "
" text += "
" text += "
" text += "
" end # Generate a dt/dd pair given a Solr field # If you provide a :default value in the opts hash, # then when the solr field is empty, the default value will be used. # If you don't provide a default value, this method will not generate html when the field is empty. def get_data_with_label(doc, label, field_string, opts={}) if opts[:default] && !doc[field_string] doc[field_string] = opts[:default] end if doc[field_string] field = doc[field_string] text = "
#{label}
" if field.is_a?(Array) field.each do |l| text += "#{h(l)}" if l != h(field.last) text += "
" end end else text += h(field) end #Does the field have a vernacular equivalent? if doc["vern_#{field_string}"] vern_field = doc["vern_#{field_string}"] text += "
" if vern_field.is_a?(Array) vern_field.each do |l| text += "#{h(l)}" if l != h(vern_field.last) text += "
" end end else text += h(vern_field) end end text += "
" text end end # generate an dt/dd pair given a marc field def get_data_with_label_from_marc(doc,label,field,sFields=[]) if doc.marc[field] text = "
#{label}
" doc.marc.find_all{|f| (field) === f.tag}.each do |l| if sFields.length > 0 l.each{|sl| sFields.include?(sl.code) ? text << "#{h(sl.value)} " : ""} else temp_text = "" # get_vern method should be here? In each loop below? After? l.each {|sl| ['w','0', '5', '6', '8'].include?(sl.code) ? nil : temp_text += "#{sl.value} "} vernacular = get_vernacular(doc,l) text += h(temp_text) end vernacular = get_vernacular(doc,l) text += "
#{vernacular}" unless vernacular.nil? text += "
" unless l == doc.marc.find_all{|f| (field) === f.tag}.last end text += "
" text else # The below if statement is attempting to find unmatched vernacular fields that match the supplied field string if doc.marc['880'] doc.marc.find_all{|f| ('880') === f.tag}.each do |l| if l['6'].split("-")[1].gsub("//r","") == "00" and l['6'].split("-")[0] == field text = "
#{label}
" l.each {|sl| ['w','0', '5', '6', '8'].include?(sl.code) ? nil : text += "#{sl.value} "} text += "
" end end text end end end # Generate a dt/dd pair with a comma separated list of formats given an array of format strings def show_formats(field) if field text = "
Format:
" field.each do |l| text += "" text += h(l) text += ", " unless l == field.last text += "" end text += "
" text end end # Generate a dt/dd pair with a link with a label given a field in the SolrDocument def link_to_data_with_label(doc,label,field_string,url) if doc[field_string] field = doc[field_string] text = "
#{label}
" if field.is_a?(Array) field.each do |l| text += link_to l, url.merge!(:q => "\"#{l}\"") if l != field.last text += "
" end end else text += link_to field, url.merge!(:q => "\"#{field}\"") end if doc["vern_#{field_string}"] vern_field = doc["vern_#{field_string}"] text += "
" if vern_field.is_a?(Array) vern_field.each do |l| text += link_to l, url.merge!(:q => "\"#{l}\"") if l != vern_field.last text += "
" end end else text += link_to vern_field, url.merge!(:q => "\"#{vern_field}\"") end end text += "
" text end end # Generate dt/dd pair with an unordered list from the table of contents (IE marc 505s) def get_toc(doc) if doc.marc['505'] text = "
Contents:
" doc.marc.find_all{|f| ('505') === f.tag}.each do |l| text << "" text << "" unless get_vernacular(doc,l).nil? end text << "
" else if doc.marc['880'] doc.marc.find_all{|f| ('880') === f.tag}.each do |l| if l['6'].split("-")[1].gsub("//r","") == "00" and l['6'].split("-")[0] == "505" text = "
Contents:
" end end text end end end # Generate dt/dd pair with a link with a label given a marc field def link_to_data_with_label_from_marc(doc,label,field,url,sFields=[]) if doc.marc[field] text = "
#{label}
" doc.marc.find_all{|f| (field) === f.tag}.each do |l| if sFields.length > 0 link_text = "" sFields.each do |sf| if l.find{|s| s.code == sf.to_s} link_text << "#{l.find{|s| s.code == sf.to_s}.value} " end end text += link_to link_text, url.merge!(:q => "\"#{link_text}\"") else link_text = '' l.each {|sl| ['w','0', '5', '6', '8'].include?(sl.code) ? nil : link_text += "#{sl.value} " unless (sl.code == 'a' and sl.value[0,1] == "%") } text += link_to link_text, url.merge!(:q => "\"#{link_text}\"") end vernacular = get_vernacular(doc,l) temp_vern = "\"#{vernacular}\"" text += "
#{link_to vernacular, url.merge!(:q => temp_vern)}" unless vernacular.nil? text += "
" unless l == doc.marc.find_all{|f| (field) === f.tag}.last end text += "
" else if doc.marc['880'] doc.marc.find_all{|f| ('880') === f.tag}.each do |l| if l['6'].split("-")[1].gsub("//r","") == "00" and l['6'].split("-")[0] == field text = "
#{label}
" link_text = '' l.each {|sl| ['w','0', '5', '6', '8'].include?(sl.code) ? nil : link_text += "#{sl.value} "} text += link_to link_text, url.merge!(:q => "\"#{link_text}\"") text += "
" end end text end end end # Generate dt/dd pair of contributors with translations def link_to_contributor_from_marc(doc) text = "
Contributor:
" ['700', '710', '711', '720'].each do |field| if doc.marc[field] doc.marc.find_all{|f| (field) === f.tag}.each do |l| link_text = '' relator_text = [] l.each {|sl| sl.code == '4' ? relator_text << " #{relator_terms[sl.value]}" : sl.code == '6' ? nil : link_text << "#{sl.value} "} text << link_to(link_text.strip, :q => "\"#{link_text}\"", :controller => 'catalog', :action => 'index', :qt => 'search_author' ) text << relator_text.join(", ") unless relator_text.empty? vernacular = get_vernacular(doc,l) temp_vern = "\"#{vernacular}\"" text << "
#{link_to vernacular, :q => temp_vern, :controller => 'catalog', :action => 'index', :qt => 'search_author'}" unless vernacular.nil? text << "
" end else if doc.marc['880'] doc.marc.find_all{|f| ('880') === f.tag}.each do |l| if l['6'].split("-")[1].gsub("//r","") == "00" and l['6'].split("-")[0] == field text = "
Contributor:
" link_text = '' relator_text = [] l.each {|sl| sl.code == '4' ? relator_text << " #{relator_terms[sl.value]}" : link_text << "#{sl.value} "} text << link_to(link_text.strip,:q => "\"#{link_text}\"", :action => 'index', :qt => 'author_search') text << relator_text.join(", ") unless relator_text.empty? end end end end end text << "
" text unless text == "
Contributor:
" end def title_change_data_from_marc(doc) if doc.marc['780'] or doc.marc['785'] text = "" if doc.marc['780'] doc.marc.find_all{|f| ('780') === f.tag}.each do |field| text << "
#{name_change_780_translations[field.indicator2]}:
" temp_text = "" field.each{|subfield| if subfield.code == "w" nil elsif subfield.code == "t" query = "\"#{subfield.value}\"" temp_text << "#{link_to(subfield.value, params.dup.merge!(:action=>'index', :qt=>'search_title', :q=>query))} " elsif subfield.code == "x" temp_text << "(#{link_to(subfield.value, params.dup.merge!(:action=>'index', :qt=>'search', :q=>subfield.value))}) " else temp_text << "#{subfield.value} " end } text << "
#{temp_text}
" end end if doc.marc['785'] special_handler = [] doc.marc.find_all{|f| ('785') === f.tag}.each do |field| if field.indicator2 == "7" special_handler << field end end doc.marc.find_all{|f| ('785') === f.tag}.each do |field| text << "
" if field.indicator2 == "7" and field == special_handler.first text << "Merged with:" elsif field.indicator2 == "7" and field == special_handler.last text << "to form:" elsif field.indicator2 == "7" and field != special_handler.first and field != special_handler.last text << "and with:" else text << "#{name_change_785_translations[field.indicator2]}:" end text << "
" temp_text = "" field.each{|subfield| if subfield.code == "w" nil elsif subfield.code == "t" query = "\"#{subfield.value}\"" temp_text << "#{link_to(subfield.value, params.dup.merge!(:action=>'index', :qt=>'search_title', :q=>query))} " elsif subfield.code == "x" temp_text << "(#{link_to(subfield.value, params.dup.merge!(:action=>'index', :qt=>'search', :q=>subfield.value))}) " else temp_text << "#{subfield.value} " end } text << "
#{temp_text}
" end end text end end # Generate hierarchical structure of subject headings from marc def get_subjects(doc) text = "" return text unless text == "" end # Generate unordered list of Online Access Links (IE marc 856s) def get_856(doc) if doc.marc['856'] text = '' int = 1 text += "" text end end def get_suppl_urls(doc) text = "" if doc['url_fulltext'] urls = doc['url_fulltext'] text << "
Online:
" #urls.each do |url| fixed_url = urls[0].gsub("^","").strip url_host = URI.parse(fixed_url).host text << "#{url_host}" if urls.length > 1 text << " + #{pluralize(urls.length - 1, 'more source')}" end #end text << "
" end text rescue URI::InvalidURIError return "" end def get_vernacular(doc,field) return_text = "" if field['6'] field_original = field.tag match_original = field['6'].split("-")[1] doc.marc.find_all{|f| ('880') === f.tag}.each do |l| if l['6'] field_880 = l['6'].split("-")[0] match_880 = l['6'].split("-")[1].gsub("//r","") if match_original == match_880 and field_original == field_880 return_text = "" l.each{ |sl| if !['w','0', '5', '6', '8'].include?(sl.code) return_text += "#{sl.value} " end } end end end end return nil if return_text.blank? return_text end def get_callnum(doc) test_hash = {} if doc['item_display'] doc['item_display'].each do |item| item_array = item.split(' -|- ') if test_hash.has_key?(item_array[1]) if test_hash[item_array[1]].has_key?(item_array[2]) if params[:action] == 'index' test_hash[item_array[1]][item_array[2]] << [item_array[3],item_array[0],item_array[6],item_array[4],item_array[7]] unless test_hash[item_array[1]][item_array[2]].flatten.include?(item_array[3]) else test_hash[item_array[1]][item_array[2]] << [item_array[3],item_array[0],item_array[6],item_array[4],item_array[7]] #|Commenting out so that multiple copies show up on record view| unless test_hash[item_array[1]][item_array[2]].flatten.include?(item_array[6]) end else test_hash[item_array[1]][item_array[2]] = [[item_array[3],item_array[0],item_array[6],item_array[4],item_array[7]]] end else test_hash[item_array[1]] = {item_array[2] => [[item_array[3],item_array[0],item_array[6],item_array[4],item_array[7]]]} end end end test_hash end def get_facet_tag_cloud(facet,response) text = "" display_facet = response.facets.detect {|f| f.name == facet } facet_arr = [] display_facet.items.each do |item| facet_arr << [item.hits,item.value] end facet_arr = facet_arr.sort_by {rand} text += "
" facet_arr.each do |l| if l[0] > 500000 #font_size = "3" font_size = "jumbo" elsif l[0] > 100000 #font_size = "2.2" font_size = "large" elsif l[0] > 75000 #font_size = "1.8" font_size = "medium" elsif l[0] > 50000 #font_size = "1.4" font_size = "small" else #font_size = "1" font_size = "tiny" end if facet == 'building_facet' and translate_lib.has_key?(l[1]) value = translate_lib[l[1]] else value = l[1] end text += " #{link_to h(value), add_facet_params(facet, l[1])} " end text += "
" end # given the solr field name of a *refinement* facet (e.g. lc_alpha_facet), # return a string containing appropriate html to display the given facet # heading and its values def get_refine_facet(solr_fname, response) text = "" display_facet = response.facets.detect {|f| f.name == solr_fname} if !display_facet.nil? && !display_facet.items.nil? && display_facet.items.length > 0 text = "
  • " text << "

    " + facet_field_labels[solr_fname] + "

    " text << " " text << "
  • " end # have facet to display text end # true or false, depending on whether the field and a value is in params[:f] def params_facet_has_value?(field) if params[:f] and params[:f][field] !params[:f][field].compact.empty? else false end end def get_search_breadcrumb_terms(q_param) if q_param.scan(/"([^"\r\n]*)"/) q_arr = [] old_q = q_param.dup q_param.scan(/"([^"\r\n]*)"/).each{|t| q_arr << "\"#{h(t)}\""} q_arr.each do |l| old_q.gsub!(l,'') end unless old_q.blank? old_q.split(' ').each {|q| q_arr << h(q) } end q_arr else q_arr = q_param.split(' ') end end def get_advanced_search_query_terms(params) # if using the standard query parser and have an actual query we need to modify the q param after the search results are requested to something more visually friendly if params[:qt] == "standard" and params[:q] != "collection:sirsi" str = [] fields = [] new_query = params[:q][1,params[:q].length-2] new_query.gsub!(") AND (", " -|- ") new_query.gsub!(") OR (", " -|- ") new_query.gsub!(/\^\d+ OR /, " -|- ") new_query.split(" -|- ").each do |query_string| fields << query_string.split(":")[0] query = query_string.split(":")[1][/\(.*\)/] Blacklight.config[:advanced].each do |key,value| if value.keys.collect{|x| x.to_s}.sort == fields.sort str << "#{key.to_s == "description_checked" ? "Description-TOC" : key.to_s.capitalize} = #{query[1,query.length][0,query.length-2]}" unless str.include?("#{key.to_s == "description_checked" ? "Description-TOC" : key.to_s.capitalize} = #{query[1,query.length][0,query.length-2]}") fields = [] end end end h str.join(" #{params[:op]} ") end end def get_advanced_search_filter_terms(params) # Modifying the fq param to be what the UI is expecting from the f param, then setting the f param to this modified hash # Note that the query to Solr has already been made, anything beyond this will just be modifying how the UI interperets the query unless params[:fq].to_s.empty? a_hash = {} fq_params = params[:fq].split("), ") fq_params.each do |fq_param| fq_fields = fq_param.split(":") fq_field = fq_fields[0] fq_values = fq_fields[1][1,fq_fields[1].length][0,fq_fields[1].length-2].split('" OR "') fq_values.each do |value| if a_hash.has_key?("#{fq_field}") a_hash["#{fq_field}"] << value.gsub('"',"") else a_hash["#{fq_field}"] = [value.gsub('"',"")] end end end a_hash end end def previous_search_is_referrer? # If the referrer params are empty and there is no search history return false (User went directly to the record w/o a search session) if referrer_params.empty? and url_back_to_catalog.empty? false # If the search history == the referrer params return true. elsif url_back_to_catalog == referrer_params true # If the referrer includes the base URL (ie, links on record view) then return true elsif request.referrer.include?(url_for({:controller => 'catalog', :only_path => false})) true else false end end def referrer_params request_params = {} if request.referrer.to_s.include?("&") or request.referrer.to_s.include?("?") request.referrer.to_s[/\/\?.*/].split("&").each do |paramater| unless paramater == "/?" key = CGI::unescape(paramater.split("=")[0].gsub(/\/\?/,"")).to_sym key.to_s[0,2] == "/?" ? key.to_s.gsub!("/?","").to_sym : "" value = paramater.split("=").length > 1 ? h(paramater.split("=")[1].gsub("+"," ")) : "" if request_params.has_key?(key) request_params[key] << CGI::unescape(value) else request_params[key] = CGI::unescape(value) end end end end request_params end # url back to catalog # show the url back to the search result, keeping the user's facet, query and paging choices intact by using session. # this is to match against the http.referrer def url_back_to_catalog query_params = session[:search].dup || {} query_params.delete :counter query_params.delete :total if query_params.has_key?(:f) query_params[:f].each do |key,val| query_params["f[#{key}][]".to_sym] = val.to_s end query_params.delete(:f) end query_params end # Generate display text for "nearby" selections according to call number def get_nearby_items(document, response, how_many) text = "" return text unless text == "" end protected # create an array of sorted html list items containing the appropriate display text # (analogous to what would be visible if you were looking at the spine of # a book on a shelf) from relevant solr docs, given a particular solr # field and value for which to retrieve spine info. # The shelf key in each html list item must match a desired shelf key in the # desired_shelfkeys array def get_spines_from_field(values, field) # FIXME: I think we want to deal with reversing and the like in the calling # method. This should get spines given a particular list of shelf keys # in each doc, we look for item display matches for shelfkeys, not reverse shelfkeys desired_shelfkeys = [] if (field == "callnum_reverse_sort") values.each { |rev_shelfkey| # turn it back into a shelfkey desired_shelfkeys << reverse_alphanum(rev_shelfkey) } else desired_shelfkeys = values end unsorted_result = [] docs = get_docs_for_field_values(values, field) docs.each do |doc| # FIXME!!! "desired_shelfkeys" is call numbers, but we have shelfkeys ... unsorted_result = unsorted_result | get_spines_from_doc(doc, desired_shelfkeys) end unsorted_result.uniq! # result is: title [(pub year)] [
    author]
    callnum # need to sort results by callnum asc, then by title asc, then by pub date desc sort_hash = {} unsorted_result.each_index { |i| line_array = unsorted_result[i].split("
    ") callnum = line_array.last # need to get rid of
  • and link stuff title_year = line_array.first.sub(/
  • .*/, '') title = title_year.sub(/\(\d.*\)/, '') year = title_year.sub(/.*\(/, '') sort_hash[i]= callnum + ' ' + title + ' ' + reverse_alphanum(year) } # sort by values, then order result (then lift and separate?) sorted_array = sort_hash.sort { |a,b| a[1] <=> b[1]} sorted_result = [] sorted_array.each_index { |i| # sort_array is array of [unsorted_result_ix, sort_val] sort_ix = sorted_array[i][0] sorted_result[i]= unsorted_result[sorted_array[i][0]] } sorted_result end # create an array of html list items containing the appropriate display text # (analogous to what would be visible if you were looking at the spine of # a book on a shelf) from a solr doc. # The shelf key in each html list item must match a desired shelf key in the # desired_shelfkeys array def get_spines_from_doc(doc, desired_shelfkeys, max_len=30) result = [] return if doc[:item_display].nil? doc[:item_display].each { |item_disp| callnum = item_disp.split(" -|- ")[3] # if desired_shelfkeys.include?(callnum) if true id = doc[:id] title = doc[:title_245a_display] author = case when doc[:author_person_display] : doc[:author_person_display] when doc[:author_corp_display] : doc[:author_corp_display] when doc[:author_meeting_display] : doc[:author_meeting_display] else nil end pub_year = doc[:pub_date] spine_text = "
  • " spine_text << link_to_document(doc, :label=>title[0,max_len]) spine_text << " (" + pub_year + ")" unless pub_year.nil? || pub_year.length == 0 spine_text << "
    " + author[0,max_len] unless author.nil? spine_text << "
    " + callnum spine_text << "
  • " result << spine_text unless result.include?(spine_text) end } return result end def reverse_alphanum(str) rev_str = String.new(str) last = str.length-1 for i in 0..last case rev_str[i,1] when '~': rev_str[i]= ' ' when '0': rev_str[i]= 'z' when '1': rev_str[i]= 'y' when '2': rev_str[i]= 'x' when '3': rev_str[i]= 'w' when '4': rev_str[i]= 'v' when '5': rev_str[i]= 'u' when '6': rev_str[i]= 't' when '7': rev_str[i]= 's' when '8': rev_str[i]= 'r' when '9': rev_str[i]= 'q' when 'a': rev_str[i]= 'p' when 'b': rev_str[i]= 'o' when 'c': rev_str[i]= 'n' when 'd': rev_str[i]= 'm' when 'e': rev_str[i]= 'l' when 'f': rev_str[i]= 'k' when 'g': rev_str[i]= 'j' when 'h': rev_str[i]= 'i' when 'i': rev_str[i]= 'h' when 'j': rev_str[i]= 'g' when 'k': rev_str[i]= 'f' when 'l': rev_str[i]= 'e' when 'm': rev_str[i]= 'd' when 'n': rev_str[i]= 'c' when 'o': rev_str[i]= 'b' when 'p': rev_str[i]= 'a' when 'q','Q': rev_str[i]= '9' when 'r','R': rev_str[i]= '8' when 's','S': rev_str[i]= '7' when 't','T': rev_str[i]= '6' when 'u','U': rev_str[i]= '5' when 'v','V': rev_str[i]= '4' when 'w','W': rev_str[i]= '3' when 'x','X': rev_str[i]= '2' when 'y','Y': rev_str[i]= '1' when 'z','Z': rev_str[i]= '0' when 'A': rev_str[i]= 'P' when 'B': rev_str[i]= 'O' when 'C': rev_str[i]= 'N' when 'D': rev_str[i]= 'M' when 'E': rev_str[i]= 'L' when 'F': rev_str[i]= 'K' when 'G': rev_str[i]= 'J' when 'H': rev_str[i]= 'I' when 'I': rev_str[i]= 'H' when 'J': rev_str[i]= 'G' when 'K': rev_str[i]= 'F' when 'L': rev_str[i]= 'E' when 'M': rev_str[i]= 'D' when 'N': rev_str[i]= 'C' when 'O': rev_str[i]= 'B' when 'P': rev_str[i]= 'A' end end rev_str end def translate_lib {"Archive of Recorded Sound" => "Archive of Recorded Sound", "Art & Architecture" => "Art", "Branner (Earth Sciences & Maps)" => "Earth Sciences", "Classics" => "Classics", "Cubberley (Education)" => "Education", "Crown (Law)" => "Law", "East Asia" => "East Asia", "Engineering" => "Engineering", "Falconer (Biology)" => "Biology", "Green (Humanities & Social Sciences)" => "Green Library", "Hoover Library" => "Hoover Library", "Hoover Archives" => "Hoover Archives", "Jackson (Business)" => "Business", "Jonsson (Government Documents)" => "GovDocs", "Lane (Medical)" => "Medicine", "Miller (Hopkins Marine Station)" => "Hopkins Marine", "Math & Computer Science" => "Math & CompSci", "Meyer" => "", "Music" => "Music", "SAL3 (Off-campus)" => "", "SAL Newark (Off-campus)" => "", "Physics" => "Physics", "Stanford Auxiliary Library (On-campus)" => "", "Special Collections & Archives" => "Special Collections", "Stanford University Libraries" => "", "Swain (Chemistry & Chem. Engineering)" => "Chemistry", "Tanner (Philosophy Dept.)" => "Philosophy", "Applied Physics Department" => ""} end def relator_terms {"acp" => "Art copyist", "act" => "Actor", "adp" => "Adapter", "aft" => "Author of afterword, colophon, etc.", "anl" => "Analyst", "anm" => "Animator", "ann" => "Annotator", "ant" => "Bibliographic antecedent", "app" => "Applicant", "aqt" => "Author in quotations or text abstracts", "arc" => "Architect", "ard" => "Artistic director ", "arr" => "Arranger", "art" => "Artist", "asg" => "Assignee", "asn" => "Associated name", "att" => "Attributed name", "auc" => "Auctioneer", "aud" => "Author of dialog", "aui" => "Author of introduction", "aus" => "Author of screenplay", "aut" => "Author", "bdd" => "Binding designer", "bjd" => "Bookjacket designer", "bkd" => "Book designer", "bkp" => "Book producer", "bnd" => "Binder", "bpd" => "Bookplate designer", "bsl" => "Bookseller", "ccp" => "Conceptor", "chr" => "Choreographer", "clb" => "Collaborator", "cli" => "Client", "cll" => "Calligrapher", "clt" => "Collotyper", "cmm" => "Commentator", "cmp" => "Composer", "cmt" => "Compositor", "cng" => "Cinematographer", "cnd" => "Conductor", "cns" => "Censor", "coe" => "Contestant -appellee", "col" => "Collector", "com" => "Compiler", "cos" => "Contestant", "cot" => "Contestant -appellant", "cov" => "Cover designer", "cpc" => "Copyright claimant", "cpe" => "Complainant-appellee", "cph" => "Copyright holder", "cpl" => "Complainant", "cpt" => "Complainant-appellant", "cre" => "Creator", "crp" => "Correspondent", "crr" => "Corrector", "csl" => "Consultant", "csp" => "Consultant to a project", "cst" => "Costume designer", "ctb" => "Contributor", "cte" => "Contestee-appellee", "ctg" => "Cartographer", "ctr" => "Contractor", "cts" => "Contestee", "ctt" => "Contestee-appellant", "cur" => "Curator", "cwt" => "Commentator for written text", "dfd" => "Defendant", "dfe" => "Defendant-appellee", "dft" => "Defendant-appellant", "dgg" => "Degree grantor", "dis" => "Dissertant", "dln" => "Delineator", "dnc" => "Dancer", "dnr" => "Donor", "dpc" => "Depicted", "dpt" => "Depositor", "drm" => "Draftsman", "drt" => "Director", "dsr" => "Designer", "dst" => "Distributor", "dtc" => "Data contributor ", "dte" => "Dedicatee", "dtm" => "Data manager ", "dto" => "Dedicator", "dub" => "Dubious author", "edt" => "Editor", "egr" => "Engraver", "elg" => "Electrician ", "elt" => "Electrotyper", "eng" => "Engineer", "etr" => "Etcher", "exp" => "Expert", "fac" => "Facsimilist", "fld" => "Field director ", "flm" => "Film editor", "fmo" => "Former owner", "fpy" => "First party", "fnd" => "Funder", "frg" => "Forger", "gis" => "Geographic information specialist ", "grt" => "Graphic technician", "hnr" => "Honoree", "hst" => "Host", "ill" => "Illustrator", "ilu" => "Illuminator", "ins" => "Inscriber", "inv" => "Inventor", "itr" => "Instrumentalist", "ive" => "Interviewee", "ivr" => "Interviewer", "lbr" => "Laboratory ", "lbt" => "Librettist", "ldr" => "Laboratory director ", "led" => "Lead", "lee" => "Libelee-appellee", "lel" => "Libelee", "len" => "Lender", "let" => "Libelee-appellant", "lgd" => "Lighting designer", "lie" => "Libelant-appellee", "lil" => "Libelant", "lit" => "Libelant-appellant", "lsa" => "Landscape architect", "lse" => "Licensee", "lso" => "Licensor", "ltg" => "Lithographer", "lyr" => "Lyricist", "mcp" => "Music copyist", "mfr" => "Manufacturer", "mdc" => "Metadata contact", "mod" => "Moderator", "mon" => "Monitor", "mrk" => "Markup editor", "msd" => "Musical director", "mte" => "Metal-engraver", "mus" => "Musician", "nrt" => "Narrator", "opn" => "Opponent", "org" => "Originator", "orm" => "Organizer of meeting", "oth" => "Other", "own" => "Owner", "pat" => "Patron", "pbd" => "Publishing director", "pbl" => "Publisher", "pdr" => "Project director", "pfr" => "Proofreader", "pht" => "Photographer", "plt" => "Platemaker", "pma" => "Permitting agency", "pmn" => "Production manager", "pop" => "Printer of plates", "ppm" => "Papermaker", "ppt" => "Puppeteer", "prc" => "Process contact", "prd" => "Production personnel", "prf" => "Performer", "prg" => "Programmer", "prm" => "Printmaker", "pro" => "Producer", "prt" => "Printer", "pta" => "Patent applicant", "pte" => "Plaintiff -appellee", "ptf" => "Plaintiff", "pth" => "Patent holder", "ptt" => "Plaintiff-appellant", "rbr" => "Rubricator", "rce" => "Recording engineer", "rcp" => "Recipient", "red" => "Redactor", "ren" => "Renderer", "res" => "Researcher", "rev" => "Reviewer", "rps" => "Repository", "rpt" => "Reporter", "rpy" => "Responsible party", "rse" => "Respondent-appellee", "rsg" => "Restager", "rsp" => "Respondent", "rst" => "Respondent-appellant", "rth" => "Research team head", "rtm" => "Research team member", "sad" => "Scientific advisor", "sce" => "Scenarist", "scl" => "Sculptor", "scr" => "Scribe", "sds" => "Sound designer", "sec" => "Secretary", "sgn" => "Signer", "sht" => "Supporting host", "sng" => "Singer", "spk" => "Speaker", "spn" => "Sponsor", "spy" => "Second party", "srv" => "Surveyor", "std" => "Set designer", "stl" => "Storyteller", "stm" => "Stage manager", "stn" => "Standards body", "str" => "Stereotyper", "tcd" => "Technical director", "tch" => "Teacher", "ths" => "Thesis advisor", "trc" => "Transcriber", "trl" => "Translator", "tyd" => "Type designer", "tyg" => "Typographer", "vdg" => "Videographer", "voc" => "Vocalist", "wam" => "Writer of accompanying material", "wdc" => "Woodcutter", "wde" => "Wood -engraver", "wit" => "Witness"} end def name_change_780_translations {"0" => "Continues", "1" => "Continues in part", "2" => "Supersedes", "3" => "Supersedes in part", "4" => "Merged from", "5" => "Absorbed", "6" => "Absorbed in part", "7" => "Separated from"} end def name_change_785_translations {"0" => "Continued by", "1" => "Continued in part by", "2" => "Superseded by", "3" => "Superseded in part by", "4" => "Absorbed by", "5" => "Absorbed in part by", "6" => "Split into", "7" => "Merged with ... to form ...", "8" => "Changed back to"} end end