lib/tr8n/tokenized_label.rb in tr8n-3.0.5 vs lib/tr8n/tokenized_label.rb in tr8n-3.1.1
- old
+ new
@@ -19,123 +19,125 @@
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
-class Tr8n::TokenizedLabel
+module Tr8n
+ class TokenizedLabel
- # constracts the label
- def initialize(label)
- @label = label
- end
+ # constracts the label
+ def initialize(label)
+ @label = label
+ end
- def label
- @label
- end
+ def label
+ @label
+ end
- # scans for all token types
- def data_tokens
- @data_tokens ||= Tr8n::Token.register_data_tokens(label)
- end
+ # scans for all token types
+ def data_tokens
+ @data_tokens ||= Tr8n::Token.register_data_tokens(label)
+ end
- def data_tokens?
- data_tokens.any?
- end
+ def data_tokens?
+ data_tokens.any?
+ end
- def decoration_tokens
- @decoration_tokens ||= Tr8n::Token.register_decoration_tokens(label)
- end
+ def decoration_tokens
+ @decoration_tokens ||= Tr8n::Token.register_decoration_tokens(label)
+ end
- def decoration_tokens?
- decoration_tokens.any?
- end
+ def decoration_tokens?
+ decoration_tokens.any?
+ end
- def tokens
- @tokens = data_tokens + decoration_tokens
- end
+ def tokens
+ @tokens = data_tokens + decoration_tokens
+ end
- def sanitized_tokens_hash
- @sanitized_tokens_hash ||= begin
- hash = {}
- tokens.each do |token|
- hash[token.sanitized_name] = token
+ def sanitized_tokens_hash
+ @sanitized_tokens_hash ||= begin
+ hash = {}
+ tokens.each do |token|
+ hash[token.sanitized_name] = token
+ end
+ hash
end
- hash
end
- end
- def tokens?
- tokens.any?
- end
+ def tokens?
+ tokens.any?
+ end
- # tokens that can be used by the user in translation
- def translation_tokens
- @translation_tokens ||= tokens.select{|token| token.allowed_in_translation?}
- end
+ # tokens that can be used by the user in translation
+ def translation_tokens
+ @translation_tokens ||= tokens.select{|token| token.allowed_in_translation?}
+ end
- def translation_tokens?
- translation_tokens.any?
- end
+ def translation_tokens?
+ translation_tokens.any?
+ end
- def sanitized_label
- @sanitized_label ||= begin
- lbl = label.clone
- data_tokens.each do |token|
- lbl = token.prepare_label_for_translator(lbl)
- end
- lbl
- end
- end
+ def sanitized_label
+ @sanitized_label ||= begin
+ lbl = label.clone
+ data_tokens.each do |token|
+ lbl = token.prepare_label_for_translator(lbl)
+ end
+ lbl
+ end
+ end
- def tokenless_label
- @tokenless_label ||= begin
- lbl = label.clone
- tokens.each_with_index do |token, index|
- lbl = token.prepare_label_for_suggestion(lbl, index)
+ def tokenless_label
+ @tokenless_label ||= begin
+ lbl = label.clone
+ tokens.each_with_index do |token, index|
+ lbl = token.prepare_label_for_suggestion(lbl, index)
+ end
+ lbl
end
- lbl
- end
- end
+ end
- def suggestion_tokens
- @suggestion_tokens ||= begin
- toks = []
- tokens.each do |token|
- if token.decoration?
- toks << token.name
- else
- toks << token.sanitized_name
+ def suggestion_tokens
+ @suggestion_tokens ||= begin
+ toks = []
+ tokens.each do |token|
+ if token.decoration?
+ toks << token.name
+ else
+ toks << token.sanitized_name
+ end
end
+ toks
end
- toks
- end
- end
+ end
- def words
- return [] if label.blank?
+ def words
+ return [] if label.blank?
- @words ||= begin
- clean_label = sanitized_label
- parts = []
- clean_label = clean_label.gsub(/[\,\.\;\!\-\:\'\"\[\]{}]/, "")
+ @words ||= begin
+ clean_label = sanitized_label
+ parts = []
+ clean_label = clean_label.gsub(/[\,\.\;\!\-\:\'\"\[\]{}]/, "")
- clean_label.split(" ").each do |w|
- parts << w.strip.capitalize if w.length > 3
+ clean_label.split(" ").each do |w|
+ parts << w.strip.capitalize if w.length > 3
+ end
+ parts
end
- parts
end
- end
- def sanitized_tokens_hash
- @sanitized_tokens_hash ||= begin
- hash = {}
- tokens.each do |token|
- hash[token.sanitized_name] = token
+ def sanitized_tokens_hash
+ @sanitized_tokens_hash ||= begin
+ hash = {}
+ tokens.each do |token|
+ hash[token.sanitized_name] = token
+ end
+ hash
end
- hash
end
- end
- def allowed_token?(token)
- not sanitized_tokens_hash[token.sanitized_name].nil?
+ def allowed_token?(token)
+ not sanitized_tokens_hash[token.sanitized_name].nil?
+ end
end
end