lib/qooxview/storages/csv.rb in qooxview-1.9.11 vs lib/qooxview/storages/csv.rb in qooxview-1.9.13.pre.10

- old
+ new

@@ -28,11 +28,11 @@ end # Saves the data stored, optionally takes an index to say # which data needs to be saved def save(data) - #dputs_func + # dputs_func @add_only ? dputs(5) { "Not saving data for #{@name}" } : @mutex.synchronize { begin dputs(3) { "Saving data for #{@name} to #{@csv_dir} - #{@csv_file}" } @@ -52,16 +52,20 @@ FileUtils.cp @csv_file, backup end tmpfile = "#{@csv_file}_tmp" File.open(tmpfile, 'w') { |f| - data_each(data) { |d| - write_line(f, d) - if di = @entity.data_instances[d[@data_field_id]] - di.changed = false - end - } + if data.length == 0 + f << '{}' + else + data_each(data) { |d| + write_line(f, d) + if di = @entity.data_instances[d[@data_field_id]] + di.changed = false + end + } + end } FileUtils.mv tmpfile, @csv_file #%x[ sync ] dputs(3) { 'Delete oldest file(s)' } if (backups = Dir.glob("#{@csv_backup_file}.*").sort).size > @backup_count @@ -113,19 +117,21 @@ end # loads the data def load # Go and fetch eventual existing data from the file + # dputs_func dputs(3) { "Starting to load #{@csv_file}" } @mutex.synchronize { cleanup if Dir.glob("#{@csv_file}*").size > 1 ["#{@csv_file}"].concat(Dir.glob("#{@csv_backup_file}*").sort.reverse).each { |file| next if (!File.exists?(file) || File.size(file) == 0) begin dputs(3) { "Loading file #{file}" } data = {} File.open(file, 'r').readlines().each { |l| dputs(5) { "Reading line #{l}" } + l == '{}' and next # Convert the keys in the lines back to Symbols data_parse = JSON.parse(l) data_csv = {} data_parse.each { |k, v| data_csv.merge!({k.to_sym => v})