lib/onnxruntime/inference_session.rb in onnxruntime-0.4.0 vs lib/onnxruntime/inference_session.rb in onnxruntime-0.5.0
- old
+ new
@@ -122,11 +122,11 @@
metadata = ::FFI::MemoryPointer.new(:pointer)
check_status api[:SessionGetModelMetadata].call(read_pointer, metadata)
custom_metadata_map = {}
- check_status = api[:ModelMetadataGetCustomMetadataMapKeys].call(metadata.read_pointer, @allocator.read_pointer, keys, num_keys)
+ check_status api[:ModelMetadataGetCustomMetadataMapKeys].call(metadata.read_pointer, @allocator.read_pointer, keys, num_keys)
num_keys.read(:int64_t).times do |i|
key = keys.read_pointer[i * ::FFI::Pointer.size].read_pointer.read_string
value = ::FFI::MemoryPointer.new(:string)
check_status api[:ModelMetadataLookupCustomMetadataMap].call(metadata.read_pointer, @allocator.read_pointer, key, value)
custom_metadata_map[key] = value.read_pointer.read_string
@@ -174,11 +174,11 @@
private
def create_input_tensor(input_feed)
allocator_info = ::FFI::MemoryPointer.new(:pointer)
- check_status = api[:CreateCpuMemoryInfo].call(1, 0, allocator_info)
+ check_status api[:CreateCpuMemoryInfo].call(1, 0, allocator_info)
input_tensor = ::FFI::MemoryPointer.new(:pointer, input_feed.size)
input_feed.each_with_index do |(input_name, input), idx|
input = input.to_a unless input.is_a?(Array)
@@ -234,10 +234,10 @@
ptr
end
def create_from_onnx_value(out_ptr)
out_type = ::FFI::MemoryPointer.new(:int)
- check_status = api[:GetValueType].call(out_ptr, out_type)
+ check_status api[:GetValueType].call(out_ptr, out_type)
type = FFI::OnnxType[out_type.read_int]
case type
when :tensor
typeinfo = ::FFI::MemoryPointer.new(:pointer)