public class onnx extends onnx
onnx.BoolIntFn, onnx.BoolIntIntFn, onnx.InferenceFunction, onnx.IntIntFn, onnx.PairBoolIntIntFn, onnx.VoidOpSchemaFn| Modifier and Type | Field and Description |
|---|---|
static int |
_START_VERSION
enum onnx::Version
|
static int |
AttributeProto_AttributeType_FLOAT
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_FLOATS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_GRAPH
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_GRAPHS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_INT
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_INTS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_SPARSE_TENSOR
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_SPARSE_TENSORS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_STRING
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_STRINGS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_TENSOR
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_TENSORS
enum onnx::AttributeProto_AttributeType
|
static int |
AttributeProto_AttributeType_UNDEFINED
enum onnx::AttributeProto_AttributeType
|
static int |
CHECKER
enum onnx::Common::StatusCategory
|
static int |
Complete
enum onnx::optimization::PassEfficiency
|
static int |
Compute
enum onnx::optimization::PassOptimizationType
|
static int |
ComputeMemory
enum onnx::optimization::PassOptimizationType
|
static int |
CountBased
enum onnx::optimization::PassAnalysisType
|
static int |
DestroyOne
enum onnx::optimization::NodeDestroyType
|
static int |
DestroyTwo
enum onnx::optimization::NodeDestroyType
|
static int |
DestroyZero
enum onnx::optimization::NodeDestroyType
|
static int |
Empty
enum onnx::optimization::PassAnalysisType
|
static int |
EXPERIMENTAL
enum onnx::OperatorStatus
|
static int |
f
enum class onnx::AttributeKind
|
static int |
FAIL
enum onnx::Common::StatusCode
|
static int |
fs
enum class onnx::AttributeKind
|
static int |
Fuse
enum onnx::optimization::PassType
|
static int |
g
enum class onnx::AttributeKind
|
static int |
gs
enum class onnx::AttributeKind
|
static int |
i
enum class onnx::AttributeKind
|
static int |
Immutable
enum onnx::optimization::PassType
|
static int |
INVALID_ARGUMENT
enum onnx::Common::StatusCode
|
static int |
INVALID_PROTOBUF
enum onnx::Common::StatusCode
|
static int |
IR_VERSION
enum onnx::Version
|
static int |
IR_VERSION_2017_10_10
enum onnx::Version
|
static int |
IR_VERSION_2017_10_30
enum onnx::Version
|
static int |
IR_VERSION_2017_11_3
enum onnx::Version
|
static int |
IR_VERSION_2019_1_22
enum onnx::Version
|
static int |
IR_VERSION_2019_3_18
enum onnx::Version
|
static int |
is
enum class onnx::AttributeKind
|
static int |
Memory
enum onnx::optimization::PassOptimizationType
|
static int |
None
enum onnx::optimization::PassOptimizationType
|
static int |
NONE
enum onnx::Common::StatusCategory
|
static int |
Nop
enum onnx::optimization::PassType
|
static int |
OK
enum onnx::Common::StatusCode
|
static int |
ONNXIFI_BACKEND_CAPABILITIES
Optional features supported by the backend.
|
static int |
ONNXIFI_BACKEND_CPU_MEMORY_READ_BANDWIDTH
Bandwidth, in bytes per second, of transferring data from cacheable
CPU-allocated memory to the backend device.
|
static int |
ONNXIFI_BACKEND_CPU_MEMORY_WRITE_BANDWIDTH
Bandwidth, in bytes per second, of transferring data to cacheable
CPU-allocated memory from the backend device.
|
static int |
ONNXIFI_BACKEND_CUDA_INDEX
CUDA index of the backend device.
|
static int |
ONNXIFI_BACKEND_CUDA_STREAM
CUDA stream to be used by the backend.
|
static int |
ONNXIFI_BACKEND_DEVICE
Descriptive name of the device (i.e.
|
static int |
ONNXIFI_BACKEND_DEVICE_TYPE
Type of the device.
|
static int |
ONNXIFI_BACKEND_DIRECTX_ID
DirectX ID of the backend device.
|
static int |
ONNXIFI_BACKEND_EXTENSIONS
Space-separated list of vendor- or device-specific extensions supported on
this backend.
|
static int |
ONNXIFI_BACKEND_GRAPH_INIT_PROPERTIES
Auxiliary initialization properties supported by graphs on the backend.
|
static int |
ONNXIFI_BACKEND_INIT_PROPERTIES
Auxiliary initialization properties supported by the backend.
|
static int |
ONNXIFI_BACKEND_MACS_FP16
Number of FP16 multiply-accumulate operations per second delivered by the
backend.
|
static int |
ONNXIFI_BACKEND_MACS_FP32
Number of FP32 multiply-accumulate operations per second delivered by the
backend.
|
static int |
ONNXIFI_BACKEND_MAX_GRAPH_COUNT
Maximum number of independent network graphs supported by the backend.
|
static int |
ONNXIFI_BACKEND_MAX_GRAPH_SIZE
Maximum size of network parameters, in bytes.
|
static int |
ONNXIFI_BACKEND_MEMORY_BANDWIDTH
Bandwidth, in bytes per second, of the global memory specific to the backend
device.
|
static int |
ONNXIFI_BACKEND_MEMORY_SIZE
Maximum amount of memory, in bytes, available to the use by the backend.
|
static int |
ONNXIFI_BACKEND_MEMORY_TYPES
Memory types supported for graph inputs and outputs.
|
static int |
ONNXIFI_BACKEND_NAME
Marketing name of the backend (excluding the vendor name).
|
static int |
ONNXIFI_BACKEND_ONNX_IR_VERSION
List of supported ONNX IR versions.
|
static int |
ONNXIFI_BACKEND_ONNXIFI_VERSION
Major and minor version of ONNXIFI specification implemented by the backend.
|
static int |
ONNXIFI_BACKEND_OPENCL_CONTEXT
OpenCL context to be used by the backend.
|
static int |
ONNXIFI_BACKEND_OPENCL_DEVICE_ID
OpenCL device ID for the backend device.
|
static int |
ONNXIFI_BACKEND_OPENCL_PLATFORM_ID
OpenCL platform ID for the backend device.
|
static int |
ONNXIFI_BACKEND_OPSET_VERSION
List of supported operator set domains and maximum supported operator set
version for each domain.
|
static int |
ONNXIFI_BACKEND_PCI_BUS_ID
PCI bus ID of the backend device.
|
static int |
ONNXIFI_BACKEND_PCI_DEVICE_ID
PCI device ID of the backend device.
|
static int |
ONNXIFI_BACKEND_PCI_DOMAIN_ID
PCI domain/function ID of the backend device.
|
static int |
ONNXIFI_BACKEND_PROPERTY_LOG_LEVEL
Logging verbosity level for the backend.
|
static int |
ONNXIFI_BACKEND_PROPERTY_NONE
Terminates the list of auxiliary backend initialization properties passed to
onnxInitBackend.
|
static int |
ONNXIFI_BACKEND_PROPERTY_OPTIMIZATION
Optimization target for graphs initialized on the backend.
|
static int |
ONNXIFI_BACKEND_SYNCHRONIZATION_TYPES
Memory synchronization primitives supported for graph inputs and outputs.
|
static int |
ONNXIFI_BACKEND_VENDOR
Name of the backend vendor.
|
static int |
ONNXIFI_BACKEND_VERSION
Version of the backend software.
|
static int |
ONNXIFI_CAPABILITY_HOT_PLUGGABLE
The backend uses a hot-pluggable device, and can be disconnected at any time.
|
static int |
ONNXIFI_CAPABILITY_SYMBOLIC_BATCH_SIZE
The backend supports ONNX graphs with symbolic variables in the outer
shape dimension (batch size), using TensorShapeProto.dim_param for
ModelProto.graph.input.type.shape or ModelProto.graph.output.type.shape.
|
static int |
ONNXIFI_CAPABILITY_SYMBOLIC_SIZE_TENSORS
The backend supports ONNX graphs with symbolic variables in the all
shape dimensions, using TensorShapeProto.dim_param for
ModelProto.graph.input.type.shape or ModelProto.graph.output.type.shape.
|
static int |
ONNXIFI_CAPABILITY_THREAD_SAFE
The backend supports multi-threaded access to ONNXIFI backend, graph, and
event objects.
|
static int |
ONNXIFI_CAPABILITY_VARIABLE_BATCH_SIZE
The backend supports ONNX graphs with data-dependent outer shape dimension
(batch size) of graph outputs.
|
static int |
ONNXIFI_CAPABILITY_VARIABLE_SIZE_OUTPUTS
The backend supports ONNX graphs with data-dependent output shapes.
|
static int |
ONNXIFI_DATATYPE_BFLOAT16 |
static int |
ONNXIFI_DATATYPE_COMPLEX128 |
static int |
ONNXIFI_DATATYPE_COMPLEX64 |
static int |
ONNXIFI_DATATYPE_FLOAT16 |
static int |
ONNXIFI_DATATYPE_FLOAT32 |
static int |
ONNXIFI_DATATYPE_FLOAT64 |
static int |
ONNXIFI_DATATYPE_INT16 |
static int |
ONNXIFI_DATATYPE_INT32 |
static int |
ONNXIFI_DATATYPE_INT64 |
static int |
ONNXIFI_DATATYPE_INT8 |
static int |
ONNXIFI_DATATYPE_UINT16 |
static int |
ONNXIFI_DATATYPE_UINT32 |
static int |
ONNXIFI_DATATYPE_UINT64 |
static int |
ONNXIFI_DATATYPE_UINT8 |
static int |
ONNXIFI_DATATYPE_UNDEFINED |
static int |
ONNXIFI_DEVICE_TYPE_CPU
General-purpose central processor
|
static int |
ONNXIFI_DEVICE_TYPE_DSP
Digital signal processor
|
static int |
ONNXIFI_DEVICE_TYPE_FPGA
Field-programmable gate array
|
static int |
ONNXIFI_DEVICE_TYPE_GPU
Graphics accelerator
|
static int |
ONNXIFI_DEVICE_TYPE_HETEROGENEOUS
Heterogeneous backend whichinternally arbitrates or distributes work between
multiple device types.
|
static int |
ONNXIFI_DEVICE_TYPE_NPU
Special-purpose accelerator for neural network
|
static int |
ONNXIFI_EVENT_STATE_INVALID
State for an invalid onnxEvent.
|
static int |
ONNXIFI_EVENT_STATE_NONSIGNALLED
Non-signalled onnxEvent state.
|
static int |
ONNXIFI_EVENT_STATE_SIGNALLED
Signalled onnxEvent state.
|
static int |
ONNXIFI_GRAPH_PROPERTY_NONE
Terminates the list of auxiliary graph initialization properties passed to
onnxInitGraph.
|
static int |
ONNXIFI_H |
static int |
ONNXIFI_LOG_LEVEL_DEBUG
Log events in ONNXIFI_LOG_LEVEL_INFO and detailed status information about
operations of a backend.
|
static int |
ONNXIFI_LOG_LEVEL_ERROR
Log events which caused a failure in an ONNXIFI function call.
|
static int |
ONNXIFI_LOG_LEVEL_INFO
Log events in ONNXIFI_LOG_LEVEL_WARNING and high-level status information
about operation of a backend.
|
static int |
ONNXIFI_LOG_LEVEL_WARNING
Log events in ONNXIFI_LOG_LEVEL_ERROR and events which caused
a performance, accuracy, or quality of service degradation in a backend.
|
static int |
ONNXIFI_MEMORY_TYPE_CPU
Cacheable CPU memory
|
static int |
ONNXIFI_MEMORY_TYPE_CUDA_BUFFER
CUDA memory buffer (allocated via cudaMalloc/cuMalloc).
|
static int |
ONNXIFI_MEMORY_TYPE_D3D_RESOURCE
Direct3D resource.
|
static int |
ONNXIFI_MEMORY_TYPE_OPENCL_BUFFER
OpenCL cl_mem object for a buffer or sub-buffer.
|
static int |
ONNXIFI_MEMORY_TYPE_OPENGLES_TEXTURE_2D
OpenGL ES 2.0+ 2D Texture.
|
static int |
ONNXIFI_OPTIMIZATION_HIGH_THROUGHPUT
Optimize graph representation and compilation for highest throughput.
|
static int |
ONNXIFI_OPTIMIZATION_LOW_DELAY
Optimize graph representation and compilation for lowest delay until first
result.
|
static int |
ONNXIFI_OPTIMIZATION_LOW_LATENCY
Optimize graph representation and compilation for lowest latency.
|
static int |
ONNXIFI_OPTIMIZATION_LOW_POWER
Optimize graph representation and compilation for lowest power consumption.
|
static int |
ONNXIFI_STATUS_BACKEND_UNAVAILABLE |
static int |
ONNXIFI_STATUS_FALLBACK |
static int |
ONNXIFI_STATUS_INTERNAL_ERROR |
static int |
ONNXIFI_STATUS_INVALID_BACKEND |
static int |
ONNXIFI_STATUS_INVALID_DATATYPE |
static int |
ONNXIFI_STATUS_INVALID_EVENT |
static int |
ONNXIFI_STATUS_INVALID_FENCE_TYPE |
static int |
ONNXIFI_STATUS_INVALID_GRAPH |
static int |
ONNXIFI_STATUS_INVALID_ID |
static int |
ONNXIFI_STATUS_INVALID_MEMORY_LOCATION |
static int |
ONNXIFI_STATUS_INVALID_MEMORY_TYPE |
static int |
ONNXIFI_STATUS_INVALID_MODEL |
static int |
ONNXIFI_STATUS_INVALID_NAME |
static int |
ONNXIFI_STATUS_INVALID_POINTER |
static int |
ONNXIFI_STATUS_INVALID_PROPERTY |
static int |
ONNXIFI_STATUS_INVALID_PROTOBUF |
static int |
ONNXIFI_STATUS_INVALID_SHAPE |
static int |
ONNXIFI_STATUS_INVALID_SIZE |
static int |
ONNXIFI_STATUS_INVALID_STATE |
static int |
ONNXIFI_STATUS_MISMATCHING_DATATYPE |
static int |
ONNXIFI_STATUS_MISMATCHING_SHAPE |
static int |
ONNXIFI_STATUS_NO_DEVICE_MEMORY |
static int |
ONNXIFI_STATUS_NO_DEVICE_RESOURCES |
static int |
ONNXIFI_STATUS_NO_SYSTEM_MEMORY |
static int |
ONNXIFI_STATUS_NO_SYSTEM_RESOURCES |
static int |
ONNXIFI_STATUS_SUCCESS
Type for pointers or handles for memory buffers.
|
static int |
ONNXIFI_STATUS_UNIDENTIFIED_NAME |
static int |
ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE |
static int |
ONNXIFI_STATUS_UNSUPPORTED_DATATYPE |
static int |
ONNXIFI_STATUS_UNSUPPORTED_FENCE_TYPE |
static int |
ONNXIFI_STATUS_UNSUPPORTED_MEMORY_TYPE |
static int |
ONNXIFI_STATUS_UNSUPPORTED_OPERATOR |
static int |
ONNXIFI_STATUS_UNSUPPORTED_PROPERTY |
static int |
ONNXIFI_STATUS_UNSUPPORTED_SHAPE |
static int |
ONNXIFI_STATUS_UNSUPPORTED_TAG |
static int |
ONNXIFI_STATUS_UNSUPPORTED_VERSION |
static int |
ONNXIFI_SYNCHRONIZATION_EVENT
Synchronization using ONNXIFI event object (onnxEvent).
|
static int |
ONNXIFI_SYNCHRONIZATION_IMPLICIT
Implicit synchronization of inputs and outputs access with the caller.
|
static int |
ONNXIFI_TAG_MEMORY_FENCE_V1
Tag for version 1 of memory fence structure (onnxMemoryFenceV1).
|
static int |
ONNXIFI_TAG_TENSOR_DESCRIPTOR_V1
Tag for version 1 of tensor descriptor structure (onnxTensorDescriptorV1).
|
static int |
OPTIMIZER
enum onnx::Common::StatusCategory
|
static int |
Other
enum onnx::optimization::PassType
|
static int |
Partial
enum onnx::optimization::PassEfficiency
|
static int |
s
enum class onnx::AttributeKind
|
static int |
Seperate
enum onnx::optimization::PassType
|
static int |
ss
enum class onnx::AttributeKind
|
static int |
Stability
enum onnx::optimization::PassOptimizationType
|
static int |
STABLE
enum onnx::OperatorStatus
|
static int |
t
enum class onnx::AttributeKind
|
static int |
TensorProto_DataLocation_DEFAULT
enum onnx::TensorProto_DataLocation
|
static int |
TensorProto_DataLocation_EXTERNAL
enum onnx::TensorProto_DataLocation
|
static int |
TensorProto_DataType_BFLOAT16
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_BOOL
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_COMPLEX128
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_COMPLEX64
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_DOUBLE
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_FLOAT
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_FLOAT16
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_INT16
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_INT32
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_INT64
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_INT8
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_STRING
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_UINT16
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_UINT32
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_UINT64
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_UINT8
enum onnx::TensorProto_DataType
|
static int |
TensorProto_DataType_UNDEFINED
enum onnx::TensorProto_DataType
|
static int |
ts
enum class onnx::AttributeKind
|
| Constructor and Description |
|---|
onnx() |
public static final int EXPERIMENTAL
public static final int STABLE
public static final int AttributeProto_AttributeType_UNDEFINED
public static final int AttributeProto_AttributeType_FLOAT
public static final int AttributeProto_AttributeType_INT
public static final int AttributeProto_AttributeType_STRING
public static final int AttributeProto_AttributeType_TENSOR
public static final int AttributeProto_AttributeType_GRAPH
public static final int AttributeProto_AttributeType_SPARSE_TENSOR
public static final int AttributeProto_AttributeType_FLOATS
public static final int AttributeProto_AttributeType_INTS
public static final int AttributeProto_AttributeType_STRINGS
public static final int AttributeProto_AttributeType_TENSORS
public static final int AttributeProto_AttributeType_GRAPHS
public static final int AttributeProto_AttributeType_SPARSE_TENSORS
public static final int TensorProto_DataType_UNDEFINED
public static final int TensorProto_DataType_FLOAT
public static final int TensorProto_DataType_UINT8
public static final int TensorProto_DataType_INT8
public static final int TensorProto_DataType_UINT16
public static final int TensorProto_DataType_INT16
public static final int TensorProto_DataType_INT32
public static final int TensorProto_DataType_INT64
public static final int TensorProto_DataType_STRING
public static final int TensorProto_DataType_BOOL
public static final int TensorProto_DataType_FLOAT16
public static final int TensorProto_DataType_DOUBLE
public static final int TensorProto_DataType_UINT32
public static final int TensorProto_DataType_UINT64
public static final int TensorProto_DataType_COMPLEX64
public static final int TensorProto_DataType_COMPLEX128
public static final int TensorProto_DataType_BFLOAT16
public static final int TensorProto_DataLocation_DEFAULT
public static final int TensorProto_DataLocation_EXTERNAL
public static final int _START_VERSION
public static final int IR_VERSION_2017_10_10
public static final int IR_VERSION_2017_10_30
public static final int IR_VERSION_2017_11_3
public static final int IR_VERSION_2019_1_22
public static final int IR_VERSION_2019_3_18
public static final int IR_VERSION
public static final int ONNXIFI_H
public static final int ONNXIFI_STATUS_SUCCESS
public static final int ONNXIFI_STATUS_FALLBACK
public static final int ONNXIFI_STATUS_INVALID_ID
public static final int ONNXIFI_STATUS_INVALID_SIZE
public static final int ONNXIFI_STATUS_INVALID_POINTER
public static final int ONNXIFI_STATUS_INVALID_PROTOBUF
public static final int ONNXIFI_STATUS_INVALID_MODEL
public static final int ONNXIFI_STATUS_INVALID_BACKEND
public static final int ONNXIFI_STATUS_INVALID_GRAPH
public static final int ONNXIFI_STATUS_INVALID_EVENT
public static final int ONNXIFI_STATUS_INVALID_STATE
public static final int ONNXIFI_STATUS_INVALID_NAME
public static final int ONNXIFI_STATUS_INVALID_SHAPE
public static final int ONNXIFI_STATUS_INVALID_DATATYPE
public static final int ONNXIFI_STATUS_INVALID_MEMORY_TYPE
public static final int ONNXIFI_STATUS_INVALID_MEMORY_LOCATION
public static final int ONNXIFI_STATUS_INVALID_FENCE_TYPE
public static final int ONNXIFI_STATUS_INVALID_PROPERTY
public static final int ONNXIFI_STATUS_UNSUPPORTED_TAG
public static final int ONNXIFI_STATUS_UNSUPPORTED_VERSION
public static final int ONNXIFI_STATUS_UNSUPPORTED_OPERATOR
public static final int ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE
public static final int ONNXIFI_STATUS_UNSUPPORTED_SHAPE
public static final int ONNXIFI_STATUS_UNSUPPORTED_DATATYPE
public static final int ONNXIFI_STATUS_UNSUPPORTED_MEMORY_TYPE
public static final int ONNXIFI_STATUS_UNSUPPORTED_FENCE_TYPE
public static final int ONNXIFI_STATUS_UNSUPPORTED_PROPERTY
public static final int ONNXIFI_STATUS_UNIDENTIFIED_NAME
public static final int ONNXIFI_STATUS_MISMATCHING_SHAPE
public static final int ONNXIFI_STATUS_MISMATCHING_DATATYPE
public static final int ONNXIFI_STATUS_NO_SYSTEM_MEMORY
public static final int ONNXIFI_STATUS_NO_DEVICE_MEMORY
public static final int ONNXIFI_STATUS_NO_SYSTEM_RESOURCES
public static final int ONNXIFI_STATUS_NO_DEVICE_RESOURCES
public static final int ONNXIFI_STATUS_BACKEND_UNAVAILABLE
public static final int ONNXIFI_STATUS_INTERNAL_ERROR
public static final int ONNXIFI_EVENT_STATE_INVALID
public static final int ONNXIFI_EVENT_STATE_NONSIGNALLED
public static final int ONNXIFI_EVENT_STATE_SIGNALLED
public static final int ONNXIFI_DEVICE_TYPE_NPU
public static final int ONNXIFI_DEVICE_TYPE_DSP
public static final int ONNXIFI_DEVICE_TYPE_GPU
public static final int ONNXIFI_DEVICE_TYPE_CPU
public static final int ONNXIFI_DEVICE_TYPE_FPGA
public static final int ONNXIFI_DEVICE_TYPE_HETEROGENEOUS
public static final int ONNXIFI_CAPABILITY_THREAD_SAFE
public static final int ONNXIFI_CAPABILITY_SYMBOLIC_BATCH_SIZE
public static final int ONNXIFI_CAPABILITY_SYMBOLIC_SIZE_TENSORS
public static final int ONNXIFI_CAPABILITY_VARIABLE_BATCH_SIZE
public static final int ONNXIFI_CAPABILITY_VARIABLE_SIZE_OUTPUTS
public static final int ONNXIFI_CAPABILITY_HOT_PLUGGABLE
public static final int ONNXIFI_BACKEND_ONNXIFI_VERSION
public static final int ONNXIFI_BACKEND_NAME
public static final int ONNXIFI_BACKEND_VENDOR
public static final int ONNXIFI_BACKEND_VERSION
public static final int ONNXIFI_BACKEND_EXTENSIONS
public static final int ONNXIFI_BACKEND_DEVICE
public static final int ONNXIFI_BACKEND_DEVICE_TYPE
public static final int ONNXIFI_BACKEND_ONNX_IR_VERSION
public static final int ONNXIFI_BACKEND_OPSET_VERSION
public static final int ONNXIFI_BACKEND_CAPABILITIES
public static final int ONNXIFI_BACKEND_INIT_PROPERTIES
public static final int ONNXIFI_BACKEND_MEMORY_TYPES
public static final int ONNXIFI_BACKEND_GRAPH_INIT_PROPERTIES
public static final int ONNXIFI_BACKEND_SYNCHRONIZATION_TYPES
public static final int ONNXIFI_BACKEND_MEMORY_SIZE
public static final int ONNXIFI_BACKEND_MAX_GRAPH_SIZE
public static final int ONNXIFI_BACKEND_MAX_GRAPH_COUNT
public static final int ONNXIFI_BACKEND_MACS_FP32
public static final int ONNXIFI_BACKEND_MACS_FP16
public static final int ONNXIFI_BACKEND_MEMORY_BANDWIDTH
public static final int ONNXIFI_BACKEND_CPU_MEMORY_READ_BANDWIDTH
public static final int ONNXIFI_BACKEND_CPU_MEMORY_WRITE_BANDWIDTH
public static final int ONNXIFI_BACKEND_PCI_BUS_ID
public static final int ONNXIFI_BACKEND_PCI_DEVICE_ID
public static final int ONNXIFI_BACKEND_PCI_DOMAIN_ID
public static final int ONNXIFI_BACKEND_DIRECTX_ID
public static final int ONNXIFI_BACKEND_CUDA_INDEX
public static final int ONNXIFI_BACKEND_OPENCL_PLATFORM_ID
public static final int ONNXIFI_BACKEND_OPENCL_DEVICE_ID
public static final int ONNXIFI_DATATYPE_UNDEFINED
public static final int ONNXIFI_DATATYPE_FLOAT16
public static final int ONNXIFI_DATATYPE_FLOAT32
public static final int ONNXIFI_DATATYPE_FLOAT64
public static final int ONNXIFI_DATATYPE_INT8
public static final int ONNXIFI_DATATYPE_INT16
public static final int ONNXIFI_DATATYPE_INT32
public static final int ONNXIFI_DATATYPE_INT64
public static final int ONNXIFI_DATATYPE_UINT8
public static final int ONNXIFI_DATATYPE_UINT16
public static final int ONNXIFI_DATATYPE_UINT32
public static final int ONNXIFI_DATATYPE_UINT64
public static final int ONNXIFI_DATATYPE_COMPLEX64
public static final int ONNXIFI_DATATYPE_COMPLEX128
public static final int ONNXIFI_DATATYPE_BFLOAT16
public static final int ONNXIFI_MEMORY_TYPE_CPU
public static final int ONNXIFI_MEMORY_TYPE_CUDA_BUFFER
public static final int ONNXIFI_MEMORY_TYPE_OPENCL_BUFFER
public static final int ONNXIFI_MEMORY_TYPE_OPENGLES_TEXTURE_2D
public static final int ONNXIFI_MEMORY_TYPE_D3D_RESOURCE
public static final int ONNXIFI_BACKEND_PROPERTY_NONE
public static final int ONNXIFI_BACKEND_PROPERTY_OPTIMIZATION
public static final int ONNXIFI_BACKEND_PROPERTY_LOG_LEVEL
public static final int ONNXIFI_BACKEND_CUDA_STREAM
public static final int ONNXIFI_BACKEND_OPENCL_CONTEXT
public static final int ONNXIFI_GRAPH_PROPERTY_NONE
public static final int ONNXIFI_OPTIMIZATION_HIGH_THROUGHPUT
public static final int ONNXIFI_OPTIMIZATION_LOW_LATENCY
public static final int ONNXIFI_OPTIMIZATION_LOW_POWER
public static final int ONNXIFI_OPTIMIZATION_LOW_DELAY
public static final int ONNXIFI_LOG_LEVEL_ERROR
public static final int ONNXIFI_LOG_LEVEL_WARNING
public static final int ONNXIFI_LOG_LEVEL_INFO
public static final int ONNXIFI_LOG_LEVEL_DEBUG
public static final int ONNXIFI_TAG_TENSOR_DESCRIPTOR_V1
public static final int ONNXIFI_SYNCHRONIZATION_EVENT
public static final int ONNXIFI_SYNCHRONIZATION_IMPLICIT
public static final int ONNXIFI_TAG_MEMORY_FENCE_V1
public static final int NONE
public static final int CHECKER
public static final int OPTIMIZER
public static final int OK
public static final int FAIL
public static final int INVALID_ARGUMENT
public static final int INVALID_PROTOBUF
public static final int f
public static final int fs
public static final int i
public static final int is
public static final int s
public static final int ss
public static final int t
public static final int ts
public static final int g
public static final int gs
public static final int Fuse
public static final int Nop
public static final int Seperate
public static final int Immutable
public static final int Other
public static final int Empty
public static final int CountBased
public static final int Partial
public static final int Complete
public static final int None
public static final int Compute
public static final int Memory
public static final int ComputeMemory
public static final int Stability
public static final int DestroyZero
public static final int DestroyOne
public static final int DestroyTwo
@Namespace(value="onnx") @StdString public static BytePointer GenerateOptionalArgumentsDoc()
@Namespace(value="onnx") @StdString public static BytePointer GenerateBroadcastingDocMul()
@Namespace(value="onnx") @StdString public static BytePointer GenerateBroadcastingDocUni(@Cast(value="const char*") BytePointer from, @Cast(value="const char*") BytePointer to)
@Namespace(value="onnx") @StdString public static String GenerateBroadcastingDocUni(String from, String to)
@Namespace(value="onnx") public static void RegisterOnnxOperatorSetSchema()
@Namespace(value="onnx") public static void RegisterOnnxMLOperatorSetSchema()
@Namespace(value="onnx") public static void dummyInferenceFunction(@ByRef InferenceContext arg0)
@Namespace(value="onnx") @Cast(value="int64_t") public static long getAttribute(@ByRef InferenceContext ctx, @StdString BytePointer attributeName, @Cast(value="int64_t") long defaultValue)
@Namespace(value="onnx") @Cast(value="int64_t") public static long getAttribute(@ByRef InferenceContext ctx, @StdString String attributeName, @Cast(value="int64_t") long defaultValue)
@Namespace(value="onnx") @StdString public static BytePointer getAttribute(@ByRef InferenceContext ctx, @StdString BytePointer attributeName, @StdString BytePointer defaultValue)
@Namespace(value="onnx") @StdString public static String getAttribute(@ByRef InferenceContext ctx, @StdString String attributeName, @StdString String defaultValue)
@Namespace(value="onnx") @ByVal @Name(value="operator *") public static Dimension multiply(@ByVal Dimension dim1, @ByVal Dimension dim2)
@Namespace(value="onnx") @ByVal @Name(value="operator *") public static Dimension multiply(@ByVal Dimension dim1, @Cast(value="int64_t") long dim2)
@Namespace(value="onnx") @ByVal @Name(value="operator /") public static Dimension divide(@ByVal Dimension dim1, @Cast(value="int64_t") long dim2)
@Namespace(value="onnx") @ByVal public static Dimension multiplyDims(@Const @ByRef TensorShapeProto shape, int from, int upto_exclusive)
@Namespace(value="onnx") public static void propagateElemTypeWithValidation(@Const TypeProto input_type, TypeProto output_type)
@Namespace(value="onnx") public static void propagateElemTypeFromInputToOutput(@ByRef InferenceContext ctx, @Cast(value="size_t") long inputIndex, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void propagateElemTypeFromDtypeToOutput(@ByRef InferenceContext ctx, int data_type, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void propagateElemTypeFromDtypeToOutput(@ByRef InferenceContext ctx, @Const AttributeProto attr, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") @Cast(value="bool") public static boolean hasShape(@Const @ByRef TypeProto type)
@Namespace(value="onnx") @Cast(value="bool") public static boolean hasInputShape(@ByRef InferenceContext ctx, @Cast(value="size_t") long n)
@Namespace(value="onnx") @Cast(value="bool") public static boolean hasNInputShapes(@ByRef InferenceContext ctx, @Cast(value="size_t") long n)
@Namespace(value="onnx") @Const @ByRef public static TensorShapeProto getInputShape(@ByRef InferenceContext ctx, @Cast(value="size_t") long n)
@Namespace(value="onnx") public static void appendSingleDimCopiedFromInputTypeToOutputType(@ByRef InferenceContext ctx, @Cast(value="size_t") long inputIndex, @Cast(value="size_t") long outputIndex, @Cast(value="size_t") long fromDimIndex)
@Namespace(value="onnx") public static void propagateShapeFromInputToOutput(@ByRef InferenceContext ctx, @Cast(value="size_t") long inputIndex, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void propagateShapeAndTypeFromFirstInput(@ByRef InferenceContext ctx)
@Namespace(value="onnx") public static void updateOutputElemType(@ByRef InferenceContext ctx, @Cast(value="size_t") long outputIndex, int elemType)
@Namespace(value="onnx") public static void propagateElemTypeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString BytePointer attributeName, @Cast(value="size_t") long outputIndex, @Cast(value="onnx::TensorProto_DataType") int default_value)
@Namespace(value="onnx") public static void propagateElemTypeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString BytePointer attributeName, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void propagateElemTypeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString String attributeName, @Cast(value="size_t") long outputIndex, @Cast(value="onnx::TensorProto_DataType") int default_value)
@Namespace(value="onnx") public static void propagateElemTypeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString String attributeName, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static TensorShapeProto getOutputShape(@ByRef InferenceContext ctx, @Cast(value="size_t") long n)
@Namespace(value="onnx") public static void appendDim(TensorShapeProto shape, @Cast(value="int64_t") long dim_value)
@Namespace(value="onnx") public static void updateOutputShape(@ByRef InferenceContext ctx, @Cast(value="size_t") long outputIndex, @Const @ByRef TensorShapeProto shape)
@Namespace(value="onnx") public static void updateOutputShape(@ByRef InferenceContext ctx, @Cast(value="size_t") long outputIndex, @Const @ByRef TensorProto tensorProto)
@Namespace(value="onnx") public static void propagateShapeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString BytePointer attributeName, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void propagateShapeFromAttributeToOutput(@ByRef InferenceContext ctx, @StdString String attributeName, @Cast(value="size_t") long outputIndex)
@Namespace(value="onnx") public static void multidirectionalBroadcastShapeInference(@Const @ByRef TensorShapeProtoVector shapes, @ByRef TensorShapeProto resultShape)
@Namespace(value="onnx") public static void bidirectionalBroadcastShapeInference(@Const @ByRef TensorShapeProto shapeL, @Const @ByRef TensorShapeProto shapeR, @ByRef TensorShapeProto resultShape)
@Namespace(value="onnx") public static void mergeInDimensionInfo(@Const @ByRef Dimension source_dim, @ByRef Dimension target_dim, int dim_index)
@Namespace(value="onnx") public static void mergeInShapeInfo(@Const @ByRef TensorShapeProto source, @ByRef TensorShapeProto target)
@Namespace(value="onnx") public static void mergeInShapeInfo(@Const @ByRef TensorShapeProto source_shape, @ByRef TypeProto_Tensor target_type)
@Namespace(value="onnx") public static void mergeInShapeInfo(@Const @ByRef TypeProto_Tensor source, @ByRef TypeProto_Tensor target)
@Namespace(value="onnx") @ByVal public static TypeProto RemoveIthDimensionFromShape(@Const @ByRef TypeProto proto, int removed_dim)
@Namespace(value="onnx") @ByVal public static TypeProto RemoveDimensionsFromShape(@Const @ByRef TypeProto proto, int num_dimensions)
@Namespace(value="onnx") public static void checkInputRank(@ByRef InferenceContext ctx, @Cast(value="size_t") long input_index, int expected_rank)
@Namespace(value="onnx") public static void checkDimEquality(@Cast(value="int64_t") long value1, @Cast(value="int64_t") long value2)
@Namespace(value="onnx") public static void unifyDim(@Cast(value="const onnx::Dim*") @ByRef Dimension dim1, @Cast(value="const onnx::Dim*") @ByRef Dimension dim2)
@Namespace(value="onnx") public static void unifyInputDim(@ByRef InferenceContext ctx, @Cast(value="size_t") long input_index, int dim_index, @Cast(value="onnx::Dim*") @ByRef Dimension dim)
@Namespace(value="onnx") public static void unifyDim(@Cast(value="onnx::Dim*") @ByRef Dimension dim, @Cast(value="int64_t") long value)
@Namespace(value="onnx") public static void UnionShapeInfo(@Const @ByRef TensorShapeProto source_shape, @ByRef TypeProto_Tensor target_type)
public static void AddDescriptors_onnx_2fonnx_2doperators_2dml_2eproto()
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_IsValid(int value)
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::OperatorStatus") public static int OperatorStatus_MIN()
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::OperatorStatus") public static int OperatorStatus_MAX()
@Namespace(value="onnx") @MemberGetter public static int OperatorStatus_ARRAYSIZE()
@Namespace(value="onnx") @Const public static EnumDescriptor OperatorStatus_descriptor()
@Namespace(value="onnx") @StdString public static BytePointer OperatorStatus_Name(@Cast(value="onnx::OperatorStatus") int value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString BytePointer name, @Cast(value="onnx::OperatorStatus*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString String name, @Cast(value="onnx::OperatorStatus*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString BytePointer name, @Cast(value="onnx::OperatorStatus*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString String name, @Cast(value="onnx::OperatorStatus*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString BytePointer name, @Cast(value="onnx::OperatorStatus*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean OperatorStatus_Parse(@StdString String name, @Cast(value="onnx::OperatorStatus*") int[] value)
public static void AddDescriptors_onnx_2fonnx_2dml_2eproto()
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_IsValid(int value)
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::AttributeProto_AttributeType") public static int AttributeProto_AttributeType_AttributeType_MIN()
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::AttributeProto_AttributeType") public static int AttributeProto_AttributeType_AttributeType_MAX()
@Namespace(value="onnx") @MemberGetter public static int AttributeProto_AttributeType_AttributeType_ARRAYSIZE()
@Namespace(value="onnx") @Const public static EnumDescriptor AttributeProto_AttributeType_descriptor()
@Namespace(value="onnx") @StdString public static BytePointer AttributeProto_AttributeType_Name(@Cast(value="onnx::AttributeProto_AttributeType") int value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString BytePointer name, @Cast(value="onnx::AttributeProto_AttributeType*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString String name, @Cast(value="onnx::AttributeProto_AttributeType*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString BytePointer name, @Cast(value="onnx::AttributeProto_AttributeType*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString String name, @Cast(value="onnx::AttributeProto_AttributeType*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString BytePointer name, @Cast(value="onnx::AttributeProto_AttributeType*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean AttributeProto_AttributeType_Parse(@StdString String name, @Cast(value="onnx::AttributeProto_AttributeType*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_IsValid(int value)
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::TensorProto_DataType") public static int TensorProto_DataType_DataType_MIN()
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::TensorProto_DataType") public static int TensorProto_DataType_DataType_MAX()
@Namespace(value="onnx") @MemberGetter public static int TensorProto_DataType_DataType_ARRAYSIZE()
@Namespace(value="onnx") @Const public static EnumDescriptor TensorProto_DataType_descriptor()
@Namespace(value="onnx") @StdString public static BytePointer TensorProto_DataType_Name(@Cast(value="onnx::TensorProto_DataType") int value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataType*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataType*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataType*") int... value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataType*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataType*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataType_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataType*") int... value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_IsValid(int value)
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::TensorProto_DataLocation") public static int TensorProto_DataLocation_DataLocation_MIN()
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::TensorProto_DataLocation") public static int TensorProto_DataLocation_DataLocation_MAX()
@Namespace(value="onnx") @MemberGetter public static int TensorProto_DataLocation_DataLocation_ARRAYSIZE()
@Namespace(value="onnx") @Const public static EnumDescriptor TensorProto_DataLocation_descriptor()
@Namespace(value="onnx") @StdString public static BytePointer TensorProto_DataLocation_Name(@Cast(value="onnx::TensorProto_DataLocation") int value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataLocation*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataLocation*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataLocation*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataLocation*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString BytePointer name, @Cast(value="onnx::TensorProto_DataLocation*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean TensorProto_DataLocation_Parse(@StdString String name, @Cast(value="onnx::TensorProto_DataLocation*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_IsValid(int value)
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::Version") public static int Version_MIN()
@Namespace(value="onnx") @MemberGetter @Cast(value="const onnx::Version") public static int Version_MAX()
@Namespace(value="onnx") @MemberGetter public static int Version_ARRAYSIZE()
@Namespace(value="onnx") @Const public static EnumDescriptor Version_descriptor()
@Namespace(value="onnx") @StdString public static BytePointer Version_Name(@Cast(value="onnx::Version") int value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString BytePointer name, @Cast(value="onnx::Version*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString String name, @Cast(value="onnx::Version*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString BytePointer name, @Cast(value="onnx::Version*") int[] value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString String name, @Cast(value="onnx::Version*") IntPointer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString BytePointer name, @Cast(value="onnx::Version*") IntBuffer value)
@Namespace(value="onnx") @Cast(value="bool") public static boolean Version_Parse(@StdString String name, @Cast(value="onnx::Version*") int[] value)
@Namespace(value="google::protobuf::arena_metrics") public static void EnableArenaMetrics(ArenaOptions options)
@Namespace(value="google::protobuf::internal") public static void arena_free(Pointer object, @Cast(value="size_t") long size)
@Namespace(value="google::protobuf::internal") public static int ToCachedSize(@Cast(value="size_t") long size)
@Namespace(value="google::protobuf::internal") @Cast(value="size_t") public static long FromIntSize(int size)
@Namespace(value="google::protobuf::internal") public static int ToIntSize(@Cast(value="size_t") long size)
@Namespace(value="google::protobuf::internal") @StdString public static BytePointer GetEmptyStringAlreadyInited()
@Namespace(value="google::protobuf::internal") @Cast(value="size_t") public static long StringSpaceUsedExcludingSelfLong(@StdString BytePointer str)
@Namespace(value="google::protobuf::internal") @Cast(value="size_t") public static long StringSpaceUsedExcludingSelfLong(@StdString String str)
@Namespace(value="onnx") @StdString public static BytePointer ProtoDebugString(@Const @ByRef MessageLite proto)
@Namespace(value="onnx") @Cast(value="bool") public static boolean ParseProtoFromBytes(MessageLite proto, @Cast(value="const char*") BytePointer buffer, @Cast(value="size_t") long length)
@Namespace(value="onnx") @Cast(value="bool") public static boolean ParseProtoFromBytes(MessageLite proto, String buffer, @Cast(value="size_t") long length)
@Namespace(value="onnx") @ByVal @Name(value="RetrieveValues<int64_t>") public static LongVector RetrieveValuesLong(@Const @ByRef AttributeProto attr)
@Namespace(value="onnx") @ByVal @Name(value="RetrieveValues<std::string>") public static StringVector RetrieveValuesString(@Const @ByRef AttributeProto attr)
@Namespace(value="onnx::checker") public static void check_value_info(@Const @ByRef ValueInfoProto value_info, @Const @ByRef CheckerContext arg1)
@Namespace(value="onnx::checker") public static void check_tensor(@Const @ByRef TensorProto tensor, @Const @ByRef CheckerContext arg1)
@Namespace(value="onnx::checker") public static void check_sparse_tensor(@Const @ByRef SparseTensorProto sparse_tensor, @Const @ByRef CheckerContext arg1)
@Namespace(value="onnx::checker") public static void check_attribute(@Const @ByRef AttributeProto attr, @Const @ByRef CheckerContext arg1, @Const @ByRef LexicalScopeContext arg2)
@Namespace(value="onnx::checker") public static void check_node(@Const @ByRef NodeProto node, @Const @ByRef CheckerContext arg1, @Const @ByRef LexicalScopeContext arg2)
@Namespace(value="onnx::checker") public static void check_graph(@Const @ByRef GraphProto graph, @Const @ByRef CheckerContext arg1, @Const @ByRef LexicalScopeContext arg2)
@Namespace(value="onnx::checker") public static void check_function(@Const @ByRef FunctionProto function, @Const @ByRef CheckerContext arg1, @Const @ByRef LexicalScopeContext arg2)
@Namespace(value="onnx::checker") public static void check_model(@Const @ByRef ModelProto model)
@Namespace(value="onnx::checker") public static void check_model(@StdString BytePointer model_path)
@Namespace(value="onnx::checker") public static void check_model(@StdString String model_path)
@Namespace(value="onnx::shape_inference") public static void checkShapesAndTypes(@Const @ByRef TypeProto_Tensor inferredType, @Const @ByRef TypeProto_Tensor existingType)
@Namespace(value="onnx::shape_inference") public static void checkShapesAndTypes(@Const @ByRef TypeProto_Sequence inferredType, @Const @ByRef TypeProto_Sequence existingType)
@Namespace(value="onnx::shape_inference") public static void checkShapesAndTypes(@Const @ByRef TypeProto inferredType, @Const @ByRef TypeProto existingType)
@Namespace(value="onnx::shape_inference") public static void mergeShapesAndTypes(@Const @ByRef TypeProto_Tensor inferredType, TypeProto_Tensor existingType)
@Namespace(value="onnx::shape_inference") public static void mergeShapesAndTypes(@Const @ByRef TypeProto_Sequence inferredType, TypeProto_Tensor existingType)
@Namespace(value="onnx::shape_inference") public static void mergeShapesAndTypes(@Const @ByRef TypeProto inferredType, TypeProto existingType)
@Namespace(value="onnx::shape_inference") public static void InferShapes(@ByRef ModelProto m, @Const ISchemaRegistry schema_registry)
@Namespace(value="onnx::shape_inference") public static void InferShapes(@ByRef ModelProto m)
@Namespace(value="onnx::shape_inference") public static void InferShapes(GraphProto g, @Const @ByRef StringIntMap opset_imports, @Const ISchemaRegistry schema_registry)
@Namespace(value="onnx::shape_inference") public static void InferShapes(GraphProto g, @Const @ByRef StringIntMap opset_imports)
@Namespace(value="onnx::shape_inference") public static void InferShapeForFunctionNode(@Const FunctionProto func, @Const ISchemaRegistry schema_registry, @ByRef InferenceContext ctx)
@Cast(value="onnxStatus") public static int onnxGetBackendIDs(@ByPtrPtr onnxBackendID backendIDs, @Cast(value="size_t*") SizeTPointer numBackends)
backendIDs[out] - - pointer to the memory location where the backend IDs
will be returned. If the pointer is NULL, it is
ignored, and the function returns only the number
of backend IDs through numBackendIDs pointer.numBackendIDs[in,out] - - pointer to a variable specifying number of
available backends. On function entry, the
variable MUST contain the capacity, in number
of backend IDs, of the memory buffer specified
by backendIDs. For successful completion, this
capacity must be at least as large as the
number of available backends. If the function
completes with either ONNXIFI_STATUS_SUCCESS
or ONNXIFI_STATUS_FALLBACK status codes, the
number of backend IDs written into backendIDs
buffer is stored in the variable specified by
this pointer.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded, and backend IDs
are stored in the location specified by
backendIDs, and the number of the backends
is stored in the location specified by
numBackends.
\retval ONNXIFI_STATUS_FALLBACK The function call completed, but the
backend IDs were not stored in the
location specified by backendIDs, either
because it is NULL, or because the size of
the memory buffer is insufficient to store
all available backend IDs. The number of
available backends is stored in the
location specified by numBackends.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
numBackends is NULL.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed because the
system failed to allocate memory
to store backend ID information.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxReleaseBackendID(onnxBackendID backendID)
backendID - - backend ID returned by onnxGetBackendIDs.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the resources
associated to the backend ID were released to
the operating system.
\retval ONNXIFI_STATUS_INVALID_ID The function call failed because backendID
is not an ONNXIFI backend ID.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxGetBackendInfo(onnxBackendID backendID, @Cast(value="onnxBackendInfo") int infoType, Pointer infoValue, @Cast(value="size_t*") SizeTPointer infoValueSize)
backendID - - ID of the backend to query.infoType - - type of the backend information to query. Must be one of
the ONNXIFI_BACKEND_* constants. If this value is not
supported by the backend, the function will fail with
ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE.infoValue[out] - - pointer to the memory location where the backend
information value will be returned. If the pointer is
NULL, is it ignored.infoValueSize[in,out] - - pointer to a variable specifying size, in
bytes, of the information value. On function
entry, the variable MUST contain the size of
the memory buffer specified by infoValue.
For successful completion, this size must be
at least as large as the queried value. If the
function completes with either
ONNXIFI_STATUS_SUCCESS or
ONNXIFI_STATUS_FALLBACK status codes, the
actual size of the value queried in the call
is stored in the variable specified by this
pointer.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded, and requested
value is stored in the location specified by
infoValue, and the actual size of the
requested value is stored in the location
specified by infoValueSize.
\retval ONNXIFI_STATUS_FALLBACK The function call completed, but the
requested value was not stored in the
location specified by infoValue, either
because it is NULL, or because the size of
the memory buffer is insufficient for the
value. The actual size of the requested value
is stored in the location specified by
infoValueSize.
\retval ONNXIFI_STATUS_INVALID_ID The function call failed because backendID
is not an ONNXIFI backend ID.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
infoValueSize is NULL.
\retval ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE The function call failed because
the value of infoType is not
supported by the backend.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.@Cast(value="onnxStatus") public static int onnxGetBackendCompatibility(onnxBackendID backendID, @Cast(value="size_t") long onnxModelSize, @Const Pointer onnxModel)
backend - - ID of the backend to query.onnxModelSize - - size of the serialized ONNX ModelProto message,
in bytes.onnxModel - [in] - pointer to serialized ONNX ModelProto message
representing the model graph.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the model
graph can efficiently run on the backend.
\retval ONNXIFI_STATUS_FALLBACK The function call succeeded and the model
graph can run on the backend through some
emulation layer with some efficiency loss. If
a backend decomposes this operator into
multiple sub-operators, it should return this
code. E.g. if a backend does not natively
support grouped or depthwise convolution, but
can execute it as multiple unit-group
convolution operators, it must returns this
code.
\retval ONNXIFI_STATUS_INVALID_ID The function call failed because backendID
is not an ONNXIFI backend ID.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
onnxModel is NULL.
\retval ONNXIFI_STATUS_INVALID_SIZE The function call failed because
onnxModelSize is 0.
\retval ONNXIFI_STATUS_INVALID_PROTOBUF The function call failed because it
couldn't parse the serialized
protobuf as an ONNX ModelProto
message.
\retval ONNXIFI_STATUS_INVALID_MODEL The function call failed because the
parsed ModelProto message does not
satisfy ONNX requirements and
constraints.
\retval ONNXIFI_STATUS_UNSUPPORTED_VERSION The function call failed because
the ONNX IR version or operator
version is not supported by the
backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_OPERATOR The function call failed because
one of the operators in the model
graph is not supported by the
backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE The function call failed because
the backend does not support the
particular AttributeProto
values in one of the operators.
\retval ONNXIFI_STATUS_UNSUPPORTED_SHAPE The function call failed because the
backend does not support the
tensor shapes in an input or output
of one of the operators. The
problematic tensor shapes could be
directly specified through
ValueInfoProto in GraphProto.input,
GraphProto.output, or
GraphProto.value_info, through
TensorProto in
GraphProto.initializer, or inferred
from the inputs by the backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_DATATYPE The function call failed because
the backend does not support the
data types in an input or output
of one of the operators. The
problematic data types could be
directly specified through
ValueInfoProto in
GraphProto.input,
GraphProto.output, or
GraphProto.value_info, through
TensorProto in
GraphProto.initializer, or
inferred from the inputs by the
backend.
\retval ONNXIFI_STATUS_MISMATCHING_SHAPE The function call failed because
output or intermediate shapes
specified in the ONNX model graph do
not match the shapes inferred from
input shapes.
\retval ONNXIFI_STATUS_MISMATCHING_DATATYPE The function call failed because
output or intermediate data types
specified in the ONNX model graph
do not match the data types
inferred from graph inputs.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed because the
backend could not allocate enough
system memory to parse and analyze
the model graph.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxInitBackend(onnxBackendID backendID, @Cast(value="const uint64_t*") IntPointer auxPropertiesList, @ByPtrPtr onnxBackend backend)
backendID - - ID of the backend to initialize.auxPropertiesList - [in] - optional list of backend initialization
properties, terminated by
ONNXIFI_BACKEND_PROPERTY_NONE entry. Can be
NULL or empty.backend - [out] - pointer to an opaque handle for the initialized ONNXIFI
backend. If the function fails, the handle is
initialized to NULL.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the backend
was successfully initialized.
\retval ONNXIFI_STATUS_INVALID_ID The function call failed because backendID
is not an ONNXIFI backend ID.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
backend pointer is NULL.
\retval ONNXIFI_STATUS_INVALID_PROPERTY The function call failed because one
of the backend initialization
property values is invalid.
\retval ONNXIFI_STATUS_UNSUPPORTED_PROPERTY The function call failed because
backend does not recognize one
of the initialization
property IDs.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed due to
insufficient system memory to
initialize backend.
\retval ONNXIFI_STATUS_NO_SYSTEM_RESOURCES The function call failed due to
insufficient non-memory system
resources (e.g. file handles) to
initialize the backend.
\retval ONNXIFI_STATUS_NO_DEVICE_MEMORY The function call failed due to
insufficient backend-specific memory
to initialize the backend.
\retval ONNXIFI_STATUS_NO_DEVICE_RESOURCES The function call failed due to
insufficient non-memory
backend-specific resources (e.g.
command queues) to initialize the
backend.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxInitBackend(onnxBackendID backendID, @Cast(value="const uint64_t*") IntBuffer auxPropertiesList, @ByPtrPtr onnxBackend backend)
@Cast(value="onnxStatus") public static int onnxInitBackend(onnxBackendID backendID, @Cast(value="const uint64_t*") int[] auxPropertiesList, @ByPtrPtr onnxBackend backend)
@Cast(value="onnxStatus") public static int onnxReleaseBackend(onnxBackend backend)
backend - - ONNXIFI backend handle created by onnxInitBackend.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the backend
resources were released to the operating
system.
\retval ONNXIFI_STATUS_INVALID_BACKEND The function call failed because
backend is not an ONNXIFI backend
handle.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxInitEvent(onnxBackend backend, @ByPtrPtr onnxEvent event)
backend - - backend handle created by onnxInitBackend. This backend
would be used to initialize the event.event - [out] - pointer to the opaque handle for the created ONNXIFI
event. If the function fails, the handle is initialized
to NULL.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the event
was successfully initialized.
\retval ONNXIFI_STATUS_INVALID_BACKEND The function call failed because
backend is not an ONNXIFI backend
handle.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
event pointer is NULL.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed due to
insufficient system memory to
initialize the event.
\retval ONNXIFI_STATUS_NO_SYSTEM_RESOURCES The function call failed due to
insufficient non-memory system
resources (e.g. file handles) to
initialize the event.
\retval ONNXIFI_STATUS_NO_DEVICE_MEMORY The function call failed due to
insufficient backend-specific memory
to initialize the event.
\retval ONNXIFI_STATUS_NO_DEVICE_RESOURCES The function call failed due to
insufficient non-memory
backend-specific resources (e.g.
command queues) to initialize the
event.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxSignalEvent(onnxEvent event)
event - - event handle created by onnxInitEvent. While it is technically
possible to use this function for output memory fence event
created by onnxRunGraph, users SHOULD NOT do that.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the event
was changed to signalled state.
\retval ONNXIFI_STATUS_INVALID_EVENT The function call failed because event
is not an ONNXIFI event handle.
\retval ONNXIFI_STATUS_INVALID_STATE The function call failed because event
is already in the signalled state.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxGetEventState(onnxEvent event, @Cast(value="onnxEventState*") IntPointer state)
event - - event handle created by onnxRunGraph. While it is technically
possible to use this function to events created by
onnxInitEvent, this is not the intended use-case.state - [out] - pointer to the variable that will store the state of the
event. If the function fails, the variable is initialized
to ONNXIFI_EVENT_STATE_INVALID.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the state
variable was initialized to either
ONNXIFI_EVENT_STATE_SIGNALLED or
ONNXIFI_EVENT_STATE_NONSIGNALLED according
to the state of the event.
\retval ONNXIFI_STATUS_INVALID_EVENT The function call failed because event
is not an ONNXIFI event handle.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because state
pointer is NULL.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxGetEventState(onnxEvent event, @Cast(value="onnxEventState*") IntBuffer state)
@Cast(value="onnxStatus") public static int onnxGetEventState(onnxEvent event, @Cast(value="onnxEventState*") int[] state)
@Cast(value="onnxStatus") public static int onnxWaitEvent(onnxEvent event)
event - - event handle created by onnxRunGraph. While it is technically
possible to use this function to events created by
onnxInitEvent, this is not the intended use-case.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the function
returned because event transitioned to
signalled state.
\retval ONNXIFI_STATUS_INVALID_EVENT The function call failed because event
is not an ONNXIFI event handle.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxReleaseEvent(onnxEvent event)
event - - event handle created by either onnxInitEvent or onnxRunGraph.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the event
resources were released to the operating
system.
\retval ONNXIFI_STATUS_INVALID_EVENT The function call failed because event
is not an ONNXIFI event handle.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxInitGraph(onnxBackend backend, @Cast(value="const uint64_t*") IntPointer auxPropertiesList, @Cast(value="size_t") long onnxModelSize, @Const Pointer onnxModel, @Cast(value="uint32_t") int weightsCount, @Const onnxTensorDescriptorV1 weightDescriptors, @ByPtrPtr onnxGraph graph)
backend - - backend handle created by onnxInitBackend. This backend
would be used to setup and run the model graph.auxPropertiesList - [in] - optional list of graph initialization
properties, terminated by
ONNXIFI_GRAPH_PROPERTY_NONE entry. Can be
NULL or empty.onnxModelSize - - size of the serialized ONNX ModelProto message,
in bytes.onnxModel - [in] - pointer to serialized ONNX ModelProto message
representing the model graph. The backend MUST not
assume that the serialized ModelProto message is
present at this address after the function returns.weightsCount - - number of weights specified in this function call
through tensor descriptors. Alternatively, the weights
can be specified in ModelProto.graph.initializer.
If weightsCount is non-zero, weightDescriptors must be
non-NULL.weightDescriptors - [in] - descriptors of static input tensors for the
graph. Elements of this array provide location
for blobs identified by ValueInfoProto.name
listed in ModelProto.graph.input of the ONNX
graph. If this parameter is non-NULL,
all static inputs must be specified through
the tensor descriptors, and the
ModelProto.graph.initilizer list must be
empty. The tensor descriptors
must use ONNXIFI_MEMORY_TYPE_CPU memory type,
and the backend must copy the values of the
tensors and all metadata, including shape,
into its own memory before the function
returns.graph - [out] - pointer to the opaque handle for the created ONNXIFI
graph. If the function fails, and this pointer is
non-NULL, the handle is initialized to NULL.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the model
graph was successfully initialized on the
backend.
\retval ONNXIFI_STATUS_FALLBACK The function call succeeded and the model
graph was initialized for the backend through
an emulation layer with substantial
efficiency loss. If a backend decomposes an
operator into multiple sub-operators, it
MUST return this code. E.g. if a backend
does not natively support grouped or
depthwise convolution, but can execute it as
multiple unit-group convolution operators, it
should return this code.
\retval ONNXIFI_STATUS_INVALID_BACKEND The function call failed because
backend is not an ONNXIFI backend
handle.
\retval ONNXIFI_STATUS_INVALID_PROPERTY The function call failed because one
of the graph initialization property
values is invalid.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
onnxModel or graph pointer is NULL, or
weightDescriptors pointer is NULL
while weightsCount is non-zero.
\retval ONNXIFI_STATUS_INVALID_SIZE The function call failed because
onnxModelSize is 0.
\retval ONNXIFI_STATUS_INVALID_PROTOBUF The function call failed because it
couldn't parse the serialized
protobuf as an ONNX ModelProto
message.
\retval ONNXIFI_STATUS_INVALID_MODEL The function call failed because the
parsed ModelProto message does not
satisfy ONNX requirements and
constraints.
\retval ONNXIFI_STATUS_INVALID_SHAPE The function call failed because one of
the shape dimensions in
weightDescriptors is 0.
\retval ONNXIFI_STATUS_INVALID_DATATYPE The function call failed because
one of the data types in
weightDescriptors is unknown to the
backend.
\retval ONNXIFI_STATUS_INVALID_MEMORY_TYPE The function call failed because
one of the memory types in
weightDescriptors is unknown to
the backend.
\retval ONNXIFI_STATUS_INVALID_MEMORY_LOCATION The function call failed
because one of the memory
locations in weightDescriptors
is invalid (NULL pointer).
\retval ONNXIFI_STATUS_UNSUPPORTED_PROPERTY The function call failed because
backend does not recognize one
of the graph initialization
property IDs.
\retval ONNXIFI_STATUS_UNSUPPORTED_VERSION The function call failed because
the ONNX IR version or operator
version is not supported by the
backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_OPERATOR The function call failed because
one of the operators in the model
graph is not supported by the
backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_ATTRIBUTE The function call failed because
the backend does not support the
particular AttributeProto
values in one of the operators.
\retval ONNXIFI_STATUS_UNSUPPORTED_SHAPE The function call failed because the
backend does not support the
tensor shapes in an input or
output of one of the operators.
The problematic tensor shapes could
be directly specified through
ValueInfoProto in GraphProto.input,
GraphProto.output, or
& GraphProto.value_info, through
TensorProto in
GraphProto.initializer, through
weightDescriptors argument,
or inferred from the inputs by the
backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_DATATYPE The function call failed because
the backend does not support the
data types in an input or output
of one of the operators. The
problematic data types could be
directly specified through
ValueInfoProto in
GraphProto.input,
GraphProto.output, or
GraphProto.value_info, through
TensorProto in
GraphProto.initializer, through
weightDescriptors argument,
or inferred from the inputs by
the backend.
\retval ONNXIFI_STATUS_UNSUPPORTED_MEMORY_TYPE The function call failed
because one of the memory
types in weightDescriptors is
different from
ONNXIFI_MEMORY_TYPE_CPU.
\retval ONNXIFI_STATUS_MISMATCHING_SHAPE The function call failed because
the shapes specified in weight
descriptors do not match the shapes
specified in the ONNX model graph,
or output or intermediate shapes
specified in the ONNX model graph do
not match the shapes inferred from
input shapes.
\retval ONNXIFI_STATUS_MISMATCHING_DATATYPE The function call failed because
data types specified in weight
descriptors do not match the data
types specified in ONNX model
graph, or output or intermediate
data types specified in the ONNX
model graph do not match the data
types inferred from graph inputs.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed because the
backend could not allocate enough
system memory to parse, analyze, and
initialize the model graph.
\retval ONNXIFI_STATUS_NO_SYSTEM_RESOURCES The function call failed due to
insufficient non-memory system
resources (e.g. file handles) to
initialize the graph.
\retval ONNXIFI_STATUS_NO_DEVICE_MEMORY The function call failed due to
insufficient backend-specific memory
to initialize the graph.
\retval ONNXIFI_STATUS_NO_DEVICE_RESOURCES The function call failed due to
insufficient non-memory
backend-specific resources (e.g.
command queues) to initialize the
graph.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
implementation experienced an
unrecovered internal error.@Cast(value="onnxStatus") public static int onnxInitGraph(onnxBackend backend, @Cast(value="const uint64_t*") IntBuffer auxPropertiesList, @Cast(value="size_t") long onnxModelSize, @Const Pointer onnxModel, @Cast(value="uint32_t") int weightsCount, @Const onnxTensorDescriptorV1 weightDescriptors, @ByPtrPtr onnxGraph graph)
@Cast(value="onnxStatus") public static int onnxInitGraph(onnxBackend backend, @Cast(value="const uint64_t*") int[] auxPropertiesList, @Cast(value="size_t") long onnxModelSize, @Const Pointer onnxModel, @Cast(value="uint32_t") int weightsCount, @Const onnxTensorDescriptorV1 weightDescriptors, @ByPtrPtr onnxGraph graph)
@Cast(value="onnxStatus") public static int onnxSetGraphIO(onnxGraph graph, @Cast(value="uint32_t") int inputsCount, @Const onnxTensorDescriptorV1 inputDescriptors, @Cast(value="uint32_t") int outputsCount, @Const onnxTensorDescriptorV1 outputDescriptors)
graph - - graph handle created by onnxInitGraph.inputsCount - - number of elements in the inputDescriptors array.inputDescriptors - [in] - descriptors of input tensors for the graph.
Elements of this array must provide a location
for each ValueInfoProto.name listed in
ModelProto.graph.input of the ONNX graph.
If inputsCount is non-zero, inputDescriptors
pointer must be non-NULL.outputsCount - - number of elements in the outputDescriptors array.
Must be greater than zero.outputDescriptors - [in] - descriptors of output tensors for the graph.
outputDescriptors pointer must be non-NULL.
Elements of this array must provide a location
for each ValueInfoProto.name listed in
ModelProto.graph.output of the ONNX graph.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the all graph
inputs and outputs were matched to a memory
location.
\retval ONNXIFI_STATUS_INVALID_GRAPH The function call failed because
graph is not an ONNXIFI graph handle.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
outputDescriptors pointer is NULL or
inputDescriptors pointer is NULL while
inputsCount is non-zero.
\retval ONNXIFI_STATUS_INVALID_NAME The function call failed because one of
the names in tensor descriptors doesn't
match blob name in ModelProto.graph.input
or ModelProto.graph.output, or the same
name appears in more than one tensor
descriptor.
\retval ONNXIFI_STATUS_INVALID_SHAPE The function call failed because one of
the shape dimensions is 0.
\retval ONNXIFI_STATUS_INVALID_DATATYPE The function call failed because
one of the data types in
inputDescriptors or outputDescriptors
is unknown to the backend.
\retval ONNXIFI_STATUS_INVALID_MEMORY_TYPE The function call failed because
one of the memory types in
inputDescriptors or
outputDescriptors is unknown to
the backend.
\retval ONNXIFI_STATUS_INVALID_MEMORY_LOCATION The function call failed
because one of the memory
locations in inputDescriptors
or outputDescriptors is not
valid for the specified
memory type (e.g. NULL pointer
for ONNXIFI_MEMORY_TYPE_CPU).
\retval ONNXIFI_STATUS_UNSUPPORTED_TAG The function call failed because one
of the tags in inputDescriptors or
outputDescriptors is unknown to the
backend (tag does not match
ONNXIFI_TAG_TENSOR_DESCRIPTOR_V1).
\retval ONNXIFI_STATUS_UNSUPPORTED_SHAPE The function call failed because the
backend does not support the
tensor shapes in an input or output
of one of the operators. The
problematic tensor shapes could be
directly specified through
inputDescriptors or
outputDescriptors argument,
or inferred from the inputs by the
backend. This error code can be
returned when the backend supports
variable-size inputs and outputs,
and the problematic tensor shape was
provided in the ValueInfoProto as a
symbolic variable.
\retval ONNXIFI_STATUS_UNSUPPORTED_MEMORY_TYPE The function call failed
because the backend does not
support one of the memory
types in inputDescriptors or
outputDescriptors.
\retval ONNXIFI_STATUS_UNIDENTIFIED_NAME The function call failed because one
of the ValueInfoProto.name value in
ModelProto.graph.input or
ModelProto.graph.output doesn't have
a match in the inputDescriptors or
outputDescriptors.
\retval ONNXIFI_STATUS_MISMATCHING_SHAPE The function call failed because
the shapes specified through
inputDescriptors or
outputDescriptors argument are
inconsistent with the shapes
specified in the ONNX model graph.
\retval ONNXIFI_STATUS_MISMATCHING_DATATYPE The function call failed because
data types specified through
inputDescriptors or
outputDescriptors argument are
inconsistent with the data types
specified in the ONNX model
graph.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed because the
backend could not allocate enough
system memory to parse, analyze, and
initialize the tensor locations.
\retval ONNXIFI_STATUS_NO_SYSTEM_RESOURCES The function call failed due to
insufficient non-memory system
resources (e.g. file handles) to
initialize the tensor locations.
\retval ONNXIFI_STATUS_NO_DEVICE_MEMORY The function call failed due to
insufficient backend-specific memory
to initialize the tensor locations.
\retval ONNXIFI_STATUS_NO_DEVICE_RESOURCES The function call failed due to
insufficient non-memory
backend-specific resources (e.g.
command queues) to initialize the
tensor locations.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxRunGraph(onnxGraph graph, @Const onnxMemoryFenceV1 inputFence, onnxMemoryFenceV1 outputFence)
graph - - graph handle created by onnxInitGraph.inputFence - [in] - synchronization primitive that signals when graph
inputs are ready to use by the backend. The
synchronization primitive always must be initialized
by the caller.outputFence - [out] - synchronization primitive that signals when graph
outputs are ready to use by the caller. The type
of the synchronization primitive always must be
initialized by the caller. The type of the
synchronization primitive determines whether it
is initialized by the user before the call or by
the backend as a result of this call. Single-shot
synchronizatiom objects are initialized as a result
of the call. Reusable synchronization objects are
generally initialized by the user prior to the
call.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the all graph
inputs and outputs were matched to a memory
location.
\retval ONNXIFI_STATUS_INVALID_POINTER The function call failed because
inputFence or outputFence pointer is
NULL.
\retval ONNXIFI_STATUS_INVALID_GRAPH The function call failed because
graph is not an ONNXIFI graph handle.
\retval ONNXIFI_STATUS_INVALID_FENCE_TYPE The function call failed because
the type of synchronization
primitive specified in inputFence
or outputFence is unknown to the
backend.
\retval ONNXIFI_STATUS_INVALID_EVENT The function call failed because
the memory synchronization primitive
specified in inputFence or outputFence
is not valid (e.g. NULL onnxEvent).
\retval ONNXIFI_STATUS_UNSUPPORTED_TAG The function call failed because a tag
in inputFence or outputFence is
unknown to the backend (tag does not
match ONNXIFI_TAG_MEMORY_FENCE_V1).
\retval ONNXIFI_STATUS_UNSUPPORTED_FENCE_TYPE The function call failed
because the backend does not
support the type of
synchronization primitive
specified in inputFence or
outputFence.
\retval ONNXIFI_STATUS_UNIDENTIFIED_NAME The function call failed because
some of the ValueInfoProto.name
value in ModelProto.graph.input or
ModelProto.graph.output were not
specified in a call to
onnxSetGraphIO.
\retval ONNXIFI_STATUS_NO_SYSTEM_MEMORY The function call failed because the
backend could not allocate enough
system memory to execute the model
graph.
\retval ONNXIFI_STATUS_NO_SYSTEM_RESOURCES The function call failed due to
insufficient non-memory system
resources (e.g. file handles) to
execute the model graph.
\retval ONNXIFI_STATUS_NO_DEVICE_MEMORY The function call failed due to
insufficient backend-specific memory
to execute the graph.
\retval ONNXIFI_STATUS_NO_DEVICE_RESOURCES The function call failed due to
insufficient non-memory
backend-specific resources (e.g.
command queues) to execute the
graph.
\retval ONNXIFI_STATUS_BACKEND_UNAVAILABLE The function call failed because
the backend was disconnected or
uninstalled from the system.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
backend experienced an unrecovered
internal error.@Cast(value="onnxStatus") public static int onnxReleaseGraph(onnxGraph graph)
graph - - graph handle created by onnxInitGraph.
\retval ONNXIFI_STATUS_SUCCESS The function call succeeded and the graph
resources were released to the operating
system.
\retval ONNXIFI_STATUS_INVALID_GRAPH The function call failed because graph
is not an ONNXIFI graph handle.
\retval ONNXIFI_STATUS_INTERNAL_ERROR The function call failed because the
graph backend experienced an
unrecovered internal error.@Namespace(value="onnx::Common") @Cast(value="std::ostream*") @ByRef @Name(value="operator <<") public static Pointer shiftLeft(@Cast(value="std::ostream*") @ByRef Pointer out, @Const @ByRef Status status)
@Namespace(value="onnx") @Cast(value="bool") @Name(value="operator ==") public static boolean equals(@Const @ByRef Use a, @Const @ByRef Use b)
@Namespace(value="onnx") public static void ExportModelProto(ModelProto p_m, @SharedPtr Graph g)
@Namespace(value="onnx") @UniquePtr public static Graph ImportModelProto(@Const @ByRef ModelProto mp)
@Namespace(value="onnx") @ByVal public static ModelProto PrepareOutput(@Const @ByRef ModelProto mp_in)
@Namespace(value="onnx") public static void assertNonNull(@SharedPtr Graph g)
@Namespace(value="onnx::version_conversion") public static int check_numpy_unibroadcastable_and_require_broadcast(@StdVector DimensionIR input1_sizes, @StdVector DimensionIR input2_sizes)
@Namespace(value="onnx::version_conversion") public static void assert_numpy_multibroadcastable(@StdVector DimensionIR input1_sizes, @StdVector DimensionIR input2_sizes)
@Namespace(value="onnx::version_conversion") public static void assertNotParams(@StdVector DimensionIR sizes)
@Namespace(value="onnx::version_conversion") @ByVal public static ModelProto ConvertVersion(@Const @ByRef ModelProto mp_in, int target_version)
@Namespace(value="onnx") public static void FunctionExpandHelper(@Const @ByRef NodeProto node, @Const @ByRef FunctionProto func, @ByRef GraphProto g, @StdString BytePointer node_prefix)
@Namespace(value="onnx") public static void FunctionExpandHelper(@Const @ByRef NodeProto node, @Const @ByRef FunctionProto func, @ByRef GraphProto g)
@Namespace(value="onnx") public static void FunctionExpandHelper(@Const @ByRef NodeProto node, @Const @ByRef FunctionProto func, @ByRef GraphProto g, @StdString String node_prefix)
@Namespace(value="onnx::optimization") @Const @ByVal public static StringVector GetAvailablePasses()
@Namespace(value="onnx::optimization") @ByVal public static ModelProto Optimize(@Const @ByRef ModelProto mp_in, @Const @ByRef StringVector names)
@Namespace(value="onnx::optimization") @ByVal public static ModelProto OptimizeFixed(@Const @ByRef ModelProto mp_in, @Const @ByRef StringVector names)
Copyright © 2020. All rights reserved.