@Namespace(value="dai::node") @NoOffset @Properties(inherit=depthai.class) public class NeuralNetwork extends Node
Node.Connection, Node.DatatypeHierarchy, Node.Input, Node.OutputPointer.CustomDeallocator, Pointer.Deallocator, Pointer.NativeDeallocator, Pointer.ReferenceCounter| Constructor and Description |
|---|
NeuralNetwork(PipelineImpl par,
long nodeId) |
NeuralNetwork(Pointer p)
Pointer cast constructor.
|
| Modifier and Type | Method and Description |
|---|---|
BytePointer |
getName()
Retrieves nodes name
|
int |
getNumInferenceThreads()
How many inference threads will be used to run the network
|
Node.Input |
input()
Input message with data to be infered upon
Default queue is blocking with size 5
|
Node.Output |
out()
Outputs NNData message that carries inference results
|
Node.Output |
passthrough()
Passthrough message on which the inference was performed.
|
void |
setBlobPath(BytePointer path)
Load network blob into assets and use once pipeline is started.
|
void |
setBlobPath(String path) |
void |
setNumInferenceThreads(int numThreads)
How many threads should the node use to run the network.
|
void |
setNumNCEPerInferenceThread(int numNCEPerThread)
How many Neural Compute Engines should a single thread use for inference
|
void |
setNumPoolFrames(int numFrames)
Specifies how many frames will be avilable in the pool
|
assetManager, clone, getAssets, getInputRefs, getInputs, getOutputRefs, getOutputs, getParentPipeline, getProperties, getRequiredOpenVINOVersion, id, inputs, outputsaddress, asBuffer, asByteBuffer, availablePhysicalBytes, calloc, capacity, capacity, close, deallocate, deallocate, deallocateReferences, deallocator, deallocator, equals, fill, formatBytes, free, getPointer, getPointer, getPointer, getPointer, hashCode, isNull, isNull, limit, limit, malloc, maxBytes, maxPhysicalBytes, memchr, memcmp, memcpy, memmove, memset, offsetAddress, offsetof, offsetof, parseBytes, physicalBytes, position, position, put, realloc, referenceCount, releaseReference, retainReference, setNull, sizeof, sizeof, toString, totalBytes, totalCount, totalPhysicalBytes, withDeallocator, zeropublic NeuralNetwork(Pointer p)
Pointer(Pointer).public NeuralNetwork(@SharedPtr PipelineImpl par, @Cast(value="int64_t") long nodeId)
@StdString public BytePointer getName()
Node@MemberGetter @ByRef public Node.Input input()
@MemberGetter @ByRef public Node.Output out()
@MemberGetter @ByRef public Node.Output passthrough()
public void setBlobPath(@StdString BytePointer path)
path - Path to network blobpublic void setBlobPath(@StdString String path)
public void setNumPoolFrames(int numFrames)
numFrames - How many frames will pool havepublic void setNumInferenceThreads(int numThreads)
numThreads - Number of threads to dedicate to this nodepublic void setNumNCEPerInferenceThread(int numNCEPerThread)
numNCEPerThread - Number of NCE per threadpublic int getNumInferenceThreads()
Copyright © 2021. All rights reserved.