curl --request GET \
--url https://studio.edgeimpulse.com/v1/api/{projectId}/impulses-detailed \
--header 'x-api-key: <api-key>'{
"success": true,
"impulses": [
{
"impulse": {
"id": 123,
"name": "<string>",
"inputBlocks": [
{
"id": 2,
"type": "time-series",
"name": "Time series",
"title": "Time series",
"windowSizeMs": 2004,
"windowIncreaseMs": 123,
"frequencyHz": 60,
"classificationWindowIncreaseMs": 123,
"padZeros": true,
"labelingMethodMultiLabel": {
"type": "end-of-window",
"labels": [
"<string>"
]
},
"imageWidth": 28,
"imageHeight": 28,
"resizeMode": "squash",
"resizeMethod": "squash",
"cropAnchor": "middle-center",
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z",
"datasetSubset": {
"includePercentage": 123,
"seed": 123
}
}
],
"dspBlocks": [
{
"id": 2,
"type": "spectral-analysis",
"name": "Spectral features",
"axes": [
"accX"
],
"title": "Spectral Analysis",
"implementationVersion": 123,
"valuesPerAxis": 11,
"input": 1,
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z",
"organization": {
"id": 123,
"dspId": 123
},
"customUrl": "<string>",
"namedAxes": [
{
"name": "<string>",
"description": "<string>",
"required": true,
"selectedAxis": "<string>"
}
]
}
],
"learnBlocks": [
{
"id": 2,
"type": "anomaly",
"name": "NN Classifier",
"dsp": [
27
],
"title": "Classification (Keras)",
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z"
}
],
"postProcessingBlocks": [
{
"id": 2,
"type": "object-tracking",
"name": "Object tracking",
"title": "Object tracking",
"implementationVersion": 123,
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z"
}
]
},
"metrics": [
{
"name": "<string>",
"type": "core",
"category": "impulseMetrics",
"description": "<string>",
"value": "<string>",
"filteringType": {
"type": "numeric",
"options": [
"<string>"
]
},
"title": "<string>",
"valueForSorting": 123,
"valueHint": "<string>"
}
],
"dspBlockConfigs": [
{
"blockId": 123,
"config": {
"dsp": {
"id": 1,
"name": "Spectral features",
"windowLength": 3000,
"type": "spectral-analysis",
"classes": [
"<string>"
],
"features": {
"generated": true,
"count": 123,
"labels": [
"<string>"
],
"classes": [
"<string>"
]
},
"expectedWindowCount": 123,
"inputAxes": [
"<string>"
],
"canCalculateFeatureImportance": true,
"calculateFeatureImportance": true,
"canNormalizeData": true,
"normalizeData": "none",
"performance": {
"latency": 123,
"ram": 123,
"customDspString": "<string>"
},
"hasAutoTune": true,
"minimumVersionForAutotune": 123,
"hasAutotunerResults": true,
"usesState": true
},
"config": [
{
"group": "Scaling",
"items": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
]
}
],
"configError": "<string>"
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"generated": true,
"dspConfig": {},
"labels": [
"<string>"
],
"windowCount": 123,
"featureCount": 123,
"includedSamples": [
{
"id": 123,
"windowCount": 123
}
],
"windowSizeMs": 123,
"windowIncreaseMs": 123,
"padZeros": true,
"frequency": 123,
"outputConfig": {
"type": "image",
"shape": {
"width": 123,
"height": 123,
"channels": 123,
"frames": 123
}
},
"featureLabels": [
"<string>"
],
"fftUsed": [
123
],
"resamplingAlgorithmVersion": 123,
"featureExplorerJobId": 123,
"featureExplorerJobFailed": true,
"featureImportanceJobId": 123,
"featureImportanceJobFailed": true
}
}
],
"learnBlockKerasConfigs": [
{
"blockId": 123,
"config": {
"dependencies": {
"classes": [
"<string>"
],
"blockNames": [
"<string>"
],
"featureCount": 123,
"sampleCount": 123
},
"trained": true,
"name": "<string>",
"script": "<string>",
"minimumConfidenceRating": 123,
"selectedModelType": "int8",
"mode": "visual",
"visualLayers": [
{
"type": "dense",
"neurons": 123,
"kernelSize": 123,
"dropoutRate": 123,
"columns": 123,
"stack": 123,
"enabled": true,
"organizationModelId": 123
}
],
"trainingCycles": 123,
"learningRate": 123,
"defaultBatchSize": 123,
"shape": "<string>",
"augmentationPolicyImage": "none",
"transferLearningModels": [
{
"name": "<string>",
"shortName": "<string>",
"description": "<string>",
"hasNeurons": true,
"hasDropout": true,
"type": "dense",
"learnBlockType": "anomaly",
"author": "<string>",
"blockType": "official",
"abbreviatedName": "<string>",
"defaultNeurons": 123,
"defaultDropout": 123,
"defaultLearningRate": 123,
"defaultTrainingCycles": 123,
"hasImageAugmentation": true,
"organizationModelId": 123,
"implementationVersion": 123,
"repositoryUrl": "<string>",
"customParameters": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
],
"displayCategory": "classical",
"blockNoLongerAvailable": {
"reasonMarkdown": "<string>",
"reasonHtml": "<string>"
}
}
],
"profileInt8": true,
"skipEmbeddingsAndMemory": true,
"showAdvancedTrainingSettings": true,
"showAugmentationTrainingSettings": true,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"type": "anomaly",
"batchSize": 123,
"trainTestSplit": 123,
"autoClassWeights": true,
"useLearnedOptimizer": true,
"augmentationPolicySpectrogram": {
"enabled": true,
"warping": true,
"freqMasking": "none",
"timeMasking": "none",
"gaussianNoise": "none"
},
"akidaEdgeLearningConfig": {
"enabled": true,
"additionalClasses": 123,
"neuronsPerClass": 123
},
"customValidationMetadataKey": "<string>",
"customParameters": {},
"anomalyCapacity": "low",
"lastShownModelVariant": "int8",
"blockParameters": {
"backbone": "<string>",
"numLayers": 123,
"poolSize": 123,
"samplingRatio": 123,
"numNearestNeighbors": 123
}
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"layers": [
{
"input": {
"shape": 33,
"name": "x_input:0",
"type": "<dtype: 'float32'>"
},
"output": {
"shape": 20,
"name": "dense_1/Relu:0",
"type": "<dtype: 'float32'>"
}
}
],
"classNames": [
"<string>"
],
"labels": [
"<string>"
],
"availableModelTypes": [
"int8"
],
"recommendedModelType": "int8",
"modelValidationMetrics": [
{
"type": "int8",
"loss": 123,
"confusionMatrix": [
[
31,
1,
0
],
[
2,
27,
3
],
[
1,
0,
39
]
],
"report": {},
"onDevicePerformance": [
{
"mcu": "<string>",
"name": "<string>",
"isDefault": true,
"latency": 123,
"tflite": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"eon": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"hasPerformance": true,
"eon_ram_optimized": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"customMetrics": [
{
"name": "<string>",
"value": "<string>"
}
],
"profilingError": "<string>"
}
],
"visualization": "featureExplorer",
"isSupportedOnMcu": true,
"additionalMetrics": [
{
"name": "<string>",
"value": "<string>",
"fullPrecisionValue": 123,
"tooltipText": "<string>",
"link": "<string>"
}
],
"accuracy": 123,
"predictions": [
{
"sampleId": 123,
"startMs": 123,
"endMs": 123,
"prediction": "<string>",
"label": "<string>",
"predictionCorrect": true,
"f1Score": 123,
"anomalyScores": [
[
123
]
],
"boundingBoxes": [
{
"label": "<string>",
"x": 123,
"y": 123,
"width": 123,
"height": 123,
"score": 123
}
]
}
],
"mcuSupportError": "<string>",
"profilingJobId": 123,
"profilingJobFailed": true
}
],
"hasTrainedModel": true,
"mode": "classification",
"imageInputScaling": "0..1",
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"objectDetectionLastLayer": "mobilenet-ssd",
"tensorboardGraphs": [
{
"title": "<string>",
"data": [
{
"title": "<string>",
"values": [
123
]
}
],
"xLabel": "<string>",
"yLabel": "<string>",
"description": "<string>",
"hideInUI": true
}
]
}
}
],
"learnBlockAnomalyConfigs": [
{
"blockId": 123,
"config": {
"dependencies": {
"classes": [
"<string>"
],
"blockNames": [
"<string>"
],
"featureCount": 123,
"sampleCount": 123
},
"name": "<string>",
"axes": [
{
"ix": 123,
"label": "<string>",
"selected": true,
"favourite": true
}
],
"trained": true,
"selectedAxes": [
123
],
"minimumConfidenceRating": 123,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"clusterCount": 123
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"scale": [
123
],
"mean": [
123
],
"clusters": [
{
"center": [
123
],
"maxError": 123
}
],
"axes": "`[ 0, 11, 22 ]`",
"defaultMinimumConfidenceRating": 123,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"availableModelTypes": [
"int8"
],
"recommendedModelType": "int8",
"modelValidationMetrics": [
{
"type": "int8",
"loss": 123,
"confusionMatrix": [
[
31,
1,
0
],
[
2,
27,
3
],
[
1,
0,
39
]
],
"report": {},
"onDevicePerformance": [
{
"mcu": "<string>",
"name": "<string>",
"isDefault": true,
"latency": 123,
"tflite": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"eon": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"hasPerformance": true,
"eon_ram_optimized": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"customMetrics": [
{
"name": "<string>",
"value": "<string>"
}
],
"profilingError": "<string>"
}
],
"visualization": "featureExplorer",
"isSupportedOnMcu": true,
"additionalMetrics": [
{
"name": "<string>",
"value": "<string>",
"fullPrecisionValue": 123,
"tooltipText": "<string>",
"link": "<string>"
}
],
"accuracy": 123,
"predictions": [
{
"sampleId": 123,
"startMs": 123,
"endMs": 123,
"prediction": "<string>",
"label": "<string>",
"predictionCorrect": true,
"f1Score": 123,
"anomalyScores": [
[
123
]
],
"boundingBoxes": [
{
"label": "<string>",
"x": 123,
"y": 123,
"width": 123,
"height": 123,
"score": 123
}
]
}
],
"mcuSupportError": "<string>",
"profilingJobId": 123,
"profilingJobFailed": true
}
],
"hasTrainedModel": true
},
"gmmMetadata": {
"means": [
[
123
]
],
"covariances": [
[
[
123
]
]
],
"weights": [
123
]
}
}
],
"postProcessingBlockConfigs": [
{
"blockId": 123,
"config": {
"enabled": true,
"parameters": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
]
}
}
],
"isStale": true,
"configured": true,
"complete": true,
"tags": [
"<string>"
],
"pretrainedModelInfo": {
"fileName": "<string>"
},
"createdFromTunerTrialId": 123,
"createdByUser": {
"id": 123,
"name": "<string>",
"username": "<string>",
"photo": "<string>"
}
}
],
"metricKeysByCategory": [
{
"category": "impulseMetrics",
"metricKeys": [
{
"name": "<string>",
"description": "<string>",
"type": "core",
"showInTable": true,
"filteringType": {
"type": "numeric",
"options": [
"<string>"
]
}
}
]
}
],
"extraTableColumns": [
"<string>"
],
"error": "<string>"
}Retrieve all impulse for a project, including accuracy and performance metrics.
curl --request GET \
--url https://studio.edgeimpulse.com/v1/api/{projectId}/impulses-detailed \
--header 'x-api-key: <api-key>'{
"success": true,
"impulses": [
{
"impulse": {
"id": 123,
"name": "<string>",
"inputBlocks": [
{
"id": 2,
"type": "time-series",
"name": "Time series",
"title": "Time series",
"windowSizeMs": 2004,
"windowIncreaseMs": 123,
"frequencyHz": 60,
"classificationWindowIncreaseMs": 123,
"padZeros": true,
"labelingMethodMultiLabel": {
"type": "end-of-window",
"labels": [
"<string>"
]
},
"imageWidth": 28,
"imageHeight": 28,
"resizeMode": "squash",
"resizeMethod": "squash",
"cropAnchor": "middle-center",
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z",
"datasetSubset": {
"includePercentage": 123,
"seed": 123
}
}
],
"dspBlocks": [
{
"id": 2,
"type": "spectral-analysis",
"name": "Spectral features",
"axes": [
"accX"
],
"title": "Spectral Analysis",
"implementationVersion": 123,
"valuesPerAxis": 11,
"input": 1,
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z",
"organization": {
"id": 123,
"dspId": 123
},
"customUrl": "<string>",
"namedAxes": [
{
"name": "<string>",
"description": "<string>",
"required": true,
"selectedAxis": "<string>"
}
]
}
],
"learnBlocks": [
{
"id": 2,
"type": "anomaly",
"name": "NN Classifier",
"dsp": [
27
],
"title": "Classification (Keras)",
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z"
}
],
"postProcessingBlocks": [
{
"id": 2,
"type": "object-tracking",
"name": "Object tracking",
"title": "Object tracking",
"implementationVersion": 123,
"createdBy": "createImpulse",
"createdAt": "2023-11-07T05:31:56Z"
}
]
},
"metrics": [
{
"name": "<string>",
"type": "core",
"category": "impulseMetrics",
"description": "<string>",
"value": "<string>",
"filteringType": {
"type": "numeric",
"options": [
"<string>"
]
},
"title": "<string>",
"valueForSorting": 123,
"valueHint": "<string>"
}
],
"dspBlockConfigs": [
{
"blockId": 123,
"config": {
"dsp": {
"id": 1,
"name": "Spectral features",
"windowLength": 3000,
"type": "spectral-analysis",
"classes": [
"<string>"
],
"features": {
"generated": true,
"count": 123,
"labels": [
"<string>"
],
"classes": [
"<string>"
]
},
"expectedWindowCount": 123,
"inputAxes": [
"<string>"
],
"canCalculateFeatureImportance": true,
"calculateFeatureImportance": true,
"canNormalizeData": true,
"normalizeData": "none",
"performance": {
"latency": 123,
"ram": 123,
"customDspString": "<string>"
},
"hasAutoTune": true,
"minimumVersionForAutotune": 123,
"hasAutotunerResults": true,
"usesState": true
},
"config": [
{
"group": "Scaling",
"items": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
]
}
],
"configError": "<string>"
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"generated": true,
"dspConfig": {},
"labels": [
"<string>"
],
"windowCount": 123,
"featureCount": 123,
"includedSamples": [
{
"id": 123,
"windowCount": 123
}
],
"windowSizeMs": 123,
"windowIncreaseMs": 123,
"padZeros": true,
"frequency": 123,
"outputConfig": {
"type": "image",
"shape": {
"width": 123,
"height": 123,
"channels": 123,
"frames": 123
}
},
"featureLabels": [
"<string>"
],
"fftUsed": [
123
],
"resamplingAlgorithmVersion": 123,
"featureExplorerJobId": 123,
"featureExplorerJobFailed": true,
"featureImportanceJobId": 123,
"featureImportanceJobFailed": true
}
}
],
"learnBlockKerasConfigs": [
{
"blockId": 123,
"config": {
"dependencies": {
"classes": [
"<string>"
],
"blockNames": [
"<string>"
],
"featureCount": 123,
"sampleCount": 123
},
"trained": true,
"name": "<string>",
"script": "<string>",
"minimumConfidenceRating": 123,
"selectedModelType": "int8",
"mode": "visual",
"visualLayers": [
{
"type": "dense",
"neurons": 123,
"kernelSize": 123,
"dropoutRate": 123,
"columns": 123,
"stack": 123,
"enabled": true,
"organizationModelId": 123
}
],
"trainingCycles": 123,
"learningRate": 123,
"defaultBatchSize": 123,
"shape": "<string>",
"augmentationPolicyImage": "none",
"transferLearningModels": [
{
"name": "<string>",
"shortName": "<string>",
"description": "<string>",
"hasNeurons": true,
"hasDropout": true,
"type": "dense",
"learnBlockType": "anomaly",
"author": "<string>",
"blockType": "official",
"abbreviatedName": "<string>",
"defaultNeurons": 123,
"defaultDropout": 123,
"defaultLearningRate": 123,
"defaultTrainingCycles": 123,
"hasImageAugmentation": true,
"organizationModelId": 123,
"implementationVersion": 123,
"repositoryUrl": "<string>",
"customParameters": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
],
"displayCategory": "classical",
"blockNoLongerAvailable": {
"reasonMarkdown": "<string>",
"reasonHtml": "<string>"
}
}
],
"profileInt8": true,
"skipEmbeddingsAndMemory": true,
"showAdvancedTrainingSettings": true,
"showAugmentationTrainingSettings": true,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"type": "anomaly",
"batchSize": 123,
"trainTestSplit": 123,
"autoClassWeights": true,
"useLearnedOptimizer": true,
"augmentationPolicySpectrogram": {
"enabled": true,
"warping": true,
"freqMasking": "none",
"timeMasking": "none",
"gaussianNoise": "none"
},
"akidaEdgeLearningConfig": {
"enabled": true,
"additionalClasses": 123,
"neuronsPerClass": 123
},
"customValidationMetadataKey": "<string>",
"customParameters": {},
"anomalyCapacity": "low",
"lastShownModelVariant": "int8",
"blockParameters": {
"backbone": "<string>",
"numLayers": 123,
"poolSize": 123,
"samplingRatio": 123,
"numNearestNeighbors": 123
}
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"layers": [
{
"input": {
"shape": 33,
"name": "x_input:0",
"type": "<dtype: 'float32'>"
},
"output": {
"shape": 20,
"name": "dense_1/Relu:0",
"type": "<dtype: 'float32'>"
}
}
],
"classNames": [
"<string>"
],
"labels": [
"<string>"
],
"availableModelTypes": [
"int8"
],
"recommendedModelType": "int8",
"modelValidationMetrics": [
{
"type": "int8",
"loss": 123,
"confusionMatrix": [
[
31,
1,
0
],
[
2,
27,
3
],
[
1,
0,
39
]
],
"report": {},
"onDevicePerformance": [
{
"mcu": "<string>",
"name": "<string>",
"isDefault": true,
"latency": 123,
"tflite": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"eon": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"hasPerformance": true,
"eon_ram_optimized": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"customMetrics": [
{
"name": "<string>",
"value": "<string>"
}
],
"profilingError": "<string>"
}
],
"visualization": "featureExplorer",
"isSupportedOnMcu": true,
"additionalMetrics": [
{
"name": "<string>",
"value": "<string>",
"fullPrecisionValue": 123,
"tooltipText": "<string>",
"link": "<string>"
}
],
"accuracy": 123,
"predictions": [
{
"sampleId": 123,
"startMs": 123,
"endMs": 123,
"prediction": "<string>",
"label": "<string>",
"predictionCorrect": true,
"f1Score": 123,
"anomalyScores": [
[
123
]
],
"boundingBoxes": [
{
"label": "<string>",
"x": 123,
"y": 123,
"width": 123,
"height": 123,
"score": 123
}
]
}
],
"mcuSupportError": "<string>",
"profilingJobId": 123,
"profilingJobFailed": true
}
],
"hasTrainedModel": true,
"mode": "classification",
"imageInputScaling": "0..1",
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"objectDetectionLastLayer": "mobilenet-ssd",
"tensorboardGraphs": [
{
"title": "<string>",
"data": [
{
"title": "<string>",
"values": [
123
]
}
],
"xLabel": "<string>",
"yLabel": "<string>",
"description": "<string>",
"hideInUI": true
}
]
}
}
],
"learnBlockAnomalyConfigs": [
{
"blockId": 123,
"config": {
"dependencies": {
"classes": [
"<string>"
],
"blockNames": [
"<string>"
],
"featureCount": 123,
"sampleCount": 123
},
"name": "<string>",
"axes": [
{
"ix": 123,
"label": "<string>",
"selected": true,
"favourite": true
}
],
"trained": true,
"selectedAxes": [
123
],
"minimumConfidenceRating": 123,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"clusterCount": 123
},
"metadata": {
"created": "2023-11-07T05:31:56Z",
"scale": [
123
],
"mean": [
123
],
"clusters": [
{
"center": [
123
],
"maxError": 123
}
],
"axes": "`[ 0, 11, 22 ]`",
"defaultMinimumConfidenceRating": 123,
"thresholds": [
{
"key": "min_score",
"description": "Score threshold",
"helpText": "Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded.",
"value": 0.5,
"suggestedValue": 123,
"suggestedValueText": "<string>"
}
],
"availableModelTypes": [
"int8"
],
"recommendedModelType": "int8",
"modelValidationMetrics": [
{
"type": "int8",
"loss": 123,
"confusionMatrix": [
[
31,
1,
0
],
[
2,
27,
3
],
[
1,
0,
39
]
],
"report": {},
"onDevicePerformance": [
{
"mcu": "<string>",
"name": "<string>",
"isDefault": true,
"latency": 123,
"tflite": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"eon": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"hasPerformance": true,
"eon_ram_optimized": {
"ramRequired": 123,
"romRequired": 123,
"arenaSize": 123,
"modelSize": 123
},
"customMetrics": [
{
"name": "<string>",
"value": "<string>"
}
],
"profilingError": "<string>"
}
],
"visualization": "featureExplorer",
"isSupportedOnMcu": true,
"additionalMetrics": [
{
"name": "<string>",
"value": "<string>",
"fullPrecisionValue": 123,
"tooltipText": "<string>",
"link": "<string>"
}
],
"accuracy": 123,
"predictions": [
{
"sampleId": 123,
"startMs": 123,
"endMs": 123,
"prediction": "<string>",
"label": "<string>",
"predictionCorrect": true,
"f1Score": 123,
"anomalyScores": [
[
123
]
],
"boundingBoxes": [
{
"label": "<string>",
"x": 123,
"y": 123,
"width": 123,
"height": 123,
"score": 123
}
]
}
],
"mcuSupportError": "<string>",
"profilingJobId": 123,
"profilingJobFailed": true
}
],
"hasTrainedModel": true
},
"gmmMetadata": {
"means": [
[
123
]
],
"covariances": [
[
[
123
]
]
],
"weights": [
123
]
}
}
],
"postProcessingBlockConfigs": [
{
"blockId": 123,
"config": {
"enabled": true,
"parameters": [
{
"name": "Scale axes",
"defaultValue": "<string>",
"type": "text",
"param": "scale-axes",
"readonly": true,
"shouldShow": true,
"required": true,
"showClickToSet": true,
"value": "<string>",
"help": "Divide axes by this number",
"selectOptions": [
{
"value": "<string>",
"selected": true,
"optionLabel": "<string>",
"priority": 123,
"romEstimate": 123,
"needsOps": [
"<string>"
],
"needsFeatures": [
"<string>"
]
}
],
"showIf": {
"parameter": "<string>",
"operator": "eq",
"value": "<string>"
},
"invalidText": "<string>",
"section": "advanced",
"multiline": true,
"hint": "<string>",
"placeholder": "<string>",
"valid": [
{}
],
"items": {},
"properties": {}
}
]
}
}
],
"isStale": true,
"configured": true,
"complete": true,
"tags": [
"<string>"
],
"pretrainedModelInfo": {
"fileName": "<string>"
},
"createdFromTunerTrialId": 123,
"createdByUser": {
"id": 123,
"name": "<string>",
"username": "<string>",
"photo": "<string>"
}
}
],
"metricKeysByCategory": [
{
"category": "impulseMetrics",
"metricKeys": [
{
"name": "<string>",
"description": "<string>",
"type": "core",
"showInTable": true,
"filteringType": {
"type": "numeric",
"options": [
"<string>"
]
}
}
]
}
],
"extraTableColumns": [
"<string>"
],
"error": "<string>"
}Project ID
OK
Whether the operation succeeded
Show child attributes
Show child attributes
ID for this impulse.
Name for this impulse.
Input Blocks that are part of this impulse
Show child attributes
Identifier for this block. Make sure to up this number when creating a new block via getNewBlockId, and don't re-use identifiers. If the block hasn't changed, keep the ID as-is. ID must be unique across the project and greather than zero (>0).
x >= 1Block type (either time-series, image or features)
time-series, image, features "time-series"
Block name, will be used in menus
"Time series"
Block title, used in the impulse UI
"Time series"
Size of the sliding window in milliseconds
2004
We use a sliding window to go over the raw data. How many milliseconds to increase the sliding window with for each step.
(Input only) Frequency of the input data in Hz
60
We use a sliding window to go over the raw data. How many milliseconds to increase the sliding window with for each step in classification mode.
Whether to zero pad data when a data item is too short
How to pick the label for multi-label samples
Show child attributes
end-of-window, anywhere-in-window Required when choosing "anywhere-in-window". The list of classes that should trigger detection (e.g. "interference").
Width all images are resized to before training
28
Width all images are resized to before training
28
Input images are resized automatically before training and testing, to match the impulse input shape. This determines the resize mode used when the aspect ratio of the input data is different to the aspect ratio of the impulse.
squash, fit-short, fit-long, crop "squash"
Resize method to use when resizing images
lanczos3, nearest "squash"
If images are resized using a crop, choose where to anchor the crop
top-left, top-center, top-right, middle-left, middle-center, middle-right, bottom-left, bottom-center, bottom-right "middle-center"
The system component that created the block version (createImpulse | clone | tuner). Cannot be set via API.
"createImpulse"
The datetime that the block version was created. Cannot be set via API.
Only generate features for samples where (sample_id + datasetSubsetSeed) % datasetSubset) == 0
DSP Blocks that are part of this impulse
Show child attributes
Identifier for this block. Make sure to up this number when creating a new block via getNewBlockId, and don't re-use identifiers. If the block hasn't changed, keep the ID as-is. ID must be unique across the project and greather than zero (>0).
x >= 1Block type
"spectral-analysis"
Block name, will be used in menus
"Spectral features"
Input axes, identified by the name in the name of the axis
Block title, used in the impulse UI
"Spectral Analysis"
Implementation version of the block
Number of features this DSP block outputs per axis. This is only set when the DSP block is configured.
11
The ID of the Input block a DSP block is connected to
1
The system component that created the block version (createImpulse | clone | tuner). Cannot be set via API.
"createImpulse"
The datetime that the block version was created. Cannot be set via API.
Required for type 'custom'
Named axes for the block
Show child attributes
Name of the axis
Description of the axis
Whether the axis is required
The selected axis for the block
Learning Blocks that are part of this impulse
Show child attributes
Identifier for this block. Make sure to up this number when creating a new block via getNewBlockId, and don't re-use identifiers. If the block hasn't changed, keep the ID as-is. ID must be unique across the project and greather than zero (>0).
x >= 1The type of learning block (anomaly, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression). Each behaves differently.
anomaly, anomaly-gmm, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression, keras-akida, keras-akida-transfer-image, keras-akida-object-detection, keras-visual-anomaly Block name, will be used in menus. If a block has a baseBlockId, this field is ignored and the base block's name is used instead.
"NN Classifier"
DSP dependencies, identified by DSP block ID
Block title, used in the impulse UI
"Classification (Keras)"
The system component that created the block version (createImpulse | clone | tuner). Cannot be set via API.
"createImpulse"
The datetime that the block version was created. Cannot be set via API.
Post-processing blocks that are part of this impulse
Show child attributes
Identifier for this block. Make sure to up this number when creating a new block via getNewBlockId, and don't re-use identifiers. If the block hasn't changed, keep the ID as-is. ID must be unique across the project and greather than zero (>0).
x >= 1Block type
"object-tracking"
Block name, will be used in menus
"Object tracking"
Block title, used in the impulse UI
"Object tracking"
Implementation version of the block
The system component that created the block version (createImpulse | clone | tuner). Cannot be set via API.
"createImpulse"
The datetime that the block version was created. Cannot be set via API.
Show child attributes
core, additional impulseMetrics, inputBlockConfig, dspBlockConfig, learnBlockConfig, learnBlockMetrics, postProcessingBlockConfig Additional help explaining the value for this metric
Show child attributes
This returns a DSPConfig object, but "dsp.classes" and "dsp.features.classes" will be set to an empty array (use getDspConfig to retrieve these).
Show child attributes
Show child attributes
1
"Spectral features"
3000
"spectral-analysis"
Show child attributes
Whether this block has generated features
Number of generated features
Names of the features
Classes that the features were generated on
Expected number of windows that would be generated
Axes that this block depends on.
Whether this DSP block supports data normalization after features were generated. This is true unless "dontAllowDataNormalization" is set to true in the DSP block's parameters.json.
Data normalization that was last selected for this block.
none, normalize-channel-standard-scaler Show child attributes
If the project latencyDevice has custom DSP hardware, this value contains a device specific latency metric (eg. cycles)
Whether this type of DSP block supports autotuning.
For DSP blocks that support autotuning, this value specifies the minimum block implementation version for which autotuning is supported.
Whether autotune results exist for this DSP block.
Whether this DSP block uses state.
Show child attributes
"Scaling"
Show child attributes
"Scale axes"
"text"
"scale-axes"
If enabled, render a disabled input element with 'Click to set'
"Divide axes by this number"
Show child attributes
What is the string that will be set if this option is selected?
What is the label that will be shown to the user for this option?
The following options are optional. See Learn Block Auto Config in Notion. Higher priority will get chosen based on limits below.
Estimated ROM footprint for this choice. Will be tested against ROM budget in Studio.
ML operator needed by this choice.
Feature needed by this choice. (non op related)
Interface section to render parameter in.
advanced, augmentation, modelProfiling Only valid for type "string". Will render a multiline text area.
If set, shows a hint below the input.
Sets the placeholder text on the input element (for types "string", "int", "float" and "secret")
Valid values for parameter.
Recursive definition for items of a parameter with type 'array'.
Recursive definition for a parameter with type 'object'.
This returns a DSPMetadata object, but "labels" will be set to an empty array (use getDspMetadata to retrieve these).
Show child attributes
Date when the features were created
Whether features were generated
Labels in the dataset when generator ran
Number of features for this DSP block
The included samples in this DSP block. Note that these are sorted in the same way as the npy files are laid out. So with the windowCount parameter you can exactly search back to see which file contributed to which windows there.
Length of the sliding window when generating features.
Increase of the sliding window when generating features.
Whether data was zero-padded when generating features.
Frequency of the original data in Hz.
Information about the output of the DSP block
Show child attributes
Output type of the DSP block
image, spectrogram, flat The shape of the block output
Show child attributes
Available on all types. Denotes the width of an 'image' or 'spectrogram', or the number of features in a 'flat' block.
Only available for type 'image' and 'spectrogram'
Only available for type 'image'
Number of frames, only available for type 'image'
Names of the generated features. Only set if axes have explicit labels.
The version number of the resampling algorithm used (for resampled time series data only)
When specified, a job is running (asynchronously) to generate the feature explorer.
If this is set, then the feature explorer job failed (get the status by getting the job logs for 'featureExplorerJobId').
When specified, a job is running (asynchronously) to generate feature importance.
If this is set, then the feature importance job failed (get the status by getting the job logs for 'featureImportanceJobId').
Show child attributes
This returns a KerasConfig object, but "transferLearningModels" and "dependencies.classes" will be set to an empty array (use getKeras to retrieve these).
Show child attributes
Show child attributes
Whether the block is trained
The Keras script. This script might be empty if the mode is visual.
DEPRECATED, see "thresholds" instead. Minimum confidence rating required for the neural network. Scores below this confidence are tagged as uncertain.
The model type that is currently selected.
int8, float32, akida, requiresRetrain The mode (visual or expert) to use for editing this network.
visual, expert The visual layers (if in visual mode) for the neural network. This will be an empty array when in expert mode.
Show child attributes
dense, conv1d, conv2d, reshape, flatten, dropout, batchNormalization, transfer_mobilenetv2_a35, transfer_mobilenetv2_a1, transfer_mobilenetv2_a05, transfer_mobilenetv2_160_a1, transfer_mobilenetv2_160_a75, transfer_mobilenetv2_160_a5, transfer_mobilenetv2_160_a35, transfer_mobilenetv1_a25_d100, transfer_mobilenetv1_a2_d100, transfer_mobilenetv1_a1_d100, transfer_kws_mobilenetv1_a1_d100, transfer_kws_mobilenetv2_a35_d100, transfer_kws_syntiant_ndp10x, transfer_kws_conv2d_tiny, object_ssd_mobilenet_v2_fpnlite_320x320, fomo_mobilenet_v2_a01, fomo_mobilenet_v2_a35, transfer_organization, transfer_akidanet_imagenet_160_a100, transfer_akidanet_imagenet_160_a50, transfer_akidanet_imagenet_160_a25, transfer_akidanet_imagenet_224_a100, transfer_akidanet_imagenet_224_a50, transfer_akidanet_imagenet_224_a25, fomo_akidanet_a50, fomo_ad_gmm, fomo_ad_patchcore Number of neurons or filters in this layer (only for dense, conv1d, conv2d) or in the final conv2d layer (only for transfer layers)
Kernel size for the convolutional layers (only for conv1d, conv2d)
Fraction of input units to drop (only for dropout) or in the final layer dropout (only for transfer layers)
Number of columns for the reshape operation (only for reshape)
Number of convolutional layers before the pooling layer (only for conv1d, conv2d)
Custom transfer learning model ID (when type is set to transfer_organization)
Number of training cycles. If in expert mode this will be 0.
Learning rate (between 0 and 1). If in expert mode this will be 0.
The default batch size if a value is not configured.
Python-formatted tuple of input axes
The data augmentation policy to use with image input
none, all Show child attributes
dense, conv1d, conv2d, reshape, flatten, dropout, batchNormalization, transfer_mobilenetv2_a35, transfer_mobilenetv2_a1, transfer_mobilenetv2_a05, transfer_mobilenetv2_160_a1, transfer_mobilenetv2_160_a75, transfer_mobilenetv2_160_a5, transfer_mobilenetv2_160_a35, transfer_mobilenetv1_a25_d100, transfer_mobilenetv1_a2_d100, transfer_mobilenetv1_a1_d100, transfer_kws_mobilenetv1_a1_d100, transfer_kws_mobilenetv2_a35_d100, transfer_kws_syntiant_ndp10x, transfer_kws_conv2d_tiny, object_ssd_mobilenet_v2_fpnlite_320x320, fomo_mobilenet_v2_a01, fomo_mobilenet_v2_a35, transfer_organization, transfer_akidanet_imagenet_160_a100, transfer_akidanet_imagenet_160_a50, transfer_akidanet_imagenet_160_a25, transfer_akidanet_imagenet_224_a100, transfer_akidanet_imagenet_224_a50, transfer_akidanet_imagenet_224_a25, fomo_akidanet_a50, fomo_ad_gmm, fomo_ad_patchcore The type of learning block (anomaly, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression). Each behaves differently.
anomaly, anomaly-gmm, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression, keras-akida, keras-akida-transfer-image, keras-akida-object-detection, keras-visual-anomaly official, personal, enterprise, pro-or-enterprise, community URL to the source code of this custom learn block.
Show child attributes
"Scale axes"
"text"
"scale-axes"
If enabled, render a disabled input element with 'Click to set'
"Divide axes by this number"
Show child attributes
What is the string that will be set if this option is selected?
What is the label that will be shown to the user for this option?
The following options are optional. See Learn Block Auto Config in Notion. Higher priority will get chosen based on limits below.
Estimated ROM footprint for this choice. Will be tested against ROM budget in Studio.
ML operator needed by this choice.
Feature needed by this choice. (non op related)
Show child attributes
eq, neq Interface section to render parameter in.
advanced, augmentation, modelProfiling Only valid for type "string". Will render a multiline text area.
If set, shows a hint below the input.
Sets the placeholder text on the input element (for types "string", "int", "float" and "secret")
Valid values for parameter.
Recursive definition for items of a parameter with type 'array'.
Recursive definition for a parameter with type 'object'.
Category to display this block in the UI.
classical, tao, developer-preview If this object is set, then you can no longer train this block. The reason (or a migration path) is in the reasonMarkdown and reasonHtml properties.
Show child attributes
Reason or migration path for current users of this block, in Markdown format.
Reason or migration path for current users of this block, in HTML format.
Whether to profile the i8 model (might take a very long time)
If set, skips creating embeddings and measuring memory (used in tests)
Whether the 'Advanced training settings' UI element should be expanded.
Whether the 'Augmentation training settings' UI element should be expanded.
List of configurable thresholds for this block.
Show child attributes
Identifier to reference the threshold. You'll need to refer to the threshold by this key when you set the threshold).
"min_score"
User-friendly description of the threshold.
"Score threshold"
Additional help text (shown in the UI under a "?" icon)
"Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded."
Current value of the threshold
0.5
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the numeric value of that threshold.
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the stringified value of that threshold.
The type of learning block (anomaly, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression). Each behaves differently.
anomaly, anomaly-gmm, keras, keras-transfer-image, keras-transfer-kws, keras-object-detection, keras-regression, keras-akida, keras-akida-transfer-image, keras-akida-object-detection, keras-visual-anomaly The batch size used during training.
Train/test split (between 0 and 1)
Whether to automatically balance class weights, use this for skewed datasets.
Use learned optimizer and ignore learning rate.
Show child attributes
True if spectrogram augmentation is enabled. Other properties will be ignored if this is false.
True if warping along the time axis is enabled.
The amount of frequency masking to apply.
none, low, high The amount of time masking to apply.
none, low, high The amount of Gaussian noise to add.
none, low, high Show child attributes
True if Akida Edge Learning model creation is enabled. Other properties will be ignored if this is false.
Number of additional classes that will be added to the Edge Learning model.
Number of neurons in each class on the last layer in the Edge Learning model.
This metadata key is used to prevent group data leakage between train and validation datasets.
Capacity level for visual anomaly detection (GMM). Determines which set of default configurations to use. The higher capacity, the higher number of (Gaussian) components, and the more adapted the model becomes to the original distribution
low, medium, high Last shown variant on the Keras screen. Used to keep the same view after refreshing.
int8, float32, akida Training parameters specific to the type of the learn block. Parameters may be adjusted depending on the model defined in the visual layers. Used for our built-in blocks.
Show child attributes
The backbone to use for feature extraction
The number of layers in the feature extractor (1-3)
The pool size for the feature extractor
The sampling ratio for the coreset, used for anomaly scoring
The number of nearest neighbors to consider, used for anomaly scoring
This returns a KerasModelMetadata object, but 1) non-default "onDevicePerformance", 2) "predictions", 3) "labels"; are omitted (use getKerasMetadata to retrieve these).
Show child attributes
Date when the model was trained
Layers of the neural network
Show child attributes
Show child attributes
Input size
33
TensorFlow name
"x_input:0"
TensorFlow type
"<dtype: 'float32'>"
Show child attributes
Output size
20
TensorFlow name
"dense_1/Relu:0"
TensorFlow type
"<dtype: 'float32'>"
Labels for the output layer
Original labels in the dataset when features were generated, e.g. used to render the feature explorer.
The types of model that are available
int8, float32, akida, requiresRetrain The model type that is recommended for use
int8, float32, akida, requiresRetrain Metrics for each of the available model types
Show child attributes
The type of model
int8, float32, akida, requiresRetrain The model's loss on the validation set after training
[[31, 1, 0], [2, 27, 3], [1, 0, 39]]Precision, recall, F1 and support scores
Show child attributes
Show child attributes
Show child attributes
If false, then no metrics are available for this target
Show child attributes
Custom, device-specific performance metrics
Show child attributes
The name of the metric
The value of this metric for this model type
Specific error during profiling (e.g. model not supported)
featureExplorer, dataExplorer, none Show child attributes
The model's accuracy on the validation set after training
Show child attributes
Only set for object detection projects
Only set for visual anomaly projects. 2D array of shape (n, n) with raw anomaly scores, where n varies based on the image input size and the specific visual anomaly algorithm used. The scores corresponds to each grid cell in the image's spatial matrix.
Only set for object detection projects. Coordinates are scaled 0..1, not absolute values.
Show child attributes
If this is set, then we're still profiling this model. Subscribe to job updates to see when it's done (afterward the metadata will be updated).
If this is set, then the profiling job failed (get the status by getting the job logs for 'profilingJobId').
classification, regression, object-detection, visual-anomaly, anomaly-gmm Normalization that is applied to images. If this is not set then 0..1 is used. "0..1" gives you non-normalized pixels between 0 and 1. "-1..1" gives you non-normalized pixels between -1 and 1. "0..255" gives you non-normalized pixels between 0 and 255. "-128..127" gives you non-normalized pixels between -128 and 127. "torch" first scales pixels between 0 and 1, then applies normalization using the ImageNet dataset (same as torchvision.transforms.Normalize()). "bgr-subtract-imagenet-mean" scales to 0..255, reorders pixels to BGR, and subtracts the ImageNet mean from each channel.
0..1, -1..1, -128..127, 0..255, torch, bgr-subtract-imagenet-mean List of configurable thresholds for this block.
Show child attributes
Identifier to reference the threshold. You'll need to refer to the threshold by this key when you set the threshold).
"min_score"
User-friendly description of the threshold.
"Score threshold"
Additional help text (shown in the UI under a "?" icon)
"Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded."
Current value of the threshold
0.5
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the numeric value of that threshold.
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the stringified value of that threshold.
mobilenet-ssd, fomo, yolov2-akida, yolov5, yolov5v5-drpai, yolox, yolov7, yolo-pro, tao-retinanet, tao-ssd, tao-yolov3, tao-yolov4, yolov11, yolov11-abs This is experimental and may change in the future.
Show child attributes
Graph title
X-axis title
Y-axis title
A description for the graph
Whether this graph should be hidden by default in the Studio UI
Show child attributes
Show child attributes
Show child attributes
Selectable axes for the anomaly detection block
Whether the block is trained
Selected clusters (in config)
DEPRECATED, see "thresholds" instead. Minimum confidence rating for this block, scores above this number will be flagged as anomaly.
List of configurable thresholds for this block.
Show child attributes
Identifier to reference the threshold. You'll need to refer to the threshold by this key when you set the threshold).
"min_score"
User-friendly description of the threshold.
"Score threshold"
Additional help text (shown in the UI under a "?" icon)
"Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded."
Current value of the threshold
0.5
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the numeric value of that threshold.
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the stringified value of that threshold.
Number of clusters for K-means, or number of components for GMM (in config)
This returns a AnomalyModelMetadata object, but 1) non-default "onDevicePerformance", 2) "predictions" are omitted (use getAnomalyMetadata to retrieve these).
Show child attributes
Date when the model was trained
Scale input for StandardScaler. Values are scaled like this (where ix is axis index): input[ix] = (input[ix] - mean[ix]) / scale[ix];
Mean input for StandardScaler. Values are scaled like this (where ix is axis index): input[ix] = (input[ix] - mean[ix]) / scale[ix];
Which axes were included during training (by index)
"[ 0, 11, 22 ]"
DEPRECATED, see "thresholds" instead. Default minimum confidence rating required before tagging as anomaly, based on scores of training data (GMM only).
List of configurable thresholds for this block.
Show child attributes
Identifier to reference the threshold. You'll need to refer to the threshold by this key when you set the threshold).
"min_score"
User-friendly description of the threshold.
"Score threshold"
Additional help text (shown in the UI under a "?" icon)
"Threshold score for bounding boxes. If the score for a bounding box is below this the box will be discarded."
Current value of the threshold
0.5
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the numeric value of that threshold.
If the threshold has a suggested value, e.g. a max. absolute error for regression projects; or the min. anomaly score for visual anomaly detection, then this is the stringified value of that threshold.
The types of model that are available
int8, float32, akida, requiresRetrain The model type that is recommended for use
int8, float32, akida, requiresRetrain Metrics for each of the available model types
Show child attributes
The type of model
int8, float32, akida, requiresRetrain The model's loss on the validation set after training
[[31, 1, 0], [2, 27, 3], [1, 0, 39]]Precision, recall, F1 and support scores
Show child attributes
Show child attributes
Show child attributes
If false, then no metrics are available for this target
Show child attributes
Custom, device-specific performance metrics
Show child attributes
The name of the metric
The value of this metric for this model type
Specific error during profiling (e.g. model not supported)
featureExplorer, dataExplorer, none Show child attributes
The model's accuracy on the validation set after training
Show child attributes
Only set for object detection projects
Only set for visual anomaly projects. 2D array of shape (n, n) with raw anomaly scores, where n varies based on the image input size and the specific visual anomaly algorithm used. The scores corresponds to each grid cell in the image's spatial matrix.
Only set for object detection projects. Coordinates are scaled 0..1, not absolute values.
Show child attributes
If this is set, then we're still profiling this model. Subscribe to job updates to see when it's done (afterward the metadata will be updated).
If this is set, then the profiling job failed (get the status by getting the job logs for 'profilingJobId').
Show child attributes
Show child attributes
Show child attributes
Show child attributes
"Scale axes"
"text"
"scale-axes"
If enabled, render a disabled input element with 'Click to set'
"Divide axes by this number"
Show child attributes
What is the string that will be set if this option is selected?
What is the label that will be shown to the user for this option?
The following options are optional. See Learn Block Auto Config in Notion. Higher priority will get chosen based on limits below.
Estimated ROM footprint for this choice. Will be tested against ROM budget in Studio.
ML operator needed by this choice.
Feature needed by this choice. (non op related)
Show child attributes
Interface section to render parameter in.
advanced, augmentation, modelProfiling Only valid for type "string". Will render a multiline text area.
If set, shows a hint below the input.
Sets the placeholder text on the input element (for types "string", "int", "float" and "secret")
Valid values for parameter.
Recursive definition for items of a parameter with type 'array'.
Recursive definition for a parameter with type 'object'.
Whether this impulse contains blocks with "stale" features (i.e. the dataset has changed since features were generated)
Whether this impulse is configured
Whether this impulse is fully trained and configured
Tags associated with this impulse
The source EON Tuner trial ID for impulses created from the EON Tuner
Show child attributes
impulseMetrics, inputBlockConfig, dspBlockConfig, learnBlockConfig, learnBlockMetrics, postProcessingBlockConfig Show child attributes
core, additional Which extra impulse information should be shown in the impulses table.
Optional error description (set if 'success' was false)
Was this page helpful?