GET
/
api
/
{projectId}
/
pretrained-model
Get pretrained model
curl --request GET \
  --url https://studio.edgeimpulse.com/v1/api/{projectId}/pretrained-model \
  --header 'x-api-key: <api-key>'
{
  "success": true,
  "error": "<string>",
  "specificDeviceSelected": true,
  "availableModelTypes": [
    "int8"
  ],
  "model": {
    "fileName": "<string>",
    "profileInfo": {
      "float32": {
        "variant": "int8",
        "device": "<string>",
        "tfliteFileSizeBytes": 123,
        "isSupportedOnMcu": true,
        "memory": {
          "tflite": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          },
          "eon": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          },
          "eonRamOptimized": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          }
        },
        "timePerInferenceMs": 123,
        "mcuSupportError": "<string>",
        "customMetrics": [
          {
            "name": "<string>",
            "value": "<string>"
          }
        ],
        "hasPerformance": true,
        "profilingError": "<string>"
      },
      "int8": {
        "variant": "int8",
        "device": "<string>",
        "tfliteFileSizeBytes": 123,
        "isSupportedOnMcu": true,
        "memory": {
          "tflite": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          },
          "eon": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          },
          "eonRamOptimized": {
            "ram": 123,
            "rom": 123,
            "arenaSize": 123
          }
        },
        "timePerInferenceMs": 123,
        "mcuSupportError": "<string>",
        "customMetrics": [
          {
            "name": "<string>",
            "value": "<string>"
          }
        ],
        "hasPerformance": true,
        "profilingError": "<string>"
      },
      "table": {
        "variant": "int8",
        "lowEndMcu": {
          "description": "<string>",
          "timePerInferenceMs": 123,
          "memory": {
            "tflite": {
              "ram": 123,
              "rom": 123
            },
            "eon": {
              "ram": 123,
              "rom": 123
            },
            "eonRamOptimized": {
              "ram": 123,
              "rom": 123
            }
          },
          "supported": true,
          "mcuSupportError": "<string>"
        },
        "highEndMcu": {
          "description": "<string>",
          "timePerInferenceMs": 123,
          "memory": {
            "tflite": {
              "ram": 123,
              "rom": 123
            },
            "eon": {
              "ram": 123,
              "rom": 123
            },
            "eonRamOptimized": {
              "ram": 123,
              "rom": 123
            }
          },
          "supported": true,
          "mcuSupportError": "<string>"
        },
        "highEndMcuPlusAccelerator": {
          "description": "<string>",
          "timePerInferenceMs": 123,
          "memory": {
            "tflite": {
              "ram": 123,
              "rom": 123
            },
            "eon": {
              "ram": 123,
              "rom": 123
            },
            "eonRamOptimized": {
              "ram": 123,
              "rom": 123
            }
          },
          "supported": true,
          "mcuSupportError": "<string>"
        },
        "mpu": {
          "description": "<string>",
          "timePerInferenceMs": 123,
          "rom": 123,
          "supported": true
        },
        "gpuOrMpuAccelerator": {
          "description": "<string>",
          "timePerInferenceMs": 123,
          "rom": 123,
          "supported": true
        }
      }
    },
    "inputs": [
      {
        "dataType": "int8",
        "name": "<string>",
        "shape": [
          123
        ],
        "quantizationScale": 123,
        "quantizationZeroPoint": 123
      }
    ],
    "outputs": [
      {
        "dataType": "int8",
        "name": "<string>",
        "shape": [
          123
        ],
        "quantizationScale": 123,
        "quantizationZeroPoint": 123
      }
    ],
    "profileJobId": 123,
    "profileJobFailed": true,
    "supportsTFLite": true
  },
  "modelInfo": {
    "input": {
      "inputType": "time-series",
      "frequencyHz": 123,
      "windowLengthMs": 123
    },
    "model": {
      "modelType": "classification",
      "labels": [
        "<string>"
      ]
    }
  }
}

Authorizations

x-api-key
string
header
required

Path Parameters

projectId
integer
required

Project ID

Query Parameters

impulseId
integer

Impulse ID. If this is unset then the default impulse is used.

Response

200 - application/json

OK

The response is of type object.