Grove Vision AI V2 (WE2) without Arduino SSCMA library

@PJ_Glasso I hope you can help guide using the Grove Vision AI V2 module with a STM32WL MCU. This MCU is not supported by the Arduino SSCMA library that works with a XIAO ESP32C3 to communicate with the SSCMA-Micro firmware of the WE2. Therefore, I am trying to follow the SSCMA-Micro GitHub at_protocol. I am able to establish communications from my STM32WL MCU to the WE2, but cannot get the WE2 to properly INVOKE image capture and inferencing. Using Sensecraft, I confirm the model is loaded and Sensecraft will display the preview video and list results in its “Device Logger”. Using a test program to decipher the WE2 results, it seems to indicate the model size is “0”, perhaps suggesting the model is not being loaded into WE2 memory from Flash? Here, to show communications, is a snippet of Serial Monitor listing from my SMT32WL (A RAK3172 module) program (connected to the WE2 via Tx/Rx). I reset the STM32WL at time points 20:23:52:833 & 20:25:06:668.

0:23:30:459 ---- Sent utf8 encoded message: “i\r\n” ----
20:23:33:383 ---- Sent utf8 encoded message: “q\r\n” ----
20:23:52:833 → �RAK3172 ↔ Grove Vision AI V2 UART/AT/JSON Test
20:24:00:935 → Type ‘i’ for inference, ‘q’ for query, ‘s’ for sample
20:24:00:938 → [AT > WE2] AT+ID?[WE2 > RAW]
20:24:02:941 →
20:24:02:941 → [AT > WE2] AT+NAME?[WE2 > RAW]
20:24:04:944 →
20:24:04:944 → [AT > WE2] AT+STAT?[WE2 > RAW]
20:24:06:947 →
20:24:06:947 → [AT > WE2] AT+VER?[WE2 > RAW]
20:24:08:950 →
20:24:08:950 → [AT > WE2] AT+MODEL?[WE2 > RAW]
20:24:10:953 →
20:24:10:953 → [AT > WE2] AT+ALGOS?[WE2 > RAW]
20:24:12:956 →
20:24:12:956 → [AT > WE2] AT+SENSORS?[WE2 > RAW]
20:24:14:958 →
20:24:14:958 → Current Work Mode: LoRaWAN.
20:24:35:341 → [WE2 > RAW]
20:24:35:341 → {“type”: 0, “name”: “INIT@STAT?”, “code”: 0, “data”: {“boot_count”: 61, “is_ready”: 1}}
20:24:35:349 → [WE2 > PARSED]
20:24:35:349 → {
20:24:35:349 → “type”: 0,
20:24:35:353 → “name”: “INIT@STAT?”,
20:24:35:355 → “code”: 0,
20:24:35:355 → “data”: {
20:24:35:355 → “boot_count”: 61,
20:24:35:358 → “is_ready”: 1
20:24:35:361 → }
20:24:35:361 → }
20:25:06:668 → �RAK3172 ↔ Grove Vision AI V2 UART/AT/JSON Test
20:25:14:770 → Type ‘i’ for inference, ‘q’ for query, ‘s’ for sample
20:25:14:772 → [AT > WE2] AT+ID?[WE2 > RAW]
20:25:14:799 → {“type”: 0, “name”: “ID?”, “code”: 0, “data”: “79344b8a”}
20:25:14:805 → [AT > WE2] AT+NAME?[WE2 > RAW]
20:25:14:819 → {“type”: 0, “name”: “NAME?”, “code”: 0, “data”: “Grove Vision AI V2”}
20:25:14:825 → [AT > WE2] AT+STAT?[WE2 > RAW]
20:25:14:839 → {“type”: 0, “name”: “STAT?”, “code”: 0, “data”: {“boot_count”: 61, “is_ready”: 1}}
20:25:14:862 → [AT > WE2] AT+VER?[WE2 > RAW]
20:25:14:862 → {“type”: 0, “name”: “VER?”, “code”: 0, “data”: {“at_api”: “v0”, “software”: “2025.01.02”, “hardware”: “1”}}
20:25:14:868 → [AT > WE2] AT+MODEL?[WE2 > RAW]
20:25:14:900 → {“type”: 0, “name”: “MODEL?”, “code”: 0, “data”: {“id”: 1, “type”: 0, “address”: 4194304, “size”: 0}}
20:25:14:908 → [AT > WE2] AT+ALGOS?[WE2 > RAW]
20:25:16:912 → {“type”: 0, “name”: “ALGOS?”, “code”: 0, “data”: [{“type”: 7, “categroy”: 1, “input_from”: 1}, {“type”: 6, “categroy”: 1, "input
20:25:16:931 → [AT > WE2] AT+SENSORS?[WE2 > RAW]
20:25:18:925 →

And following is my test program to invoke image capture and inferencing and then decipher the JSON response from the WE2.

#include <Arduino.h>
#include <ArduinoJson.h>

// Serial port for Grove Vision AI V2
#define WE2_SERIAL Serial1
#define WE2_BAUD 921600
#define SERIAL_MON Serial

// AT commands
// Use protocol-compliant carriage return terminator
#define AT_INVOKE “AT+INVOKE=1,0,1\r” // Run model 1, once
#define AT_QUERY “AT+QUERY\r” // Query last result
#define AT_SAMPLE “AT+SAMPLE=1\r” // Capture image
#define AT_TIMEOUT 2000
#define JSON_BUF_SIZE 4096

char atResponse[JSON_BUF_SIZE];
size_t atResponseLen = 0;
bool jsonReady = false;

void sendATCommand(const char* cmd) {
WE2_SERIAL.print(cmd);
SERIAL_MON.print("[AT > WE2] ");
SERIAL_MON.print(cmd);
}

// Read response into buffer, look for JSON (ends with ‘}’)
void readATResponse() {
atResponseLen = 0;
jsonReady = false;
int braceCount = 0;
bool started = false;
unsigned long start = millis();
while (millis() - start < AT_TIMEOUT) {
while (WE2_SERIAL.available()) {
char c = WE2_SERIAL.read();
if (!started && c == ‘{’) {
started = true;
braceCount = 1;
atResponse[atResponseLen++] = c;
} else if (started) {
if (atResponseLen < JSON_BUF_SIZE - 1) {
atResponse[atResponseLen++] = c;
}
if (c == ‘{’) braceCount++;
if (c == ‘}’) braceCount–;
if (braceCount == 0) {
jsonReady = true;
break;
}
}
}
if (jsonReady) break;
}
atResponse[atResponseLen] = ‘\0’;
}

void printRawResponse() {
SERIAL_MON.println(“[WE2 > RAW]”);
SERIAL_MON.println(atResponse);
}

void parseAndPrintJSON() {
StaticJsonDocument<JSON_BUF_SIZE> doc;
DeserializationError err = deserializeJson(doc, atResponse);
if (err) {
SERIAL_MON.print(“[JSON ERROR] “);
SERIAL_MON.println(err.c_str());
return;
}
SERIAL_MON.println(”[WE2 > PARSED]”);
serializeJsonPretty(doc, SERIAL_MON);
SERIAL_MON.println();
// Print performance metrics if present
if (doc.containsKey(“perf”)) {
SERIAL_MON.print(“Preprocess: “);
SERIAL_MON.println(doc[“perf”][0].as(), 3);
SERIAL_MON.print(“Inference: “);
SERIAL_MON.println(doc[“perf”][1].as(), 3);
SERIAL_MON.print(“Postprocess: “);
SERIAL_MON.println(doc[“perf”][2].as(), 3);
}
// Print detection/class info if present
if (doc.containsKey(“boxes”)) {
JsonArray boxes = doc[“boxes”].as();
for (JsonObject box : boxes) {
SERIAL_MON.print(“Box: “);
SERIAL_MON.print(“target=”);
SERIAL_MON.print(box[“target”].as());
SERIAL_MON.print(”, score=”);
SERIAL_MON.print(box[“score”].as(), 3);
SERIAL_MON.print(”, x=”);
SERIAL_MON.print(box[“x”].as());
SERIAL_MON.print(”, y=”);
SERIAL_MON.print(box[“y”].as());
SERIAL_MON.print(”, w=”);
SERIAL_MON.print(box[“w”].as());
SERIAL_MON.print(“, h=”);
SERIAL_MON.println(box[“h”].as());
}
}
if (doc.containsKey(“classes”)) {
JsonArray classes = doc[“classes”].as();
for (JsonObject cls : classes) {
SERIAL_MON.print(“Class: target=”);
SERIAL_MON.print(cls[“target”].as());
SERIAL_MON.print(“, score=”);
SERIAL_MON.println(cls[“score”].as(), 3);
}
}
if (doc.containsKey(“points”)) {
JsonArray points = doc[“points”].as();
for (JsonObject pt : points) {
SERIAL_MON.print(“Point: x=”);
SERIAL_MON.print(pt[“x”].as());
SERIAL_MON.print(“, y=”);
SERIAL_MON.print(pt[“y”].as());
SERIAL_MON.print(“, score=”);
SERIAL_MON.print(pt[“score”].as(), 3);
SERIAL_MON.print(“, target=”);
SERIAL_MON.println(pt[“target”].as());
}
}
if (doc.containsKey(“keypoints”)) {
JsonArray keypoints = doc[“keypoints”].as();
for (JsonObject kp : keypoints) {
JsonObject box = kp[“box”];
SERIAL_MON.print(“Keypoint: box target=”);
SERIAL_MON.print(box[“target”].as());
SERIAL_MON.print(“, score=”);
SERIAL_MON.print(box[“score”].as(), 3);
SERIAL_MON.print(“, x=”);
SERIAL_MON.print(box[“x”].as());
SERIAL_MON.print(“, y=”);
SERIAL_MON.print(box[“y”].as());
SERIAL_MON.print(“, w=”);
SERIAL_MON.print(box[“w”].as());
SERIAL_MON.print(“, h=”);
SERIAL_MON.print(box[“h”].as());
SERIAL_MON.print(“, points=[”);
JsonArray pts = kp[“points”].as();
for (JsonObject pt : pts) {
SERIAL_MON.print(“[x=”);
SERIAL_MON.print(pt[“x”].as());
SERIAL_MON.print(“, y=”);
SERIAL_MON.print(pt[“y”].as());
SERIAL_MON.print(“, score=”);
SERIAL_MON.print(pt[“score”].as(), 3);
SERIAL_MON.print(“, target=”);
SERIAL_MON.print(pt[“target”].as());
SERIAL_MON.print(“] “);
}
SERIAL_MON.println(”]”);
}
}
// Do not print image data to save RAM
}

void setup() {
SERIAL_MON.begin(115200);
WE2_SERIAL.begin(WE2_BAUD);
delay(5000);
SERIAL_MON.println(“RAK3172 ↔ Grove Vision AI V2 UART/AT/JSON Test”);
delay(3000);
SERIAL_MON.println(“Type ‘i’ for inference, ‘q’ for query, ‘s’ for sample”);
// Optional: Confirm communication with Grove Vision AI V2
sendATCommand(“AT+ID?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+NAME?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+STAT?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+VER?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+MODEL?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+ALGOS?\r”);
readATResponse();
printRawResponse();
sendATCommand(“AT+SENSORS?\r”);
readATResponse();
printRawResponse();
}

void loop() {
// Simple serial menu for manual testing
if (SERIAL_MON.available()) {
char c = SERIAL_MON.read();
if (c == ‘i’) sendATCommand(AT_INVOKE);
else if (c == ‘q’) sendATCommand(AT_QUERY);
else if (c == ‘s’) sendATCommand(AT_SAMPLE);
// Ignore CR and LF
else if (c == ‘\r’ || c == ‘\n’) { /* do nothing */ }
}
// Read and process response
readATResponse();
if (atResponseLen > 0) {
printRawResponse();
if (jsonReady) parseAndPrintJSON();
}
delay(100); // avoid busy loop
}

Hi there,

SO I can look and you probably saw the posts on this as well. You have it working in the standard setup (not using the ST32) ? can you make it work that way first, to verify the overall setup. Alos which Senscraft web site did you use (link)

Time permitting I can check it out, but looks like you have the right methodology, may be a step missing. You’ll get it , it works on others so:+1:

HTH
GL :slight_smile: PJ :v:

@PJ_Glasso Thanks for the prompt reply. The Seeed Sensecraft Vision Workspace link that works fine with the WE2 directly is Seeed Sensecraft Vision Workspace . I have yet to find a command or any explanation on how to trigger the WE2 to include the loaded model in memory and connect it with the INVOKE command. I shall continue my search for proper commands and timing and communications to work successfully with the SSCMA-Micro firmware of the WE2, when I am unable to use the Seeed Arduino SSCMA library. This has become a struggle, so any fresh troubleshooting suggestions are most welcome! A lighter, more generic version of the Arduino SSCMA library would be nice…

1 Like