ESP32-S3進階功能開發
基於小智AI專案實踐,本指南詳細介紹ESP32-S3的進階功能開發,包括4G通訊、本地AI推理、多模態互動等前沿技術的具體實現。
一、4G通訊模組整合
1.1 ML307R Cat.1模組接入
適用場景: 戶外環境、行動裝置、無Wi-Fi覆蓋的IoT應用
硬體連接
ESP32-S3 → ML307R Cat.1
GPIO11 → TXD (發送資料)
GPIO12 → RXD (接收資料)
3.3V → VCC (電源)
GND → GND (接地)
LDO輸出(~5V) → BAT (電池電源,短接EN使能)
AT命令通訊實現
#include "driver/uart.h"
#include "string.h"
#define ML307R_UART_PORT UART_NUM_1
#define ML307R_TXD_PIN GPIO_NUM_11
#define ML307R_RXD_PIN GPIO_NUM_12
#define ML307R_BUFFER_SIZE 1024
typedef enum {
ML307R_STATE_INIT,
ML307R_STATE_READY,
ML307R_STATE_CONNECTED,
ML307R_STATE_ERROR
} ml307r_state_t;
static ml307r_state_t ml307r_state = ML307R_STATE_INIT;
// 初始化ML307R UART
void ml307r_uart_init() {
uart_config_t uart_config = {
.baud_rate = 115200,
.data_bits = UART_DATA_8_BITS,
.parity = UART_PARITY_DISABLE,
.stop_bits = UART_STOP_BITS_1,
.flow_ctrl = UART_HW_FLOWCTRL_DISABLE,
.source_clk = UART_SCLK_DEFAULT,
};
ESP_ERROR_CHECK(uart_driver_install(ML307R_UART_PORT, ML307R_BUFFER_SIZE * 2, 0, 0, NULL, 0));
ESP_ERROR_CHECK(uart_param_config(ML307R_UART_PORT, &uart_config));
ESP_ERROR_CHECK(uart_set_pin(ML307R_UART_PORT, ML307R_TXD_PIN, ML307R_RXD_PIN,
UART_PIN_NO_CHANGE, UART_PIN_NO_CHANGE));
}
// 發送AT命令
bool ml307r_send_command(const char* command, const char* expected_response, uint32_t timeout_ms) {
char response[512];
// 發送命令
uart_write_bytes(ML307R_UART_PORT, command, strlen(command));
uart_write_bytes(ML307R_UART_PORT, "\r\n", 2);
// 等待響應
int len = uart_read_bytes(ML307R_UART_PORT, response, sizeof(response) - 1,
pdMS_TO_TICKS(timeout_ms));
if (len > 0) {
response[len] = '\0';
ESP_LOGI("ML307R", "命令: %s, 響應: %s", command, response);
if (expected_response && strstr(response, expected_response)) {
return true;
}
}
return false;
}
// 4G網路初始化
esp_err_t ml307r_network_init(const char* apn) {
ESP_LOGI("ML307R", "初始化4G網路...");
// 檢查模組狀態
if (!ml307r_send_command("AT", "OK", 3000)) {
ESP_LOGE("ML307R", "模組無響應");
return ESP_FAIL;
}
// 檢查SIM卡
if (!ml307r_send_command("AT+CPIN?", "+CPIN: READY", 5000)) {
ESP_LOGE("ML307R", "SIM卡未準備就緒");
return ESP_FAIL;
}
// 設定APN
char apn_cmd[128];
snprintf(apn_cmd, sizeof(apn_cmd), "AT+CGDCONT=1,\"IP\",\"%s\"", apn);
if (!ml307r_send_command(apn_cmd, "OK", 3000)) {
ESP_LOGE("ML307R", "APN設定失敗");
return ESP_FAIL;
}
// 啟動PDP上下文
if (!ml307r_send_command("AT+CGACT=1,1", "OK", 10000)) {
ESP_LOGE("ML307R", "網路連接失敗");
return ESP_FAIL;
}
// 取得IP位址
if (ml307r_send_command("AT+CGPADDR=1", "+CGPADDR", 5000)) {
ESP_LOGI("ML307R", "4G網路連接成功");
ml307r_state = ML307R_STATE_CONNECTED;
return ESP_OK;
}
return ESP_FAIL;
}
1.2 HTTP通訊實現
// HTTP POST請求
esp_err_t ml307r_http_post(const char* url, const char* data, char* response, size_t response_size) {
if (ml307r_state != ML307R_STATE_CONNECTED) {
return ESP_ERR_INVALID_STATE;
}
// 配置HTTP參數
char cmd[256];
snprintf(cmd, sizeof(cmd), "AT+HTTPSET=\"URL\",\"%s\"", url);
if (!ml307r_send_command(cmd, "OK", 3000)) {
return ESP_FAIL;
}
ml307r_send_command("AT+HTTPSET=\"CONTENT\",\"application/json\"", "OK", 3000);
// 設定POST資料
snprintf(cmd, sizeof(cmd), "AT+HTTPDATA=%d", strlen(data));
uart_write_bytes(ML307R_UART_PORT, cmd, strlen(cmd));
uart_write_bytes(ML307R_UART_PORT, "\r\n", 2);
vTaskDelay(pdMS_TO_TICKS(100));
uart_write_bytes(ML307R_UART_PORT, data, strlen(data));
// 執行HTTP POST
if (ml307r_send_command("AT+HTTPPOST", "+HTTPPOST:", 10000)) {
// 讀取響應資料
ml307r_send_command("AT+HTTPREAD", "+HTTPREAD:", 5000);
return ESP_OK;
}
return ESP_FAIL;
}
二、本地AI推理引擎
2.1 TensorFlow Lite Micro整合
記憶體要求: TensorFlow Lite模型推理至少需要2MB PSRAM,建議使用N16R8配置
環境配置
#include "tensorflow/lite/micro/all_ops_resolver.h"
#include "tensorflow/lite/micro/micro_error_reporter.h"
#include "tensorflow/lite/micro/micro_interpreter.h"
#include "tensorflow/lite/schema/schema_generated.h"
#include "model_data.h" // 轉換後的模型資料
#define TENSOR_ARENA_SIZE (200 * 1024) // 200KB推理記憶體
class EdgeAIEngine {
private:
tflite::MicroErrorReporter error_reporter;
tflite::AllOpsResolver resolver;
const tflite::Model* model;
tflite::MicroInterpreter* interpreter;
TfLiteTensor* input;
TfLiteTensor* output;
// 記憶體分配器
uint8_t tensor_arena[TENSOR_ARENA_SIZE];
public:
// 初始化AI引擎
bool init() {
// 載入模型
model = tflite::GetModel(g_model_data);
if (model->version() != TFLITE_SCHEMA_VERSION) {
ESP_LOGE("AI", "模型版本不符");
return false;
}
// 建立解釋器
interpreter = new tflite::MicroInterpreter(
model, resolver, tensor_arena, TENSOR_ARENA_SIZE, &error_reporter);
// 分配張量
TfLiteStatus allocate_status = interpreter->AllocateTensors();
if (allocate_status != kTfLiteOk) {
ESP_LOGE("AI", "張量分配失敗");
return false;
}
// 取得輸入輸出張量
input = interpreter->input(0);
output = interpreter->output(0);
ESP_LOGI("AI", "AI引擎初始化成功");
ESP_LOGI("AI", "輸入形狀: [%d, %d, %d, %d]",
input->dims->data[0], input->dims->data[1],
input->dims->data[2], input->dims->data[3]);
return true;
}
// 語音指令分類
int classify_voice_command(const float* audio_features, size_t feature_count) {
// 資料預處理
for (size_t i = 0; i < feature_count; i++) {
input->data.f[i] = audio_features[i];
}
// 推理
uint64_t start_time = esp_timer_get_time();
TfLiteStatus invoke_status = interpreter->Invoke();
uint64_t inference_time = esp_timer_get_time() - start_time;
if (invoke_status != kTfLiteOk) {
ESP_LOGE("AI", "推理執行失敗");
return -1;
}
// 取得結果
float max_score = 0;
int predicted_class = -1;
for (int i = 0; i < output->dims->data[1]; i++) {
if (output->data.f[i] > max_score) {
max_score = output->data.f[i];
predicted_class = i;
}
}
ESP_LOGI("AI", "推理完成,耗時:%lldμs,分類:%d,置信度:%.2f",
inference_time, predicted_class, max_score);
return predicted_class;
}
// 情感識別
float detect_emotion(const float* audio_features) {
// 類似的推理流程
// 返回情感值: -1.0(負面) 到 1.0(正面)
for (size_t i = 0; i < input->bytes / sizeof(float); i++) {
input->data.f[i] = audio_features[i];
}
interpreter->Invoke();
return output->data.f[0]; // 假設單輸出情感值
}
};
// 全域AI引擎實例
static EdgeAIEngine ai_engine;
2.2 音訊特徵提取
#include <math.h>
// MFCC特徵提取簡化版
class AudioFeatureExtractor {
private:
static constexpr int SAMPLE_RATE = 16000;
static constexpr int FFT_SIZE = 512;
static constexpr int MFCC_COEFF = 13;
public:
// 提取MFCC特徵
void extract_mfcc(const int16_t* audio_data, size_t samples, float* mfcc_features) {
// 1. 預強調
float pre_emphasized[samples];
pre_emphasized[0] = audio_data[0];
for (size_t i = 1; i < samples; i++) {
pre_emphasized[i] = audio_data[i] - 0.97 * audio_data[i-1];
}
// 2. 加窗 (漢明窗)
float windowed[FFT_SIZE];
for (int i = 0; i < FFT_SIZE; i++) {
float window = 0.54 - 0.46 * cos(2 * M_PI * i / (FFT_SIZE - 1));
windowed[i] = (i < samples) ? pre_emphasized[i] * window : 0;
}
// 3. FFT (簡化實現,實際需要更高效的FFT庫)
float magnitude_spectrum[FFT_SIZE/2];
simple_fft_magnitude(windowed, magnitude_spectrum, FFT_SIZE);
// 4. Mel濾波器組
float mel_energies[26]; // 26個Mel濾波器
apply_mel_filters(magnitude_spectrum, mel_energies);
// 5. 對數變換和DCT
for (int i = 0; i < 26; i++) {
mel_energies[i] = log(mel_energies[i] + 1e-10);
}
// DCT變換得到MFCC係數
for (int i = 0; i < MFCC_COEFF; i++) {
mfcc_features[i] = 0;
for (int j = 0; j < 26; j++) {
mfcc_features[i] += mel_energies[j] * cos(M_PI * i * (j + 0.5) / 26);
}
}
}
private:
void simple_fft_magnitude(const float* input, float* magnitude, int size) {
// 簡化的FFT實現(實際使用ESP-DSP庫更高效)
for (int k = 0; k < size/2; k++) {
float real = 0, imag = 0;
for (int n = 0; n < size; n++) {
float angle = -2 * M_PI * k * n / size;
real += input[n] * cos(angle);
imag += input[n] * sin(angle);
}
magnitude[k] = sqrt(real*real + imag*imag);
}
}
void apply_mel_filters(const float* spectrum, float* mel_energies) {
// Mel濾波器組實現
// 簡化版本,實際需要更精確的Mel scale計算
for (int i = 0; i < 26; i++) {
mel_energies[i] = 0;
int start = i * (FFT_SIZE/2) / 26;
int end = (i + 1) * (FFT_SIZE/2) / 26;
for (int j = start; j < end; j++) {
mel_energies[i] += spectrum[j];
}
}
}
};
三、多模態互動系統
3.1 視覺+語音多模態
硬體要求: ESP32-CAM模組或相容的攝影機模組,配合小智語音系統
視覺處理整合
#include "esp_camera.h"
#include "img_converters.h"
// 攝影機配置 (ESP32-CAM)
camera_config_t camera_config = {
.pin_pwdn = 32,
.pin_reset = -1,
.pin_xclk = 0,
.pin_sscb_sda = 26,
.pin_sscb_scl = 27,
.pin_d7 = 35,
.pin_d6 = 34,
.pin_d5 = 39,
.pin_d4 = 36,
.pin_d3 = 21,
.pin_d2 = 19,
.pin_d1 = 18,
.pin_d0 = 5,
.pin_vsync = 25,
.pin_href = 23,
.pin_pclk = 22,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG,
.frame_size = FRAMESIZE_VGA, // 640x480
.jpeg_quality = 10,
.fb_count = 1
};
class MultiModalProcessor {
private:
bool camera_initialized;
public:
MultiModalProcessor() {
camera_initialized = false;
}
// 初始化攝影機
bool init_camera() {
esp_err_t err = esp_camera_init(&camera_config);
if (err != ESP_OK) {
ESP_LOGE("CAMERA", "攝影機初始化失敗: %s", esp_err_to_name(err));
return false;
}
camera_initialized = true;
ESP_LOGI("CAMERA", "攝影機初始化成功");
return true;
}
// 拍照並進行物體識別
char* capture_and_recognize() {
if (!camera_initialized) {
return NULL;
}
// 拍攝照片
camera_fb_t* fb = esp_camera_fb_get();
if (!fb) {
ESP_LOGE("CAMERA", "拍照失敗");
return NULL;
}
ESP_LOGI("CAMERA", "拍照成功: %dx%d, 大小: %d bytes",
fb->width, fb->height, fb->len);
// 圖像識別 (簡化版本)
char* recognition_result = simple_object_recognition(fb->buf, fb->len);
esp_camera_fb_return(fb);
return recognition_result;
}
// 多模態互動處理
void process_multimodal_command(const char* voice_text) {
if (strstr(voice_text, "看看") || strstr(voice_text, "拍照") || strstr(voice_text, "識別")) {
ESP_LOGI("MULTIMODAL", "觸發視覺識別");
char* result = capture_and_recognize();
if (result) {
// 將識別結果轉換為語音播報
char response[256];
snprintf(response, sizeof(response), "我看到了%s", result);
text_to_speech_and_play(response);
free(result);
} else {
text_to_speech_and_play("抱歉,我沒有看清楚");
}
}
else if (strstr(voice_text, "拍張照片")) {
// 僅拍照儲存
camera_fb_t* fb = esp_camera_fb_get();
if (fb) {
save_photo_to_sd(fb->buf, fb->len);
text_to_speech_and_play("照片已儲存");
esp_camera_fb_return(fb);
}
}
}
private:
// 簡單物體識別 (可接入雲端視覺API)
char* simple_object_recognition(uint8_t* image_data, size_t size) {
// 這裡可以整合:
// 1. 本地TensorFlow Lite圖像分類模型
// 2. 雲端AI視覺API (百度、阿里雲、騰訊雲)
// 3. 簡單的顏色/形狀檢測演算法
// 範例:發送到雲端API
return call_cloud_vision_api(image_data, size);
}
char* call_cloud_vision_api(uint8_t* image_data, size_t size) {
// Base64編碼圖像
char* base64_image = base64_encode(image_data, size);
// 建構JSON請求
char json_request[4096];
snprintf(json_request, sizeof(json_request),
"{\"image\":\"%s\",\"features\":[\"object_detection\"]}",
base64_image);
// HTTP POST到視覺API
char response[1024];
if (http_post_json("https://vision-api.example.com/detect",
json_request, response, sizeof(response)) == ESP_OK) {
// 解析JSON響應,提取識別結果
char* result = parse_vision_result(response);
free(base64_image);
return result;
}
free(base64_image);
return NULL;
}
};
四、IoT設備控制
4.1 MQTT智慧家庭整合
#include "mqtt_client.h"
class SmartHomeController {
private:
esp_mqtt_client_handle_t mqtt_client;
bool mqtt_connected;
// 設備狀態
typedef struct {
bool light_on;
uint8_t brightness;
uint8_t temperature;
bool fan_on;
uint8_t fan_speed;
} home_status_t;
home_status_t home_status;
public:
SmartHomeController() : mqtt_connected(false) {
memset(&home_status, 0, sizeof(home_status));
}
// MQTT事件處理
static void mqtt_event_handler(void *handler_args, esp_event_base_t base,
int32_t event_id, void *event_data) {
SmartHomeController* controller = (SmartHomeController*)handler_args;
esp_mqtt_event_handle_t event = (esp_mqtt_event_handle_t)event_data;
switch (event->event_id) {
case MQTT_EVENT_CONNECTED:
ESP_LOGI("MQTT", "連接成功");
controller->mqtt_connected = true;
// 訂閱設備狀態主題
esp_mqtt_client_subscribe(event->client, "home/+/status", 1);
esp_mqtt_client_subscribe(event->client, "home/xiaozhi/command", 1);
break;
case MQTT_EVENT_DATA:
controller->handle_mqtt_message(event->topic, event->topic_len,
event->data, event->data_len);
break;
case MQTT_EVENT_DISCONNECTED:
ESP_LOGI("MQTT", "斷開連接");
controller->mqtt_connected = false;
break;
default:
break;
}
}
// 初始化MQTT連接
bool init_mqtt(const char* broker_uri) {
esp_mqtt_client_config_t mqtt_cfg = {
.broker.address.uri = broker_uri,
.session.keepalive = 60,
.session.disable_clean_session = 0,
.credentials.client_id = "xiaozhi_esp32",
.credentials.username = "xiaozhi",
.credentials.authentication.password = "xiaozhi123",
};
mqtt_client = esp_mqtt_client_init(&mqtt_cfg);
if (mqtt_client == NULL) {
ESP_LOGE("MQTT", "客戶端初始化失敗");
return false;
}
esp_mqtt_client_register_event(mqtt_client, MQTT_EVENT_ANY,
mqtt_event_handler, this);
esp_err_t err = esp_mqtt_client_start(mqtt_client);
if (err != ESP_OK) {
ESP_LOGE("MQTT", "客戶端啟動失敗: %s", esp_err_to_name(err));
return false;
}
return true;
}
// 控制智慧燈具
void control_light(bool on, uint8_t brightness = 100) {
if (!mqtt_connected) return;
char payload[128];
snprintf(payload, sizeof(payload),
"{\"state\":\"%s\",\"brightness\":%d}",
on ? "ON" : "OFF", brightness);
esp_mqtt_client_publish(mqtt_client, "home/light/command",
payload, 0, 1, 0);
home_status.light_on = on;
home_status.brightness = brightness;
ESP_LOGI("SMART_HOME", "燈光控制: %s, 亮度: %d%%",
on ? "開啟" : "關閉", brightness);
}
// 語音指令解析和執行
void execute_voice_command(const char* command) {
if (strstr(command, "開燈") || strstr(command, "打開燈")) {
control_light(true);
text_to_speech_and_play("燈已打開");
}
else if (strstr(command, "關燈") || strstr(command, "關閉燈")) {
control_light(false);
text_to_speech_and_play("燈已關閉");
}
else if (strstr(command, "調亮") || strstr(command, "亮一點")) {
uint8_t new_brightness = min(100, home_status.brightness + 20);
control_light(true, new_brightness);
text_to_speech_and_play("亮度已調高");
}
else if (strstr(command, "調暗") || strstr(command, "暗一點")) {
uint8_t new_brightness = max(10, home_status.brightness - 20);
control_light(true, new_brightness);
text_to_speech_and_play("亮度已調低");
}
}
private:
// 處理MQTT訊息
void handle_mqtt_message(const char* topic, int topic_len,
const char* data, int data_len) {
char topic_str[64], data_str[256];
strncpy(topic_str, topic, min(topic_len, sizeof(topic_str)-1));
topic_str[min(topic_len, sizeof(topic_str)-1)] = '\0';
strncpy(data_str, data, min(data_len, sizeof(data_str)-1));
data_str[min(data_len, sizeof(data_str)-1)] = '\0';
ESP_LOGI("MQTT", "收到訊息 - 主題: %s, 資料: %s", topic_str, data_str);
// 解析設備狀態更新
if (strstr(topic_str, "/status")) {
parse_device_status(topic_str, data_str);
}
// 處理遠端指令
else if (strstr(topic_str, "xiaozhi/command")) {
execute_voice_command(data_str);
}
}
// 解析設備狀態
void parse_device_status(const char* topic, const char* data) {
// 簡化的JSON解析
if (strstr(topic, "light")) {
if (strstr(data, "\"state\":\"ON\"")) {
home_status.light_on = true;
} else if (strstr(data, "\"state\":\"OFF\"")) {
home_status.light_on = false;
}
}
// 更新顯示狀態
update_home_status_display();
}
};
五、雲端服務整合
5.1 AI語音服務對接
#include "cJSON.h"
class CloudAIService {
private:
char api_key[64];
char base_url[128];
public:
CloudAIService(const char* key, const char* url) {
strcpy(api_key, key);
strcpy(base_url, url);
}
// 語音轉文字 (ASR)
char* speech_to_text(const uint8_t* audio_data, size_t audio_size) {
// Base64編碼音訊資料
char* base64_audio = base64_encode(audio_data, audio_size);
// 建構請求JSON
cJSON *json = cJSON_CreateObject();
cJSON *format = cJSON_CreateString("wav");
cJSON *rate = cJSON_CreateNumber(16000);
cJSON *channel = cJSON_CreateNumber(1);
cJSON *speech = cJSON_CreateString(base64_audio);
cJSON_AddItemToObject(json, "format", format);
cJSON_AddItemToObject(json, "rate", rate);
cJSON_AddItemToObject(json, "channel", channel);
cJSON_AddItemToObject(json, "speech", speech);
char* json_string = cJSON_Print(json);
// HTTP POST請求
char response[1024];
char full_url[256];
snprintf(full_url, sizeof(full_url), "%s/speech/v1/asr", base_url);
esp_err_t ret = http_post_with_auth(full_url, api_key, json_string,
response, sizeof(response));
// 清理記憶體
free(base64_audio);
free(json_string);
cJSON_Delete(json);
if (ret == ESP_OK) {
return parse_asr_response(response);
}
return NULL;
}
// 大模型對話
char* chat_with_llm(const char* message, const char* system_prompt = NULL) {
cJSON *json = cJSON_CreateObject();
cJSON *messages = cJSON_CreateArray();
// 系統提示詞
if (system_prompt) {
cJSON *system_msg = cJSON_CreateObject();
cJSON_AddStringToObject(system_msg, "role", "system");
cJSON_AddStringToObject(system_msg, "content", system_prompt);
cJSON_AddItemToArray(messages, system_msg);
}
// 使用者訊息
cJSON *user_msg = cJSON_CreateObject();
cJSON_AddStringToObject(user_msg, "role", "user");
cJSON_AddStringToObject(user_msg, "content", message);
cJSON_AddItemToArray(messages, user_msg);
cJSON_AddItemToObject(json, "messages", messages);
cJSON_AddStringToObject(json, "model", "deepseek-chat");
cJSON_AddNumberToObject(json, "max_tokens", 500);
cJSON_AddNumberToObject(json, "temperature", 0.7);
char* json_string = cJSON_Print(json);
// HTTP POST請求
char response[2048];
char full_url[256];
snprintf(full_url, sizeof(full_url), "%s/chat/completions", base_url);
esp_err_t ret = http_post_with_auth(full_url, api_key, json_string,
response, sizeof(response));
cJSON_Delete(json);
free(json_string);
if (ret == ESP_OK) {
return parse_chat_response(response);
}
return NULL;
}
private:
esp_err_t http_post_with_auth(const char* url, const char* api_key,
const char* data, char* response, size_t response_size) {
esp_http_client_config_t config = {
.url = url,
.method = HTTP_METHOD_POST,
};
esp_http_client_handle_t client = esp_http_client_init(&config);
// 設定請求標頭
esp_http_client_set_header(client, "Content-Type", "application/json");
char auth_header[128];
snprintf(auth_header, sizeof(auth_header), "Bearer %s", api_key);
esp_http_client_set_header(client, "Authorization", auth_header);
esp_http_client_set_post_field(client, data, strlen(data));
esp_err_t err = esp_http_client_perform(client);
if (err == ESP_OK) {
int data_read = esp_http_client_read_response(client, response, response_size - 1);
response[data_read] = '\0';
}
esp_http_client_cleanup(client);
return err;
}
char* parse_asr_response(const char* response) {
cJSON *json = cJSON_Parse(response);
if (!json) return NULL;
cJSON *result = cJSON_GetObjectItem(json, "result");
if (!result) {
cJSON_Delete(json);
return NULL;
}
char* text = strdup(result->valuestring);
cJSON_Delete(json);
return text;
}
char* parse_chat_response(const char* response) {
cJSON *json = cJSON_Parse(response);
if (!json) return NULL;
cJSON *choices = cJSON_GetObjectItem(json, "choices");
if (!choices || !cJSON_IsArray(choices)) {
cJSON_Delete(json);
return NULL;
}
cJSON *first_choice = cJSON_GetArrayItem(choices, 0);
if (!first_choice) {
cJSON_Delete(json);
return NULL;
}
cJSON *message = cJSON_GetObjectItem(first_choice, "message");
cJSON *content = cJSON_GetObjectItem(message, "content");
char* reply = strdup(content->valuestring);
cJSON_Delete(json);
return reply;
}
};
學習路徑推薦:
- 🔧 ESP32技術規格詳解 - 深入了解硬體能力
- 💻 ESP32程式設計開發指南 - 基礎程式設計技能
- 🎯 AI功能整合文檔 - AI能力開發
- ⚠️ 常見問題解答 - 問題排解指南