TensorFlow Lite 是一種用于低運算能力終端的開源深度學習框架坤塞。它適用于微控制器和其他一些僅有數(shù)千字節(jié)內(nèi)存的設(shè)備。它可以直接在“裸機”上運行澈蚌,不需要操作系統(tǒng)支持摹芙、任何標準 C/C++ 庫和動態(tài)內(nèi)存分配。核心運行時(core runtime)在 Cortex M3 上運行時僅需 16KB宛瞄,加上足以用來運行語音關(guān)鍵字檢測模型的操作浮禾,也只需 22KB 的空間。
微控制器通常是小型、低能耗的計算設(shè)備盈电,經(jīng)常嵌入在只需要進行基本運算的硬件中蝴簇,包括家用電器和物聯(lián)網(wǎng)設(shè)備等。每年都有數(shù)十億個微控制器被生產(chǎn)出來匆帚。微控制器通常針對低能耗和小尺寸進行優(yōu)化熬词,但代價是降低了處理能力、內(nèi)存和存儲吸重。一些微控制器具有用來優(yōu)化機器學習任務(wù)性能的功能荡澎。
通過在微控制器上運行機器學習推斷,開發(fā)人員可以在不依賴于網(wǎng)絡(luò)連接的情況下將 AI 添加到各種各樣的硬件設(shè)備中晤锹,這經(jīng)常用來克服帶寬、功率以及由它們所導(dǎo)致的高延遲而造成的約束彤委。在設(shè)備上運行推斷也可以幫助保護隱私鞭铆,因為沒有數(shù)據(jù)從設(shè)備中發(fā)送出去。開發(fā)人員通過在大型設(shè)備上生成模型焦影,使用專用程序轉(zhuǎn)換模型车遂,然后部署模型和處理程序在低運算能力的終端上實現(xiàn)智能應(yīng)用。下面介紹我的實現(xiàn)過程斯辰。我的開發(fā)環(huán)境如下:
? ? 硬件:ESP-CAM
? ? 開發(fā)框架:Arduino 1.8.10
? ? 在arduino的庫管理界面添加tensorflow lite和JPEGdecoder舶担。然后在libraries/Arduino_TensorFlowLite/src/tensorflow/lite/experimental/micro/arduino/debug_log.cpp中查看波特率參數(shù)9600,你可以修改參數(shù)或者修改串口監(jiān)視器的參數(shù)彬呻,總之要保持一致衣陶。
? ? 我打開tensorflow lite自帶的案例person_detect,它的案例主要是在 SparkFun Edge(Apollo3 Blue)闸氮、Arduino MKRZERO剪况、
? ? STM32F746G 探索板(Discovery Board)實現(xiàn)。我手頭有ESP-CAM開發(fā)板蒲跨,主要修改了攝像頭采集部分的程序译断。
? ? 修改arduino_image_provider.cpp
? ? #include <JPEGDecoder.h>
? ? #include “Arduino.h”
// The size of our temporary buffer for holding
// JPEG data received from the Arducam module
#define MAX_JPEG_BYTES 8182
// Camera library instance
// Temporary buffer for holding JPEG data from camera
uint8_t jpeg_buffer;
// Length of the JPEG data currently in the buffer
uint32_t jpeg_length = 0;
// iamge buffer
camera_fb_t * fb = NULL;
// Get the camera module ready
TfLiteStatus InitCamera(tflite::ErrorReporter error_reporter) {
error_reporter->Report(“Attempting to start Arducam”);
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sscb_sda = SIOD_GPIO_NUM;
config.pin_sscb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.pixel_format = PIXFORMAT_JPEG;
//init with high specs to pre-allocate larger buffers
if(psramFound()){
config.frame_size = FRAMESIZE_UXGA;
config.jpeg_quality = 10;
config.fb_count = 2;
} else {
config.frame_size = FRAMESIZE_SVGA;
config.jpeg_quality = 12;
config.fb_count = 1;
}
// camera init
esp_err_t err = esp_camera_init(&config);
if (err != ESP_OK) {
Serial.printf(“Camera init failed with error 0x%x”, err);
return kTfLiteError;
}
sensor_t * s = esp_camera_sensor_get();
//initial sensors are flipped vertically and colors are a bit saturated
if (s->id.PID == OV3660_PID) {
s->set_vflip(s, 1);//flip it back
s->set_brightness(s, 1);//up the blightness just a bit
s->set_saturation(s, -2);//lower the saturation
}
//drop down frame size for higher initial frame rate
s->set_framesize(s, FRAMESIZE_QVGA);
delay(100);
return kTfLiteOk;
}
// Begin the capture and wait for it to finish
TfLiteStatus PerformCapture(tflite::ErrorReporter* error_reporter) {
error_reporter->Report(“Starting capture”);
fb = esp_camera_fb_get();
if (!fb) {
Serial.println(“Camera capture failed”);
error_reporter->Report(“Camera capture failed”);
return kTfLiteError;
}
return kTfLiteOk;
}
// Read data from the camera module into a local buffer
TfLiteStatus ReadData(tflite::ErrorReporter* error_reporter) {
// This represents the total length of the JPEG data
jpeg_length = fb->len;
error_reporter->Report(“Reading %d bytes from Arducam”, jpeg_length);
// Ensure there’s not too much data for our buffer
if (jpeg_length > MAX_JPEG_BYTES) {
error_reporter->Report(“Too many bytes in buffer (%d)”,
MAX_JPEG_BYTES);
return kTfLiteError;
}
if (jpeg_length == 0) {
error_reporter->Report(“No data in esp-cam buffer”);
return kTfLiteError;
}
jpeg_buffer = fb->buf;
delayMicroseconds(15);
error_reporter->Report(“Finished reading”);
return kTfLiteOk;
}
? ? 修改arduino_detection_responder.cpp
? ? #include “detection_responder.h”
#include “Arduino.h”
// Flash the blue LED after each inference
void RespondToDetection(tflite::ErrorReporter* error_reporter,
uint8_t person_score, uint8_t no_person_score) {
if(person_score>no_person_score){
Serial.println(“有人”);
}
error_reporter->Report(“Person score: %d No person score: %d”, person_score,
no_person_score);
}
運行結(jié)果如下: