以往在ESP32CAM教學時,偶爾會遇到學生問說,為什麼只有一個人能看,我都會說這是單晶片的限制,心裡想說,一顆兩三百的單晶片能做Stream Webserver已經很厲害了,還要多人連線,會不會太強人所難。
我的ESP32實做書籍:我出書了 ESP32 物聯網專題
博客來網址:https://www.books.com.tw/products/0010901195
以往在ESP32CAM教學時,偶爾會遇到學生問說,為什麼只有一個人能看,我都會說這是單晶片的限制,心裡想說,一顆兩三百的單晶片能做Stream Webserver已經很厲害了,還要多人連線,會不會太強人所難。
不過有一次一位很好學的學生問:「老師,您說ESP32有雙核心,還能跑多執行緒,現在不能多人連線看影像是不是有點說不過去」。
心想似乎也是,我每次上課都會強調這顆晶片可以跑多執行緒,竟然如此應該來研究一下如何將單人連線程式改寫成多人連線。
不過在研究之前,先Google看看,哲學家都說:「不用重複造輪子」,就發現了這篇:Multi-Client MJPEG Streaming From ESP32,沒想到真的有大神已經寫好了,馬上把網址傳給那位好問的學生去測試看看,誰叫你愛問,沒想到一測試就成功了,太棒了。
後來又經過我們多次改寫及精簡,現在把輪子程式附上給大家。

三個客戶端同時連線:PCx2,手機x1,連線狀況非常好,多人連線也幾乎沒有延遲。
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
#include <Arduino.h>
#include <pgmspace.h>
#include <stdio.h>
#include "esp_log.h"
#include "esp_attr.h"
#include "esp_camera.h"
#include <esp_bt.h>
#include <esp_wifi.h>
#include <esp_sleep.h>
#include <driver/rtc_io.h>
//Setup Your wifi
const char* ssid = "You";
const char* password = "0933932774";
//Define AI_Thinker Camera_pins
#define PWDN_GPIO_NUM 32
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 0
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 21
#define Y4_GPIO_NUM 19
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 5
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
#define APP_CPU 1
#define PRO_CPU 0
class OV2640
{
public:
OV2640() {
fb = NULL;
};
~OV2640() {
};
esp_err_t init(camera_config_t config);
void run(void);
size_t getSize(void);
uint8_t *getfb(void);
int getWidth(void);
int getHeight(void);
framesize_t getFrameSize(void);
pixformat_t getPixelFormat(void);
void setFrameSize(framesize_t size);
void setPixelFormat(pixformat_t format);
private:
void runIfNeeded(); // grab a frame if we don't already have one
// camera_framesize_t _frame_size;
// camera_pixelformat_t _pixel_format;
camera_config_t _cam_config;
camera_fb_t *fb;
};
void OV2640::run(void)
{
if (fb)
//return the frame buffer back to the driver for reuse
esp_camera_fb_return(fb);
fb = esp_camera_fb_get();
}
void OV2640::runIfNeeded(void)
{
if (!fb)
run();
}
int OV2640::getWidth(void)
{
runIfNeeded();
return fb->width;
}
int OV2640::getHeight(void)
{
runIfNeeded();
return fb->height;
}
size_t OV2640::getSize(void)
{
runIfNeeded();
if (!fb)
return 0; // FIXME - this shouldn't be possible but apparently the new cam board returns null sometimes?
return fb->len;
}
uint8_t *OV2640::getfb(void)
{
runIfNeeded();
if (!fb)
return NULL; // FIXME - this shouldn't be possible but apparently the new cam board returns null sometimes?
return fb->buf;
}
framesize_t OV2640::getFrameSize(void)
{
return _cam_config.frame_size;
}
void OV2640::setFrameSize(framesize_t size)
{
_cam_config.frame_size = size;
}
pixformat_t OV2640::getPixelFormat(void)
{
return _cam_config.pixel_format;
}
void OV2640::setPixelFormat(pixformat_t format)
{
switch (format)
{
case PIXFORMAT_RGB565:
case PIXFORMAT_YUV422:
case PIXFORMAT_GRAYSCALE:
case PIXFORMAT_JPEG:
_cam_config.pixel_format = format;
break;
default:
_cam_config.pixel_format = PIXFORMAT_GRAYSCALE;
break;
}
}
esp_err_t OV2640::init(camera_config_t config)
{
memset(&_cam_config, 0, sizeof(_cam_config));
memcpy(&_cam_config, &config, sizeof(config));
esp_err_t err = esp_camera_init(&_cam_config);
if (err != ESP_OK)
{
printf("Camera probe failed with error 0x%x", err);
return err;
}
// ESP_ERROR_CHECK(gpio_install_isr_service(0));
return ESP_OK;
} OV2640 cam;
WebServer server(80);
// ===== rtos task handles =========================
// Streaming is implemented with 3 tasks:
TaskHandle_t tMjpeg; // handles client connections to the webserver
TaskHandle_t tCam; // handles getting picture frames from the camera and storing them locally
TaskHandle_t tStream; // actually streaming frames to all connected clients
// frameSync semaphore is used to prevent streaming buffer as it is replaced with the next frame
SemaphoreHandle_t frameSync = NULL;
// Queue stores currently connected clients to whom we are streaming
QueueHandle_t streamingClients;
// We will try to achieve 25 FPS frame rate
const int FPS = 25;
// We will handle web client requests every 50 ms (20 Hz)
const int WSINTERVAL = 50;
// ======== Server Connection Handler Task ==========================
void mjpegCB(void* pvParameters) {
TickType_t xLastWakeTime;
const TickType_t xFrequency = pdMS_TO_TICKS(WSINTERVAL);
// Creating frame synchronization semaphore and initializing it
frameSync = xSemaphoreCreateBinary();
xSemaphoreGive( frameSync );
// Creating a queue to track all connected clients
streamingClients = xQueueCreate( 10, sizeof(WiFiClient*) );
//=== setup section ==================
// Creating RTOS task for grabbing frames from the camera
xTaskCreatePinnedToCore(
camCB, // callback
"cam", // name
4096, // stacj size
NULL, // parameters
2, // priority
&tCam, // RTOS task handle
APP_CPU); // core
// Creating task to push the stream to all connected clients
xTaskCreatePinnedToCore(
streamCB,
"strmCB",
4096,
NULL, //(void*) handler,
2,
&tStream,
APP_CPU);
// Registering webserver handling routines
server.on("/stream", HTTP_GET, handleJPGSstream);
server.on("/jpg", HTTP_GET, handleJPG);
server.onNotFound(handleNotFound);
// Starting webserver
server.begin();
//=== loop() section ===================
xLastWakeTime = xTaskGetTickCount();
for (;;) {
server.handleClient();
// After every server client handling request, we let other tasks run and then pause
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
}
}
// Commonly used variables:
volatile size_t camSize; // size of the current frame, byte
volatile char* camBuf; // pointer to the current frame
// ==== RTOS task to grab frames from the camera =========================
void camCB(void* pvParameters) {
TickType_t xLastWakeTime;
// A running interval associated with currently desired frame rate
const TickType_t xFrequency = pdMS_TO_TICKS(1000 / FPS);
// Mutex for the critical section of swithing the active frames around
portMUX_TYPE xSemaphore = portMUX_INITIALIZER_UNLOCKED;
// Pointers to the 2 frames, their respective sizes and index of the current frame
char* fbs[2] = { NULL, NULL };
size_t fSize[2] = { 0, 0 };
int ifb = 0;
//=== loop() section ===================
xLastWakeTime = xTaskGetTickCount();
for (;;) {
// Grab a frame from the camera and query its size
cam.run();
size_t s = cam.getSize();
// If frame size is more that we have previously allocated - request 125% of the current frame space
if (s > fSize[ifb]) {
fSize[ifb] = s * 4 / 3;
fbs[ifb] = allocateMemory(fbs[ifb], fSize[ifb]);
}
// Copy current frame into local buffer
char* b = (char*) cam.getfb();
memcpy(fbs[ifb], b, s);
// Let other tasks run and wait until the end of the current frame rate interval (if any time left)
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
// Only switch frames around if no frame is currently being streamed to a client
// Wait on a semaphore until client operation completes
xSemaphoreTake( frameSync, portMAX_DELAY );
// Do not allow interrupts while switching the current frame
taskENTER_CRITICAL(&xSemaphore);
camBuf = fbs[ifb];
camSize = s;
ifb = (++ifb) & 1; // this should produce 1, 0, 1, 0, 1 ... sequence
taskEXIT_CRITICAL(&xSemaphore);
// Let anyone waiting for a frame know that the frame is ready
xSemaphoreGive( frameSync );
// Technically only needed once: let the streaming task know that we have at least one frame
// and it could start sending frames to the clients, if any
xTaskNotifyGive( tStream );
// Immediately let other (streaming) tasks run
taskYIELD();
// If streaming task has suspended itself (no active clients to stream to)
// there is no need to grab frames from the camera. We can save some juice
// by suspedning the tasks
if ( eTaskGetState( tStream ) == eSuspended ) {
vTaskSuspend(NULL); // passing NULL means "suspend yourself"
}
}
}
// ==== Memory allocator that takes advantage of PSRAM if present =======================
char* allocateMemory(char* aPtr, size_t aSize) {
// Since current buffer is too smal, free it
if (aPtr != NULL) free(aPtr);
size_t freeHeap = ESP.getFreeHeap();
char* ptr = NULL;
// If memory requested is more than 2/3 of the currently free heap, try PSRAM immediately
if ( aSize > freeHeap * 2 / 3 ) {
if ( psramFound() && ESP.getFreePsram() > aSize ) {
ptr = (char*) ps_malloc(aSize);
}
}
else {
// Enough free heap - let's try allocating fast RAM as a buffer
ptr = (char*) malloc(aSize);
// If allocation on the heap failed, let's give PSRAM one more chance:
if ( ptr == NULL && psramFound() && ESP.getFreePsram() > aSize) {
ptr = (char*) ps_malloc(aSize);
}
}
// Finally, if the memory pointer is NULL, we were not able to allocate any memory, and that is a terminal condition.
if (ptr == NULL) {
ESP.restart();
}
return ptr;
}
// ==== STREAMING ======================================================
const char HEADER[] = "HTTP/1.1 200 OK\r\n" \
"Access-Control-Allow-Origin: *\r\n" \
"Content-Type: multipart/x-mixed-replace; boundary=123456789000000000000987654321\r\n";
const char BOUNDARY[] = "\r\n--123456789000000000000987654321\r\n";
const char CTNTTYPE[] = "Content-Type: image/jpeg\r\nContent-Length: ";
const int hdrLen = strlen(HEADER);
const int bdrLen = strlen(BOUNDARY);
const int cntLen = strlen(CTNTTYPE);
//控制連線服務
void handleJPGSstream(void)
{
// Can only acommodate 10 clients. The limit is a default for WiFi connections
if ( !uxQueueSpacesAvailable(streamingClients) ) return;
// Create a new WiFi Client object to keep track of this one
WiFiClient* client = new WiFiClient();
*client = server.client();
// Immediately send this client a header
client->write(HEADER, hdrLen);
client->write(BOUNDARY, bdrLen);
// Push the client to the streaming queue
xQueueSend(streamingClients, (void *) &client, 0);
// Wake up streaming tasks, if they were previously suspended:
if ( eTaskGetState( tCam ) == eSuspended ) vTaskResume( tCam );
if ( eTaskGetState( tStream ) == eSuspended ) vTaskResume( tStream );
}
//處理連線服務
void streamCB(void * pvParameters) {
char buf[16];
TickType_t xLastWakeTime;
TickType_t xFrequency;
// Wait until the first frame is captured and there is something to send
// to clients
ulTaskNotifyTake( pdTRUE, /* Clear the notification value before exiting. */
portMAX_DELAY ); /* Block indefinitely. */
xLastWakeTime = xTaskGetTickCount();
for (;;) {
// Default assumption we are running according to the FPS
xFrequency = pdMS_TO_TICKS(1000 / FPS);
// Only bother to send anything if there is someone watching
UBaseType_t activeClients = uxQueueMessagesWaiting(streamingClients);
if ( activeClients ) {
// Adjust the period to the number of connected clients
xFrequency /= activeClients;
// Since we are sending the same frame to everyone,
// pop a client from the the front of the queue
WiFiClient *client;
xQueueReceive (streamingClients, (void*) &client, 0);
// Check if this client is still connected.
if (!client->connected()) {
// delete this client reference if s/he has disconnected
// and don't put it back on the queue anymore. Bye!
delete client;
}
else {
// Ok. This is an actively connected client.
// Let's grab a semaphore to prevent frame changes while we
// are serving this frame
xSemaphoreTake( frameSync, portMAX_DELAY );
client->write(CTNTTYPE, cntLen);
sprintf(buf, "%d\r\n\r\n", camSize);
client->write(buf, strlen(buf));
client->write((char*) camBuf, (size_t)camSize);
client->write(BOUNDARY, bdrLen);
// Since this client is still connected, push it to the end
// of the queue for further processing
xQueueSend(streamingClients, (void *) &client, 0);
// The frame has been served. Release the semaphore and let other tasks run.
// If there is a frame switch ready, it will happen now in between frames
xSemaphoreGive( frameSync );
taskYIELD();
}
}
else {
// Since there are no connected clients, there is no reason to waste battery running
vTaskSuspend(NULL);
}
// Let other tasks run after serving every client
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
}
}
const char JHEADER[] = "HTTP/1.1 200 OK\r\n" \
"Content-disposition: inline; filename=capture.jpg\r\n" \
"Content-type: image/jpeg\r\n\r\n";
const int jhdLen = strlen(JHEADER);
//設定jpg單張影像服務
void handleJPG(void)
{
WiFiClient client = server.client();
if (!client.connected()) return;
client.write(JHEADER, jhdLen);
client.write((char*)cam.getfb(), cam.getSize());
}
//使用者輸入錯誤網址時
void handleNotFound()
{
String message = "Server is running!\n\n";
message += "URI: ";
message += server.uri();
message += "\nMethod: ";
message += (server.method() == HTTP_GET) ? "GET" : "POST";
message += "\nArguments: ";
message += server.args();
message += "\n";
server.send(200, "text / plain", message);
}
void setup()
{
// Setup Serial connection:
Serial.begin(115200);
delay(1000); // wait for a second to let Serial connect
// Configure the camera
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sscb_sda = SIOD_GPIO_NUM;
config.pin_sscb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.pixel_format = PIXFORMAT_JPEG;
config.frame_size = FRAMESIZE_VGA; //FRAMESIZE_UXGA, SVGA, QVGA
config.jpeg_quality = 12;//0-60
config.fb_count = 2;
if (cam.init(config) != ESP_OK) {
Serial.println("Error initializing the camera");
delay(10000);
ESP.restart();
}
// Configure and connect to WiFi
IPAddress ip;
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
Serial.print("Connecting to WiFi");
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(F("."));
}
ip = WiFi.localIP();
Serial.println(F("WiFi connected"));
Serial.println("");
Serial.print("Stream Link: http://");
Serial.print(ip);
Serial.println("/stream");
Serial.print("Still Jpg Link: http://");
Serial.print(ip);
Serial.println("/jpg");
// Start mainstreaming RTOS task
xTaskCreatePinnedToCore(
mjpegCB,
"mjpeg",
4096,
NULL,
2,
&tMjpeg,
APP_CPU);
}
void loop() {
//put your code here
}
想請問這個範例也能用在CAMERA_MODEL_WROVER_KIT嗎?需要修改什麼嗎?
應該都可以喔~