Add files via upload

This commit is contained in:
Linkenelis
2021-02-26 17:43:16 +01:00
committed by GitHub
parent fa0402608c
commit e72d6841e1
55 changed files with 6081 additions and 2 deletions

BIN
3D/Case+Stand.PNG Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

BIN
3D/ESP32-CAM-Back.stl Normal file
View File

Binary file not shown.

BIN
3D/ESP32-CAM-Case.f3z Normal file
View File

Binary file not shown.

BIN
3D/ESP32-CAM-Foot.stl Normal file
View File

Binary file not shown.

BIN
3D/ESP32-CAM-Front.stl Normal file
View File

Binary file not shown.

BIN
ArduinoIDE/Micro-RTSP.zip Normal file
View File

Binary file not shown.

BIN
Docs/20200309_131750.mp4 Normal file
View File

Binary file not shown.

BIN
Docs/Breadboard-s.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1023 KiB

BIN
Docs/Breadboard.jpg Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 MiB

BIN
Docs/ESP32-CAM.avi Normal file
View File

Binary file not shown.

BIN
Docs/ESP32-CAM.mkv Normal file
View File

Binary file not shown.

BIN
Docs/ESP32-CAM.mp4 Normal file
View File

Binary file not shown.

BIN
Docs/VLC-Windows-1.PNG Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

BIN
Docs/VLC-Windows.PNG Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 175 KiB

BIN
HW/ESP32-CAM-Breadboard.fzz Normal file
View File

Binary file not shown.

BIN
HW/ESP32-CAM-schematic.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 199 KiB

View File

@@ -1,2 +1,18 @@
# ESP32-cam-Octoprint
Cheap ESP32-cam using 2 webstream to stream video and still be able to take a snapshot (octolapse)
This creates 2 webservers, 1 for the stream (video) on port 81 and 1 for jpg (snapshot), led and ota
Just type the ip address of your cam in a browser, the links will be there.
If you happen to use the wrong webserver (eg 81 instead of the standard 80, it will be auto matically redirected).
Yes this makes it possible to take a snapshot, while watching the live stream ;-) but you will see a glitch in the stream as there is only 1 camera chip
first time setup needs to be done via serial port. After that you can use OTA. Just follow the link to put the ESP32-cam into OTA mode and change from com to ip in platform.ini
Manual set to OTA by connecting IO12 to ground for a sec, then disconnect
This repository is forked from the [Circuitrocks@Learn](https://learn.circuit.rocks/?p=2245) blog how to stream a video from the ESP32-CAM to a RTSP client or web browser.
The source code for PlatformIO is in the _**src**_ subfolder.
## Compiling with PlatformIO
Just open the folder containing the files with PlatformIO and adjust your settings in platformio.ini. Required libraries are either automatically installed (OneButton) or in the lib folder (Micro-RTSP).

6
customparts.csv Normal file
View File

@@ -0,0 +1,6 @@
# Name, Type, SubType, Offset, Size, Flags
nvs, data, nvs, 0x9000, 0x5000,
otadata, data, ota, 0xe000, 0x2000,
app0, app, ota_0, 0x10000, 0x1E0000,
app1, app, ota_1, 0x1F0000,0x1E0000,
spiffs, data, spiffs, 0x3F0000,0x10000,
1 # Name Type SubType Offset Size Flags
2 nvs data nvs 0x9000 0x5000
3 otadata data ota 0xe000 0x2000
4 app0 app ota_0 0x10000 0x1E0000
5 app1 app ota_1 0x1F0000 0x1E0000
6 spiffs data spiffs 0x3F0000 0x10000

7
lib/Micro-RTSP/LICENSE Normal file
View File

@@ -0,0 +1,7 @@
Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

93
lib/Micro-RTSP/README.md Normal file
View File

@@ -0,0 +1,93 @@
# Micro-RTSP
This is a small library which can be used to serve up RTSP streams from
resource constrained MCUs. It lets you trivially make a $10 open source
RTSP video stream camera.
# Usage
This library works for ESP32/arduino targets but also for most any posixish platform.
## Example arduino/ESP32 usage
This library will work standalone, but it is _super_ easy to use if your app is platform.io based.
Just "pio lib install Micro-RTSP" to pull the latest version from their library server. If you want to use the OV2640
camera support you'll need to be targeting the espressif32 platform in your project.
See the [example platform.io app](/examples). It should build and run on virtually any of the $10
ESP32-CAM boards (such as M5CAM). The relevant bit of the code is included below. In short:
1. Listen for a TCP connection on the RTSP port with accept()
2. When a connection comes in, create a CRtspSession and OV2640Streamer camera streamer objects.
3. While the connection remains, call session->handleRequests(0) to handle any incoming client requests.
4. Every 100ms or so call session->broadcastCurrentFrame() to send new frames to any clients.
```
void loop()
{
uint32_t msecPerFrame = 100;
static uint32_t lastimage = millis();
// If we have an active client connection, just service that until gone
// (FIXME - support multiple simultaneous clients)
if(session) {
session->handleRequests(0); // we don't use a timeout here,
// instead we send only if we have new enough frames
uint32_t now = millis();
if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover
session->broadcastCurrentFrame(now);
lastimage = now;
// check if we are overrunning our max frame rate
now = millis();
if(now > lastimage + msecPerFrame)
printf("warning exceeding max frame rate of %d ms\n", now - lastimage);
}
if(session->m_stopped) {
delete session;
delete streamer;
session = NULL;
streamer = NULL;
}
}
else {
client = rtspServer.accept();
if(client) {
//streamer = new SimStreamer(&client, true); // our streamer for UDP/TCP based RTP transport
streamer = new OV2640Streamer(&client, cam); // our streamer for UDP/TCP based RTP transport
session = new CRtspSession(&client, streamer); // our threads RTSP session and state
}
}
}
```
## Example posix/linux usage
There is a small standalone example [here](/test/RTSPTestServer.cpp). You can build it by following [these](/test/README.md) directions. The usage of the two key classes (CRtspSession and SimStreamer) are very similar to to the ESP32 usage.
## Supporting new camera devices
Supporting new camera devices is quite simple. See OV2640Streamer for an example and implement streamImage()
by reading a frame from your camera.
# Structure and design notes
# Issues and sending pull requests
Please report issues and send pull requests. I'll happily reply. ;-)
# Credits
The server code was initially based on a great 2013 [tutorial](https://www.medialan.de/usecase0001.html) by Medialan.
# License
Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

18
lib/Micro-RTSP/TODO.md Normal file
View File

@@ -0,0 +1,18 @@
* add instructions for example app
* push RTSP streams to other servers ( https://github.com/ant-media/Ant-Media-Server/wiki/Getting-Started )
* make stack larger so that the various scratch buffers (currently in bss) can be shared
* cleanup code to a less ugly unified coding standard
* support multiple simultaneous clients on the device
* make octocat test image work again (by changing encoding type from 1 to 0 (422 vs 420))
DONE:
* serve real jpegs (use correct quantization & huffman tables)
* test that both TCP and UDP clients work
* change framerate to something slow
* test remote access
* select a licence and put license into github
* find cause of new mystery pause when starting up in sim mode
* split sim code from real code via inheritence
* use device camera
* package the ESP32-CAM stuff as a library so I can depend on it
* package as a library https://docs.platformio.org/en/latest/librarymanager/creating.html#library-creating-examples

View File

@@ -0,0 +1,226 @@
#include "OV2640.h"
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
#include "SimStreamer.h"
#include "OV2640Streamer.h"
#include "CRtspSession.h"
#define ENABLE_OLED //if want use oled ,turn on thi macro
// #define SOFTAP_MODE // If you want to run our own softap turn this on
#define ENABLE_WEBSERVER
#define ENABLE_RTSPSERVER
#ifdef ENABLE_OLED
#include "SSD1306.h"
#define OLED_ADDRESS 0x3c
#define I2C_SDA 14
#define I2C_SCL 13
SSD1306Wire display(OLED_ADDRESS, I2C_SDA, I2C_SCL, GEOMETRY_128_32);
bool hasDisplay; // we probe for the device at runtime
#endif
OV2640 cam;
#ifdef ENABLE_WEBSERVER
WebServer server(80);
#endif
#ifdef ENABLE_RTSPSERVER
WiFiServer rtspServer(8554);
#endif
#ifdef SOFTAP_MODE
IPAddress apIP = IPAddress(192, 168, 1, 1);
#else
#include "wifikeys.h"
#endif
#ifdef ENABLE_WEBSERVER
void handle_jpg_stream(void)
{
WiFiClient client = server.client();
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-Type: multipart/x-mixed-replace; boundary=frame\r\n\r\n";
server.sendContent(response);
while (1)
{
cam.run();
if (!client.connected())
break;
response = "--frame\r\n";
response += "Content-Type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
server.sendContent("\r\n");
if (!client.connected())
break;
}
}
void handle_jpg(void)
{
WiFiClient client = server.client();
cam.run();
if (!client.connected())
{
return;
}
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-disposition: inline; filename=capture.jpg\r\n";
response += "Content-type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
}
void handleNotFound()
{
String message = "Server is running!\n\n";
message += "URI: ";
message += server.uri();
message += "\nMethod: ";
message += (server.method() == HTTP_GET) ? "GET" : "POST";
message += "\nArguments: ";
message += server.args();
message += "\n";
server.send(200, "text/plain", message);
}
#endif
void lcdMessage(String msg)
{
#ifdef ENABLE_OLED
if(hasDisplay) {
display.clear();
display.drawString(128 / 2, 32 / 2, msg);
display.display();
}
#endif
}
void setup()
{
#ifdef ENABLE_OLED
hasDisplay = display.init();
if(hasDisplay) {
display.flipScreenVertically();
display.setFont(ArialMT_Plain_16);
display.setTextAlignment(TEXT_ALIGN_CENTER);
}
#endif
lcdMessage("booting");
Serial.begin(115200);
while (!Serial)
{
;
}
cam.init(esp32cam_config);
IPAddress ip;
#ifdef SOFTAP_MODE
const char *hostname = "devcam";
// WiFi.hostname(hostname); // FIXME - find out why undefined
lcdMessage("starting softAP");
WiFi.mode(WIFI_AP);
WiFi.softAPConfig(apIP, apIP, IPAddress(255, 255, 255, 0));
bool result = WiFi.softAP(hostname, "12345678", 1, 0);
if (!result)
{
Serial.println("AP Config failed.");
return;
}
else
{
Serial.println("AP Config Success.");
Serial.print("AP MAC: ");
Serial.println(WiFi.softAPmacAddress());
ip = WiFi.softAPIP();
}
#else
lcdMessage(String("join ") + ssid);
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(F("."));
}
ip = WiFi.localIP();
Serial.println(F("WiFi connected"));
Serial.println("");
Serial.println(ip);
#endif
lcdMessage(ip.toString());
#ifdef ENABLE_WEBSERVER
server.on("/", HTTP_GET, handle_jpg_stream);
server.on("/jpg", HTTP_GET, handle_jpg);
server.onNotFound(handleNotFound);
server.begin();
#endif
#ifdef ENABLE_RTSPSERVER
rtspServer.begin();
#endif
}
CStreamer *streamer;
CRtspSession *session;
WiFiClient client; // FIXME, support multiple clients
void loop()
{
#ifdef ENABLE_WEBSERVER
server.handleClient();
#endif
#ifdef ENABLE_RTSPSERVER
uint32_t msecPerFrame = 100;
static uint32_t lastimage = millis();
// If we have an active client connection, just service that until gone
// (FIXME - support multiple simultaneous clients)
if(session) {
session->handleRequests(0); // we don't use a timeout here,
// instead we send only if we have new enough frames
uint32_t now = millis();
if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover
session->broadcastCurrentFrame(now);
lastimage = now;
// check if we are overrunning our max frame rate
now = millis();
if(now > lastimage + msecPerFrame)
printf("warning exceeding max frame rate of %d ms\n", now - lastimage);
}
if(session->m_stopped) {
delete session;
delete streamer;
session = NULL;
streamer = NULL;
}
}
else {
client = rtspServer.accept();
if(client) {
//streamer = new SimStreamer(&client, true); // our streamer for UDP/TCP based RTP transport
streamer = new OV2640Streamer(&client, cam); // our streamer for UDP/TCP based RTP transport
session = new CRtspSession(&client, streamer); // our threads RTSP session and state
}
}
#endif
}

View File

@@ -0,0 +1,15 @@
; PlatformIO Project Configuration File
;
; Build options: build flags, source filter
; Upload options: custom upload port, speed and extra flags
; Library options: dependencies, extra library storages
; Advanced options: extra scripting
;
; Please visit documentation for the other options and examples
; https://docs.platformio.org/page/projectconf.html
[env:m5stack-core-esp32]
platform = espressif32@>=1.6.0
board = m5stack-core-esp32
framework = arduino
lib_deps = Micro-RTSP

View File

@@ -0,0 +1,3 @@
// copy this file to wifikeys.h and edit
const char *ssid = "YOURNETHERE"; // Put your SSID here
const char *password = "YOURPASSWORDHERE"; // Put your PASSWORD here

View File

@@ -0,0 +1,22 @@
{
"name": "Micro-RTSP",
"keywords": "esp32, camera, esp32-cam, rtsp",
"description": "A small/efficient RTSP server for ESP32 and other micros",
"repository":
{
"type": "git",
"url": "https://github.com/geeksville/Micro-RTSP.git"
},
"authors":
[
{
"name": "Kevin Hester",
"email": "kevinh@geeksville.com",
"url": "https://github.com/geeksville",
"maintainer": true
}
],
"version": "0.1.6",
"frameworks": "arduino",
"platforms": "*"
}

View File

@@ -0,0 +1,9 @@
name=Micro-RTSP
version=0.1.6
author=Kevin Hester
maintainer=Kevin Hester <kevinh@geeksville.com>
sentence=Mikro RTSP server for mikros
paragraph=A small/efficient RTSP server for ESP32 and other micros
category=Data Storage
url=https://github.com/geeksville/Micro-RTSP.git
architectures=*

View File

@@ -0,0 +1,412 @@
#include "CRtspSession.h"
#include <stdio.h>
#include <time.h>
CRtspSession::CRtspSession(SOCKET aRtspClient, CStreamer * aStreamer) : m_RtspClient(aRtspClient),m_Streamer(aStreamer)
{
printf("Creating RTSP session\n");
Init();
m_RtspSessionID = getRandom(); // create a session ID
m_RtspSessionID |= 0x80000000;
m_StreamID = -1;
m_ClientRTPPort = 0;
m_ClientRTCPPort = 0;
m_TcpTransport = false;
m_streaming = false;
m_stopped = false;
};
CRtspSession::~CRtspSession()
{
closesocket(m_RtspClient);
};
void CRtspSession::Init()
{
m_RtspCmdType = RTSP_UNKNOWN;
memset(m_URLPreSuffix, 0x00, sizeof(m_URLPreSuffix));
memset(m_URLSuffix, 0x00, sizeof(m_URLSuffix));
memset(m_CSeq, 0x00, sizeof(m_CSeq));
memset(m_URLHostPort, 0x00, sizeof(m_URLHostPort));
m_ContentLength = 0;
};
bool CRtspSession::ParseRtspRequest(char const * aRequest, unsigned aRequestSize)
{
char CmdName[RTSP_PARAM_STRING_MAX];
static char CurRequest[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
unsigned CurRequestSize;
Init();
CurRequestSize = aRequestSize;
memcpy(CurRequest,aRequest,aRequestSize);
// check whether the request contains information about the RTP/RTCP UDP client ports (SETUP command)
char * ClientPortPtr;
char * TmpPtr;
static char CP[1024];
char * pCP;
ClientPortPtr = strstr(CurRequest,"client_port");
if (ClientPortPtr != nullptr)
{
TmpPtr = strstr(ClientPortPtr,"\r\n");
if (TmpPtr != nullptr)
{
TmpPtr[0] = 0x00;
strcpy(CP,ClientPortPtr);
pCP = strstr(CP,"=");
if (pCP != nullptr)
{
pCP++;
strcpy(CP,pCP);
pCP = strstr(CP,"-");
if (pCP != nullptr)
{
pCP[0] = 0x00;
m_ClientRTPPort = atoi(CP);
m_ClientRTCPPort = m_ClientRTPPort + 1;
};
};
};
};
// Read everything up to the first space as the command name
bool parseSucceeded = false;
unsigned i;
for (i = 0; i < sizeof(CmdName)-1 && i < CurRequestSize; ++i)
{
char c = CurRequest[i];
if (c == ' ' || c == '\t')
{
parseSucceeded = true;
break;
}
CmdName[i] = c;
}
CmdName[i] = '\0';
if (!parseSucceeded) {
printf("failed to parse RTSP\n");
return false;
}
printf("RTSP received %s\n", CmdName);
// find out the command type
if (strstr(CmdName,"OPTIONS") != nullptr) m_RtspCmdType = RTSP_OPTIONS; else
if (strstr(CmdName,"DESCRIBE") != nullptr) m_RtspCmdType = RTSP_DESCRIBE; else
if (strstr(CmdName,"SETUP") != nullptr) m_RtspCmdType = RTSP_SETUP; else
if (strstr(CmdName,"PLAY") != nullptr) m_RtspCmdType = RTSP_PLAY; else
if (strstr(CmdName,"TEARDOWN") != nullptr) m_RtspCmdType = RTSP_TEARDOWN;
// check whether the request contains transport information (UDP or TCP)
if (m_RtspCmdType == RTSP_SETUP)
{
TmpPtr = strstr(CurRequest,"RTP/AVP/TCP");
if (TmpPtr != nullptr) m_TcpTransport = true; else m_TcpTransport = false;
};
// Skip over the prefix of any "rtsp://" or "rtsp:/" URL that follows:
unsigned j = i+1;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j; // skip over any additional white space
for (; (int)j < (int)(CurRequestSize-8); ++j)
{
if ((CurRequest[j] == 'r' || CurRequest[j] == 'R') &&
(CurRequest[j+1] == 't' || CurRequest[j+1] == 'T') &&
(CurRequest[j+2] == 's' || CurRequest[j+2] == 'S') &&
(CurRequest[j+3] == 'p' || CurRequest[j+3] == 'P') &&
CurRequest[j+4] == ':' && CurRequest[j+5] == '/')
{
j += 6;
if (CurRequest[j] == '/')
{ // This is a "rtsp://" URL; skip over the host:port part that follows:
++j;
unsigned uidx = 0;
while (j < CurRequestSize && CurRequest[j] != '/' && CurRequest[j] != ' ' && uidx < sizeof(m_URLHostPort) - 1)
{ // extract the host:port part of the URL here
m_URLHostPort[uidx] = CurRequest[j];
uidx++;
++j;
};
}
else --j;
i = j;
break;
}
}
// Look for the URL suffix (before the following "RTSP/"):
parseSucceeded = false;
for (unsigned k = i+1; (int)k < (int)(CurRequestSize-5); ++k)
{
if (CurRequest[k] == 'R' && CurRequest[k+1] == 'T' &&
CurRequest[k+2] == 'S' && CurRequest[k+3] == 'P' &&
CurRequest[k+4] == '/')
{
while (--k >= i && CurRequest[k] == ' ') {}
unsigned k1 = k;
while (k1 > i && CurRequest[k1] != '/') --k1;
if (k - k1 + 1 > sizeof(m_URLSuffix)) return false;
unsigned n = 0, k2 = k1+1;
while (k2 <= k) m_URLSuffix[n++] = CurRequest[k2++];
m_URLSuffix[n] = '\0';
if (k1 - i > sizeof(m_URLPreSuffix)) return false;
n = 0; k2 = i + 1;
while (k2 <= k1 - 1) m_URLPreSuffix[n++] = CurRequest[k2++];
m_URLPreSuffix[n] = '\0';
i = k + 7;
parseSucceeded = true;
break;
}
}
if (!parseSucceeded) return false;
// Look for "CSeq:", skip whitespace, then read everything up to the next \r or \n as 'CSeq':
parseSucceeded = false;
for (j = i; (int)j < (int)(CurRequestSize-5); ++j)
{
if (CurRequest[j] == 'C' && CurRequest[j+1] == 'S' &&
CurRequest[j+2] == 'e' && CurRequest[j+3] == 'q' &&
CurRequest[j+4] == ':')
{
j += 5;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j;
unsigned n;
for (n = 0; n < sizeof(m_CSeq)-1 && j < CurRequestSize; ++n,++j)
{
char c = CurRequest[j];
if (c == '\r' || c == '\n')
{
parseSucceeded = true;
break;
}
m_CSeq[n] = c;
}
m_CSeq[n] = '\0';
break;
}
}
if (!parseSucceeded) return false;
// Also: Look for "Content-Length:" (optional)
for (j = i; (int)j < (int)(CurRequestSize-15); ++j)
{
if (CurRequest[j] == 'C' && CurRequest[j+1] == 'o' &&
CurRequest[j+2] == 'n' && CurRequest[j+3] == 't' &&
CurRequest[j+4] == 'e' && CurRequest[j+5] == 'n' &&
CurRequest[j+6] == 't' && CurRequest[j+7] == '-' &&
(CurRequest[j+8] == 'L' || CurRequest[j+8] == 'l') &&
CurRequest[j+9] == 'e' && CurRequest[j+10] == 'n' &&
CurRequest[j+11] == 'g' && CurRequest[j+12] == 't' &&
CurRequest[j+13] == 'h' && CurRequest[j+14] == ':')
{
j += 15;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j;
unsigned num;
if (sscanf(&CurRequest[j], "%u", &num) == 1) m_ContentLength = num;
}
}
return true;
};
RTSP_CMD_TYPES CRtspSession::Handle_RtspRequest(char const * aRequest, unsigned aRequestSize)
{
if (ParseRtspRequest(aRequest,aRequestSize))
{
switch (m_RtspCmdType)
{
case RTSP_OPTIONS: { Handle_RtspOPTION(); break; };
case RTSP_DESCRIBE: { Handle_RtspDESCRIBE(); break; };
case RTSP_SETUP: { Handle_RtspSETUP(); break; };
case RTSP_PLAY: { Handle_RtspPLAY(); break; };
default: {};
};
};
return m_RtspCmdType;
};
void CRtspSession::Handle_RtspOPTION()
{
static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"Public: DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE\r\n\r\n",m_CSeq);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::Handle_RtspDESCRIBE()
{
static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
static char SDPBuf[1024];
static char URLBuf[1024];
// check whether we know a stream with the URL which is requested
m_StreamID = -1; // invalid URL
if ((strcmp(m_URLPreSuffix,"mjpeg") == 0) && (strcmp(m_URLSuffix,"1") == 0)) m_StreamID = 0; else
if ((strcmp(m_URLPreSuffix,"mjpeg") == 0) && (strcmp(m_URLSuffix,"2") == 0)) m_StreamID = 1;
if (m_StreamID == -1)
{ // Stream not available
snprintf(Response,sizeof(Response),
"RTSP/1.0 404 Stream Not Found\r\nCSeq: %s\r\n%s\r\n",
m_CSeq,
DateHeader());
socketsend(m_RtspClient,Response,strlen(Response));
return;
};
// simulate DESCRIBE server response
static char OBuf[256];
char * ColonPtr;
strcpy(OBuf,m_URLHostPort);
ColonPtr = strstr(OBuf,":");
if (ColonPtr != nullptr) ColonPtr[0] = 0x00;
snprintf(SDPBuf,sizeof(SDPBuf),
"v=0\r\n"
"o=- %d 1 IN IP4 %s\r\n"
"s=\r\n"
"t=0 0\r\n" // start / stop - 0 -> unbounded and permanent session
"m=video 0 RTP/AVP 26\r\n" // currently we just handle UDP sessions
// "a=x-dimensions: 640,480\r\n"
"c=IN IP4 0.0.0.0\r\n",
rand(),
OBuf);
char StreamName[64];
switch (m_StreamID)
{
case 0: strcpy(StreamName,"mjpeg/1"); break;
case 1: strcpy(StreamName,"mjpeg/2"); break;
};
snprintf(URLBuf,sizeof(URLBuf),
"rtsp://%s/%s",
m_URLHostPort,
StreamName);
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Content-Base: %s/\r\n"
"Content-Type: application/sdp\r\n"
"Content-Length: %d\r\n\r\n"
"%s",
m_CSeq,
DateHeader(),
URLBuf,
(int) strlen(SDPBuf),
SDPBuf);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::Handle_RtspSETUP()
{
static char Response[1024];
static char Transport[255];
// init RTP streamer transport type (UDP or TCP) and ports for UDP transport
m_Streamer->InitTransport(m_ClientRTPPort,m_ClientRTCPPort,m_TcpTransport);
// simulate SETUP server response
if (m_TcpTransport)
snprintf(Transport,sizeof(Transport),"RTP/AVP/TCP;unicast;interleaved=0-1");
else
snprintf(Transport,sizeof(Transport),
"RTP/AVP;unicast;destination=127.0.0.1;source=127.0.0.1;client_port=%i-%i;server_port=%i-%i",
m_ClientRTPPort,
m_ClientRTCPPort,
m_Streamer->GetRtpServerPort(),
m_Streamer->GetRtcpServerPort());
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Transport: %s\r\n"
"Session: %i\r\n\r\n",
m_CSeq,
DateHeader(),
Transport,
m_RtspSessionID);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::Handle_RtspPLAY()
{
static char Response[1024];
// simulate SETUP server response
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Range: npt=0.000-\r\n"
"Session: %i\r\n"
"RTP-Info: url=rtsp://127.0.0.1:8554/mjpeg/1/track1\r\n\r\n",
m_CSeq,
DateHeader(),
m_RtspSessionID);
socketsend(m_RtspClient,Response,strlen(Response));
}
char const * CRtspSession::DateHeader()
{
static char buf[200];
time_t tt = time(NULL);
strftime(buf, sizeof buf, "Date: %a, %b %d %Y %H:%M:%S GMT", gmtime(&tt));
return buf;
}
int CRtspSession::GetStreamID()
{
return m_StreamID;
};
/**
Read from our socket, parsing commands as possible.
*/
bool CRtspSession::handleRequests(uint32_t readTimeoutMs)
{
if(m_stopped)
return false; // Already closed down
static char RecvBuf[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
memset(RecvBuf,0x00,sizeof(RecvBuf));
int res = socketread(m_RtspClient,RecvBuf,sizeof(RecvBuf), readTimeoutMs);
if(res > 0) {
// we filter away everything which seems not to be an RTSP command: O-ption, D-escribe, S-etup, P-lay, T-eardown
if ((RecvBuf[0] == 'O') || (RecvBuf[0] == 'D') || (RecvBuf[0] == 'S') || (RecvBuf[0] == 'P') || (RecvBuf[0] == 'T'))
{
RTSP_CMD_TYPES C = Handle_RtspRequest(RecvBuf,res);
if (C == RTSP_PLAY)
m_streaming = true;
else if (C == RTSP_TEARDOWN)
m_stopped = true;
}
return true;
}
else if(res == 0) {
printf("client closed socket, exiting\n");
m_stopped = true;
return true;
}
else {
// Timeout on read
return false;
}
}
void CRtspSession::broadcastCurrentFrame(uint32_t curMsec) {
// Send a frame
if (m_streaming && !m_stopped) {
// printf("serving a frame\n");
m_Streamer->streamImage(curMsec);
}
}

View File

@@ -0,0 +1,73 @@
#pragma once
#include "CStreamer.h"
#include "platglue.h"
// supported command types
enum RTSP_CMD_TYPES
{
RTSP_OPTIONS,
RTSP_DESCRIBE,
RTSP_SETUP,
RTSP_PLAY,
RTSP_TEARDOWN,
RTSP_UNKNOWN
};
#define RTSP_BUFFER_SIZE 10000 // for incoming requests, and outgoing responses
#define RTSP_PARAM_STRING_MAX 200
#define MAX_HOSTNAME_LEN 256
class CRtspSession
{
public:
CRtspSession(SOCKET aRtspClient, CStreamer * aStreamer);
~CRtspSession();
RTSP_CMD_TYPES Handle_RtspRequest(char const * aRequest, unsigned aRequestSize);
int GetStreamID();
/**
Read from our socket, parsing commands as possible.
return false if the read timed out
*/
bool handleRequests(uint32_t readTimeoutMs);
/**
broadcast a current frame
*/
void broadcastCurrentFrame(uint32_t curMsec);
bool m_streaming;
bool m_stopped;
private:
void Init();
bool ParseRtspRequest(char const * aRequest, unsigned aRequestSize);
char const * DateHeader();
// RTSP request command handlers
void Handle_RtspOPTION();
void Handle_RtspDESCRIBE();
void Handle_RtspSETUP();
void Handle_RtspPLAY();
// global session state parameters
int m_RtspSessionID;
SOCKET m_RtspClient; // RTSP socket of that session
int m_StreamID; // number of simulated stream of that session
IPPORT m_ClientRTPPort; // client port for UDP based RTP transport
IPPORT m_ClientRTCPPort; // client port for UDP based RTCP transport
bool m_TcpTransport; // if Tcp based streaming was activated
CStreamer * m_Streamer; // the UDP or TCP streamer of that session
// parameters of the last received RTSP request
RTSP_CMD_TYPES m_RtspCmdType; // command type (if any) of the current request
char m_URLPreSuffix[RTSP_PARAM_STRING_MAX]; // stream name pre suffix
char m_URLSuffix[RTSP_PARAM_STRING_MAX]; // stream name suffix
char m_CSeq[RTSP_PARAM_STRING_MAX]; // RTSP command sequence number
char m_URLHostPort[MAX_HOSTNAME_LEN]; // host:port part of the URL
unsigned m_ContentLength; // SDP string size
};

View File

@@ -0,0 +1,343 @@
#include "CStreamer.h"
#include <stdio.h>
CStreamer::CStreamer(SOCKET aClient, u_short width, u_short height) : m_Client(aClient)
{
printf("Creating TSP streamer\n");
m_RtpServerPort = 0;
m_RtcpServerPort = 0;
m_RtpClientPort = 0;
m_RtcpClientPort = 0;
m_SequenceNumber = 0;
m_Timestamp = 0;
m_SendIdx = 0;
m_TCPTransport = false;
m_RtpSocket = NULLSOCKET;
m_RtcpSocket = NULLSOCKET;
m_width = width;
m_height = height;
m_prevMsec = 0;
};
CStreamer::~CStreamer()
{
udpsocketclose(m_RtpSocket);
udpsocketclose(m_RtcpSocket);
};
int CStreamer::SendRtpPacket(unsigned const char * jpeg, int jpegLen, int fragmentOffset, BufPtr quant0tbl, BufPtr quant1tbl)
{
#define KRtpHeaderSize 12 // size of the RTP header
#define KJpegHeaderSize 8 // size of the special JPEG payload header
#define MAX_FRAGMENT_SIZE 1100 // FIXME, pick more carefully
int fragmentLen = MAX_FRAGMENT_SIZE;
if(fragmentLen + fragmentOffset > jpegLen) // Shrink last fragment if needed
fragmentLen = jpegLen - fragmentOffset;
bool isLastFragment = (fragmentOffset + fragmentLen) == jpegLen;
// Do we have custom quant tables? If so include them per RFC
bool includeQuantTbl = quant0tbl && quant1tbl && fragmentOffset == 0;
uint8_t q = includeQuantTbl ? 128 : 0x5e;
static char RtpBuf[2048]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
int RtpPacketSize = fragmentLen + KRtpHeaderSize + KJpegHeaderSize + (includeQuantTbl ? (4 + 64 * 2) : 0);
memset(RtpBuf,0x00,sizeof(RtpBuf));
// Prepare the first 4 byte of the packet. This is the Rtp over Rtsp header in case of TCP based transport
RtpBuf[0] = '$'; // magic number
RtpBuf[1] = 0; // number of multiplexed subchannel on RTPS connection - here the RTP channel
RtpBuf[2] = (RtpPacketSize & 0x0000FF00) >> 8;
RtpBuf[3] = (RtpPacketSize & 0x000000FF);
// Prepare the 12 byte RTP header
RtpBuf[4] = 0x80; // RTP version
RtpBuf[5] = 0x1a | (isLastFragment ? 0x80 : 0x00); // JPEG payload (26) and marker bit
RtpBuf[7] = m_SequenceNumber & 0x0FF; // each packet is counted with a sequence counter
RtpBuf[6] = m_SequenceNumber >> 8;
RtpBuf[8] = (m_Timestamp & 0xFF000000) >> 24; // each image gets a timestamp
RtpBuf[9] = (m_Timestamp & 0x00FF0000) >> 16;
RtpBuf[10] = (m_Timestamp & 0x0000FF00) >> 8;
RtpBuf[11] = (m_Timestamp & 0x000000FF);
RtpBuf[12] = 0x13; // 4 byte SSRC (sychronization source identifier)
RtpBuf[13] = 0xf9; // we just an arbitrary number here to keep it simple
RtpBuf[14] = 0x7e;
RtpBuf[15] = 0x67;
// Prepare the 8 byte payload JPEG header
RtpBuf[16] = 0x00; // type specific
RtpBuf[17] = (fragmentOffset & 0x00FF0000) >> 16; // 3 byte fragmentation offset for fragmented images
RtpBuf[18] = (fragmentOffset & 0x0000FF00) >> 8;
RtpBuf[19] = (fragmentOffset & 0x000000FF);
/* These sampling factors indicate that the chrominance components of
type 0 video is downsampled horizontally by 2 (often called 4:2:2)
while the chrominance components of type 1 video are downsampled both
horizontally and vertically by 2 (often called 4:2:0). */
RtpBuf[20] = 0x00; // type (fixme might be wrong for camera data) https://tools.ietf.org/html/rfc2435
RtpBuf[21] = q; // quality scale factor was 0x5e
RtpBuf[22] = m_width / 8; // width / 8
RtpBuf[23] = m_height / 8; // height / 8
int headerLen = 24; // Inlcuding jpeg header but not qant table header
if(includeQuantTbl) { // we need a quant header - but only in first packet of the frame
//printf("inserting quanttbl\n");
RtpBuf[24] = 0; // MBZ
RtpBuf[25] = 0; // 8 bit precision
RtpBuf[26] = 0; // MSB of lentgh
int numQantBytes = 64; // Two 64 byte tables
RtpBuf[27] = 2 * numQantBytes; // LSB of length
headerLen += 4;
memcpy(RtpBuf + headerLen, quant0tbl, numQantBytes);
headerLen += numQantBytes;
memcpy(RtpBuf + headerLen, quant1tbl, numQantBytes);
headerLen += numQantBytes;
}
// printf("Sending timestamp %d, seq %d, fragoff %d, fraglen %d, jpegLen %d\n", m_Timestamp, m_SequenceNumber, fragmentOffset, fragmentLen, jpegLen);
// append the JPEG scan data to the RTP buffer
memcpy(RtpBuf + headerLen,jpeg + fragmentOffset, fragmentLen);
fragmentOffset += fragmentLen;
m_SequenceNumber++; // prepare the packet counter for the next packet
IPADDRESS otherip;
IPPORT otherport;
socketpeeraddr(m_Client, &otherip, &otherport);
// RTP marker bit must be set on last fragment
if (m_TCPTransport) // RTP over RTSP - we send the buffer + 4 byte additional header
socketsend(m_Client,RtpBuf,RtpPacketSize + 4);
else // UDP - we send just the buffer by skipping the 4 byte RTP over RTSP header
udpsocketsend(m_RtpSocket,&RtpBuf[4],RtpPacketSize, otherip, m_RtpClientPort);
return isLastFragment ? 0 : fragmentOffset;
};
void CStreamer::InitTransport(u_short aRtpPort, u_short aRtcpPort, bool TCP)
{
m_RtpClientPort = aRtpPort;
m_RtcpClientPort = aRtcpPort;
m_TCPTransport = TCP;
if (!m_TCPTransport)
{ // allocate port pairs for RTP/RTCP ports in UDP transport mode
for (u_short P = 6970; P < 0xFFFE; P += 2)
{
m_RtpSocket = udpsocketcreate(P);
if (m_RtpSocket)
{ // Rtp socket was bound successfully. Lets try to bind the consecutive Rtsp socket
m_RtcpSocket = udpsocketcreate(P + 1);
if (m_RtcpSocket)
{
m_RtpServerPort = P;
m_RtcpServerPort = P+1;
break;
}
else
{
udpsocketclose(m_RtpSocket);
udpsocketclose(m_RtcpSocket);
};
}
};
};
};
u_short CStreamer::GetRtpServerPort()
{
return m_RtpServerPort;
};
u_short CStreamer::GetRtcpServerPort()
{
return m_RtcpServerPort;
};
void CStreamer::streamFrame(unsigned const char *data, uint32_t dataLen, uint32_t curMsec)
{
if(m_prevMsec == 0) // first frame init our timestamp
m_prevMsec = curMsec;
// compute deltat (being careful to handle clock rollover with a little lie)
uint32_t deltams = (curMsec >= m_prevMsec) ? curMsec - m_prevMsec : 100;
m_prevMsec = curMsec;
// locate quant tables if possible
BufPtr qtable0, qtable1;
if(!decodeJPEGfile(&data, &dataLen, &qtable0, &qtable1)) {
printf("can't decode jpeg data\n");
return;
}
int offset = 0;
do {
offset = SendRtpPacket(data, dataLen, offset, qtable0, qtable1);
delay(15);
} while(offset != 0);
// Increment ONLY after a full frame
uint32_t units = 90000; // Hz per RFC 2435
m_Timestamp += (units * deltams / 1000); // fixed timestamp increment for a frame rate of 25fps
m_SendIdx++;
if (m_SendIdx > 1) m_SendIdx = 0;
};
#include <assert.h>
// search for a particular JPEG marker, moves *start to just after that marker
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
// APP0 e0
// DQT db
// DQT db
// DHT c4
// DHT c4
// DHT c4
// DHT c4
// SOF0 c0 baseline (not progressive) 3 color 0x01 Y, 0x21 2h1v, 0x00 tbl0
// - 0x02 Cb, 0x11 1h1v, 0x01 tbl1 - 0x03 Cr, 0x11 1h1v, 0x01 tbl1
// therefore 4:2:2, with two separate quant tables (0 and 1)
// SOS da
// EOI d9 (no need to strip data after this RFC says client will discard)
bool findJPEGheader(BufPtr *start, uint32_t *len, uint8_t marker) {
// per https://en.wikipedia.org/wiki/JPEG_File_Interchange_Format
unsigned const char *bytes = *start;
// kinda skanky, will break if unlucky and the headers inxlucde 0xffda
// might fall off array if jpeg is invalid
// FIXME - return false instead
while(bytes - *start < *len) {
uint8_t framing = *bytes++; // better be 0xff
if(framing != 0xff) {
printf("malformed jpeg, framing=%x\n", framing);
return false;
}
uint8_t typecode = *bytes++;
if(typecode == marker) {
unsigned skipped = bytes - *start;
//printf("found marker 0x%x, skipped %d\n", marker, skipped);
*start = bytes;
// shrink len for the bytes we just skipped
*len -= skipped;
return true;
}
else {
// not the section we were looking for, skip the entire section
switch(typecode) {
case 0xd8: // start of image
{
break; // no data to skip
}
case 0xe0: // app0
case 0xdb: // dqt
case 0xc4: // dht
case 0xc0: // sof0
case 0xda: // sos
{
// standard format section with 2 bytes for len. skip that many bytes
uint32_t len = bytes[0] * 256 + bytes[1];
//printf("skipping section 0x%x, %d bytes\n", typecode, len);
bytes += len;
break;
}
default:
printf("unexpected jpeg typecode 0x%x\n", typecode);
break;
}
}
}
printf("failed to find jpeg marker 0x%x", marker);
return false;
}
// the scan data uses byte stuffing to guarantee anything that starts with 0xff
// followed by something not zero, is a new section. Look for that marker and return the ptr
// pointing there
void skipScanBytes(BufPtr *start) {
BufPtr bytes = *start;
while(true) { // FIXME, check against length
while(*bytes++ != 0xff);
if(*bytes++ != 0) {
*start = bytes - 2; // back up to the 0xff marker we just found
return;
}
}
}
void nextJpegBlock(BufPtr *bytes) {
uint32_t len = (*bytes)[0] * 256 + (*bytes)[1];
//printf("going to next jpeg block %d bytes\n", len);
*bytes += len;
}
// When JPEG is stored as a file it is wrapped in a container
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
bool decodeJPEGfile(BufPtr *start, uint32_t *len, BufPtr *qtable0, BufPtr *qtable1) {
// per https://en.wikipedia.org/wiki/JPEG_File_Interchange_Format
unsigned const char *bytes = *start;
if(!findJPEGheader(&bytes, len, 0xd8)) // better at least look like a jpeg file
return false; // FAILED!
// Look for quant tables if they are present
*qtable0 = NULL;
*qtable1 = NULL;
BufPtr quantstart = *start;
uint32_t quantlen = *len;
if(!findJPEGheader(&quantstart, &quantlen, 0xdb)) {
printf("error can't find quant table 0\n");
}
else {
// printf("found quant table %x\n", quantstart[2]);
*qtable0 = quantstart + 3; // 3 bytes of header skipped
nextJpegBlock(&quantstart);
if(!findJPEGheader(&quantstart, &quantlen, 0xdb)) {
printf("error can't find quant table 1\n");
}
else {
// printf("found quant table %x\n", quantstart[2]);
}
*qtable1 = quantstart + 3;
nextJpegBlock(&quantstart);
}
if(!findJPEGheader(start, len, 0xda))
return false; // FAILED!
// Skip the header bytes of the SOS marker FIXME why doesn't this work?
uint32_t soslen = (*start)[0] * 256 + (*start)[1];
*start += soslen;
*len -= soslen;
// start scanning the data portion of the scan to find the end marker
BufPtr endmarkerptr = *start;
uint32_t endlen = *len;
skipScanBytes(&endmarkerptr);
if(!findJPEGheader(&endmarkerptr, &endlen, 0xd9))
return false; // FAILED!
// endlen must now be the # of bytes between the start of our scan and
// the end marker, tell the caller to ignore bytes afterwards
*len = endmarkerptr - *start;
return true;
}

View File

@@ -0,0 +1,56 @@
#pragma once
#include "platglue.h"
typedef unsigned const char *BufPtr;
class CStreamer
{
public:
CStreamer(SOCKET aClient, u_short width, u_short height);
virtual ~CStreamer();
void InitTransport(u_short aRtpPort, u_short aRtcpPort, bool TCP);
u_short GetRtpServerPort();
u_short GetRtcpServerPort();
virtual void streamImage(uint32_t curMsec) = 0; // send a new image to the client
protected:
void streamFrame(unsigned const char *data, uint32_t dataLen, uint32_t curMsec);
private:
int SendRtpPacket(unsigned const char *jpeg, int jpegLen, int fragmentOffset, BufPtr quant0tbl = NULL, BufPtr quant1tbl = NULL);// returns new fragmentOffset or 0 if finished with frame
UDPSOCKET m_RtpSocket; // RTP socket for streaming RTP packets to client
UDPSOCKET m_RtcpSocket; // RTCP socket for sending/receiving RTCP packages
uint16_t m_RtpClientPort; // RTP receiver port on client (in host byte order!)
uint16_t m_RtcpClientPort; // RTCP receiver port on client (in host byte order!)
IPPORT m_RtpServerPort; // RTP sender port on server
IPPORT m_RtcpServerPort; // RTCP sender port on server
u_short m_SequenceNumber;
uint32_t m_Timestamp;
int m_SendIdx;
bool m_TCPTransport;
SOCKET m_Client;
uint32_t m_prevMsec;
u_short m_width; // image data info
u_short m_height;
};
// When JPEG is stored as a file it is wrapped in a container
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
// returns true if the file seems to be valid jpeg
// If quant tables can be found they will be stored in qtable0/1
bool decodeJPEGfile(BufPtr *start, uint32_t *len, BufPtr *qtable0, BufPtr *qtable1);
bool findJPEGheader(BufPtr *start, uint32_t *len, uint8_t marker);
// Given a jpeg ptr pointing to a pair of length bytes, advance the pointer to
// the next 0xff marker byte
void nextJpegBlock(BufPtr *start);

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
#pragma once
#ifndef ARDUINO_ARCH_ESP32
#define INCLUDE_SIMDATA
#endif
#ifdef INCLUDE_SIMDATA
extern unsigned const char capture_jpg[];
extern unsigned const char octo_jpg[];
extern unsigned int octo_jpg_len, capture_jpg_len;
#endif

View File

@@ -0,0 +1,192 @@
#include "OV2640.h"
#define TAG "OV2640"
// definitions appropriate for the ESP32-CAM devboard (and most clones)
camera_config_t esp32cam_config {
.pin_pwdn = -1,
.pin_reset = 15,
.pin_xclk = 27,
.pin_sscb_sda = 25,
.pin_sscb_scl = 23,
.pin_d7 = 19,
.pin_d6 = 36,
.pin_d5 = 18,
.pin_d4 = 39,
.pin_d3 = 5,
.pin_d2 = 34,
.pin_d1 = 35,
.pin_d0 = 17,
.pin_vsync = 22,
.pin_href = 26,
.pin_pclk = 21,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG,
// .frame_size = FRAMESIZE_UXGA, // needs 234K of framebuffer space
// .frame_size = FRAMESIZE_SXGA, // needs 160K for framebuffer
// .frame_size = FRAMESIZE_XGA, // needs 96K or even smaller FRAMESIZE_SVGA - can work if using only 1 fb
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 12, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
camera_config_t esp32cam_aithinker_config {
.pin_pwdn = 32,
.pin_reset = -1,
.pin_xclk = 0,
.pin_sscb_sda = 26,
.pin_sscb_scl = 27,
// Note: LED GPIO is apparently 4 not sure where that goes
// per https://github.com/donny681/ESP32_CAMERA_QR/blob/e4ef44549876457cd841f33a0892c82a71f35358/main/led.c
.pin_d7 = 35,
.pin_d6 = 34,
.pin_d5 = 39,
.pin_d4 = 36,
.pin_d3 = 21,
.pin_d2 = 19,
.pin_d1 = 18,
.pin_d0 = 5,
.pin_vsync = 25,
.pin_href = 23,
.pin_pclk = 22,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_1,
.ledc_channel = LEDC_CHANNEL_1,
.pixel_format = PIXFORMAT_JPEG,
// .frame_size = FRAMESIZE_UXGA, // needs 234K of framebuffer space
// .frame_size = FRAMESIZE_SXGA, // needs 160K for framebuffer
// .frame_size = FRAMESIZE_XGA, // needs 96K or even smaller FRAMESIZE_SVGA - can work if using only 1 fb
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 10, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
camera_config_t esp32cam_ttgo_t_config {
.pin_pwdn = 26,
.pin_reset = -1,
.pin_xclk = 32,
.pin_sscb_sda = 13,
.pin_sscb_scl = 12,
.pin_d7 = 39,
.pin_d6 = 36,
.pin_d5 = 23,
.pin_d4 = 18,
.pin_d3 = 15,
.pin_d2 = 4,
.pin_d1 = 14,
.pin_d0 = 5,
.pin_vsync = 27,
.pin_href = 25,
.pin_pclk = 19,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG,
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 12, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
void OV2640::run(void)
{
if(fb)
//return the frame buffer back to the driver for reuse
esp_camera_fb_return(fb);
fb = esp_camera_fb_get();
}
void OV2640::runIfNeeded(void)
{
if(!fb)
run();
}
int OV2640::getWidth(void)
{
runIfNeeded();
return fb->width;
}
int OV2640::getHeight(void)
{
runIfNeeded();
return fb->height;
}
size_t OV2640::getSize(void)
{
runIfNeeded();
return fb->len;
}
uint8_t *OV2640::getfb(void)
{
runIfNeeded();
return fb->buf;
}
framesize_t OV2640::getFrameSize(void)
{
return _cam_config.frame_size;
}
void OV2640::setFrameSize(framesize_t size)
{
_cam_config.frame_size = size;
}
pixformat_t OV2640::getPixelFormat(void)
{
return _cam_config.pixel_format;
}
void OV2640::setPixelFormat(pixformat_t format)
{
switch (format)
{
case PIXFORMAT_RGB565:
case PIXFORMAT_YUV422:
case PIXFORMAT_GRAYSCALE:
case PIXFORMAT_JPEG:
_cam_config.pixel_format = format;
break;
default:
_cam_config.pixel_format = PIXFORMAT_GRAYSCALE;
break;
}
}
esp_err_t OV2640::init(camera_config_t config)
{
memset(&_cam_config, 0, sizeof(_cam_config));
memcpy(&_cam_config, &config, sizeof(config));
esp_err_t err = esp_camera_init(&_cam_config);
if (err != ESP_OK)
{
printf("Camera probe failed with error 0x%x", err);
return err;
}
// ESP_ERROR_CHECK(gpio_install_isr_service(0));
return ESP_OK;
}

View File

@@ -0,0 +1,43 @@
#ifndef OV2640_H_
#define OV2640_H_
#include <Arduino.h>
#include <pgmspace.h>
#include <stdio.h>
#include "esp_log.h"
#include "esp_attr.h"
#include "esp_camera.h"
extern camera_config_t esp32cam_config, esp32cam_aithinker_config, esp32cam_ttgo_t_config;
class OV2640
{
public:
OV2640(){
fb = NULL;
};
~OV2640(){
};
esp_err_t init(camera_config_t config);
void run(void);
size_t getSize(void);
uint8_t *getfb(void);
int getWidth(void);
int getHeight(void);
framesize_t getFrameSize(void);
pixformat_t getPixelFormat(void);
void setFrameSize(framesize_t size);
void setPixelFormat(pixformat_t format);
private:
void runIfNeeded(); // grab a frame if we don't already have one
// camera_framesize_t _frame_size;
// camera_pixelformat_t _pixel_format;
camera_config_t _cam_config;
camera_fb_t *fb;
};
#endif //OV2640_H_

View File

@@ -0,0 +1,18 @@
#include "OV2640Streamer.h"
#include <assert.h>
OV2640Streamer::OV2640Streamer(SOCKET aClient, OV2640 &cam) : CStreamer(aClient, cam.getWidth(), cam.getHeight()), m_cam(cam)
{
printf("Created streamer width=%d, height=%d\n", cam.getWidth(), cam.getHeight());
}
void OV2640Streamer::streamImage(uint32_t curMsec)
{
m_cam.run();// queue up a read for next time
BufPtr bytes = m_cam.getfb();
streamFrame(bytes, m_cam.getSize(), curMsec);
}

View File

@@ -0,0 +1,15 @@
#pragma once
#include "CStreamer.h"
#include "OV2640.h"
class OV2640Streamer : public CStreamer
{
bool m_showBig;
OV2640 &m_cam;
public:
OV2640Streamer(SOCKET aClient, OV2640 &cam);
virtual void streamImage(uint32_t curMsec);
};

View File

@@ -0,0 +1,28 @@
#include "SimStreamer.h"
#include "JPEGSamples.h"
#ifdef INCLUDE_SIMDATA
SimStreamer::SimStreamer(SOCKET aClient, bool showBig) : CStreamer(aClient, showBig ? 800 : 640, showBig ? 600 : 480)
{
m_showBig = showBig;
}
void SimStreamer::streamImage(uint32_t curMsec)
{
if(m_showBig) {
BufPtr bytes = capture_jpg;
uint32_t len = capture_jpg_len;
streamFrame(bytes, len, curMsec);
}
else {
BufPtr bytes = octo_jpg;
uint32_t len = octo_jpg_len;
streamFrame(bytes, len, curMsec);
}
}
#endif

View File

@@ -0,0 +1,15 @@
#pragma once
#include "JPEGSamples.h"
#include "CStreamer.h"
#ifdef INCLUDE_SIMDATA
class SimStreamer : public CStreamer
{
bool m_showBig;
public:
SimStreamer(SOCKET aClient, bool showBig);
virtual void streamImage(uint32_t curMsec);
};
#endif

View File

@@ -0,0 +1,107 @@
#pragma once
#include <Arduino.h>
#include <WiFiClient.h>
#include <WiFiUdp.h>
#include <sys/socket.h>
#include <netinet/in.h>
//#include <arpa/inet.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <errno.h>
typedef WiFiClient *SOCKET;
typedef WiFiUDP *UDPSOCKET;
typedef IPAddress IPADDRESS; // On linux use uint32_t in network byte order (per getpeername)
typedef uint16_t IPPORT; // on linux use network byte order
#define NULLSOCKET NULL
inline void closesocket(SOCKET s) {
printf("closing TCP socket\n");
if(s) {
s->stop();
// delete s; TDP WiFiClients are never on the heap in arduino land?
}
}
#define getRandom() random(65536)
inline void socketpeeraddr(SOCKET s, IPADDRESS *addr, IPPORT *port) {
*addr = s->remoteIP();
*port = s->remotePort();
}
inline void udpsocketclose(UDPSOCKET s) {
printf("closing UDP socket\n");
if(s) {
s->stop();
delete s;
}
}
inline UDPSOCKET udpsocketcreate(unsigned short portNum)
{
UDPSOCKET s = new WiFiUDP();
if(!s->begin(portNum)) {
printf("Can't bind port %d\n", portNum);
delete s;
return NULL;
}
return s;
}
// TCP sending
inline ssize_t socketsend(SOCKET sockfd, const void *buf, size_t len)
{
return sockfd->write((uint8_t *) buf, len);
}
inline ssize_t udpsocketsend(UDPSOCKET sockfd, const void *buf, size_t len,
IPADDRESS destaddr, IPPORT destport)
{
sockfd->beginPacket(destaddr, destport);
sockfd->write((const uint8_t *) buf, len);
if(!sockfd->endPacket())
printf("error sending udp packet\n");
return len;
}
/**
Read from a socket with a timeout.
Return 0=socket was closed by client, -1=timeout, >0 number of bytes read
*/
inline int socketread(SOCKET sock, char *buf, size_t buflen, int timeoutmsec)
{
if(!sock->connected()) {
printf("client has closed the socket\n");
return 0;
}
int numAvail = sock->available();
if(numAvail == 0 && timeoutmsec != 0) {
// sleep and hope for more
delay(timeoutmsec);
numAvail = sock->available();
}
if(numAvail == 0) {
// printf("timeout on read\n");
return -1;
}
else {
// int numRead = sock->readBytesUntil('\n', buf, buflen);
int numRead = sock->readBytes(buf, buflen);
// printf("bytes avail %d, read %d: %s", numAvail, numRead, buf);
return numRead;
}
}

View File

@@ -0,0 +1,111 @@
#pragma once
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <errno.h>
typedef int SOCKET;
typedef int UDPSOCKET;
typedef uint32_t IPADDRESS; // On linux use uint32_t in network byte order (per getpeername)
typedef uint16_t IPPORT; // on linux use network byte order
#define NULLSOCKET 0
inline void closesocket(SOCKET s) {
close(s);
}
#define getRandom() rand()
inline void socketpeeraddr(SOCKET s, IPADDRESS *addr, IPPORT *port) {
sockaddr_in r;
socklen_t len = sizeof(r);
if(getpeername(s,(struct sockaddr*)&r,&len) < 0) {
printf("getpeername failed\n");
*addr = 0;
*port = 0;
}
else {
//htons
*port = r.sin_port;
*addr = r.sin_addr.s_addr;
}
}
inline void udpsocketclose(UDPSOCKET s) {
close(s);
}
inline UDPSOCKET udpsocketcreate(unsigned short portNum)
{
sockaddr_in addr;
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = INADDR_ANY;
int s = socket(AF_INET, SOCK_DGRAM, 0);
addr.sin_port = htons(portNum);
if (bind(s,(sockaddr*)&addr,sizeof(addr)) != 0) {
printf("Error, can't bind\n");
close(s);
s = 0;
}
return s;
}
// TCP sending
inline ssize_t socketsend(SOCKET sockfd, const void *buf, size_t len)
{
// printf("TCP send\n");
return send(sockfd, buf, len, 0);
}
inline ssize_t udpsocketsend(UDPSOCKET sockfd, const void *buf, size_t len,
IPADDRESS destaddr, uint16_t destport)
{
sockaddr_in addr;
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = destaddr;
addr.sin_port = htons(destport);
//printf("UDP send to 0x%0x:%0x\n", destaddr, destport);
return sendto(sockfd, buf, len, 0, (sockaddr *) &addr, sizeof(addr));
}
/**
Read from a socket with a timeout.
Return 0=socket was closed by client, -1=timeout, >0 number of bytes read
*/
inline int socketread(SOCKET sock, char *buf, size_t buflen, int timeoutmsec)
{
// Use a timeout on our socket read to instead serve frames
struct timeval tv;
tv.tv_sec = 0;
tv.tv_usec = timeoutmsec * 1000; // send a new frame ever
setsockopt(sock, SOL_SOCKET, SO_RCVTIMEO, &tv, sizeof tv);
int res = recv(sock,buf,buflen,0);
if(res > 0) {
return res;
}
else if(res == 0) {
return 0; // client dropped connection
}
else {
if (errno == EWOULDBLOCK || errno == EAGAIN)
return -1;
else
return 0; // unknown error, just claim client dropped it
};
}

View File

@@ -0,0 +1,7 @@
#pragma once
#ifdef ARDUINO_ARCH_ESP32
#include "platglue-esp32.h"
#else
#include "platglue-posix.h"
#endif

View File

@@ -0,0 +1,7 @@
SRCS = ../src/CRtspSession.cpp ../src/CStreamer.cpp ../src/JPEGSamples.cpp ../src/SimStreamer.cpp
run: *.cpp ../src/*
skill testerver
g++ -o testserver -I ../src -I . *.cpp $(SRCS)
./testserver

View File

@@ -0,0 +1,15 @@
# Testserver
This is a standalone Linux test application to allow development of this
library without going through the slow process of always testing on the ESP32.
Almost all of the code is the same - only platglue-posix.h differs from
platglue-esp32.h (thus serving as a crude HAL).
RESPTestServer.cpp also serves as a small example of how this library could
be used on Poxix systems.
# Usage
Run "make" to build and run the server. Run "runvlc.sh" to fire up a VLC client
that talks to that server. If all is working you should see a static image
of my office that I captured using a ESP32-CAM.

View File

@@ -0,0 +1,69 @@
#include "platglue.h"
#include "SimStreamer.h"
#include "CRtspSession.h"
#include "JPEGSamples.h"
#include <assert.h>
#include <sys/time.h>
void workerThread(SOCKET s)
{
SimStreamer streamer(s, true); // our streamer for UDP/TCP based RTP transport
CRtspSession rtsp(s, &streamer); // our threads RTSP session and state
while (!rtsp.m_stopped)
{
uint32_t timeout = 400;
if(!rtsp.handleRequests(timeout)) {
struct timeval now;
gettimeofday(&now, NULL); // crufty msecish timer
uint32_t msec = now.tv_sec * 1000 + now.tv_usec / 1000;
rtsp.broadcastCurrentFrame(msec);
}
}
}
int main()
{
SOCKET MasterSocket; // our masterSocket(socket that listens for RTSP client connections)
SOCKET ClientSocket; // RTSP socket to handle an client
sockaddr_in ServerAddr; // server address parameters
sockaddr_in ClientAddr; // address parameters of a new RTSP client
socklen_t ClientAddrLen = sizeof(ClientAddr);
printf("running RTSP server\n");
ServerAddr.sin_family = AF_INET;
ServerAddr.sin_addr.s_addr = INADDR_ANY;
ServerAddr.sin_port = htons(8554); // listen on RTSP port 8554
MasterSocket = socket(AF_INET,SOCK_STREAM,0);
int enable = 1;
if (setsockopt(MasterSocket, SOL_SOCKET, SO_REUSEADDR, &enable, sizeof(int)) < 0) {
printf("setsockopt(SO_REUSEADDR) failed");
return 0;
}
// bind our master socket to the RTSP port and listen for a client connection
if (bind(MasterSocket,(sockaddr*)&ServerAddr,sizeof(ServerAddr)) != 0) {
printf("error can't bind port errno=%d\n", errno);
return 0;
}
if (listen(MasterSocket,5) != 0) return 0;
while (true)
{ // loop forever to accept client connections
ClientSocket = accept(MasterSocket,(struct sockaddr*)&ClientAddr,&ClientAddrLen);
printf("Client connected. Client address: %s\r\n",inet_ntoa(ClientAddr.sin_addr));
if(fork() == 0)
workerThread(ClientSocket);
}
closesocket(MasterSocket);
return 0;
}

View File

@@ -0,0 +1,2 @@
# for testing
vlc -v rtsp://192.168.86.215:8554/mjpeg/1

View File

@@ -0,0 +1,111 @@
#include "platglue.h"
#include "SimStreamer.h"
#include "CRtspSession.h"
#include "JPEGSamples.h"
// From RFC2435 generates standard quantization tables
/*
* Table K.1 from JPEG spec.
*/
static const int jpeg_luma_quantizer[64] = {
16, 11, 10, 16, 24, 40, 51, 61,
12, 12, 14, 19, 26, 58, 60, 55,
14, 13, 16, 24, 40, 57, 69, 56,
14, 17, 22, 29, 51, 87, 80, 62,
18, 22, 37, 56, 68, 109, 103, 77,
24, 35, 55, 64, 81, 104, 113, 92,
49, 64, 78, 87, 103, 121, 120, 101,
72, 92, 95, 98, 112, 100, 103, 99
};
/*
* Table K.2 from JPEG spec.
*/
static const int jpeg_chroma_quantizer[64] = {
17, 18, 24, 47, 99, 99, 99, 99,
18, 21, 26, 66, 99, 99, 99, 99,
24, 26, 56, 99, 99, 99, 99, 99,
47, 66, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99
};
/*
* Call MakeTables with the Q factor and two u_char[64] return arrays
*/
void
MakeTables(int q, u_char *lqt, u_char *cqt)
{
int i;
int factor = q;
if (q < 1) factor = 1;
if (q > 99) factor = 99;
if (q < 50)
q = 5000 / factor;
else
q = 200 - factor*2;
for (i=0; i < 64; i++) {
int lq = (jpeg_luma_quantizer[i] * q + 50) / 100;
int cq = (jpeg_chroma_quantizer[i] * q + 50) / 100;
/* Limit the quantizers to 1 <= q <= 255 */
if (lq < 1) lq = 1;
else if (lq > 255) lq = 255;
lqt[i] = lq;
if (cq < 1) cq = 1;
else if (cq > 255) cq = 255;
cqt[i] = cq;
}
}
// analyze an imge from our camera to find which quant table it is using...
// Used to see if our camera is spitting out standard RTP tables (it isn't)
// So we have to use Q of 255 to indicate that each frame has unique quant tables
// use 0 for precision in the qant header, 64 for length
void findCameraQuant()
{
BufPtr bytes = capture_jpg;
uint32_t len = capture_jpg_len;
if(!findJPEGheader(&bytes, &len, 0xdb)) {
printf("error can't find quant table 0\n");
return;
}
else {
printf("found quant table %x (len %d)\n", bytes[2], bytes[1]);
}
BufPtr qtable0 = bytes + 3; // 3 bytes of header skipped
nextJpegBlock(&bytes);
if(!findJPEGheader(&bytes, &len, 0xdb)) {
printf("error can't find quant table 1\n");
return;
}
else {
printf("found quant table %x\n", bytes[2]);
}
BufPtr qtable1 = bytes + 3;
nextJpegBlock(&bytes);
for(int q = 0; q < 128; q++) {
uint8_t lqt[64], cqt[64];
MakeTables(q, lqt, cqt);
if(memcmp(qtable0, lqt, sizeof(lqt)) == 0 && memcmp(qtable1, cqt, sizeof(cqt)) == 0) {
printf("Found matching quant table %d\n", q);
}
}
printf("No matching quant table found!\n");
}

View File

@@ -0,0 +1,2 @@
# for testing
vlc -v rtsp://127.0.0.1:8554/mjpeg/1

25
platformio.ini Normal file
View File

@@ -0,0 +1,25 @@
; PlatformIO Project Configuration File
;
; Build options: build flags, source filter
; Upload options: custom upload port, speed and extra flags
; Library options: dependencies, extra library storages
; Advanced options: extra scripting
;
; Please visit documentation for the other options and examples
; https://docs.platformio.org/page/projectconf.html
[env:esp32cam]
platform = https://github.com/platformio/platform-espressif32.git ;espressif32
board = esp32cam
framework = arduino
board_build.partitions = customparts.csv
;upload_port = COM6 ;com port of the ESP32-cam
upload_port = x.x.x.x ;ip address of the ESP32-cam
build_flags =
; -DBOARD_HAS_PSRAM=TRUE
-DLED_BUILTIN=4
-mfix-esp32-psram-cache-issue
lib_deps =
; Micro-RTSP
; https://github.com/beegee-tokyo/Micro-RTSP.git
OneButton

144
src/main.cpp Normal file
View File

@@ -0,0 +1,144 @@
#include "main.h"
/** Put your WiFi credentials into this file **/
#include "wifikeys.h"
/** Camera class */
OV2640 cam;
/** GPIO for OTA request button */
int otaButton = 12;
/** Button class */
OneButton pushBt(otaButton, true, true);
/** Function declarations */
void enableOTA(void);
void resetDevice(void);
/**
* Called once after reboot/powerup
*/
void setup()
{
// Start the serial connection
Serial.begin(115200);
Serial.println("\n\n##################################");
Serial.printf("Internal Total heap %d, internal Free Heap %d\n", ESP.getHeapSize(), ESP.getFreeHeap());
Serial.printf("SPIRam Total heap %d, SPIRam Free Heap %d\n", ESP.getPsramSize(), ESP.getFreePsram());
Serial.printf("ChipRevision %d, Cpu Freq %d, SDK Version %s\n", ESP.getChipRevision(), ESP.getCpuFreqMHz(), ESP.getSdkVersion());
Serial.printf("Flash Size %d, Flash Speed %d\n", ESP.getFlashChipSize(), ESP.getFlashChipSpeed());
Serial.println("##################################\n\n");
// Initialize the ESP32 CAM, here we use the AIthinker ESP32 CAM
delay(100);
cam.init(esp32cam_aithinker_config);
delay(100);
//LED settings
pinMode(LED_BUILTIN, OUTPUT);
digitalWrite(LED_BUILTIN, false);
// Connect the WiFi
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(".");
}
// Print information how to contact the camera server
IPAddress ip = WiFi.localIP();
Serial.print("\nWiFi connected with IP ");
Serial.println(ip);
#ifdef ENABLE_WEBSERVER1
Serial.print("Browser Stream Link: http://");
Serial.print(ip);
Serial.println(":81/stream\n");
Serial.print("Browser Single Picture Link: http//");
Serial.print(ip);
Serial.println("/jpg\n");
Serial.print("Switch LED ON: http//");
Serial.print(ip);
Serial.println("/ledon\n");
Serial.print("Switch LED OFF: http//");
Serial.print(ip);
Serial.println("/ledoff\n");
Serial.print("Start OTA: http//");
Serial.print(ip);
Serial.println("/start_ota\n");
#endif
#ifdef ENABLE_WEBSERVER1
// Initialize the HTTP web stream server
initWebStream1();
#endif
#ifdef ENABLE_WEBSERVER2
// Initialize the HTTP web stream server
initWebStream2();
#endif
// Attach the button functions
pushBt.attachClick(enableOTA);
}
void loop()
{
// Check the button
pushBt.tick();
if (otaStarted)
{
ArduinoOTA.handle();
}
//Nothing else to do here
delay(100);
}
/**
* Handle OTA
*/
void enableOTA(void)
{
// If OTA is not enabled
if (!otaStarted)
{
// Stop the camera servers
#ifdef ENABLE_WEBSERVER
stopWebStream();
#endif
delay(100);
Serial.println("OTA enabled");
// Start the OTA server
startOTA();
otaStarted = true;
}
else
{
// If OTA was enabled
otaStarted = false;
// Stop the OTA server
stopOTA();
// Restart the camera servers
#ifdef ENABLE_WEBSERVER1
initWebStream1();
#endif
#ifdef ENABLE_WEBSERVER2
initWebStream2();
#endif
}
}
/**
* Handle resetDevice
*/
void resetDevice(void)
{
delay(100);
WiFi.disconnect();
esp_restart();
}

40
src/main.h Normal file
View File

@@ -0,0 +1,40 @@
#include <Arduino.h>
// WiFi stuff
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
// OTA stuff
#include <ESPmDNS.h>
#include <WiFiUdp.h>
#include <ArduinoOTA.h>
// Camera stuff
#include "OV2640.h"
#include "OV2640Streamer.h"
// Button stuff
#include <OneButton.h>
#define ENABLE_WEBSERVER1
#define ENABLE_WEBSERVER2
// Camera class
extern OV2640 cam;
// Web server stuff
void initWebStream1(void);
void initWebStream2(void);
void stopWebStream(void);
//void stopWebStream(void);
void handleWebServer(void);
// OTA stuff
void startOTA(void);
void stopOTA(void);
extern boolean otaStarted;

80
src/ota.cpp Normal file
View File

@@ -0,0 +1,80 @@
#include "main.h"
/** Flag if OTA is enabled */
boolean otaStarted = false;
/** Limit the progress output on serial */
unsigned int lastProgress = 0;
/**
* Initialize OTA server
* and start waiting for OTA requests
*/
void startOTA(void)
{
ArduinoOTA
// OTA request received
.onStart([]() {
String type;
if (ArduinoOTA.getCommand() == U_FLASH)
type = "sketch";
else // U_SPIFFS
type = "filesystem";
// NOTE: if updating SPIFFS this would be the place to unmount SPIFFS using SPIFFS.end()
Serial.println("Start updating " + type);
lastProgress = 0;
otaStarted = true;
})
.onEnd([]() {
// OTA is finished
Serial.println("\nEnd");
})
.onProgress([](unsigned int progress, unsigned int total) {
// Status report during OTA
if ((lastProgress == 0) || ((progress / (total / 100)) >= lastProgress + 5))
{
Serial.printf("Progress: %u%%\r", (progress / (total / 100)));
lastProgress = (progress / (total / 100));
if (lastProgress == 0)
{
lastProgress = 1;
}
}
})
.onError([](ota_error_t error) {
// Error occured during OTA, report it
Serial.printf("Error[%u]: ", error);
if (error == OTA_AUTH_ERROR)
Serial.println("Auth Failed");
else if (error == OTA_BEGIN_ERROR)
Serial.println("Begin Failed");
else if (error == OTA_CONNECT_ERROR)
Serial.println("Connect Failed");
else if (error == OTA_RECEIVE_ERROR)
Serial.println("Receive Failed");
else if (error == OTA_END_ERROR)
Serial.println("End Failed");
});
// Enable MDNS so device can be seen
ArduinoOTA.setMdnsEnabled(true);
// Create a unique name
// IPAddress ip = WiFi.localIP();
// String hostName = "ESP32-CAM" + ip.toString();
char hostName[] = "ESP32-CAM";
Serial.printf("Device is advertising as ESP32-CAM\n");
// Set the MDNS advertising name
ArduinoOTA.setHostname(hostName);
// Start the OTA server
ArduinoOTA.begin();
}
/**
* Stop the OTA server
*/
void stopOTA(void)
{
ArduinoOTA.end();
}

206
src/webstream.cpp Normal file
View File

@@ -0,0 +1,206 @@
#include "main.h"
/** Web server class */
WebServer server1(81);
/** Forward dedclaration of the task handling browser requests */
void webTask1(void *pvParameters);
/** Task handle of the web task */
TaskHandle_t webTaskHandler;
/** Flag to stop the web server1 */
boolean stopWeb = false;
/** Web request function forward declarations */
void handle_jpg_stream(void);
void handle_jpg(void);
void handle_ledon(void);
void handle_ledoff(void);
void handle_start_ota(void);
void handleNotFound();
/**
* Initialize the web stream server1 by starting the handler task
*/
void initWebStream1(void)
{
#ifdef ENABLE_WEBSERVER1
// Create the task for the web server1 and run it in core 0 (others tasks use core 1)
xTaskCreatePinnedToCore(webTask1, "WEB", 4096, NULL, 1, &webTaskHandler,0);
if (webTaskHandler == NULL)
{
Serial.println("Create Webstream task failed");
}
else
{
Serial.println("Webstream task up and running");
}
#endif
}
/**
* Called to stop the web server1 task, needed for OTA
* to avoid OTA timeout error
*/
void stopWebStream(void)
{
stopWeb = true;
}
/**
* The task that handles web server1 connections
* Starts the web server1
* Handles requests in an endless loop
* until a stop request is received because OTA
* starts
*/
void webTask1(void *pvParameters)
{
// Set the function to handle stream requests
server1.on("/stream", HTTP_GET, handle_jpg_stream);
// Set the function to handle single picture requests
server1.on("/jpg", HTTP_GET, handle_jpg);
server1.on("/ledon", HTTP_GET, handle_ledon);
server1.on("/ledoff", HTTP_GET, handle_ledoff);
server1.on("/start_ota", HTTP_GET, handle_start_ota);
// Set the function to handle other requests
server1.onNotFound(handleNotFound);
// Start the web server1
server1.begin();
while (1)
{
#ifdef ENABLE_WEBSERVER1
// Check if the server1 has clients
server1.handleClient();
#endif
}
if (stopWeb)
{
// User requested web server1 stop
server1.close();
// Delete this task
vTaskDelete(NULL);
}
delay(100);
}
#ifdef ENABLE_WEBSERVER1
/**
* Handle web stream requests
* Gives a first response to prepare the streaming
* Then runs in a loop to update the web content
* every time a new frame is available
*/
void handle_jpg_stream(void)
{
WiFiClient thisClient = server1.client();
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-Type: multipart/x-mixed-replace; boundary=frame\r\n\r\n";
server1.sendContent(response);
while (1)
{
cam.run();
if (!thisClient.connected())
{
break;
}
response = "--frame\r\n";
response += "Content-Type: image/jpeg\r\n\r\n";
server1.sendContent(response);
thisClient.write((char *)cam.getfb(), cam.getSize());
server1.sendContent("\r\n");
delay(20);
}
}
/**
* Handle single picture requests
* redirect to the other stream
*/
void handle_jpg(void)
{
IPAddress ip = WiFi.localIP();
server1.sendHeader("Location", String("http://")+ ip.toString()+ String(":80/jpg"));
server1.send(302, "text/plain", "");
server1.client().stop();
}
void handle_ledon(void)
{
IPAddress ip = WiFi.localIP();
server1.sendHeader("Location", String("http://")+ ip.toString()+ String(":80/ledon"));
server1.send(302, "text/plain", "");
server1.client().stop();
}
void handle_ledoff(void)
{
IPAddress ip = WiFi.localIP();
server1.sendHeader("Location", String("http://")+ ip.toString()+ String(":80/ledoff"));
server1.send(302, "text/plain", "");
server1.client().stop();
}
void handle_start_ota(void)
{
IPAddress ip = WiFi.localIP();
server1.sendHeader("Location", String("http://")+ ip.toString()+ String(":80/start_ota"));
server1.send(302, "text/plain", "");
server1.client().stop();
// If OTA is not enabled
if (!otaStarted)
{
// Stop the camera server1s
#ifdef ENABLE_WEBSERVER1
stopWebStream();
#endif
delay(100);
Serial.println("OTA enabled");
// Start the OTA server1
startOTA();
otaStarted = true;
}
else
{
// If OTA was enabled
otaStarted = false;
// Stop the OTA server1
stopOTA();
// Restart the camera server1s
#ifdef ENABLE_WEBSERVER1
initWebStream1();
#endif
}
}
/**
* Handle any other request from the web client
*/
void handleNotFound()
{
IPAddress ip = WiFi.localIP();
String message = "Browser Stream Link: http://";
message += ip.toString();
message += "81/stream\n";
message += "Browser Single Picture Link: http://";
message += ip.toString();
message += "/jpg\n";
message += "Switch LED ON: http://";
message += ip.toString();
message += "/ledon\n";
message += "Switch LED OFF: http://";
message += ip.toString();
message += "/ledoff\n";
message += "Start OTA mode: http://";
message += ip.toString();
message += "/start_ota\n";
message += "\n";
server1.send(200, "text/plain", message);
}
#endif

201
src/webstream2.cpp Normal file
View File

@@ -0,0 +1,201 @@
#include "main.h"
/** Web server class */
WebServer server2(80);
/** Forward dedclaration of the task handling browser requests */
void webTask2(void *pvParameters);
/** Task handle of the web task */
TaskHandle_t webTaskHandler2;
/** Flag to stop the web server2 */
boolean stopWeb2 = false;
/** Web request function forward declarations */
void handle_jpg_stream2(void);
void handle_jpg2(void);
void handle_ledon2(void);
void handle_ledoff2(void);
void handle_start_ota2(void);
void handleNotFound2();
/**
* Initialize the web stream server2 by starting the handler task
*/
void initWebStream2(void)
{
#ifdef ENABLE_WEBSERVER2
// Create the task for the web server2 and run it in core 0 (others tasks use core 1)
xTaskCreatePinnedToCore(webTask2, "WEB", 4096, NULL, 1, &webTaskHandler2,1);
if (webTaskHandler2 == NULL)
{
Serial.println("Create Webstream2 task failed");
}
else
{
Serial.println("Webstream2 task up and running");
}
#endif
}
/**
* Called to stop the web server2 task, needed for OTA
* to avoid OTA timeout error
*/
void stopWebStream2(void)
{
stopWeb2 = true;
}
/**
* The task that handles web server2 connections
* Starts the web server2
* Handles requests in an endless loop
* until a stop request is received because OTA
* starts
*/
void webTask2(void *pvParameters)
{
// Set the function to handle stream requests
server2.on("/stream", HTTP_GET, handle_jpg_stream2);
// Set the function to handle single picture requests
server2.on("/jpg", HTTP_GET, handle_jpg2);
server2.on("/ledon", HTTP_GET, handle_ledon2);
server2.on("/ledoff", HTTP_GET, handle_ledoff2);
server2.on("/start_ota", HTTP_GET, handle_start_ota2);
// Set the function to handle other requests
server2.onNotFound(handleNotFound2);
// Start the web server2
server2.begin();
while (1)
{
#ifdef ENABLE_WEBSERVER2
// Check if the server2 has clients
server2.handleClient();
#endif
}
if (stopWeb2)
{
// User requested web server2 stop
server2.close();
// Delete this task
vTaskDelete(NULL);
}
delay(100);
}
#ifdef ENABLE_WEBSERVER2
/**
* Handle web stream requests
* redirect to ip:81/stream
*/
void handle_jpg_stream2(void)
{
IPAddress ip = WiFi.localIP();
server2.sendHeader("Location", String("http://")+ ip.toString()+ String(":81/stream"));
server2.send(302, "text/plain", "");
server2.client().stop();
}
/**
* Handle single picture requests
* Gets the latest picture from the camera
* and sends it to the web client
*/
void handle_jpg2(void)
{
WiFiClient thisClient = server2.client();
cam.run();
if (!thisClient.connected())
{
return;
}
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-disposition: inline; filename=capture.jpg\r\n";
response += "Content-type: image/jpeg\r\n\r\n";
server2.sendContent(response);
thisClient.write((char *)cam.getfb(), cam.getSize());
}
void handle_ledon2(void)
{
digitalWrite(LED_BUILTIN, true);
IPAddress ip = WiFi.localIP();
String message = "Done";
message += "\n";
server2.send(200, "text/plain", message);
}
void handle_ledoff2(void)
{
digitalWrite(LED_BUILTIN, false);
IPAddress ip = WiFi.localIP();
String message = "Done";
message += "\n";
server2.send(200, "text/plain", message);
}
void handle_start_ota2(void)
{
IPAddress ip = WiFi.localIP();
String message = "Going into OTA mode";
message += "\n";
server2.send(200, "text/plain", message);
// If OTA is not enabled
if (!otaStarted)
{
// Stop the camera server2s
#ifdef ENABLE_WEBSERVER2
stopWebStream2();
#endif
delay(100);
Serial.println("OTA enabled");
// Start the OTA server2
startOTA();
otaStarted = true;
}
else
{
// If OTA was enabled
otaStarted = false;
// Stop the OTA server2
stopOTA();
// Restart the camera server2s
#ifdef ENABLE_WEBSERVER2
initWebStream2();
#endif
}
}
/**
* Handle any other request from the web client
*/
void handleNotFound2()
{
IPAddress ip = WiFi.localIP();
String message = "Browser Stream Link: http://";
message += ip.toString();
message += ":81/stream\n";
message += "Browser Single Picture Link: http://";
message += ip.toString();
message += "/jpg\n";
message += "Switch LED ON: http://";
message += ip.toString();
message += "/ledon\n";
message += "Switch LED OFF: http://";
message += ip.toString();
message += "/ledoff\n";
message += "Start OTA mode: http://";
message += ip.toString();
message += "/start_ota\n";
message += "\n";
server2.send(200, "text/plain", message);
}
#endif

4
src/wifikeys.h Normal file
View File

@@ -0,0 +1,4 @@
#include <Arduino.h>
// Change YOUR_AP_NAME and YOUR_AP_PASSWORD to your WiFi credentials
const char *ssid = "YOUR_SSID"; // Put your SSID here
const char *password = "YOUR_PASSWORD"; // Put your PASSWORD here