その3 Webカメラにしてみる!
概要
この特設では、HVC-P2で人を検出した後に画像をWebカメラにしてブラウザに表示する例を紹介します。 以下のイメージのようにブラウザに表示され、画面上の操作でHVC-P2の検出機能をお試しできます。ちなみに筆者は確かに39歳の男です。悲しんではいませんが、そう見えないように笑顔の練習をしないといけませんね!
準備
以下の接続写真を参考に、GR-PEACH、HVC-P2のほかにUSBケーブル(マイクロBタイプ)を2本、USB(タイプA)変換アダプタを準備してください。
ブラウザのHTMLファイルを保存するためにマイクロSDを用意します。保存するHTMLファイルは後程説明します。
GR-PEACHの中央部にあるジャンパー(JP3)をショートしてください。これによってUSBホストとして接続する際にVBUSに電源が供給されるようになります。
最後にLANケーブルをGR-PEACHに接続してください。
人物検出 Webカメラサンプル
はじめにブラウザ表示用の「index.htm」、「mbedrpc.js」ファイルをマイクロSDに保存します。
以下のリンクを右クリックして「対象をファイルに保存」を行ってダウンロードしてください。
- index.htm GR-PEACH Web Camera Sample (HTM)
- mbedrpc.js (JS)
保存後、GR-PEACHの裏面にあるSDソケットにマイクロSDを差し込んで以下のスケッチをビルドし、GR-PEACHに書き込んでください。
以下のサンプルをWebコンパイラでビルドし、GR-PEACHにbinファイルを書き込んでください。ソース上でダブルクリックすると全選択されます。
/* GR-PEACH Sketch Template for using HVC-P2
* based on library version V1.06
*/
// GR-PEACH API
#include <Arduino.h>
#include <SD.h>
#include <EthernetInterface.h>
#include <HTTPServer.h>
#include <mbed_rpc.h>
#include <RomRamFileSystem.h>
#include <USBHostSerial.h>
#include <JPEG_Converter.h>
// HVC-P2 API
#include "HVCApi.h" // Omron sample
#include "HVCDef.h" // Omron sample
#include "HVCExtraUartFunc.h" // Omron sample
#define USB_SETTING_TIMEOUT 1000
#define USB_EXECUTE_TIMEOUT 2000
//#define ALL_DETECTION
#define SENSOR_ROLL_ANGLE_DEFAULT 0 /* Camera angle setting (0°) */
#define BODY_THRESHOLD_DEFAULT 500 /* Threshold for Human Body Detection */
#define FACE_THRESHOLD_DEFAULT 500 /* Threshold for Face Detection */
#define HAND_THRESHOLD_DEFAULT 500 /* Threshold for Hand Detection */
#define REC_THRESHOLD_DEFAULT 500 /* Threshold for Face Recognition */
#define BODY_SIZE_RANGE_MIN_DEFAULT 30 /* Human Body Detection minimum detection size */
#define BODY_SIZE_RANGE_MAX_DEFAULT 8192 /* Human Body Detection maximum detection size */
#define HAND_SIZE_RANGE_MIN_DEFAULT 40 /* Hand Detection minimum detection size */
#define HAND_SIZE_RANGE_MAX_DEFAULT 8192 /* Hand Detection maximum detection size */
#define FACE_SIZE_RANGE_MIN_DEFAULT 64 /* Face Detection minimum detection size */
#define FACE_SIZE_RANGE_MAX_DEFAULT 8192 /* Face Detection maximum detection size */
#define FACE_POSE_DEFAULT 0 /* Face Detection facial pose (frontal face)*/
#define FACE_ANGLE_DEFAULT 0 /* Face Detection roll angle (±15°)*/
#define PIXEL_WIDTH (320)
#define PIXEL_HEIGHT (240)
#define BYTE_PER_PIXEL_YUV (2u)
#define BYTE_PER_PIXEL_RGB (3u)
#define FRAME_STRIDE_YUV (((PIXEL_WIDTH * BYTE_PER_PIXEL_YUV) + 31u) & ~31u)
#define FRAME_STRIDE_RGB (((PIXEL_WIDTH * BYTE_PER_PIXEL_RGB) + 31u) & ~31u)
#define NUM_WEBFILES 2
/*! Frame buffer stride: Frame buffer stride should be set to a multiple of 32 or 128
in accordance with the frame buffer burst transfer mode. */
static uint8_t frame_buffer_YUV422[FRAME_STRIDE_YUV * PIXEL_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static uint8_t frame_buffer_RGB888[FRAME_STRIDE_RGB * PIXEL_HEIGHT]__attribute((section("NC_BSS"),aligned(32)));
static uint8_t JpegBuffer[1024 * 200]__attribute((aligned(8))); //8 bytes aligned!;
static volatile int jcu_encoding = 1;
static volatile int jcu_encode_size = 1;
static JPEG_Converter Jcu;
static EthernetInterface network;
static RomRamFileSystem romramfs("romram");
const char mount_files[NUM_WEBFILES][13] = { //name should be 8.3 format
"index.htm", "mbedrpc.js" };
File myFile;
void RPC_HVC_Read(Arguments* arg, Reply* r);
void RPC_HVC_Write(Arguments* arg, Reply* r);
int char_to_int(char* c, int len);
static USBHostSerial * p_cdc;
extern "C" int UART_SendData(int inDataSize, UINT8 *inData) {
return p_cdc->writeBuf((char *) inData, inDataSize);
}
extern "C" int UART_ReceiveData(int inTimeOutTime, int inDataSize,
UINT8 *outResult) {
return p_cdc->readBuf((char *) outResult, inDataSize, inTimeOutTime);
}
static bool hvc_rec_face = true;
static bool hvc_rec_age = false;
static bool hvc_rec_gender = false;
static bool hvc_rec_gaze = false;
static bool hvc_rec_blink = false;
static bool hvc_rec_expression = false;
static bool hvc_rec_body = false;
static bool hvc_rec_hand = false;
static int hvc_age;
static int hvc_gender;
static int hvc_gaze_lr;
static int hvc_gaze_ud;
static int hvc_blink_l;
static int hvc_blink_r;
static int hvc_expression;
static int hvc_resolution = HVC_EXECUTE_IMAGE_QVGA;
static int hvc_pixel_width = PIXEL_WIDTH;
static int hvc_pixel_height = PIXEL_HEIGHT;
void led_blink(int pin, int num) {
for (int i = 0; i < num; i++) {
digitalWrite(pin, HIGH);
delay(50);
digitalWrite(pin, LOW);
delay(50);
}
}
static void EraseImage(uint8_t* buf, uint32_t size) {
uint32_t i = 0;
while (i < size) {
buf[i++] = 0x10;
buf[i++] = 0x80;
}
}
static void dcache_invalid(void * p_buf, uint32_t size) {
uint32_t start_addr = (uint32_t) p_buf & 0xFFFFFFE0;
uint32_t end_addr = (uint32_t) p_buf + size;
uint32_t addr;
// Data cache invalid
for (addr = start_addr; addr < end_addr; addr += 0x20) {
__v7_inv_dcache_mva((void *) addr);
}
}
void JcuEncodeCallBackFunc(JPEG_Converter::jpeg_conv_error_t err_code) {
jcu_encoding = 0;
}
size_t JpegEncode(uint8_t* buf, int len, int width, int height, uint8_t* inbuf) {
size_t encode_size;
JPEG_Converter::bitmap_buff_info_t bitmap_buff_info;
JPEG_Converter::encode_options_t encode_options;
bitmap_buff_info.width = width;
bitmap_buff_info.height = height;
bitmap_buff_info.format = JPEG_Converter::WR_RD_YCbCr422;
bitmap_buff_info.buffer_address = (void *) inbuf;
encode_options.encode_buff_size = sizeof(buf);
encode_options.p_EncodeCallBackFunc = &JcuEncodeCallBackFunc;
// encode_options.input_swapsetting = JPEG_Converter::WR_RD_WRSWA_32_16_8BIT;
jcu_encoding = 1; // flag for waiting callback function of JCU
jcu_encode_size = 0;
dcache_invalid(buf, len);
if (Jcu.encode(&bitmap_buff_info, buf, &encode_size, &encode_options)
!= JPEG_Converter::JPEG_CONV_OK) {
jcu_encode_size = 0;
jcu_encoding = 0;
}
while (jcu_encoding == 1) {
delay(1);
}
return encode_size;
}
static void DrawSquare(int x, int y, int size, uint32_t const colour, uint8_t* inbuf) {
int wk_x;
int wk_y;
int wk_w = 0;
int wk_h = 0;
int idx_base;
int wk_idx;
int i;
int j;
uint8_t coller_pix[BYTE_PER_PIXEL_RGB];
bool l_draw = true;
bool r_draw = true;
bool t_draw = true;
bool b_draw = true;
if ((x - (size / 2)) < 0) {
l_draw = false;
wk_w += x;
wk_x = 0;
} else {
wk_w += (size / 2);
wk_x = x - (size / 2);
}
if ((x + (size / 2)) >= 1600) {
r_draw = false;
wk_w += (1600 - x);
} else {
wk_w += (size / 2);
}
if ((y - (size / 2)) < 0) {
t_draw = false;
wk_h += y;
wk_y = 0;
} else {
wk_h += (size / 2);
wk_y = y - (size / 2);
}
if ((y + (size / 2)) >= 1200) {
b_draw = false;
wk_h += (1200 - y);
} else {
wk_h += (size / 2);
}
wk_x = wk_x / 5 / hvc_resolution;
wk_y = wk_y / 5 / hvc_resolution;
wk_w = wk_w / 5 / hvc_resolution;
wk_h = wk_h / 5 / hvc_resolution;
idx_base = (wk_x + (hvc_pixel_width * wk_y)) * BYTE_PER_PIXEL_RGB;
/* Select color */
coller_pix[0] = (colour >> 16) & 0xff;
coller_pix[1] = (colour >> 8) & 0xff;
coller_pix[2] = colour & 0xff;
/* top */
if (t_draw) {
wk_idx = idx_base;
for (j = 0; j < wk_w; j++) {
inbuf[wk_idx++] = coller_pix[0];
inbuf[wk_idx++] = coller_pix[1];
inbuf[wk_idx++] = coller_pix[2];
}
}
/* middle */
for (i = 1; i < (wk_h - 1); i++) {
wk_idx = idx_base + (hvc_pixel_width * BYTE_PER_PIXEL_RGB * i);
if (l_draw) {
inbuf[wk_idx + 0] = coller_pix[0];
inbuf[wk_idx + 1] = coller_pix[1];
inbuf[wk_idx + 2] = coller_pix[2];
}
wk_idx += (wk_w - 1) * 3;
if (r_draw) {
inbuf[wk_idx + 0] = coller_pix[0];
inbuf[wk_idx + 1] = coller_pix[1];
inbuf[wk_idx + 2] = coller_pix[2];
}
}
/* bottom */
if (b_draw) {
wk_idx = idx_base + (hvc_pixel_width * BYTE_PER_PIXEL_RGB * (wk_h - 1));
for (j = 0; j < wk_w; j++) {
inbuf[wk_idx++] = coller_pix[0];
inbuf[wk_idx++] = coller_pix[1];
inbuf[wk_idx++] = coller_pix[2];
}
}
}
static int snapshot_req(const char ** pp_data) {
INT32 timeOutTime;
int encode_size;
INT32 execFlag;
timeOutTime = USB_EXECUTE_TIMEOUT;
INT32 ret = 0; /* Return code */
UINT8 status;
HVC_RESULT *pHVCResult = NULL;
HVC_IMAGE *pImage = NULL;
/*********************************/
/* Result Structure Allocation */
/*********************************/
pHVCResult = (HVC_RESULT *) malloc(sizeof(HVC_RESULT));
if (pHVCResult == NULL) { /* Error processing */
Serial.println("Memory Allocation Error");
mbed_die();
}
/* Image Structure allocation */
pImage = (HVC_IMAGE *) malloc(sizeof(HVC_IMAGE));
if (pImage == NULL) {
printf("Memory Allocation Error : %08x\n", sizeof(HVC_RESULT));
mbed_die();
}
execFlag = 0;
if(hvc_rec_face == true){
execFlag |= (HVC_ACTIV_FACE_DETECTION | HVC_ACTIV_FACE_DIRECTION);
}
if(hvc_rec_age == true){
execFlag |= HVC_ACTIV_AGE_ESTIMATION;
}
if(hvc_rec_gender == true){
execFlag |= HVC_ACTIV_GENDER_ESTIMATION;
}
if(hvc_rec_gaze == true){
execFlag |= HVC_ACTIV_GAZE_ESTIMATION;
}
if(hvc_rec_blink == true){
execFlag |= HVC_ACTIV_BLINK_ESTIMATION;
}
if(hvc_rec_expression == true){
execFlag |= HVC_ACTIV_EXPRESSION_ESTIMATION;
}
if(hvc_rec_body == true){
execFlag |= HVC_ACTIV_BODY_DETECTION;
}
if(hvc_rec_hand == true){
execFlag |= HVC_ACTIV_HAND_DETECTION;
}
ret = HVC_ExecuteEx(timeOutTime, execFlag, hvc_resolution,
pHVCResult, &status);
if ((ret != 0) || (status != 0)) {
Serial.println("HVC_ExecuteEx Error");
}
EraseImage(frame_buffer_YUV422, sizeof(frame_buffer_YUV422));
// Prepare image data
// HVC-P2 outputs Y in order 4-3-2-1
// To simplify data processing, convert from 4-3-2-1 to 1-2-3-4
for (int i = 0; i < hvc_pixel_height; i++){
for (int j = 0; j < (hvc_pixel_width / 4); j++){
uint8_t temp[4];
temp[0] = pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 3];
temp[1] = pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 2];
temp[2] = pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 1];
temp[3] = pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 0];
pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 0] = temp[0];
pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 1] = temp[1];
pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 2] = temp[2];
pHVCResult->image.image[i * hvc_pixel_width + j * 4 + 3] = temp[3];
}
}
for (int i = 0; i < hvc_pixel_height; i++) {
for (int j = 0; j < hvc_pixel_width; j++) {
uint8_t Y = pHVCResult->image.image[i * hvc_pixel_width + j];
for (int k = 0; k < 3; k++){
frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + k] = Y;
}
}
}
if(pHVCResult->bdResult.num | pHVCResult->hdResult.num | pHVCResult->fdResult.num){
led_blink(PIN_LED_BLUE, 2);
#if 0 // Y to YUV422
for (int i = 0; i < hvc_pixel_height; i++) {
for (int j = 0; j < (hvc_pixel_width); j++) {
frame_buffer_YUV422[(i * hvc_pixel_width * BYTE_PER_PIXEL_YUV)
+ BYTE_PER_PIXEL_YUV * j] = pHVCResult->image.image[i * hvc_pixel_width + j];
}
}
#endif
}
if(pHVCResult->executedFunc & HVC_ACTIV_BODY_DETECTION){
/* Body Detection result string */
Serial.print("Body result count:"); Serial.println(pHVCResult->bdResult.num);
for(int i = 0; i < pHVCResult->bdResult.num; i++){
// Draw red square
DrawSquare(pHVCResult->bdResult.bdResult[i].posX,
pHVCResult->bdResult.bdResult[i].posY,
pHVCResult->bdResult.bdResult[i].size,
0x00f00000, frame_buffer_RGB888);
}
}
if(pHVCResult->executedFunc & HVC_ACTIV_HAND_DETECTION){
/* Hand Detection result string */
Serial.print("Hand result count:"); Serial.println(pHVCResult->hdResult.num);
for(int i = 0; i < pHVCResult->hdResult.num; i++){
// Draw yellow square
DrawSquare(pHVCResult->hdResult.hdResult[i].posX,
pHVCResult->hdResult.hdResult[i].posY,
pHVCResult->hdResult.hdResult[i].size,
0x00f0f000, frame_buffer_RGB888);
}
}
/* Face Detection result string */
if(pHVCResult->executedFunc & HVC_ACTIV_FACE_DETECTION){
Serial.print("Face result count:"); Serial.println(pHVCResult->fdResult.num);
for(int i = 0; i < pHVCResult->fdResult.num; i++){
if(pHVCResult->executedFunc & HVC_ACTIV_FACE_DETECTION){
/* Detection */
// Draw sky blue square
DrawSquare(pHVCResult->fdResult.fcResult[i].dtResult.posX,
pHVCResult->fdResult.fcResult[i].dtResult.posY,
pHVCResult->fdResult.fcResult[i].dtResult.size,
0x00ffffff, frame_buffer_RGB888);
}
if(pHVCResult->executedFunc & HVC_ACTIV_FACE_DIRECTION){
/* Face Direction */
}
if(pHVCResult->executedFunc & HVC_ACTIV_AGE_ESTIMATION){
/* Age */
hvc_age = pHVCResult->fdResult.fcResult[0].ageResult.age;
}
if(pHVCResult->executedFunc & HVC_ACTIV_GENDER_ESTIMATION){
/* Gender */
if(-128 == pHVCResult->fdResult.fcResult[i].genderResult.gender){
// do nothing
}
else{
hvc_gender = pHVCResult->fdResult.fcResult[0].genderResult.gender;
if(1 == pHVCResult->fdResult.fcResult[i].genderResult.gender){
// Draw sky blue square
DrawSquare(pHVCResult->fdResult.fcResult[i].dtResult.posX,
pHVCResult->fdResult.fcResult[i].dtResult.posY,
pHVCResult->fdResult.fcResult[i].dtResult.size,
0x0000f0f0, frame_buffer_RGB888);
}
else{
// Draw pink square
DrawSquare(pHVCResult->fdResult.fcResult[i].dtResult.posX,
pHVCResult->fdResult.fcResult[i].dtResult.posY,
pHVCResult->fdResult.fcResult[i].dtResult.size,
0x00f000f0, frame_buffer_RGB888);
}
}
}
if(pHVCResult->executedFunc & HVC_ACTIV_GAZE_ESTIMATION){
/* Gaze */
if((-128 == pHVCResult->fdResult.fcResult[i].gazeResult.gazeLR) ||
(-128 == pHVCResult->fdResult.fcResult[i].gazeResult.gazeUD)){
// do nothing
}
else{
hvc_gaze_lr = pHVCResult->fdResult.fcResult[0].gazeResult.gazeLR;
hvc_gaze_ud = pHVCResult->fdResult.fcResult[0].gazeResult.gazeUD;
}
}
if(pHVCResult->executedFunc & HVC_ACTIV_BLINK_ESTIMATION){
/* Blink */
hvc_blink_l = pHVCResult->fdResult.fcResult[0].blinkResult.ratioL;
hvc_blink_r = pHVCResult->fdResult.fcResult[0].blinkResult.ratioR;
}
if(pHVCResult->executedFunc & HVC_ACTIV_EXPRESSION_ESTIMATION){
/* Expression */
hvc_expression = pHVCResult->fdResult.fcResult[0].expressionResult.topExpression;
}
}
}
for (int i = 0; i < hvc_pixel_height; i++) {
for (int j = 0; j < hvc_pixel_width; j+=2) { // processing 2 pixels
uint8_t r0,g0,b0,r1,g1,b1,y0,y1,u,v;
r1 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 5];
g1 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 4];
b1 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 3];
r0 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 2];
g0 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 1];
b0 = frame_buffer_RGB888[(i * hvc_pixel_width * BYTE_PER_PIXEL_RGB)
+ BYTE_PER_PIXEL_RGB * j + 0];
y0 = round( 0.256788 * r0 + 0.504129 * g0 + 0.097906 * b0) + 16;
y1 = round( 0.256788 * r1 + 0.504129 * g1 + 0.097906 * b1) + 16;
if(r0!=g0){ // it's not gray that means color line
u = round(-0.148223 * r0 - 0.290993 * g0 + 0.439216 * b0) + 128;
v = round( 0.439216 * r0 - 0.367788 * g0 - 0.071427 * b0) + 128;
} else if (r1!=g1){ // it's not gray that means color line
u = round(-0.148223 * r1 - 0.290993 * g1 + 0.439216 * b1) + 128;
v = round( 0.439216 * r1 - 0.367788 * g1 - 0.071427 * b1) + 128;
} else { // it's gray.
u = round(-0.148223 * r0 - 0.290993 * g0 + 0.439216 * b0) + 128;
v = round( 0.439216 * r0 - 0.367788 * g0 - 0.071427 * b0) + 128;
}
frame_buffer_YUV422[(i * hvc_pixel_width * BYTE_PER_PIXEL_YUV)
+ BYTE_PER_PIXEL_YUV * j + 0] = y0;
frame_buffer_YUV422[(i * hvc_pixel_width * BYTE_PER_PIXEL_YUV)
+ BYTE_PER_PIXEL_YUV * j + 1] = u;
frame_buffer_YUV422[(i * hvc_pixel_width * BYTE_PER_PIXEL_YUV)
+ BYTE_PER_PIXEL_YUV * j + 2] = y1;
frame_buffer_YUV422[(i * hvc_pixel_width * BYTE_PER_PIXEL_YUV)
+ BYTE_PER_PIXEL_YUV * j + 3] = v;
}
}
encode_size = JpegEncode(JpegBuffer, sizeof(JpegBuffer), hvc_pixel_width, hvc_pixel_height, frame_buffer_YUV422);
*pp_data = (const char *) JpegBuffer;
free(pHVCResult);
free(pImage);
return encode_size;
}
int char_to_int(char* c, int len){
int r = 0;
for (int i = 0; i < len; i++) {
if (c[i] == 0) break;
r = r * 10 + c[i] - '0';
}
return r;
}
void RPC_HVC_Read(Arguments* arg, Reply* r){
int ret;
char rpc_hvc_read_buf[10];
const char *pExStr[] = { "?", "Neutral", "Happiness", "Surprise", "Anger",
"Sadness" };
if (arg != NULL) {
switch(char_to_int(arg->argv[0], 2)){
case 0:
sprintf(rpc_hvc_read_buf, "%d", hvc_age);
break;
case 1:
if(hvc_gender == 1){
sprintf(rpc_hvc_read_buf, "Male");
} else {
sprintf(rpc_hvc_read_buf, "Female");
}
break;
case 2:
ret = hvc_gaze_lr;
sprintf(rpc_hvc_read_buf, "%d", ret);
break;
case 3:
ret = hvc_gaze_ud;
sprintf(rpc_hvc_read_buf, "%d", ret);
break;
case 4:
ret = hvc_blink_l;
sprintf(rpc_hvc_read_buf, "%d", ret);
break;
case 5:
ret = hvc_blink_r;
sprintf(rpc_hvc_read_buf, "%d", ret);
break;
case 6:
ret = hvc_expression;
sprintf(rpc_hvc_read_buf, "%s", pExStr[hvc_expression]);
break;
}
/* command analysis and execute */
r->putData<const char*>(rpc_hvc_read_buf);
//delay(1);
}
}
void RPC_HVC_Write(Arguments* arg, Reply* r) {
Serial.println(char_to_int(arg->argv[0], 1));
Serial.println(char_to_int(arg->argv[1], 1));
if (arg != NULL) {
/* command analysis and execute */
if(char_to_int(arg->argv[0], 1) == 0){ // modify resolution
if(char_to_int(arg->argv[1], 1) == 1){
hvc_resolution = HVC_EXECUTE_IMAGE_QVGA;
hvc_pixel_width = PIXEL_WIDTH;
hvc_pixel_height = PIXEL_HEIGHT;
} else {
hvc_resolution = HVC_EXECUTE_IMAGE_QVGA_HALF;
hvc_pixel_width = PIXEL_WIDTH / 2;
hvc_pixel_height = PIXEL_HEIGHT / 2;
}
} else if(char_to_int(arg->argv[0], 1) == 1){ // enable each estimation
switch(char_to_int(arg->argv[1], 1)){
case 0:
hvc_rec_face = true;
break;
case 1:
hvc_rec_age = true;
break;
case 2:
hvc_rec_gender = true;
break;
case 3:
hvc_rec_gaze = true;
break;
case 4:
hvc_rec_blink = true;
break;
case 5:
hvc_rec_expression = true;
break;
case 6:
hvc_rec_body = true;
break;
case 7:
hvc_rec_hand = true;
break;
}
} else if(char_to_int(arg->argv[0], 1) == 2){ // enable each estimation
switch(char_to_int(arg->argv[1], 1)){
case 0:
hvc_rec_face = false;
break;
case 1:
hvc_rec_age = false;
break;
case 2:
hvc_rec_gender = false;
break;
case 3:
hvc_rec_gaze = false;
break;
case 4:
hvc_rec_blink = false;
break;
case 5:
hvc_rec_expression = false;
break;
case 6:
hvc_rec_body = false;
break;
case 7:
hvc_rec_hand = false;
break;
}
}
led_blink(PIN_LED_RED, 1);
}
}
static void mount_romramfs(void) {
FILE * fp;
romramfs.format();
char write_path[20];
for (int i = 0; i < NUM_WEBFILES; i++) {
myFile = SD.open(mount_files[i], FILE_READ);
if (myFile) {
sprintf(write_path, "/romram/%s", mount_files[i]);
fp = fopen(write_path, "w");
// read from the file until there's nothing else in it:
while (myFile.available()) {
putc(myFile.read(), fp);
}
// close the file:
myFile.close();
fclose(fp);
}
}
}
void setup() {
INT32 ret = 0; /* Return code */
UINT8 status;
INT32 agleNo;
HVC_THRESHOLD threshold;
HVC_SIZERANGE sizeRange;
INT32 pose;
INT32 angle;
pinMode(PIN_LED_RED, OUTPUT);
pinMode(PIN_LED_GREEN, OUTPUT);
pinMode(PIN_LED_BLUE, OUTPUT);
Serial.begin(9600);
Serial.println("Program starts");
USBHostSerial cdc;
p_cdc = &cdc; // for HVC P2 API
Serial.println("SD Setting up...");
if (!SD.begin()) {
Serial.println("initialization failed!");
return;
}
mount_romramfs(); //RomRamFileSystem Mount
RPCFunction rpc_hvc_read(&RPC_HVC_Read, "RPC_HVC_Read");
RPCFunction rpc_rpc_hvc_read(&RPC_HVC_Write, "RPC_HVC_Write");
Serial.println("done.");
Serial.println("Network Setting up...");
if (network.init() != 0) { //for DHCP Server
//if (network.init(IP_ADDRESS, SUBNET_MASK, DEFAULT_GATEWAY) != 0) { //for Static IP Address (IPAddress, NetMasks, Gateway)
return;
}
if (network.connect() != 0) {
return;
}
Serial.print("MAC Address is ");
Serial.println(network.getMACAddress());
Serial.print("IP Address is ");
Serial.println(network.getIPAddress());
Serial.print("NetMask is ");
Serial.println(network.getNetworkMask());
Serial.print("Gateway Address is ");
Serial.println(network.getGateway());
Serial.println("Network Setup OK");
Serial.print("Waiting USB device...");
while (!p_cdc->connect()) { // wait for connecting HVC
led_blink(PIN_LED_RED, 1);
Thread::wait(500);
}
Serial.println("found.");
led_blink(PIN_LED_GREEN, 2);
p_cdc->baud(921600);
/*********************************/
/* Set Camera Angle */
/*********************************/
agleNo = SENSOR_ROLL_ANGLE_DEFAULT;
ret = HVC_SetCameraAngle(USB_SETTING_TIMEOUT, agleNo, &status);
/*********************************/
/* Set Threshold Values */
/*********************************/
threshold.bdThreshold = BODY_THRESHOLD_DEFAULT;
threshold.hdThreshold = HAND_THRESHOLD_DEFAULT;
threshold.dtThreshold = FACE_THRESHOLD_DEFAULT;
threshold.rsThreshold = REC_THRESHOLD_DEFAULT;
ret = HVC_SetThreshold(USB_SETTING_TIMEOUT, &threshold, &status);
/*********************************/
/* Set Detection Size */
/*********************************/
sizeRange.bdMinSize = BODY_SIZE_RANGE_MIN_DEFAULT;
sizeRange.bdMaxSize = BODY_SIZE_RANGE_MAX_DEFAULT;
sizeRange.hdMinSize = HAND_SIZE_RANGE_MIN_DEFAULT;
sizeRange.hdMaxSize = HAND_SIZE_RANGE_MAX_DEFAULT;
sizeRange.dtMinSize = FACE_SIZE_RANGE_MIN_DEFAULT;
sizeRange.dtMaxSize = FACE_SIZE_RANGE_MAX_DEFAULT;
ret = HVC_SetSizeRange(USB_SETTING_TIMEOUT, &sizeRange, &status);
/*********************************/
/* Set Face Angle */
/*********************************/
pose = FACE_POSE_DEFAULT;
angle = FACE_ANGLE_DEFAULT;
ret = HVC_SetFaceDetectionAngle(USB_SETTING_TIMEOUT, pose, angle, &status);
SnapshotHandler::attach_req(&snapshot_req);
HTTPServerAddHandler<SnapshotHandler>("/camera"); //Camera
FSHandler::mount("/romram", "/");
HTTPServerAddHandler<FSHandler>("/");
HTTPServerAddHandler<RPCHandler>("/rpc");
HTTPServerStart(80);
while (1)
;
}
void loop() {
}
Webカメラの動作を確認
GR-PEACHに書き込んだらTeratermなどのシリアルモニターを起動して、GR-PEACHのリセットボタンを押します。
プログラムがスタートし、SDカードの読み込み、USBデバイス(HVC-P2)の認識、ネットワーク設定がされ、以下のようにDHCPにより取得したLANのIPアドレスが表示されます。
表示されたIPアドレスをWebブラウザのURLに打ち込んでみましょう。以下のようにWebカメラの画面が表示されます。
デフォルトでは顔検出のみ選択されていますので、その他の検出を試してみてください。