162|0

9

帖子

0

TA的资源

一粒金砂(中级)

楼主
 

AI挑战营(进阶) 六:简易人脸识别门禁程序的实现 [复制链接]

本帖最后由 zhuxirui 于 2025-1-11 09:56 编辑
本节我们将结合Luckfox的GPIO资源实现一个简易的门禁系统。
我们将以人脸检测的结果用绿灯代表通过,红灯代表异常并抓拍人脸保存到开发板。
我们需要向我们的代码中加入识别陌生人的部分,具体逻辑比较简单,主要方法就是再另外设置一个陌生人阈值,大于这个阈值就确定为陌生人,并抓拍亮红灯,如果三秒后陌生人离开就重新亮绿灯。
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#include <unistd.h>   
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/fb.h>

#include <iomanip>
#include <cstring>
#include <dirent.h> // POSIX directory API
#include <libgen.h> // For basename() and dirname()
#include <stdlib.h> // 用于malloc和free
#include <vector>   // 用于std::vector
#include <utility>  // 用于std::pair
#include <algorithm>// 用于std::sort
#include <iostream> // 用于printf(在C++中更推荐使用std::cout)
#include <unordered_set>
#include "retinaface_facenet.h"
#include <time.h>
#include <sys/time.h>


#include <ctime>
#include <chrono>
#include "dma_alloc.cpp"

#define USE_DMA    0


/*-------------------------------------------
                  Main Function
-------------------------------------------*/
int main(int argc,char **argv)
{   time_t last_record_time = time(NULL);
    time_t last_record_time2 = time(NULL);

    //if (argc != 4)
    //{
    //    printf("%s <retinaface model_path> <facenet model_path> <reference pic_path> \n", argv[0]);
    //    return -1;
    //}
    system("RkLunch-stop.sh");
    
    //const char *model_path  = argv[1];
    //const char *model_path2 = argv[2];
    //const char *image_path  = argv[3]; 
    if (argc != 4) {
        std::cerr << "Usage: " << argv[0] << " <folder_path>\n";
        return -1;
    }
    float stranger_dis = atof(argv[3]);
    float set_dis = atof(argv[2]);
    // 获取命令行参数中的文件夹路径
    char* folder_path = argv[1];

    struct stat path_stat;
    if (stat(folder_path, &path_stat) != 0 || !S_ISDIR(path_stat.st_mode)) {
        std::cerr << "The provided path is not a valid directory." << std::endl;
        return -1;
    }

    // 打开目录
    DIR* dir = opendir(folder_path);
    if (dir == nullptr) {
        std::cerr << "Failed to open directory." << std::endl;
        return -1;
    }

    // 遍历文件夹并获取文件路径和文件名(不含后缀)
    std::vector<std::string> file_paths;
    std::vector<std::string> file_names_without_extension;
    struct dirent* entry;
    while ((entry = readdir(dir)) != nullptr) {
        if (entry->d_type == DT_REG) { // 普通文件
            std::string file_path = std::string(folder_path) + "/" + entry->d_name;
            file_paths.push_back(file_path);

            // 获取不含后缀的文件名
            char* base_name = basename(entry->d_name);
            std::string name_without_extension = base_name;
            size_t last_dot_pos = name_without_extension.find_last_of('.');
            if (last_dot_pos != std::string::npos) {
                name_without_extension = name_without_extension.substr(0, last_dot_pos);
            }
            file_names_without_extension.push_back(name_without_extension);
        }
    }
    closedir(dir);

    // 文件数量
    size_t face_target = file_paths.size();

    // 创建一个char**数组来存储文件路径
    char** image_path_collection = new char*[face_target];
    for (size_t i = 0; i < face_target; ++i) {
        image_path_collection[i] = new char[file_paths[i].size() + 1];
        std::strcpy(image_path_collection[i], file_paths[i].c_str());
    }

    // 创建一个char**数组来存储文件名(不含后缀)
    char** name_collection = new char*[face_target];
    for (size_t i = 0; i < face_target; ++i) {
        name_collection[i] = new char[file_names_without_extension[i].size() + 1];
        std::strcpy(name_collection[i], file_names_without_extension[i].c_str());
    }
    char *model_path  = "./model/retinaface.rknn";
    char *model_path2 = "./model/arcface.rknn";
    char *image_path  = "./model/test.jpg";
    float* out_fp32 = (float*)malloc(sizeof(float) * 512); // 每个 out_fp32 存储 512 个 float
    float** out_fp32_collection = (float**)malloc(sizeof(float*) * face_target); // out_fp32_collection 能容纳 5 个 float* 指针
    clock_t start_time;
    clock_t end_time;
 
    //Model Input
    //Retinaface
    int retina_width    = 640;
    int retina_height   = 640;
    //Facenet
    int facenet_width   = 112;
    int facenet_height  = 112;
    int channels = 3;

    int ret;
    rknn_app_context_t app_retinaface_ctx;
    rknn_app_context_t app_facenet_ctx; 
    object_detect_result_list od_results;

    memset(&app_retinaface_ctx, 0, sizeof(rknn_app_context_t));
    memset(&app_facenet_ctx, 0, sizeof(rknn_app_context_t));

    //Init Model
    init_retinaface_facenet_model(model_path, model_path2, &app_retinaface_ctx, &app_facenet_ctx);

    //Init fb
    int disp_flag = 0;
    int pixel_size = 0;
    size_t screensize = 0;
    int disp_width  = 0;
    int disp_height = 0;
    void* framebuffer = NULL; 
    struct fb_fix_screeninfo fb_fix;
    struct fb_var_screeninfo fb_var;
    
    int framebuffer_fd = 0; //for DMA
    cv::Mat disp;
  
    int fb = open("/dev/fb0", O_RDWR);
    if(fb == -1)
        printf("Screen OFF!\n");
    else
        disp_flag = 1;


    if(disp_flag){
        ioctl(fb, FBIOGET_VSCREENINFO, &fb_var);
        ioctl(fb, FBIOGET_FSCREENINFO, &fb_fix);

        disp_width = fb_var.xres;
        disp_height = fb_var.yres;  
        pixel_size = fb_var.bits_per_pixel / 8;
        printf("Screen width = %d, Screen height = %d, Pixel_size = %d\n",disp_width, disp_height, pixel_size);
        
        screensize = disp_width * disp_height * pixel_size;
        framebuffer = (uint8_t*)mmap(NULL, screensize, PROT_READ | PROT_WRITE, MAP_SHARED, fb, 0);
        
        if( pixel_size == 4 )//ARGB8888
            disp = cv::Mat(disp_height, disp_width, CV_8UC3);
        else if ( pixel_size == 2 ) //RGB565
            disp = cv::Mat(disp_height, disp_width, CV_16UC1); 

#if USE_DMA
        dma_buf_alloc(RV1106_CMA_HEAP_PATH,
                      disp_width * disp_height * pixel_size,  
                      &framebuffer_fd, 
                      (void **) & (disp.data)); 
#endif
    }
    else{
        disp_height = 240;
        disp_width = 240;
    }

    //Init Opencv-mobile
    cv::VideoCapture cap;
    cv::Mat bgr(disp_height, disp_width, CV_8UC3);
    cv::Mat retina_input(retina_height, retina_width, CV_8UC3, app_retinaface_ctx.input_mems[0]->virt_addr);
    cap.set(cv::CAP_PROP_FRAME_WIDTH,  disp_width);
    cap.set(cv::CAP_PROP_FRAME_HEIGHT, disp_height);
    cap.open(0); 

    //Get referencve img feature
    cv::Mat image = cv::imread(image_path);
    cv::Mat facenet_input(facenet_height, facenet_width, CV_8UC3, app_facenet_ctx.input_mems[0]->virt_addr);
    letterbox(image,facenet_input); 
    ret = rknn_run(app_facenet_ctx.rknn_ctx, nullptr);
    if (ret < 0) {
        printf("rknn_run fail! ret=%d\n", ret);
        return -1;
    }
    uint8_t  *output = (uint8_t *)(app_facenet_ctx.output_mems[0]->virt_addr);
    float* reference_out_fp32 = (float*)malloc(sizeof(float) * 512); 
    //output_normalization(&app_facenet_ctx,output,reference_out_fp32);
    //memset(facenet_input.data, 0, facenet_width * facenet_height * channels);

    //float* out_fp32 = (float*)malloc(sizeof(float) * 128); 
    



    for(int i = 0; i < face_target; i++){
        cv::Mat image = cv::imread(image_path_collection[i]);
        cv::Mat facenet_input(facenet_height, facenet_width, CV_8UC3, app_facenet_ctx.input_mems[0]->virt_addr);
        letterbox(image,facenet_input); 
        ret = rknn_run(app_facenet_ctx.rknn_ctx, nullptr);
    if (ret < 0) {
        printf("rknn_run fail! ret=%d\n", ret);
        return -1;
    }
        uint8_t  *output = (uint8_t *)(app_facenet_ctx.output_mems[0]->virt_addr);
        float* reference_out_fp32 = (float*)malloc(sizeof(float) * 512); 
        output_normalization(&app_facenet_ctx,output,reference_out_fp32);
        out_fp32_collection[i] = reference_out_fp32;
    }

    //


    char show_text[12]; 
    char fps_text[32]; 
    float fps = 0;
    
    while(1)
    {   
        start_time = clock();
        //opencv get photo
        cap >> bgr;
        
        cv::resize(bgr, retina_input, cv::Size(retina_width,retina_height), 0, 0, cv::INTER_LINEAR);
        ret = inference_retinaface_model(&app_retinaface_ctx, &od_results);
        if (ret != 0)
        {
            printf("init_retinaface_model fail! ret=%d\n", ret);
            return -1;
        }
        //printf("running------------------\n");
        for (int i = 0; i < od_results.count; i++)
        {       
            //Get det 
            object_detect_result *det_result = &(od_results.results[i]);
            mapCoordinates(bgr, retina_input, &det_result->box.left , &det_result->box.top);
            mapCoordinates(bgr, retina_input, &det_result->box.right, &det_result->box.bottom);

            cv::rectangle(bgr,cv::Point(det_result->box.left ,det_result->box.top),
                          cv::Point(det_result->box.right,det_result->box.bottom),cv::Scalar(0,255,0),3);
            cv::Mat temp = bgr;
            //Face capture
            cv::Rect roi(det_result->box.left,det_result->box.top, 
                         (det_result->box.right - det_result->box.left),
                         (det_result->box.bottom - det_result->box.top));
            cv::Mat face_img = bgr(roi);

            //Give five key points
            // for(int j = 0; j < 5;j ++)
            // {
            //     //printf("point_x = %d point_y = %d\n",det_result->point[j].x,
            //     //                                     det_result->point[j].y);
            //     cv::circle(bgr,cv::Point(det_result->point[j].x,det_result->point[j].y),10,cv::Scalar(0,255,0),3);
            // }
            
            letterbox(face_img,facenet_input); 
            ret = rknn_run(app_facenet_ctx.rknn_ctx, nullptr);
            if (ret < 0) {
                printf("rknn_run fail! ret=%d\n", ret);
                return -1;
            }
            output = (uint8_t *)(app_facenet_ctx.output_mems[0]->virt_addr);
            float *norm_list = (float*)malloc(sizeof(float) * face_target);
            output_normalization(&app_facenet_ctx, output, out_fp32);

            std::vector<std::pair<float, int>> distances; // 存储距离和对应的标签
            std::vector<std::pair<std::string, float>> detected_names_with_scores; // 存储人名和精度得分
            std::unordered_set<int> matched_indices; // 用于记录已匹配的标签索引
            static int stranger_num = 0;
            
            for (int i = 0; i < face_target; i++) {
                float distance = get_duclidean_distance(out_fp32, out_fp32_collection[i]);
                distances.push_back(std::make_pair(distance, i)); // 将距离和标签索引存储起来
                time_t current_time = time(NULL);
                time_t time_diff = difftime(current_time, last_record_time);
                time_t time_diff2 = difftime(current_time, last_record_time2);
                if (distance > stranger_dis) {
                    system("python ./gpio.py 1");
                    printf("Stranger distance: %f\n", distance);

                    // 使用ctime库获取当前时间
                    
                    if (time_diff >= 10) { // 检查时间差是否大于等于10秒
                        // 保存照片
                        std::string a = "./strangers/";
                        std::string b = ".jpg";
                        std::string path = a + std::to_string(stranger_num) + b;
                        stranger_num++;
                        printf("Saving photo at: %s\n", path.c_str());
                        cv::imwrite(path, temp); // 保存照片

                        // 更新最后记录时间
                        last_record_time = current_time;
                    }
                }
                else if (distance < stranger_dis&&time_diff2 >= 4) {
                    system("python ./gpio.py 0");
                    last_record_time2 = current_time;
                }
            }


            // 根据距离对标签进行排序,最近的标签在最前面
            std::sort(distances.begin(), distances.end());

            // 遍历排序后的距离列表,找到所有小于阈值的匹配标签
            for (const auto& pair : distances) {
                if (pair.first < set_dis) {
                    // 确保每个标签只匹配一次
                    if (matched_indices.find(pair.second) == matched_indices.end()) {
                        // 计算精度得分,距离越小,得分越高
                        float score = 1.0f / (1.0f + pair.first);    // 精度得分公式
                        detected_names_with_scores.emplace_back(name_collection[pair.second], score);
                        matched_indices.insert(pair.second);        // 记录已经匹配的标签
                    }
                }
            }

            // 构建最终的检测到的人名和精度得分字符串
            std::ostringstream name_detected_stream;
            for (size_t i = 0; i < detected_names_with_scores.size(); i++) {
                if (i > 0) {
                    name_detected_stream << ", ";
                }
                // 使用 std::fixed 和 std::setprecision 来确保精度得分有两位小数
                name_detected_stream << detected_names_with_scores[i].first
                                    << " (" << std::fixed << std::setprecision(2) << detected_names_with_scores[i].second << ")";
            }
            std::string name_detected = name_detected_stream.str();

            // 打印检测到的人名和精度得分
            if (!name_detected.empty()) {
                printf("detected: %s\n", name_detected.c_str());
            }

            // 释放内存
            free(norm_list);


            //sprintf(show_text,"norm=%f",norm);
            //cv::putText(bgr, show_text, cv::Point(det_result->box.left, det_result->box.top - 8),
            //                               cv::FONT_HERSHEY_SIMPLEX,0.5,
            //                               cv::Scalar(0,255,0),
            //                              1);


        } 
        
        if(disp_flag){
            //Fps Show
            sprintf(fps_text,"fps=%.1f",fps); 
            cv::putText(bgr,fps_text,cv::Point(0, 20),
                        cv::FONT_HERSHEY_SIMPLEX,0.5,
                        cv::Scalar(0,255,0),1);

            //LCD Show 
            if( pixel_size == 4 ) 
                cv::cvtColor(bgr, disp, cv::COLOR_BGR2BGRA);
            else if( pixel_size == 2 )
                cv::cvtColor(bgr, disp, cv::COLOR_BGR2BGR565);
            memcpy(framebuffer, disp.data, disp_width * disp_height * pixel_size);
#if USE_DMA
            dma_sync_cpu_to_device(framebuffer_fd);
#endif  
        }
        //Update Fps
        end_time = clock();
        fps = ((float)CLOCKS_PER_SEC / (end_time - start_time)) ;
    } 
 
    free(reference_out_fp32);
    free(out_fp32);

    if(disp_flag){
        close(fb);
        munmap(framebuffer, screensize);
#if USE_DMA 
        dma_buf_free(disp_width*disp_height*2,
                     &framebuffer_fd, 
                     bgr.data);
#endif
    }

    release_facenet_model(&app_facenet_ctx);
    release_retinaface_model(&app_retinaface_ctx);
    return 0;
}

上节没把部署过程流程说明白现在也重新讲一遍

git clone https://github.com/LuckfoxTECH/luckfox_pico_rknn_example.git  #注意自己LuckFox的内核版本

首先我们在PC上部署好luck_fox_rknn环境,将得到的rknn文件放入luckfox_pico_rknn_example/example/luckfox_pico_retinaface_facenet/model/

cd luckfox_pico_rknn_example/
export LUCKFOX_SDK_PATH=/home/user/luckfox-pico #替换成自己的LUCKFOX_SDK路径
./build.sh

输入1回车即可。

随后将install/luckfox_pico_retinaface_facenet_demo文件夹上传到开发板,我这里是直接用mobxterm上传了,也可以直接adb上传。

cd luckfox_pico_retinaface_facenet_demo/
mkdir strangers   #用于存放陌生人的人脸
mkdir test/person   #在这里放入要注册的人脸

同时,为了使用luckfox的gpio控制led灯,还需要自行编写gpio.py

touch gpio.py

 

#gpio.py
from periphery import GPIO
import sys
import os

# 定义GPIO引脚编号
A_Pin = 55  
B_Pin = 54

# 检查命令行参数
if len(sys.argv) != 2:
    print("Usage: python script.py <1 or 0>")
    sys.exit(1)

# 读取命令行参数
output_pin = int(sys.argv[1])

# 初始化GPIO对象
if output_pin == 1:
    Write_GPIO_A = GPIO(A_Pin, 'out')
    Write_GPIO_B = GPIO(B_Pin, 'out')
    Write_GPIO_A.write(True)  # 假设1是高电平
    Write_GPIO_B.write(False)  # 假设0是低电平
else:
    Write_GPIO_A = GPIO(A_Pin, 'out')
    Write_GPIO_B = GPIO(B_Pin, 'out')
    Write_GPIO_A.write(False)
    Write_GPIO_B.write(True)

# 关闭GPIO对象
Write_GPIO_A.close()
Write_GPIO_B.close()

创建好后就可以开始运行了

chmod 777 luckfox_pico_retinaface_facenet
./luckfox_pico_retinaface_facenet ./test/person/ 1.18 1.4  #第一个参数是你注册人脸所在的文件夹路径,图像的文件名就是人脸名称。第二个参数是检测出注册人脸的阈值,第三个个是检测为陌生人的阈值

WIN_20250111_02_09_48_Pro

 

点赞(1) 关注

回复
举报
您需要登录后才可以回帖 登录 | 注册

随便看看
查找数据手册?

EEWorld Datasheet 技术支持

相关文章 更多>>
关闭
站长推荐上一条 1/8 下一条

 
EEWorld订阅号

 
EEWorld服务号

 
汽车开发圈

About Us 关于我们 客户服务 联系方式 器件索引 网站地图 最新更新 手机版

站点相关: 国产芯 安防电子 汽车电子 手机便携 工业控制 家用电子 医疗电子 测试测量 网络通信 物联网

北京市海淀区中关村大街18号B座15层1530室 电话:(010)82350740 邮编:100190

电子工程世界版权所有 京B2-20211791 京ICP备10001474号-1 电信业务审批[2006]字第258号函 京公网安备 11010802033920号 Copyright © 2005-2025 EEWORLD.com.cn, Inc. All rights reserved
快速回复 返回顶部 返回列表