LeoCar:树莓派智能车——综合篇

📡接线图

  • 接线变化不大,这里就不测试超声波避障了,其他就是将ESP32控制TB6612FNG的引脚改为接在树莓派的对应引脚。
    总接线

📡小车运动

  • 我们先将ESP32对小车的基本控制改为对树莓派对小车的控制,这里用到RPi.GPIO库可以方便地用Python做到像控制单片机一样简单地控制树莓派的引脚。这里用类封装引脚初始化、PWM设置、占空比控制等操作方便调用,主函数测试用,运行程序小车会循环展示所有动作。与之前ESP32代码不同的是,占空比范围是0-100
  • 该文件命名为device(后续调用)
import RPi.GPIO as GPIO
import time

class MotorControl:
    def __init__(self, pwm_channel_1, pwm_channel_2, in1, in2, in3, in4, standby=22):
        # Initialize GPIO
        GPIO.setmode(GPIO.BCM)
        GPIO.setwarnings(False)

        # Setup pins
        self.in1 = in1
        self.in2 = in2
        self.in3 = in3
        self.in4 = in4
        self.standby = standby

        GPIO.setup(self.in1, GPIO.OUT)
        GPIO.setup(self.in2, GPIO.OUT)
        GPIO.setup(self.in3, GPIO.OUT)
        GPIO.setup(self.in4, GPIO.OUT)
        GPIO.setup(self.standby, GPIO.OUT)

        # Initialize PWM
        GPIO.setup(pwm_channel_1, GPIO.OUT)
        GPIO.setup(pwm_channel_2, GPIO.OUT)
        self.pwm_channel_1 = GPIO.PWM(pwm_channel_1, 5000)
        self.pwm_channel_2 = GPIO.PWM(pwm_channel_2, 5000)
        self.pwm_channel_1.start(0)
        self.pwm_channel_2.start(0)
        # WORK
        GPIO.output(self.standby, GPIO.HIGH)

    def set_pwm(self, moto1, moto2, amplitude=100):
        # Motor 1
        GPIO.output(self.in1, GPIO.HIGH if moto1 > 0 else GPIO.LOW)
        GPIO.output(self.in2, GPIO.LOW if moto1 > 0 else GPIO.HIGH)
        # Motor 2
        GPIO.output(self.in3, GPIO.LOW if moto2 > 0 else GPIO.HIGH)
        GPIO.output(self.in4, GPIO.HIGH if moto2 > 0 else GPIO.LOW)
        # Limit PWM values
        moto1 = max(-amplitude, min(amplitude, moto1))
        moto2 = max(-amplitude, min(amplitude, moto2))
        # Set PWM values
        self.pwm_channel_1.ChangeDutyCycle(abs(moto1))
        self.pwm_channel_2.ChangeDutyCycle(abs(moto2))

    def stop_motors(self):
        self.pwm_channel_1.ChangeDutyCycle(0)
        self.pwm_channel_2.ChangeDutyCycle(0)

    def cleanup(self):
        GPIO.cleanup()


if __name__ == "__main__":
    # Define GPIO pins for left and right motors (change these pins according to your setup)
    left_motor_pins = {
        'pwm_channel_1': 12,
        'in1': 17,
        'in2': 27
    }
    right_motor_pins = {
        'pwm_channel_2': 13,
        'in3': 19,
        'in4': 26
    }
    # Create an instance of MotorControl
    motor_control = MotorControl(**left_motor_pins, **right_motor_pins, standby=22)
    # Run the robot in different directions
    try:
        while True:
            motor_control.set_pwm(50, 50)  # Forward
            time.sleep(2)
            motor_control.set_pwm(-50, -50)  # Backward
            time.sleep(2)
            motor_control.set_pwm(30, 50)  # Left
            time.sleep(2)
            motor_control.set_pwm(50, 30)  # Right
            time.sleep(2)
            motor_control.set_pwm(0, 0)  # Stop
            time.sleep(1)
            motor_control.set_pwm(25, -25)  # Pivot Turn
            time.sleep(2)
            motor_control.stop_motors()
            time.sleep(1)

    except KeyboardInterrupt:
        motor_control.cleanup()

📡远程控制

  • 使用之前的web程序控制小车,通过交互遥杆组件使小车原地旋转(其他动作这里就不实现了,就是多设计一些按钮,传递专门的指令)
  • 首先我们先把之前网络篇的前端代码(index.html)进行简单的如下修改(找到触控部分的 touchmove 和 touchend 回调函数覆盖并添加url变量即可),把传递的角度值改为角度变化量的正弦值,同时在拖拽结束后发送0,意思是停止转动
  • url是运行flask服务器的树莓派IP地址
    canvas.addEventListener('touchmove', (e) => {
      e.preventDefault();
      if (isDragging) {
        const x = e.touches[0].clientX;
        const y = e.touches[0].clientY;
        const rect = canvas.getBoundingClientRect();
        const mouseX = x - rect.left;
        const mouseY = y - rect.top;
        angle = updateSmallCircle(mouseX, mouseY);
        if(Math.abs(last_angle - angle) > 5){
          // 发送 AJAX 请求
          $.ajax({
            url: url,
            type: "POST",
            data: JSON.stringify({ angle: Math.sin((angle - last_angle) * Math.PI / 180) }),
            contentType: "application/json",
            dataType: 'json',
            success: function (data) {
              console.log(data);
            }
          });
          last_angle = angle;
          label.textContent = `Angle: ${angle}°`;
        }
      }
    });

    canvas.addEventListener('touchend', () => {
      isDragging = false;
      $.ajax({
        url: url,
        type: "POST",
        data: JSON.stringify({ angle: 0 }),
        contentType: "application/json",
        dataType: 'json',
        success: function (data) {
          console.log(data);
        }
      });
    });
  • 树莓派上运行如下程序,device是上面的电机控制代码,代码很简单,就是把之前flask程序结合电机控制,当接受的sin值为正就左转,为负右转。
from flask import Flask, jsonify, request, make_response
import time
from flask_cors import CORS

from device import MotorControl

app = Flask(__name__, static_url_path="")
CORS(app, resources={r"/position/*": {"origins": "*"}})

angle = 0  # 角度变化量的sin值(为正向左,为负向右)

@app.route("/")
def index():
    return app.send_static_file("index.html")


@app.route('/position', methods=['POST'])
def send_button_position():  # put application's code here
    global angle
    args = request.json
    try:
        angle = round(args["angle"], 2)
        print(f'angle = {angle:.2f}')
        if angle > 0:
            motor_control.set_pwm(-15, 15)
        elif angle < 0:
            motor_control.set_pwm(15, -15)
        else:
            motor_control.set_pwm(0, 0)
        time.sleep(0.01)
        return jsonify({"passed": True, "message": "成功发送位置", "data": f'angle={args["angle"]}'})
    except:
        return jsonify({"passed": False, "message": "错误", "data": None})


def motor_init():
    # Define GPIO pins for left and right motors (change these pins according to your setup)
    left_motor_pins = {
        'pwm_channel_1': 12,
        'in1': 17,
        'in2': 27
    }
    right_motor_pins = {
        'pwm_channel_2': 13,
        'in3': 19,
        'in4': 26
    }
    # Create an instance of MotorControl
    return MotorControl(**left_motor_pins, **right_motor_pins, standby=22)

if __name__ == '__main__':
    motor_control = motor_init()
    app.run(debug=True, host='0.0.0.0', port=5000)
  • 得到如下结果:
    web远程控制小车

📡人脸追踪

  • 添加人脸检测代码,对摄像头的图像进行人脸检测,通过人脸水平中心(列)和画面中心(列)的偏差,控制小车左右转向来保持人脸在画面中心,实现对人脸的追踪,这里为了保证小车的平稳转动不太慢、不超调,使用了PID算法。直接上完整程序.
from flask import Flask, jsonify, request, make_response
import os, time
import threading
from flask_cors import CORS
import cv2
import numpy as np
from urllib import request as urlrequest

from device import MotorControl

app = Flask(__name__, static_url_path="")
CORS(app, resources={r"/position/*": {"origins": "*"}})

angle = 0			# sin delta angle
lock = threading.Lock()

@app.route("/")
def index():
    return app.send_static_file("index.html")

@app.route('/position',methods=['POST'])
def send_button_position():  # put application's code here
    global angle
    args = request.json
    try:
        angle = round(args["angle"], 2)
        print(f'angle = {angle:.2f}')
        time.sleep(0.01)
        return jsonify({"passed": True, "message": "成功发送位置", "data": f'angle={args["angle"]}'})
    except:
        return jsonify({"passed": False, "message": "错误", "data": None})

def motor_init():
    left_motor_pins = {
        'pwm_channel_1': 12,
        'in1': 17,
        'in2': 27
    }
    right_motor_pins = {
        'pwm_channel_2': 13,
        'in3': 19,
        'in4': 26
    }
    return MotorControl(**left_motor_pins, **right_motor_pins, standby=22)

class PIDController:
    def __init__(self, kp, ki, kd):
        self.kp = kp
        self.ki = ki
        self.kd = kd
        self.prev_error = 0
        self.integral = 0
        self.i_limit = 10

    def calculate_output(self, error):
        self.integral += error
        self.integral = max(min(self.integral, self.i_limit), -self.i_limit)
        derivative = error - self.prev_error
        output = self.kp * error + self.ki * self.integral + self.kd * derivative
        # ~ print(f'{self.kp * error},{self.ki * self.integral},{self.kd * derivative}')
        self.prev_error = error
        return output

motor_control = motor_init()
pid_controller = PIDController(kp=0.02, ki=0.005, kd=0.005)

def downloadImg(url):           # 读取网络摄像头视频帧
    with urlrequest.urlopen(url) as f:
        data = f.read()
        img1 = np.frombuffer(data, np.uint8)
        # print("img1 shape ", img1.shape) # (83653,)
        img_cv = cv2.imdecode(img1, cv2.IMREAD_ANYCOLOR)
        return img_cv
    
def face_detect():              # 人脸检测并控制小车转向
    global motor_control, pid_controller
    url = "http://视频流的IP地址/?action=snapshot"
    # 加载预训练模型
    face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_alt2.xml')
    while True:
        # 读取视频流中的一帧
        frame = downloadImg(url)
        # 将帧转换为灰度图像
        gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
        # 检测人脸
        faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5)
        if len(faces) > 0:
            x, y, w, h = faces[0]
            target = x + w / 2
            offset = target - gray.shape[1] / 2         # 计算人脸和画面中心的水平偏差
            pwm_output = int(pid_controller.calculate_output(offset))       # pid计算
            print(f'face: {offset}, pwm: {pwm_output}')
            if abs(offset) > 8:
                if offset < 0:
                    motor_control.set_pwm(-abs(pwm_output), abs(pwm_output))
                else:
                    motor_control.set_pwm(abs(pwm_output), -abs(pwm_output))
                time.sleep(0.15)
                motor_control.set_pwm(0, 0)
            else:
                motor_control.set_pwm(0, 0)
        else:
            motor_control.set_pwm(0, 0)
    
thread1 = threading.Thread(name='t1', target=face_detect)       # 人脸检测线程
thread1.daemon = True
thread1.start()

if __name__ == '__main__':
    # app.run()
    app.run(debug=True, host='0.0.0.0', port=5000, threaded=True)
  • 简单介绍一下程序,前面是flask程序部分,和之前一样没什么区别,之后初始化电机也是实现过的。

  • 添加了PID控制器,输入是offset偏差,输出就是控制电机的pwm的占空比。

  • 之后是人脸检测代码,从网络视频流上不断读取帧,进行人脸检测,这里用opencv的 haarcascade_frontalface_alt2.xml 文件,这是用 Haar 特征和级联分类器进行训练得到的人脸检测模型。如果检测到人脸就计算偏差,然后计算pid,输出控制。人脸检测的函数通过建立新的线程运行。

  • 这里我使用了点动控制,相当于一个周期即便有旋转也只转动固定时长,否则我的电机即使在占空比为1**%的情况下转动速度也很快,很容易转动过头,点动控制牺牲了流畅度但是能比较准地进行追踪,如果电机可控范围足够,就可以不需要进行点动。

  • 结果如下,小车随着人脸(镜头和人脸同步动的)进行转动,控制台打印检测结果,同时使用遥杆依然能发送数据到树莓派(控制我没写进去,这里只是测试接收)。
    控制台打印结果

    小车追踪人脸
  • 可以看到效果还可以提升,如果能提高人脸检测的帧率以及电机可控范围就能使得控制更加丝滑。

  • 至此,所有起初构想的功能都全部完成了。后续有更多的想法可能还会继续实现。

  • Copyrights © 2023-2025 LegendLeo Chen
  • 访问人数: | 浏览次数:

请我喝杯咖啡吧~

支付宝
微信