当前位置: 首页 > news >正文

OAK相机:自动或手动设置相机参数

OAK相机:自动或手动设置相机参数

硬件

使用硬件如下:

  • 4✖️ov9782相机
  • OAK-FFC-4P驱动板

硬件接线参考博主的一篇博客:OAK相机:多相机硬件同步拍摄

软件

博主使用的是Ubuntu18.04系统,首先配置所需的python环境:
1、下载SDK软件包:

git clone https://gitee.com/oakchina/depthai.git

2、安装依赖:

python3 -m pip install -r depthai/requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple

3、注意:在Linux平台并且第一次使用OAK需要配置udev规则

echo 'SUBSYSTEM=="usb", ATTRS{idVendor}=="03e7", MODE="0666"' | sudo tee /etc/udev/rules.d/80-movidius.rules
sudo udevadm control --reload-rules && sudo udevadm trigger

相关python API可参考官方文档:https://docs.luxonis.com/projects/api/en/latest/references/python/#
在此博主提供一个示例:四个相机通过硬件触发同步,使用ROS发布图像消息,并可以自动或手动设置相机参数,更多设置可参考官方文档的API加以修改,完整程序如下:

# -*- coding: utf-8 -*-
#!/usr/bin/env python3
import depthai as dai
import yaml
import cv2
assert cv2.__version__[0] == '4', 'The fisheye module requires opencv version >= 3.0.0'
import numpy as np
import globNAME_LIST = ['cama', 'camb', 'camc', 'camd']FPS = 20
AUTOSET = Truedef clamp(num, v0, v1):return max(v0, min(num, v1))class CameraArray:def __init__(self,fps=20):self.FPS = fpsself.RESOLUTION = dai.ColorCameraProperties.SensorResolution.THE_800_Pself.cam_list = ['cam_a', 'cam_b', 'cam_c', 'cam_d']self.cam_socket_opts = {'cam_a': dai.CameraBoardSocket.CAM_A,'cam_b': dai.CameraBoardSocket.CAM_B,'cam_c': dai.CameraBoardSocket.CAM_C,'cam_d': dai.CameraBoardSocket.CAM_D,}self.pipeline = dai.Pipeline()self.cam = {}self.xout = {}# colorself.controlIn = self.pipeline.create(dai.node.XLinkIn)self.controlIn.setStreamName('control')for camera_name in self.cam_list:self.cam[camera_name] = self.pipeline.createColorCamera()self.cam[camera_name].setResolution(self.RESOLUTION)if camera_name == 'cam_a':  # ref triggerself.cam[camera_name].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.OUTPUT)else:  # other triggerself.cam[camera_name].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)self.cam[camera_name].setBoardSocket(self.cam_socket_opts[camera_name])self.xout[camera_name] = self.pipeline.createXLinkOut()self.xout[camera_name].setStreamName(camera_name)self.cam[camera_name].isp.link(self.xout[camera_name].input)self.cam[camera_name].setFps(self.FPS)self.config = dai.Device.Config()self.config.board.gpio[6] = dai.BoardConfig.GPIO(dai.BoardConfig.GPIO.OUTPUT, dai.BoardConfig.GPIO.Level.HIGH)self.device = dai.Device(self.config)def start(self):self.device.startPipeline(self.pipeline)self.output_queue_dict = {}for camera_name in self.cam_list:self.output_queue_dict[camera_name] = self.device.getOutputQueue(name=camera_name, maxSize=1, blocking=False)def read_data(self):output_img = {}output_ts = {}for camera_name in self.cam_list:output_data = self.output_queue_dict[camera_name].tryGet()if output_data is not None:timestamp = output_data.getTimestampDevice()img = output_data.getCvFrame()# img = cv2.rotate(img, cv2.ROTATE_180)output_img[camera_name] = imgoutput_ts[camera_name] = timestamp.total_seconds()# print(camera_name, timestamp, timestamp.microseconds, img.shape)else:# print(camera_name, 'No ouput')output_img[camera_name] = Noneoutput_ts[camera_name] = Nonereturn output_img, output_tsif __name__ == '__main__':import rospyfrom sensor_msgs.msg import Imagefrom std_msgs.msg import Headerclass CvBridge():def __init__(self):self.numpy_type_to_cvtype = {'uint8': '8U', 'int8': '8S', 'uint16': '16U','int16': '16S', 'int32': '32S', 'float32': '32F','float64': '64F'}self.numpy_type_to_cvtype.update(dict((v, k) for (k, v) in self.numpy_type_to_cvtype.items()))def dtype_with_channels_to_cvtype2(self, dtype, n_channels):return '%sC%d' % (self.numpy_type_to_cvtype[dtype.name], n_channels)def cv2_to_imgmsg(self, cvim, encoding = "passthrough"):img_msg = Image()img_msg.height = cvim.shape[0]img_msg.width = cvim.shape[1]if len(cvim.shape) < 3:cv_type = self.dtype_with_channels_to_cvtype2(cvim.dtype, 1)else:cv_type = self.dtype_with_channels_to_cvtype2(cvim.dtype, cvim.shape[2])if encoding == "passthrough":img_msg.encoding = cv_typeelse:img_msg.encoding = encodingif cvim.dtype.byteorder == '>':img_msg.is_bigendian = Trueimg_msg.data = cvim.tobytes()img_msg.step = len(img_msg.data) // img_msg.heightreturn img_msgbridge = CvBridge()img_pub_dict = {}rospy.init_node('camera_array', anonymous=True)rate = rospy.Rate(20)for camera_name in ['cam_a', 'cam_b', 'cam_c', 'cam_d']:img_pub_dict[camera_name] = rospy.Publisher('/img/'+str(camera_name), Image, queue_size=0)img_cnt_dict = {'cam_a':0, 'cam_b':0, 'cam_c':0, 'cam_d':0}camera_array = CameraArray(FPS)camera_array.start()controlQueue = camera_array.device.getInputQueue(camera_array.controlIn.getStreamName())if AUTOSET:ctrl = dai.CameraControl()ctrl.setAutoExposureEnable()ctrl.setAutoWhiteBalanceMode(dai.CameraControl.AutoWhiteBalanceMode.AUTO)controlQueue.send(ctrl)else:# Defaults and limits for manual focus/exposure controlsexpTime = 10000expMin = 1expMax = 33000sensIso = 100sensMin = 100sensMax = 1600wbManual = 3500expTime = clamp(expTime, expMin, expMax)sensIso = clamp(sensIso, sensMin, sensMax)print("Setting manual exposure, time:", expTime, "iso:", sensIso)ctrl = dai.CameraControl()ctrl.setManualExposure(expTime, sensIso)ctrl.setManualWhiteBalance(wbManual)controlQueue.send(ctrl)first_time_cam = Nonefirst_time_local = Nonewhile not rospy.is_shutdown():output_img, output_ts = camera_array.read_data()if first_time_cam is None and output_ts['cam_a'] is not None:first_time_cam = output_ts['cam_a']first_time_local = rospy.Time.now().to_sec()for key in output_img.keys():if output_img[key] is None:continueframe = output_img[key]# convertimg = bridge.cv2_to_imgmsg(undistorted_img, encoding="bgr8")img.header = Header()if first_time_cam is not None:ts = output_ts[key] - first_time_cam + first_time_localimg.header.stamp = rospy.Time.from_sec(ts)else:img.header.stamp = rospy.Time.now()img_pub_dict[key].publish(img)rate.sleep()

将程序拷贝到本地,运行程序python camera.py;输入rostopic list,查看话题名;打开Rviz查看图像输出。

http://www.lryc.cn/news/164607.html

相关文章:

  • 百家宴焕新上市,持续深耕100-300元价位段
  • Linux Debian12使用git将本地项目上传到码云(gitee)远程仓库
  • 电子烟行业常用的英文表达
  • 【SpringMvc 丨跨域】
  • 【C语言】【strlen函数的使用与模拟实现】
  • 类和对象【基础概念】
  • 如何测试生成式人工智能(AIGC)
  • 机器学习算法详解3:逻辑回归
  • linux命令集合
  • 实现卓越供应链:RFID技术的革命性应用
  • 从JVM角度看继承
  • 基于Python和mysql开发的看图猜成语微信小程序(源码+数据库+程序配置说明书+程序使用说明书)
  • Unity入门教程||创建项目(上)
  • Openbmc编译
  • 美国CN2服务器速度怎么样
  • K8S原理架构与实战教程
  • 基于C#的图书管理系统数据库设计报告
  • 【Express.js】pm2进程管理
  • Nginx部署前后端分离项目(Linux)
  • Docker网络
  • 第15章_瑞萨MCU零基础入门系列教程之Common I2C总线模块
  • 《TCP/IP网络编程》阅读笔记--多播与广播
  • 聚观早报|华为Mate 60 Pro支持面容支付;特斯拉重回底特律车展
  • 本地缓存Caffeine的缓存过期淘汰策略
  • 激光焊接汽车尼龙塑料配件透光率测试仪
  • 2023年高校大数据实验室建设方案
  • 计网第五章(运输层)(一)
  • ILS解析漏洞复现
  • 0067__Git学习(1.本地仓库与暂存区)
  • Mac端交互式原型设计 Axure RP 8 for Mac汉化