HAL 流配置的起点位于 Camera3Device 类 configureStreamsLocked() 方法中,首先调用 configure_streams(…) 进行配置。
frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp
status_t Camera3Device::configureStreamsLocked() {
......
// 做 HAL 配置
ATRACE_BEGIN("camera3->configure_streams");
res = mHal3Device->ops->configure_streams(mHal3Device, &config);
ATRACE_END();
......
// 立即完成所有流配置
if (mInputStream != NULL && mInputStream->isConfiguring()) {
res = mInputStream->finishConfiguration(mHal3Device);
if (res != OK) {
SET_ERR_L("Can't finish configuring input stream %d: %s (%d)",
mInputStream->getId(), strerror(-res), res);
return res;
}
}
for (size_t i = 0; i < mOutputStreams.size(); i++) {
sp<Camera3OutputStreamInterface> outputStream =
mOutputStreams.editValueAt(i);
if (outputStream->isConfiguring()) {
res = outputStream->finishConfiguration(mHal3Device);
if (res != OK) {
SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
outputStream->getId(), strerror(-res), res);
return res;
}
}
}
// 请求线程需要知道以避免在 configure_streams() 调用之间使用重复最后设置协议
mRequestThread->configurationComplete();
......
return OK;
}
HAL 层 configure_streams(…) 方法,实际调用 configureStreams(…) 处理。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp
int QCamera3HardwareInterface::configure_streams(
const struct camera3_device *device,
camera3_stream_configuration_t *stream_list)
{
CDBG("%s: E", __func__);
QCamera3HardwareInterface *hw =
reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("%s: NULL camera device", __func__);
return -ENODEV;
}
int rc = hw->configureStreams(stream_list);
CDBG("%s: X", __func__);
return rc;
}
重置 HAL 相机设备处理管道并设置新的输入和输出流。我们主要来看输出流 YUV_420_888 这种格式,创建了 QCamera3RegularChannel channel。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3HWI.cpp
int QCamera3HardwareInterface::configureStreams(
camera3_stream_configuration_t *streamList)
{
......
//创建元数据通道并对其进行初始化
mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
......
rc = mMetadataChannel->initialize(IS_TYPE_NONE);
......
bool isRawStreamRequested = false;
/* 为请求的流分配 channel 对象 */
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
uint32_t stream_usage = newStream->usage;
stream_config_info.stream_sizes[stream_config_info.num_streams].width = newStream->width;
stream_config_info.stream_sizes[stream_config_info.num_streams].height = newStream->height;
if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
|| newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
&& newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
&& jpegStream){
......
} else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
......
} else {
//for non zsl streams find out the format
switch (newStream->format) {
......
case HAL_PIXEL_FORMAT_YCbCr_420_888:
stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_CALLBACK;
stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
break;
......
}
}
if (newStream->priv == NULL) {
//新建流,构建 channel
switch (newStream->stream_type) {
......
case CAMERA3_STREAM_OUTPUT:
/* 对于视频编码流,设置“很少读写”标志,以便可以将其设置为未缓存 */
if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
newStream->usage =
(GRALLOC_USAGE_SW_READ_RARELY |
GRALLOC_USAGE_SW_WRITE_RARELY |
GRALLOC_USAGE_HW_CAMERA_WRITE);
else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
CDBG("%s: ZSL usage flag skipping", __func__);
else
newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
break;
default:
ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
break;
}
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
QCamera3Channel *channel = NULL;
switch (newStream->format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCbCr_420_888:
newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info,
this,
newStream,
(cam_stream_type_t) stream_config_info.type[stream_config_info.num_streams],
stream_config_info.postprocess_mask[stream_config_info.num_streams]);
if (channel == NULL) {
ALOGE("%s: allocation of channel failed", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->priv = channel;
break;
......
}
} else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
} else {
ALOGE("%s: Error, Unknown stream type", __func__);
return -EINVAL;
}
......
} else {
// 该流已存在 channel,暂时不执行任何操作
}
/* 不要在元流信息中为输入流添加条目,因为没有与之关联的实际流
*/
if (newStream->stream_type != CAMERA3_STREAM_INPUT)
stream_config_info.num_streams++;
}
......
// settings/parameters 不会保留给新的 configureStreams
int32_t hal_version = CAM_HAL_V3;
memset(mParameters, 0, sizeof(metadata_buffer_t));
AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
sizeof(hal_version), &hal_version);
AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
sizeof(cam_stream_size_info_t), &stream_config_info);
int32_t tintless_value = 1;
AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
sizeof(tintless_value), &tintless_value);
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
/* 初始化 mPendingRequestInfo 和 mPendnigBuffersMap */
mPendingRequestsList.clear();
mPendingFrameDropList.clear();
// 初始化/重置待处理的缓冲区列表
mPendingBuffersMap.num_buffers = 0;
mPendingBuffersMap.mPendingBufferList.clear();
mPendingReprocessResultList.clear();
mFirstRequest = true;
//获取此流配置的最小帧间隔
deriveMinFrameDuration();
pthread_mutex_unlock(&mMutex);
return rc;
}
先来看元数据通道并对其进行初始化。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
mm_camera_ops_t *cam_ops,
channel_cb_routine cb_routine,
cam_padding_info_t *paddingInfo,
uint32_t postprocess_mask,
void *userData) :
QCamera3Channel(cam_handle, cam_ops,
cb_routine, paddingInfo, postprocess_mask, userData),
mMemory(NULL)
{
}
构造函数里只是是成员赋值,QCamera3MetadataChannel 构造函数内部调用了 QCamera3Channel 构造函数,接下来看初始化。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
mm_camera_ops_t *cam_ops,
channel_cb_routine cb_routine,
cam_padding_info_t *paddingInfo,
uint32_t postprocess_mask,
void *userData)
{
m_camHandle = cam_handle;
m_camOps = cam_ops;
m_bIsActive = false;
m_handle = 0;
m_numStreams = 0;
memset(mStreams, 0, sizeof(mStreams));
mUserData = userData;
mStreamInfoBuf = NULL;
// 注意这里将 cb_routine 赋给了 mChannelCB
mChannelCB = cb_routine;
mPaddingInfo = paddingInfo;
mPostProcMask = postprocess_mask;
char prop[PROPERTY_VALUE_MAX];
property_get("persist.camera.yuv.dump", prop, "0");
mYUVDump = atoi(prop);
mIsType = IS_TYPE_NONE;
}
- 调用 init(…) 进一步初始化
- 调用 QCamera3Channel::addStream(…) 添加流
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType)
{
ATRACE_CALL();
int32_t rc;
cam_dimension_t streamDim;
if (mMemory || m_numStreams > 0) {
ALOGE("%s: metadata channel already initialized", __func__);
return -EINVAL;
}
rc = init(NULL, NULL);
if (rc < 0) {
ALOGE("%s: init failed", __func__);
return rc;
}
mIsType = isType;
streamDim.width = sizeof(metadata_buffer_t),
streamDim.height = 1;
rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
streamDim, MIN_STREAMING_BUFFER_NUM, mPostProcMask, mIsType);
if (rc < 0) {
ALOGE("%s: addStream failed", __func__);
}
return rc;
}
QCamera3Channel::init(…) 方法内部调用了相机操作表的 add_channel 函数指针具体实现。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t dataCB)
{
m_handle = m_camOps->add_channel(m_camHandle,
attr,
dataCB,
this);
if (m_handle == 0) {
ALOGE("%s: Add channel failed", __func__);
return UNKNOWN_ERROR;
}
return NO_ERROR;
}
camera_handle: 相机句柄
attr : channel 的 bundle属性(如果需要)
channel_cb : bundle 数据通知的回调函数
userdata : 用户数据指针
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
uint32_t ch_id = 0;
mm_camera_obj_t * my_obj = NULL;
CDBG("%s :E camera_handler = %d", __func__, camera_handle);
pthread_mutex_lock(&g_intf_lock);
my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
if(my_obj) {
pthread_mutex_lock(&my_obj->cam_lock);
pthread_mutex_unlock(&g_intf_lock);
ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
} else {
pthread_mutex_unlock(&g_intf_lock);
}
CDBG("%s :X ch_id = %d", __func__, ch_id);
return ch_id;
}
从相机句柄获取相机对象。g_cam_ctrl 是一个全局变量,是一个 mm_camera_ctrl_t 结构体。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
{
mm_camera_obj_t *cam_obj = NULL;
uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
(NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
(cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
cam_obj = g_cam_ctrl.cam_obj[cam_idx];
}
return cam_obj;
}
cam_obj 指向 mm_camera_obj_t 结构体数组。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
typedef struct {
int8_t num_cam;
char video_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
mm_camera_obj_t *cam_obj[MM_CAMERA_MAX_NUM_SENSORS];
struct camera_info info[MM_CAMERA_MAX_NUM_SENSORS];
} mm_camera_ctrl_t;
从句柄获取索引,从 uint32_t 32 位数据中取出低 8 位。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
{
return (handler&0x000000ff);
}
拿到相机对象以后继续调用 mm_camera_add_channel(…) 方法添加 channel。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
mm_channel_t *ch_obj = NULL;
uint8_t ch_idx = 0;
uint32_t ch_hdl = 0;
for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
ch_obj = &my_obj->ch[ch_idx];
break;
}
}
if (NULL != ch_obj) {
/* 初始化 channel 对象 */
memset(ch_obj, 0, sizeof(mm_channel_t));
ch_hdl = mm_camera_util_generate_handler(ch_idx);
ch_obj->my_hdl = ch_hdl;
ch_obj->state = MM_CHANNEL_STATE_STOPPED;
ch_obj->cam_obj = my_obj;
pthread_mutex_init(&ch_obj->ch_lock, NULL);
mm_channel_init(ch_obj, attr, channel_cb, userdata);
}
pthread_mutex_unlock(&my_obj->cam_lock);
return ch_hdl;
}
这里真正进行初始化 channel。在打开的 channel 中启动数据轮询线程。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_init(mm_channel_t *my_obj,
mm_camera_channel_attr_t *attr,
mm_camera_buf_notify_t channel_cb,
void *userdata)
{
int32_t rc = 0;
my_obj->bundle.super_buf_notify_cb = channel_cb;
my_obj->bundle.user_data = userdata;
if (NULL != attr) {
my_obj->bundle.superbuf_queue.attr = *attr;
}
CDBG("%s : Launch data poll thread in channel open", __func__);
mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
MM_CAMERA_POLL_TYPE_DATA);
/* 将状态更改为停止状态 */
my_obj->state = MM_CHANNEL_STATE_STOPPED;
return rc;
}
启动了线程,开始执行 mm_camera_poll_thread 函数。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
mm_camera_poll_thread_type_t poll_type)
{
int32_t rc = 0;
poll_cb->poll_type = poll_type;
poll_cb->pfds[0] = -1;
poll_cb->pfds[1] = -1;
rc = pipe(poll_cb->pfds);
if(rc < 0) {
CDBG_ERROR("%s: pipe open rc=%d\n", __func__, rc);
return -1;
}
poll_cb->timeoutms = -1; /* Infinite seconds */
CDBG("%s: poll_type = %d, read fd = %d, write fd = %d timeout = %d",
__func__, poll_cb->poll_type,
poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
pthread_mutex_init(&poll_cb->mutex, NULL);
pthread_cond_init(&poll_cb->cond_v, NULL);
/* launch the thread */
pthread_mutex_lock(&poll_cb->mutex);
poll_cb->status = 0;
pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
if(!poll_cb->status) {
pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
}
pthread_mutex_unlock(&poll_cb->mutex);
CDBG("%s: End",__func__);
return rc;
}
- 将 data 强转为 mm_camera_poll_thread_t
- 将管道读取描述符 fd 添加到轮询列表
- 设置 poll_cb 状态为 MM_CAMERA_POLL_TASK_STATE_POLL
- 调用 mm_camera_poll_fn(…)
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
static void *mm_camera_poll_thread(void *data)
{
prctl(PR_SET_NAME, (unsigned long)"mm_cam_poll_th", 0, 0, 0);
mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
/* add pipe read fd into poll first */
poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
mm_camera_poll_sig_done(poll_cb);
mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
return mm_camera_poll_fn(poll_cb);
}
轮询线程例程。由于前一步设置了poll_cb 状态为 MM_CAMERA_POLL_TASK_STATE_POLL,所以 while 循环会一直轮询,核心为调用 poll(…) 函数。
POLLIN —— 有数据可读
POLLRDNORM —— 有普通数据可读
POLLPRI —— 有紧迫数据可读
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
{
int rc = 0, i;
if (NULL == poll_cb) {
CDBG_ERROR("%s: poll_cb is NULL!\n", __func__);
return NULL;
}
CDBG("%s: poll type = %d, num_fd = %d poll_cb = %p\n",
__func__, poll_cb->poll_type, poll_cb->num_fds,poll_cb);
do {
for(i = 0; i < poll_cb->num_fds; i++) {
poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
}
rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
if(rc > 0) {
if ((poll_cb->poll_fds[0].revents & POLLIN) &&
(poll_cb->poll_fds[0].revents & POLLRDNORM)) {
/* 如果我们在管道上有数据,则仅在此迭代中处理管道 */
CDBG("%s: cmd received on pipe\n", __func__);
mm_camera_poll_proc_pipe(poll_cb);
} else {
for(i=1; i<poll_cb->num_fds; i++) {
/* 检查ctrl事件 */
if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
(poll_cb->poll_fds[i].revents & POLLPRI)) {
CDBG("%s: mm_camera_evt_notify\n", __func__);
if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
}
}
/* 检查流数据事件 */
if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
(poll_cb->poll_fds[i].revents & POLLIN) &&
(poll_cb->poll_fds[i].revents & POLLRDNORM)) {
CDBG("%s: mm_stream_data_notify\n", __func__);
if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
}
}
}
}
} else {
/* in error case sleep 10 us and then continue. hard coded here */
usleep(10);
continue;
}
} while ((poll_cb != NULL) && (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL));
return NULL;
}
处理管道上的数据。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
{
ssize_t read_len;
int i;
mm_camera_sig_evt_t cmd_evt;
// 从驱动读出 mm_camera_sig_evt_t
read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
__func__, poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
switch (cmd_evt.cmd) {
case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
/* 我们总是有索引0用于管道读取 */
poll_cb->num_fds = 0;
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type &&
poll_cb->num_fds < MAX_STREAM_NUM_IN_BUNDLE) {
if (poll_cb->poll_entries[0].fd > 0) {
/* fd有效,我们更新poll_fds */
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
}
} else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type &&
poll_cb->num_fds <= MAX_STREAM_NUM_IN_BUNDLE) {
for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
if(poll_cb->poll_entries[i].fd > 0) {
/* fd有效,我们将poll_fds更新为该fd */
poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
poll_cb->num_fds++;
} else {
/* fd无效,我们将条目设置为-1以防止轮询。 */
poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
poll_cb->poll_fds[poll_cb->num_fds].events = 0;
poll_cb->num_fds++;
}
}
}
if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
mm_camera_poll_sig_done(poll_cb);
break;
case MM_CAMERA_PIPE_CMD_COMMIT:
mm_camera_poll_sig_done(poll_cb);
break;
case MM_CAMERA_PIPE_CMD_EXIT:
default:
mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
mm_camera_poll_sig_done(poll_cb);
break;
}
}
现在回过头来看调用 QCamera3Channel::addStream(…) 添加流。
minStreamBufNum: 所需的流缓冲区数量
- 创建 QCamera3Stream 对象
- 初始化 QCamera3Stream
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
cam_format_t streamFormat,
cam_dimension_t streamDim,
uint8_t minStreamBufNum,
uint32_t postprocessMask,
cam_is_type_t isType)
{
int32_t rc = NO_ERROR;
if (m_numStreams >= 1) {
ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
return BAD_VALUE;
}
if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
ALOGE("%s: stream number (%d) exceeds max limit (%d)",
__func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
return BAD_VALUE;
}
QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
m_handle,
m_camOps,
mPaddingInfo,
this);
if (pStream == NULL) {
ALOGE("%s: No mem for Stream", __func__);
return NO_MEMORY;
}
rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
postprocessMask, isType, streamCbRoutine, this);
if (rc == 0) {
mStreams[m_numStreams] = pStream;
m_numStreams++;
} else {
delete pStream;
}
return rc;
}
camHandle : 相机句柄
chId : channel 句柄
camOps : 相机操作表指针
paddingInfo: 填充信息指针
构造函数中初始化了 mMemVtbl(虚拟表,用于流内存分配和释放)。
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp
QCamera3Stream::QCamera3Stream(uint32_t camHandle,
uint32_t chId,
mm_camera_ops_t *camOps,
cam_padding_info_t *paddingInfo,
QCamera3Channel *channel) :
mCamHandle(camHandle),
mChannelHandle(chId),
mHandle(0),
mCamOps(camOps),
mStreamInfo(NULL),
mMemOps(NULL),
mNumBufs(0),
mDataCB(NULL),
mUserData(NULL),
mDataQ(releaseFrameData, this),
mStreamInfoBuf(NULL),
mStreamBufs(NULL),
mBufDefs(NULL),
mChannel(channel)
{
mMemVtbl.user_data = this;
mMemVtbl.get_bufs = get_bufs;
mMemVtbl.put_bufs = put_bufs;
mMemVtbl.invalidate_buf = invalidate_buf;
mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
}
初始化流对象。
stream_cb : 流数据通知回调。如果不需要,可以为 NULL。
- 调用相机操作表函数指针实现的 add_stream
- 调用相机操作表函数指针实现的 map_stream_buf
- 调用相机操作表函数指针实现的 config_stream
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Stream.cpp
int32_t QCamera3Stream::init(cam_stream_type_t streamType,
cam_format_t streamFormat,
cam_dimension_t streamDim,
cam_stream_reproc_config_t* reprocess_config,
uint8_t minNumBuffers,
uint32_t postprocess_mask,
cam_is_type_t is_type,
hal3_stream_cb_routine stream_cb,
void *userdata)
{
int32_t rc = OK;
mm_camera_stream_config_t stream_config;
mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
if (!mHandle) {
ALOGE("add_stream failed");
rc = UNKNOWN_ERROR;
goto done;
}
// 分配和映射流信息存储器
mStreamInfoBuf = new QCamera3HeapMemory();
if (mStreamInfoBuf == NULL) {
ALOGE("%s: no memory for stream info buf obj", __func__);
rc = -ENOMEM;
goto err1;
}
rc = mStreamInfoBuf->allocate(1, sizeof(cam_stream_info_t), false);
if (rc < 0) {
ALOGE("%s: no memory for stream info", __func__);
rc = -ENOMEM;
goto err2;
}
mStreamInfo =
reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
memset(mStreamInfo, 0, sizeof(cam_stream_info_t));
mStreamInfo->stream_type = streamType;
mStreamInfo->fmt = streamFormat;
mStreamInfo->dim = streamDim;
mStreamInfo->num_bufs = minNumBuffers;
mStreamInfo->pp_config.feature_mask = postprocess_mask;
ALOGV("%s: stream_type is %d, feature_mask is %d",
__func__, mStreamInfo->stream_type, mStreamInfo->pp_config.feature_mask);
mStreamInfo->is_type = is_type;
rc = mCamOps->map_stream_buf(mCamHandle,
mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO,
0, -1, mStreamInfoBuf->getFd(0), mStreamInfoBuf->getSize(0));
if (rc < 0) {
ALOGE("Failed to map stream info buffer");
goto err3;
}
mNumBufs = minNumBuffers;
if (reprocess_config != NULL) {
mStreamInfo->reprocess_config = *reprocess_config;
mStreamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
//mStreamInfo->num_of_burst = reprocess_config->offline.num_of_bufs;
mStreamInfo->num_of_burst = 1;
ALOGI("%s: num_of_burst is %d", __func__, mStreamInfo->num_of_burst);
} else {
mStreamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
}
// 配置流
stream_config.stream_info = mStreamInfo;
stream_config.mem_vtbl = mMemVtbl;
stream_config.padding_info = mPaddingInfo;
stream_config.userdata = this;
stream_config.stream_cb = dataNotifyCB;
rc = mCamOps->config_stream(mCamHandle,
mChannelHandle, mHandle, &stream_config);
if (rc < 0) {
ALOGE("Failed to config stream, rc = %d", rc);
goto err4;
}
mDataCB = stream_cb;
mUserData = userdata;
return 0;
err4:
mCamOps->unmap_stream_buf(mCamHandle,
mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
err3:
mStreamInfoBuf->deallocate();
err2:
delete mStreamInfoBuf;
mStreamInfoBuf = NULL;
mStreamInfo = NULL;
err1:
mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
mHandle = 0;
mNumBufs = 0;
done:
return rc;
}
下面是相机操作虚拟表。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static mm_camera_ops_t mm_camera_ops = {
......
.add_stream = mm_camera_intf_add_stream,
......
.config_stream = mm_camera_intf_config_stream,
......
.map_stream_buf = mm_camera_intf_map_stream_buf,
.....
};
将流添加到 channel。首先拿到相机对象 mm_camera_obj_t,然后调用 mm_camera_add_stream(…) 添加流。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
uint32_t ch_id)
{
uint32_t stream_id = 0;
mm_camera_obj_t * my_obj = NULL;
CDBG("%s : E handle = %d ch_id = %d",
__func__, camera_handle, ch_id);
pthread_mutex_lock(&g_intf_lock);
my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
if(my_obj) {
pthread_mutex_lock(&my_obj->cam_lock);
pthread_mutex_unlock(&g_intf_lock);
stream_id = mm_camera_add_stream(my_obj, ch_id);
} else {
pthread_mutex_unlock(&g_intf_lock);
}
CDBG("%s :X stream_id = %d", __func__, stream_id);
return stream_id;
}
首先获取 mm_channel_t 结构体,接着调用 mm_channel_fsm_fn(…) 进一步处理。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
uint32_t ch_id)
{
uint32_t s_hdl = 0;
mm_channel_t * ch_obj =
mm_camera_util_get_channel_by_handler(my_obj, ch_id);
if (NULL != ch_obj) {
pthread_mutex_lock(&ch_obj->ch_lock);
pthread_mutex_unlock(&my_obj->cam_lock);
mm_channel_fsm_fn(ch_obj,
MM_CHANNEL_EVT_ADD_STREAM,
NULL,
(void*)&s_hdl);
} else {
pthread_mutex_unlock(&my_obj->cam_lock);
}
return s_hdl;
}
根据 channel 状态,传入事件将被不同地处理。前面初始化 channel 时赋值状态为 MM_CHANNEL_STATE_STOPPED。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = -1;
CDBG("%s : E state = %d", __func__, my_obj->state);
switch (my_obj->state) {
case MM_CHANNEL_STATE_NOTUSED:
rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_STOPPED:
rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_ACTIVE:
rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
break;
case MM_CHANNEL_STATE_PAUSED:
rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
break;
default:
CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
break;
}
/* unlock ch_lock */
pthread_mutex_unlock(&my_obj->ch_lock);
CDBG("%s : X rc = %d", __func__, rc);
return rc;
}
channel 有限状态机功能来处理处于 STOPPED 状态的事件。我们知道此时传入的事件类型为 MM_CHANNEL_EVT_ADD_STREAM,主要调用 mm_channel_add_stream(…) 处理 channel 添加流。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
mm_channel_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
CDBG("%s : E evt = %d", __func__, evt);
switch (evt) {
case MM_CHANNEL_EVT_ADD_STREAM:
{
uint32_t s_hdl = 0;
s_hdl = mm_channel_add_stream(my_obj);
*((uint32_t*)out_val) = s_hdl;
rc = 0;
}
break;
case MM_CHANNEL_EVT_DEL_STREAM:
{
uint32_t s_id = (uint32_t)in_val;
rc = mm_channel_del_stream(my_obj, s_id);
}
break;
case MM_CHANNEL_EVT_START:
{
rc = mm_channel_start(my_obj);
/* first stream started in stopped state
* move to active state */
if (0 == rc) {
my_obj->state = MM_CHANNEL_STATE_ACTIVE;
}
}
break;
case MM_CHANNEL_EVT_CONFIG_STREAM:
{
mm_evt_paylod_config_stream_t *payload =
(mm_evt_paylod_config_stream_t *)in_val;
rc = mm_channel_config_stream(my_obj,
payload->stream_id,
payload->config);
}
break;
case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
{
cam_bundle_config_t *payload =
(cam_bundle_config_t *)in_val;
rc = mm_channel_get_bundle_info(my_obj, payload);
}
break;
case MM_CHANNEL_EVT_DELETE:
{
mm_channel_release(my_obj);
rc = 0;
}
break;
case MM_CHANNEL_EVT_SET_STREAM_PARM:
{
mm_evt_paylod_set_get_stream_parms_t *payload =
(mm_evt_paylod_set_get_stream_parms_t *)in_val;
rc = mm_channel_set_stream_parm(my_obj, payload);
}
break;
case MM_CHANNEL_EVT_GET_STREAM_PARM:
{
mm_evt_paylod_set_get_stream_parms_t *payload =
(mm_evt_paylod_set_get_stream_parms_t *)in_val;
rc = mm_channel_get_stream_parm(my_obj, payload);
}
break;
case MM_CHANNEL_EVT_DO_STREAM_ACTION:
{
mm_evt_paylod_do_stream_action_t *payload =
(mm_evt_paylod_do_stream_action_t *)in_val;
rc = mm_channel_do_stream_action(my_obj, payload);
}
break;
case MM_CHANNEL_EVT_MAP_STREAM_BUF:
{
mm_evt_paylod_map_stream_buf_t *payload =
(mm_evt_paylod_map_stream_buf_t *)in_val;
rc = mm_channel_map_stream_buf(my_obj, payload);
}
break;
case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
{
mm_evt_paylod_unmap_stream_buf_t *payload =
(mm_evt_paylod_unmap_stream_buf_t *)in_val;
rc = mm_channel_unmap_stream_buf(my_obj, payload);
}
break;
default:
CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
__func__, my_obj->state, evt);
break;
}
CDBG("%s : E rc = %d", __func__, rc);
return rc;
}
- 找到未使用的 mm_stream_t
- 初始化流对象
- 请求流
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
{
int32_t rc = 0;
uint8_t idx = 0;
uint32_t s_hdl = 0;
mm_stream_t *stream_obj = NULL;
CDBG("%s : E", __func__);
/* 检查流的有效性 */
for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
stream_obj = &my_obj->streams[idx];
break;
}
}
if (NULL == stream_obj) {
CDBG_ERROR("%s: streams reach max, no more stream allowed to add", __func__);
return s_hdl;
}
/* 初始化流对象 */
memset(stream_obj, 0, sizeof(mm_stream_t));
stream_obj->fd = -1;
stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
stream_obj->ch_obj = my_obj;
pthread_mutex_init(&stream_obj->buf_lock, NULL);
pthread_mutex_init(&stream_obj->cb_lock, NULL);
stream_obj->state = MM_STREAM_STATE_INITED;
/* 请求流 */
rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
if (0 == rc) {
s_hdl = stream_obj->my_hdl;
} else {
/* error during acquire, de-init */
pthread_mutex_destroy(&stream_obj->buf_lock);
pthread_mutex_destroy(&stream_obj->cb_lock);
memset(stream_obj, 0, sizeof(mm_stream_t));
}
CDBG("%s : stream handle = %d", __func__, s_hdl);
return s_hdl;
}
流有限状态机入口函数。根据流状态,传入事件将以不同的方式处理。此处传入 MM_STREAM_STATE_INITED。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = -1;
CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
__func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
switch (my_obj->state) {
case MM_STREAM_STATE_NOTUSED:
CDBG("%s: Not handling evt in unused state", __func__);
break;
case MM_STREAM_STATE_INITED:
rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_ACQUIRED:
rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_CFG:
rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_BUFFED:
rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_REG:
rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
break;
case MM_STREAM_STATE_ACTIVE:
rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
break;
default:
CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
break;
}
CDBG("%s : X rc =%d",__func__,rc);
return rc;
}
流有限状态机函数来处理 INITED 状态的事件。此处事件类型为 MM_STREAM_EVT_ACQUIRE。
- 调用 open 函数打开设备节点
- 调用 mm_stream_set_ext_mode 函数设置流扩展模式到服务端
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
mm_stream_evt_type_t evt,
void * in_val,
void * out_val)
{
int32_t rc = 0;
char dev_name[MM_CAMERA_DEV_NAME_LEN];
CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
__func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
switch(evt) {
case MM_STREAM_EVT_ACQUIRE:
if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
CDBG_ERROR("%s: NULL channel or camera obj\n", __func__);
rc = -1;
break;
}
if (NULL == my_obj) {
CDBG_ERROR("%s: NULL camera object\n", __func__);
rc = -1;
break;
}
snprintf(dev_name, sizeof(dev_name), "/dev/%s",
mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl));
my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
if (my_obj->fd < 0) {
CDBG_ERROR("%s: open dev returned %d\n", __func__, my_obj->fd);
rc = -1;
break;
}
CDBG("%s: open dev fd = %d\n", __func__, my_obj->fd);
rc = mm_stream_set_ext_mode(my_obj);
if (0 == rc) {
my_obj->state = MM_STREAM_STATE_ACQUIRED;
} else {
/* failed setting ext_mode
* close fd */
close(my_obj->fd);
my_obj->fd = -1;
break;
}
break;
default:
CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
__func__, my_obj->state, evt, in_val, out_val);
break;
}
return rc;
}
通过 v4l2 ioctl 设置流扩展模式到服务端。
device/moto/shamu/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
{
int32_t rc = 0;
struct v4l2_streamparm s_parm;
CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
__func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
memset(&s_parm, 0, sizeof(s_parm));
s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
CDBG("%s:stream fd=%d, rc=%d, extended_mode=%d\n",
__func__, my_obj->fd, rc, s_parm.parm.capture.extendedmode);
if (rc == 0) {
/* 获取服务端流 id */
my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
}
return rc;
}
最后来看 QCamera3RegularChannel 构造函数的实现。QCamera3RegularChannel 构造函数内部调用了 QCamera3Channel 构造函数。
cam_handle : 相机句柄
cam_ops : 指向相机操作表的指针
cb_routine : 帧聚合器的回调例程
stream : camera3_stream_t 结构体
stream_type: Channel 流类型
device/moto/shamu/camera/QCamera2/HAL3/QCamera3Channel.cpp
QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
mm_camera_ops_t *cam_ops,
channel_cb_routine cb_routine,
cam_padding_info_t *paddingInfo,
void *userData,
camera3_stream_t *stream,
cam_stream_type_t stream_type,
uint32_t postprocess_mask) :
QCamera3Channel(cam_handle, cam_ops, cb_routine,
paddingInfo, postprocess_mask, userData),
mCamera3Stream(stream),
mNumBufs(0),
mStreamType(stream_type)
{
}