Android Camera 시스템 구조 원본 분석(2)---->Camera의 start Preview와 set Preview Callback

Camera startPreview 프로세스
첫 번째 편을 이어서 이 과정의 주요 임무는 어떻게 데이터를 읽는지, 읽은 데이터가 어떤 형식인지, 어떻게 미리 보는지 아는 것이다.
상위 APP에서 Camera를 호출했습니다.java의 start Preview();,다음은 startPreview의 호출 절차를 보여 줍니다
      
      
      
      
//Camera.java
public native final void startPreview();
 
//android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
// camera ,Camera.cpp
camera->startPreview();
}
 
//Camera.cpp
status_t Camera::startPreview()
{
// mCamera CameraService.cpp connect
// CameraClient
sp <ICamera> c = mCamera;
return c->startPreview();
}

CameraClient.cpp의 startPreview 함수
      
      
      
      
status_t CameraClient::startPreview() {
return startCameraMode(CAMERA_PREVIEW_MODE);
}
 
status_t CameraClient::startCameraMode(camera_mode mode) {
 
switch(mode) {
case CAMERA_PREVIEW_MODE:
if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
//...
}
}
 
//CameraClient.cpp
status_t CameraClient::startPreviewMode() {
 
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
}
 
//CameraHardwareInterface.h
status_t startPreview(){
 
//mDevice Cam1DeviceFactory.cpp createCam1Device()
// DefaultCam1Device
return mDevice->ops->start_preview(mDevice);
}

위의 mDevice->ops->startpreview () 함수는 DefaultCam1Device의 부모 클래스인 Cam1Device 클래스의 조작 함수입니다.
      
      
      
      
Cam1DeviceBase::startPreview()
{
//(1) DefaultCam1Device
// CameraAdapter
onStartPreview();
 
//(2) DisplayClient, ,
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4) (3) (4) , (1)(2)
mpCamAdapter->startPreview();
 
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled = true;
}

mpCamclient->startPreview()
      
      
      
      
CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
 
//PreviewClient.cpp
PreviewClient::startPreview()
{
// , 800*480,yuv420sp
ms8PrvTgtFmt = mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth, &mi4PrvHeight);
// Buf
initBuffers();
//
return onStateChanged();
}
 
PreviewClient::initBuffers()
{
// Buf
mpImgBufMgr = ImgBufManager::alloc(ms8PrvTgtFmt, mi4PrvWidth,
mi4PrvHeight, eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb", mpCamMsgCbInfo->mRequestMemory,
0, 0);
 
// ,
// ,
mpExtImgProc = ExtImgProc::createInstance();
mpExtImgProc->init();
}
 
PreviewClient::onStateChanged()
{ // eID_WAKEUP
postCommand(Command(Command::eID_WAKEUP));
}
 
// eID_WAKEUP
PreviewClient::threadLoop()
{
Command cmd;
if ( getCommand(cmd) )
{
switch (cmd.eId)
{
case Command::eID_WAKEUP:
case Command::eID_PREVIEW_FRAME:
case Command::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
case Command::eID_EXIT:
//...
}
}
 
// Buf
PreviewClient::onClientThreadLoop(Command const& rCmd)
{
// (1) Get references to pool/queue before starting, so that nothing will be free during operations.
sp<ImgBufManager> pBufMgr = NULL;
sp<IImgBufQueue> pBufQueue = NULL;
{
Mutex::Autolock _l(mModuleMtx);
//
pBufMgr = mpImgBufMgr;
pBufQueue = mpImgBufQueue;
if ( pBufMgr == 0 || pBufQueue == 0 || ! isEnabledState() )
}
 
// (2) stop & clear all buffers so that we won't deque any undefined buffer.
pBufQueue->stopProcessor();
 
// (3) Prepare all TODO buffers. buf
if ( ! prepareAllTodoBuffers(pBufQueue, pBufMgr) )
 
// (4) Start ,
if ( ! pBufQueue->startProcessor() )
 
 
// (5) Do until all wanted messages are disabled.
while (1)
{
// (.1) , buf
waitAndHandleReturnBuffers(pBufQueue);
 
// (.2) break if disabled.
// add isProcessorRunning to make sure the former pauseProcessor
// is sucessfully processed.
if ( ! isEnabledState() || ! pBufQueue->isProcessorRunning() )
{
MY_LOGI("Preview client disabled");
break;
}
 
// (.3) re-prepare all TODO buffers, if possible,
// since some DONE/CANCEL buffers return. Buf
prepareAllTodoBuffers(pBufQueue, pBufMgr);
}
 
// (6) stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor(); // clear "TODO"
pBufQueue->stopProcessor(); // clear "DONE"
//
// (7) Cancel all un-returned buffers.
cancelAllUnreturnBuffers();
}
 
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const& rpBufQueue)
{
Vector<ImgBufQueNode> vQueNode;
 
// (1) deque buffers from processor.
rpBufQueue->dequeProcessor(vQueNode);
 
// (2) handle buffers dequed from processor.
ret = handleReturnBuffers(vQueNode);
}

우리는 어디에서 데이터를 처리하기 시작했는지, 그 문제가 왔다. 데이터를 어떻게 처리했는지, 데이터가 어떻게 나타났는지, 그리고 이 데이터들은 어디에서 왔는지 찾았다.
데이터가 어떻게 처리되는지 먼저 봅시다.
      
      
      
      
PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const& rvQueNode)
{
// (1) determine the index of the latest DONE buffer for callback.
int32_t idxToCallback = 0;
for ( idxToCallback = rvQueNode.size()-1; idxToCallback >= 0; idxToCallback-- )
{
if ( rvQueNode[idxToCallback].isDONE() )
break;
}
 
// Show Time duration.
if ( 0 <= idxToCallback )
{
nsecs_t const _timestamp1 = rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_t const _msDuration_buffer_timestamp = ::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_t const _msDuration_dequeProcessor = ::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
// (2) Remove from List and peform callback, one by one.
int32_t const queSize = rvQueNode.size();
for (int32_t i = 0; i < queSize; i++)
{
ImgBufQueNode const& rQueNode = rvQueNode[i];
sp<IImgBuf>const& rpQueImgBuf = rQueNode.getImgBuf(); // ImgBuf in Queue.
sp<ICameraImgBuf> pListImgBuf = NULL;
 
ImgBufNode const ListNode = *mImgBufList.begin(); // Node in List.
pListImgBuf = ListNode.getImgBuf(); // ImgBuf in List.
 
// (.4) Perform callback.
if ( i == idxToCallback ) {
//
if(mpExtImgProc != NULL)
{
if(mpExtImgProc->getImgMask() & ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfo img;
//
img.bufType = ExtImgProc::BufType_PreviewCB;
img.format = rpQueImgBuf->getImgFormat();
img.width = rpQueImgBuf->getImgWidth();
img.height = rpQueImgBuf->getImgHeight();
img.stride[0] = rpQueImgBuf->getImgWidthStride(0);
img.stride[1] = rpQueImgBuf->getImgWidthStride(1);
img.stride[2] = rpQueImgBuf->getImgWidthStride(2);
img.virtAddr = (MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize = rpQueImgBuf->getBufSize();
// ,
mpExtImgProc->doImgProc(img);
}
}
//
performPreviewCallback(pListImgBuf, rQueNode.getCookieDE());
}
}
}
 
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const& pCameraImgBuf, int32_t const msgType)
{
if ( pCameraImgBuf != 0 )
{
// [2] Callback
sp<CamMsgCbInfo> pCamMsgCbInfo;
{
pCamMsgCbInfo = mpCamMsgCbInfo;
}
 
//
// mDataCb
// CameraClient.cpp initialize()
//mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, (void *)mCameraId);
// dataCallback, mDataCb
pCamMsgCbInfo->mDataCb(
0 != msgType ? msgType : (int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}

마지막으로 mDataCb() 콜백 함수가 호출되었습니다.APP층에서 setPreviewCallback()을 사용하면 콜백 함수를 호출하고 데이터를 APP에 전송합니다.여기 msgType이 CAMERA 로 설정되어 있음을 주의하십시오.MSG_PREVIEW_FRAME.
Cam1 DeviceBase의 set Callbacks () 는 많은 리셋 함수를 설정했는데, 이 리셋 함수는 매우 유용할 것 같습니다. 틈이 나면 알아보십시오.startPreview()와 유사하게 Frameworks 레이어에서 호출됩니다.Frameworks의 CameraClient::initialize 호출
      
      
      
      
// Camera
Cam1DeviceBase::setCallbacks(
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie = user;
mpCamMsgCbInfo->mNotifyCb = notify_cb;
mpCamMsgCbInfo->mDataCb = data_cb;
mpCamMsgCbInfo->mDataCbTimestamp= data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory = get_memory;
//
if ( mpCamClient != 0 )
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if ( mpCamAdapter != 0 )
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}

Frameworks:
      
      
      
      
status_t CameraClient::initialize(camera_module_t *module) {
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
 
//Cam1DeviceBase setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
}

데이터 콜백은 콜백 함수이며 Camera Client에도 있습니다.이 함수에는 여러 msgType이 수신되어 있습니다.이것은 데이터 리셋에 여러 가지 데이터 유형이 있을 수 있음을 나타낸다. RAW의,COMPRESSD의, 등이 있다.
      
      
      
      
void CameraClient::dataCallback(int32_t msgType,
const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
 
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
//!++
#if 1 // defined(MTK_CAMERA_BSP_SUPPORT)
case MTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr, metadata);
break;
#endif
//!--
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
break;
case CAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
case CAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
case CAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType, dataPtr, metadata);
break;
}
}

저희 msgType은 CAMERA 입니다.MSG_PREVIEW_FRAME.handlePreviewData 함수 호출
      
      
      
      
void CameraClient::handlePreviewData(int32_t msgType,
const sp<IMemory>& mem,
camera_frame_metadata_t *metadata) {
// mem
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
// is callback enabled?
if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
return;
}
 
// hold a strong pointer to the client
sp<ICameraClient> c = mRemoteCallback;
 
// clear callback flags if no client or one-shot mode
if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
}
// mem , copy
if (c != 0) {
// Is the received frame copied out or not?
if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
} else {
LOG2("frame is forwarded");
c->dataCallback(msgType, mem, metadata);
}
}
}

마지막으로 spc = mRemoteCallback을 통해 
mRemoteCallback->dataCallback
보냈어.
mRemote Callback은 ICamera Client 유형과 Camera Client 둘 다 데이터 Callback이 있는데 둘 다 무슨 관계인지, 만약 관계가 있다면 여기는 사순환이 아니다.
mRemote Callback 검색해 보세요.
Camera Client::connect () 가 초기화되어 있어서 여기서 찾았는데 한참을 찾았지만 어디서 초기화되었는지 찾지 못했습니다.한참 동안 함정에 빠졌는데, 알고 보니
Camera Client의 구조 함수에
Camera로 돌아갑니다.cpp의connect 함수에서그래도 재미있더라고요.다시
Camera.cpp의
connect () 자세히 따라가세요.
      
      
      
      
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
int clientUid)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid);
}
 
//CameraBaseT CameraBase typedef CameraBase CameraBaseT; 
//CameraBase Camera CameraBase, CameraBase::connect()
 
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName, int clientUid)
{
// TCam Camera, Camera.cpp camera
sp<TCam> c = new TCam(cameraId);
//TCamCallbacks Camera.h ICameraClient, Camera。Camera ICameraClient
sp<TCamCallbacks> cl = c;
const sp<ICameraService>& cs = getCameraService();
 
//fnConnectService Camera ICameraService::connect()
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
// CameraService::connect() c cl , CameraService::connect()
status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, /*out*/ c->mCamera);
}
 
//CameraService::connec() CameraClient(), CameraClient,
// ICameraClient 。 , ++ 。
status_t CameraService::connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16& clientPackageName, int clientUid, /*out*/sp<ICamera>& device) {
 
//CameraService::connec() , Camera mCamera Camera CameraClient
//CameraClient Client() ,Client CameraService。Camera mRemoteCallback
client = new CameraClient(this, cameraClient, clientPackageName, cameraId,
facing, callingPid, clientUid, getpid());
device = client;
return OK;
}

그래서
->dataCallback.Camera를 호출했습니다.
dataCallback()
      
      
      
      
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
sp<CameraListener> listener;
 
listener = mListener;
listener->postData(msgType, dataPtr, metadata);
}

여기 mListener는 안드로이드에서...hardware_Camera_native_setup () 에서 설정된 유형은 MtkJNICameraContext입니다.그래서listener->postData()가 호출한 것은
MtkJNICameraContext의 postData().
      
      
      
      
void MtkJNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
JNICameraContext::postData(msgType, dataPtr, metadata);
}
 
void JNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
switch (dataMsgType) {
case CAMERA_MSG_VIDEO_FRAME:
//...
case CAMERA_MSG_RAW_IMAGE:
// There is no data.
case 0:
break;
 
default:
ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get());
copyAndPost(env, dataPtr, dataMsgType);
break;
}
// post frame metadata to Java
if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) {
postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata);
}
}
 
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType)
{
jbyteArray obj = NULL;
 
// allocate Java byte array and copy data
if (dataPtr != NULL) {
sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
uint8_t *heapBase = (uint8_t*)heap->base();
 
const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset);
 
if (msgType == CAMERA_MSG_RAW_IMAGE) {
obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size);
} else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) {
// Buffer,
obj = getCallbackBuffer(env, &mCallbackBuffers, size);
} else {
ALOGD("Allocating callback buffer");
obj = env->NewByteArray(size);
}
 
if (obj == NULL) {
ALOGE("Couldn't allocate byte array for JPEG data");
env->ExceptionClear();
} else {
env->SetByteArrayRegion(obj, 0, size, data);
}
} else {
ALOGE("image heap is NULL");
}
}
 
// post image data to Java
// , Java , fields.post_event,post_event postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, msgType, 0, 0, obj);
if (obj) {
env->DeleteLocalRef(obj);
}
}
 
 
// Java
private static void postEventFromNative(Object camera_ref,
int what, int arg1, int arg2, Object obj)
{
// msg,what CAMERA_MSG_PREVIEW_FRAME。
Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj);
c.mEventHandler.sendMessage(m);
}

그러나 여기를 보면 우리는 모든 것을 보지 못한 것 같다. 마지막 리셋은 상부의 리셋 함수를 호출하여 데이터를 위로 잃어버리는 것일 뿐이다.
표시하지도 않고 처리하지도 않는다.다음에 우리는 두 가지 문제가 있는데, 데이터는 어디에서 옵니까?그리고 이 데이터는 어떻게 표시됩니까?다음 편 봐주세요.
Camera startPreview 프로세스
첫 번째 편을 이어서 이 과정의 주요 임무는 어떻게 데이터를 읽는지, 읽은 데이터가 어떤 형식인지, 어떻게 미리 보는지 아는 것이다.
상위 APP에서 Camera를 호출했습니다.java의 start Preview();,다음은 startPreview의 호출 절차를 보여 줍니다
      
      
      
      
//Camera.java
public native final void startPreview();
 
//android_hardware_Camera.cpp
static void android_hardware_Camera_startPreview(JNIEnv *env, jobject thiz)
{
// camera ,Camera.cpp
camera->startPreview();
}
 
//Camera.cpp
status_t Camera::startPreview()
{
// mCamera CameraService.cpp connect
// CameraClient
sp <ICamera> c = mCamera;
return c->startPreview();
}

CameraClient.cpp의 startPreview 함수
      
      
      
      
status_t CameraClient::startPreview() {
return startCameraMode(CAMERA_PREVIEW_MODE);
}
 
status_t CameraClient::startCameraMode(camera_mode mode) {
 
switch(mode) {
case CAMERA_PREVIEW_MODE:
if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
//...
}
}
 
//CameraClient.cpp
status_t CameraClient::startPreviewMode() {
 
mHardware->previewEnabled();
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
}
 
//CameraHardwareInterface.h
status_t startPreview(){
 
//mDevice Cam1DeviceFactory.cpp createCam1Device()
// DefaultCam1Device
return mDevice->ops->start_preview(mDevice);
}

위의 mDevice->ops->startpreview () 함수는 DefaultCam1Device의 부모 클래스인 Cam1Device 클래스의 조작 함수입니다.
      
      
      
      
Cam1DeviceBase::startPreview()
{
//(1) DefaultCam1Device
// CameraAdapter
onStartPreview();
 
//(2) DisplayClient, ,
enableDisplayClient();
//(3)
mpCamClient->startPreview();
//(4) (3) (4) , (1)(2)
mpCamAdapter->startPreview();
 
enableMsgType(CAMERA_MSG_PREVIEW_METADATA);
//
mIsPreviewEnabled = true;
}

mpCamclient->startPreview()
      
      
      
      
CamClient::startPreview()
{
mpPreviewClient->startPreview();
}
 
//PreviewClient.cpp
PreviewClient::startPreview()
{
// , 800*480,yuv420sp
ms8PrvTgtFmt = mpParamsMgr->getPreviewFormat();
mpParamsMgr->getPreviewSize(&mi4PrvWidth, &mi4PrvHeight);
// Buf
initBuffers();
//
return onStateChanged();
}
 
PreviewClient::initBuffers()
{
// Buf
mpImgBufMgr = ImgBufManager::alloc(ms8PrvTgtFmt, mi4PrvWidth,
mi4PrvHeight, eMAX_PREVIEW_BUFFER_NUM,
"PreviewClientCb", mpCamMsgCbInfo->mRequestMemory,
0, 0);
 
// ,
// ,
mpExtImgProc = ExtImgProc::createInstance();
mpExtImgProc->init();
}
 
PreviewClient::onStateChanged()
{ // eID_WAKEUP
postCommand(Command(Command::eID_WAKEUP));
}
 
// eID_WAKEUP
PreviewClient::threadLoop()
{
Command cmd;
if ( getCommand(cmd) )
{
switch (cmd.eId)
{
case Command::eID_WAKEUP:
case Command::eID_PREVIEW_FRAME:
case Command::eID_POSTVIEW_FRAME:
onClientThreadLoop(cmd);
break;
//
case Command::eID_EXIT:
//...
}
}
 
// Buf
PreviewClient::onClientThreadLoop(Command const& rCmd)
{
// (1) Get references to pool/queue before starting, so that nothing will be free during operations.
sp<ImgBufManager> pBufMgr = NULL;
sp<IImgBufQueue> pBufQueue = NULL;
{
Mutex::Autolock _l(mModuleMtx);
//
pBufMgr = mpImgBufMgr;
pBufQueue = mpImgBufQueue;
if ( pBufMgr == 0 || pBufQueue == 0 || ! isEnabledState() )
}
 
// (2) stop & clear all buffers so that we won't deque any undefined buffer.
pBufQueue->stopProcessor();
 
// (3) Prepare all TODO buffers. buf
if ( ! prepareAllTodoBuffers(pBufQueue, pBufMgr) )
 
// (4) Start ,
if ( ! pBufQueue->startProcessor() )
 
 
// (5) Do until all wanted messages are disabled.
while (1)
{
// (.1) , buf
waitAndHandleReturnBuffers(pBufQueue);
 
// (.2) break if disabled.
// add isProcessorRunning to make sure the former pauseProcessor
// is sucessfully processed.
if ( ! isEnabledState() || ! pBufQueue->isProcessorRunning() )
{
MY_LOGI("Preview client disabled");
break;
}
 
// (.3) re-prepare all TODO buffers, if possible,
// since some DONE/CANCEL buffers return. Buf
prepareAllTodoBuffers(pBufQueue, pBufMgr);
}
 
// (6) stop.
pBufQueue->pauseProcessor();
pBufQueue->flushProcessor(); // clear "TODO"
pBufQueue->stopProcessor(); // clear "DONE"
//
// (7) Cancel all un-returned buffers.
cancelAllUnreturnBuffers();
}
 
PreviewClient::waitAndHandleReturnBuffers(sp<IImgBufQueue>const& rpBufQueue)
{
Vector<ImgBufQueNode> vQueNode;
 
// (1) deque buffers from processor.
rpBufQueue->dequeProcessor(vQueNode);
 
// (2) handle buffers dequed from processor.
ret = handleReturnBuffers(vQueNode);
}

우리는 어디에서 데이터를 처리하기 시작했는지, 그 문제가 왔다. 데이터를 어떻게 처리했는지, 데이터가 어떻게 나타났는지, 그리고 이 데이터들은 어디에서 왔는지 찾았다.
데이터가 어떻게 처리되는지 먼저 봅시다.
      
      
      
      
PreviewClient::handleReturnBuffers(Vector<ImgBufQueNode>const& rvQueNode)
{
// (1) determine the index of the latest DONE buffer for callback.
int32_t idxToCallback = 0;
for ( idxToCallback = rvQueNode.size()-1; idxToCallback >= 0; idxToCallback-- )
{
if ( rvQueNode[idxToCallback].isDONE() )
break;
}
 
// Show Time duration.
if ( 0 <= idxToCallback )
{
nsecs_t const _timestamp1 = rvQueNode[idxToCallback].getImgBuf()->getTimestamp();
mProfile_buffer_timestamp.pulse(_timestamp1);
nsecs_t const _msDuration_buffer_timestamp = ::ns2ms(mProfile_buffer_timestamp.getDuration());
mProfile_buffer_timestamp.reset(_timestamp1);
//
mProfile_dequeProcessor.pulse();
nsecs_t const _msDuration_dequeProcessor = ::ns2ms(mProfile_dequeProcessor.getDuration());
mProfile_dequeProcessor.reset();
}
//
// (2) Remove from List and peform callback, one by one.
int32_t const queSize = rvQueNode.size();
for (int32_t i = 0; i < queSize; i++)
{
ImgBufQueNode const& rQueNode = rvQueNode[i];
sp<IImgBuf>const& rpQueImgBuf = rQueNode.getImgBuf(); // ImgBuf in Queue.
sp<ICameraImgBuf> pListImgBuf = NULL;
 
ImgBufNode const ListNode = *mImgBufList.begin(); // Node in List.
pListImgBuf = ListNode.getImgBuf(); // ImgBuf in List.
 
// (.4) Perform callback.
if ( i == idxToCallback ) {
//
if(mpExtImgProc != NULL)
{
if(mpExtImgProc->getImgMask() & ExtImgProc::BufType_PreviewCB)
{
IExtImgProc::ImgInfo img;
//
img.bufType = ExtImgProc::BufType_PreviewCB;
img.format = rpQueImgBuf->getImgFormat();
img.width = rpQueImgBuf->getImgWidth();
img.height = rpQueImgBuf->getImgHeight();
img.stride[0] = rpQueImgBuf->getImgWidthStride(0);
img.stride[1] = rpQueImgBuf->getImgWidthStride(1);
img.stride[2] = rpQueImgBuf->getImgWidthStride(2);
img.virtAddr = (MUINT32)(rpQueImgBuf->getVirAddr());
img.bufSize = rpQueImgBuf->getBufSize();
// ,
mpExtImgProc->doImgProc(img);
}
}
//
performPreviewCallback(pListImgBuf, rQueNode.getCookieDE());
}
}
}
 
PreviewClient::performPreviewCallback(sp<ICameraImgBuf>const& pCameraImgBuf, int32_t const msgType)
{
if ( pCameraImgBuf != 0 )
{
// [2] Callback
sp<CamMsgCbInfo> pCamMsgCbInfo;
{
pCamMsgCbInfo = mpCamMsgCbInfo;
}
 
//
// mDataCb
// CameraClient.cpp initialize()
//mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, (void *)mCameraId);
// dataCallback, mDataCb
pCamMsgCbInfo->mDataCb(
0 != msgType ? msgType : (int32_t)CAMERA_MSG_PREVIEW_FRAME,
pCameraImgBuf->get_camera_memory(),
pCameraImgBuf->getBufIndex(),
NULL,
pCamMsgCbInfo->mCbCookie
);
}
}

마지막으로 mDataCb() 콜백 함수가 호출되었습니다.APP층에서 setPreviewCallback()을 사용하면 콜백 함수를 호출하고 데이터를 APP에 전송합니다.여기 msgType이 CAMERA 로 설정되어 있음을 주의하십시오.MSG_PREVIEW_FRAME.
Cam1 DeviceBase의 set Callbacks () 는 많은 리셋 함수를 설정했는데, 이 리셋 함수는 매우 유용할 것 같습니다. 틈이 나면 알아보십시오.startPreview()와 유사하게 Frameworks 레이어에서 호출됩니다.Frameworks의 CameraClient::initialize 호출
      
      
      
      
// Camera
Cam1DeviceBase::setCallbacks(
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void*user
)
{
mpCamMsgCbInfo->mCbCookie = user;
mpCamMsgCbInfo->mNotifyCb = notify_cb;
mpCamMsgCbInfo->mDataCb = data_cb;
mpCamMsgCbInfo->mDataCbTimestamp= data_cb_timestamp;
mpCamMsgCbInfo->mRequestMemory = get_memory;
//
if ( mpCamClient != 0 )
{
mpCamClient->setCallbacks(mpCamMsgCbInfo);
}
//
if ( mpCamAdapter != 0 )
{
mpCamAdapter->setCallbacks(mpCamMsgCbInfo);
}
}

Frameworks:
      
      
      
      
status_t CameraClient::initialize(camera_module_t *module) {
mHardware = new CameraHardwareInterface(camera_device_name);
res = mHardware->initialize(&module->common);
 
//Cam1DeviceBase setCallbacks()
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
(void *)mCameraId);
}

데이터 콜백은 콜백 함수이며 Camera Client에도 있습니다.이 함수에는 여러 msgType이 수신되어 있습니다.이것은 데이터 리셋에 여러 가지 데이터 유형이 있을 수 있음을 나타낸다. RAW의,COMPRESSD의, 등이 있다.
      
      
      
      
void CameraClient::dataCallback(int32_t msgType,
const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
 
switch (msgType & ~CAMERA_MSG_PREVIEW_METADATA) {
//!++
#if 1 // defined(MTK_CAMERA_BSP_SUPPORT)
case MTK_CAMERA_MSG_EXT_DATA:
client->handleMtkExtData(dataPtr, metadata);
break;
#endif
//!--
case CAMERA_MSG_PREVIEW_FRAME:
client->handlePreviewData(msgType, dataPtr, metadata);
break;
case CAMERA_MSG_POSTVIEW_FRAME:
client->handlePostview(dataPtr);
break;
case CAMERA_MSG_RAW_IMAGE:
client->handleRawPicture(dataPtr);
break;
case CAMERA_MSG_COMPRESSED_IMAGE:
client->handleCompressedPicture(dataPtr);
break;
default:
client->handleGenericData(msgType, dataPtr, metadata);
break;
}
}

저희 msgType은 CAMERA 입니다.MSG_PREVIEW_FRAME.handlePreviewData 함수 호출
      
      
      
      
void CameraClient::handlePreviewData(int32_t msgType,
const sp<IMemory>& mem,
camera_frame_metadata_t *metadata) {
// mem
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
 
// is callback enabled?
if (!(flags & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK)) {
// If the enable bit is off, the copy-out and one-shot bits are ignored
LOG2("frame callback is disabled");
return;
}
 
// hold a strong pointer to the client
sp<ICameraClient> c = mRemoteCallback;
 
// clear callback flags if no client or one-shot mode
if (c == 0 || (mPreviewCallbackFlag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK)) {
LOG2("Disable preview callback");
}
// mem , copy
if (c != 0) {
// Is the received frame copied out or not?
if (flags & CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK) {
LOG2("frame is copied");
copyFrameAndPostCopiedFrame(msgType, c, heap, offset, size, metadata);
} else {
LOG2("frame is forwarded");
c->dataCallback(msgType, mem, metadata);
}
}
}

마지막으로 spc = mRemoteCallback을 통해 
mRemoteCallback->dataCallback
보냈어.
mRemote Callback은 ICamera Client 유형과 Camera Client 둘 다 데이터 Callback이 있는데 둘 다 무슨 관계인지, 만약 관계가 있다면 여기는 사순환이 아니다.
mRemote Callback 검색해 보세요.
Camera Client::connect () 가 초기화되어 있어서 여기서 찾았는데 한참을 찾았지만 어디서 초기화되었는지 찾지 못했습니다.한참 동안 함정에 빠졌는데, 알고 보니
Camera Client의 구조 함수에
Camera로 돌아갑니다.cpp의connect 함수에서그래도 재미있더라고요.다시
Camera.cpp의
connect () 자세히 따라가세요.
      
      
      
      
sp<Camera> Camera::connect(int cameraId, const String16& clientPackageName,
int clientUid)
{
return CameraBaseT::connect(cameraId, clientPackageName, clientUid);
}
 
//CameraBaseT CameraBase typedef CameraBase CameraBaseT; 
//CameraBase Camera CameraBase, CameraBase::connect()
 
template <typename TCam, typename TCamTraits>
sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
const String16& clientPackageName, int clientUid)
{
// TCam Camera, Camera.cpp camera
sp<TCam> c = new TCam(cameraId);
//TCamCallbacks Camera.h ICameraClient, Camera。Camera ICameraClient
sp<TCamCallbacks> cl = c;
const sp<ICameraService>& cs = getCameraService();
 
//fnConnectService Camera ICameraService::connect()
TCamConnectService fnConnectService = TCamTraits::fnConnectService;
// CameraService::connect() c cl , CameraService::connect()
status = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid, /*out*/ c->mCamera);
}
 
//CameraService::connec() CameraClient(), CameraClient,
// ICameraClient 。 , ++ 。
status_t CameraService::connect(const sp<ICameraClient>& cameraClient, int cameraId,
const String16& clientPackageName, int clientUid, /*out*/sp<ICamera>& device) {
 
//CameraService::connec() , Camera mCamera Camera CameraClient
//CameraClient Client() ,Client CameraService。Camera mRemoteCallback
client = new CameraClient(this, cameraClient, clientPackageName, cameraId,
facing, callingPid, clientUid, getpid());
device = client;
return OK;
}

그래서
->dataCallback.Camera를 호출했습니다.
dataCallback()
      
      
      
      
// callback from camera service when frame or image is ready
void Camera::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
sp<CameraListener> listener;
 
listener = mListener;
listener->postData(msgType, dataPtr, metadata);
}

여기 mListener는 안드로이드에서...hardware_Camera_native_setup () 에서 설정된 유형은 MtkJNICameraContext입니다.그래서listener->postData()가 호출한 것은
MtkJNICameraContext의 postData().
      
      
      
      
void MtkJNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
JNICameraContext::postData(msgType, dataPtr, metadata);
}
 
void JNICameraContext::postData(int32_t msgType, const sp<IMemory>& dataPtr,
camera_frame_metadata_t *metadata)
{
switch (dataMsgType) {
case CAMERA_MSG_VIDEO_FRAME:
//...
case CAMERA_MSG_RAW_IMAGE:
// There is no data.
case 0:
break;
 
default:
ALOGV("dataCallback(%d, %p)", dataMsgType, dataPtr.get());
copyAndPost(env, dataPtr, dataMsgType);
break;
}
// post frame metadata to Java
if (metadata && (msgType & CAMERA_MSG_PREVIEW_METADATA)) {
postMetadata(env, CAMERA_MSG_PREVIEW_METADATA, metadata);
}
}
 
void JNICameraContext::copyAndPost(JNIEnv* env, const sp<IMemory>& dataPtr, int msgType)
{
jbyteArray obj = NULL;
 
// allocate Java byte array and copy data
if (dataPtr != NULL) {
sp<IMemoryHeap> heap = dataPtr->getMemory(&offset, &size);
uint8_t *heapBase = (uint8_t*)heap->base();
 
const jbyte* data = reinterpret_cast<const jbyte*>(heapBase + offset);
 
if (msgType == CAMERA_MSG_RAW_IMAGE) {
obj = getCallbackBuffer(env, &mRawImageCallbackBuffers, size);
} else if (msgType == CAMERA_MSG_PREVIEW_FRAME && mManualBufferMode) {
// Buffer,
obj = getCallbackBuffer(env, &mCallbackBuffers, size);
} else {
ALOGD("Allocating callback buffer");
obj = env->NewByteArray(size);
}
 
if (obj == NULL) {
ALOGE("Couldn't allocate byte array for JPEG data");
env->ExceptionClear();
} else {
env->SetByteArrayRegion(obj, 0, size, data);
}
} else {
ALOGE("image heap is NULL");
}
}
 
// post image data to Java
// , Java , fields.post_event,post_event postEventFromNative()
env->CallStaticVoidMethod(mCameraJClass, fields.post_event,
mCameraJObjectWeak, msgType, 0, 0, obj);
if (obj) {
env->DeleteLocalRef(obj);
}
}
 
 
// Java
private static void postEventFromNative(Object camera_ref,
int what, int arg1, int arg2, Object obj)
{
// msg,what CAMERA_MSG_PREVIEW_FRAME。
Message m = c.mEventHandler.obtainMessage(what, arg1, arg2, obj);
c.mEventHandler.sendMessage(m);
}

그러나 여기를 보면 우리는 모든 것을 보지 못한 것 같다. 마지막 리셋은 상부의 리셋 함수를 호출하여 데이터를 위로 잃어버리는 것일 뿐이다.
표시하지도 않고 처리하지도 않는다.다음에 우리는 두 가지 문제가 있는데, 데이터는 어디에서 옵니까?그리고 이 데이터는 어떻게 표시됩니까?다음 편 봐주세요.

좋은 웹페이지 즐겨찾기