Android Camera getSupportedPreviewSizes 실패 문제

27442 단어 Andorid
문제 현상: 일부 카메라는 지원하는 해상도를 얻을 수 없습니다. 기본 480p만 얻을 수 있습니다.문제 포지셔닝: 응용 프로그램에서는 getParameters () 다음에 해당하는 Parameters를 통해 Parameters 클래스의 방법을 사용해서 얻을 수 있기 때문에 먼저 따라가야 하는 getParameters () 인터페이스는 다음과 같다.
/**
     * Returns the current settings for this Camera service.
     * If modifications are made to the returned Parameters, they must be passed
     * to {@link #setParameters(Camera.Parameters)} to take effect.
     *
     * @see #setParameters(Camera.Parameters)
     */
    public Parameters getParameters() {
        Parameters p = new Parameters();
        String s = native_getParameters();
        p.unflatten(s);
        return p;
    }

여기서는 실제로 네이티브를 직접 호출했어요getParameters() 메서드를 사용하여 다음 단계를 계속 진행합니다.
private native final String native_getParameters();

JNI에 대해 다음을 수행합니다.
static jstring android_hardware_Camera_getParameters(JNIEnv *env, jobject thiz)
{
    ALOGV("getParameters");
    sp camera = get_native_camera(env, thiz, NULL);
    if (camera == 0) return 0;

    String8 params8 = camera->getParameters();
    if (params8.isEmpty()) {
        jniThrowRuntimeException(env, "getParameters failed (empty parameters)");
        return 0;
    }
    return env->NewStringUTF(params8.string());
}

jni에서 다음 층 프레임워크/av/camera/Camera로 넘어가기cpp:
String8 Camera::getParameters() const
{
    ALOGV("getParameters");
    String8 params;
    sp  c = mCamera;
    if (c != 0) params = mCamera->getParameters();
    return params;
}

framework/av/camera/ICamera로 이동합니다.cpp:
 // get preview/capture parameters - key/value pairs
    String8 getParameters() const
    {
        ALOGV("getParameters");
        Parcel data, reply;
        data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
        remote()->transact(GET_PARAMETERS, data, &reply);
        return reply.readString8();
    }

그리고 여기서 주목해야 할 것은 다음과 같다.
status_t BnCamera::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch(code) {
        case DISCONNECT: {
            ALOGV("DISCONNECT");
            CHECK_INTERFACE(ICamera, data, reply);
            disconnect();
            reply->writeNoException();
            return NO_ERROR;
        } break;
        case SET_PREVIEW_TARGET: {
            ALOGV("SET_PREVIEW_TARGET");
            CHECK_INTERFACE(ICamera, data, reply);
            sp st =
                interface_cast(data.readStrongBinder());
            reply->writeInt32(setPreviewTarget(st));
            return NO_ERROR;
        } break;
        case SET_PREVIEW_CALLBACK_FLAG: {
            ALOGV("SET_PREVIEW_CALLBACK_TYPE");
            CHECK_INTERFACE(ICamera, data, reply);
            int callback_flag = data.readInt32();
            setPreviewCallbackFlag(callback_flag);
            return NO_ERROR;
        } break;
        case SET_PREVIEW_CALLBACK_TARGET: {
            ALOGV("SET_PREVIEW_CALLBACK_TARGET");
            CHECK_INTERFACE(ICamera, data, reply);
            sp cp =
                interface_cast(data.readStrongBinder());
            reply->writeInt32(setPreviewCallbackTarget(cp));
            return NO_ERROR;
        }
        case START_PREVIEW: {
            ALOGV("START_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startPreview());
            return NO_ERROR;
        } break;
        case START_RECORDING: {
            ALOGV("START_RECORDING");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(startRecording());
            return NO_ERROR;
        } break;
        case STOP_PREVIEW: {
            ALOGV("STOP_PREVIEW");
            CHECK_INTERFACE(ICamera, data, reply);
            stopPreview();
            return NO_ERROR;
        } break;
        case STOP_RECORDING: {
            ALOGV("STOP_RECORDING");
            CHECK_INTERFACE(ICamera, data, reply);
            stopRecording();
            return NO_ERROR;
        } break;
        case RELEASE_RECORDING_FRAME: {
            ALOGV("RELEASE_RECORDING_FRAME");
            CHECK_INTERFACE(ICamera, data, reply);
            sp mem = interface_cast(data.readStrongBinder());
            releaseRecordingFrame(mem);
            return NO_ERROR;
        } break;
        case STORE_META_DATA_IN_BUFFERS: {
            ALOGV("STORE_META_DATA_IN_BUFFERS");
            CHECK_INTERFACE(ICamera, data, reply);
            bool enabled = data.readInt32();
            reply->writeInt32(storeMetaDataInBuffers(enabled));
            return NO_ERROR;
        } break;
        case PREVIEW_ENABLED: {
            ALOGV("PREVIEW_ENABLED");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(previewEnabled());
            return NO_ERROR;
        } break;
        case RECORDING_ENABLED: {
            ALOGV("RECORDING_ENABLED");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(recordingEnabled());
            return NO_ERROR;
        } break;
        case AUTO_FOCUS: {
            ALOGV("AUTO_FOCUS");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(autoFocus());
            return NO_ERROR;
        } break;
        case CANCEL_AUTO_FOCUS: {
            ALOGV("CANCEL_AUTO_FOCUS");
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(cancelAutoFocus());
            return NO_ERROR;
        } break;
        case TAKE_PICTURE: {
            ALOGV("TAKE_PICTURE");
            CHECK_INTERFACE(ICamera, data, reply);
            int msgType = data.readInt32();
            reply->writeInt32(takePicture(msgType));
            return NO_ERROR;
        } break;
        case SET_PARAMETERS: {
            ALOGV("SET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
            String8 params(data.readString8());
            reply->writeInt32(setParameters(params));
            return NO_ERROR;
         } break;
        case GET_PARAMETERS: {
            ALOGV("GET_PARAMETERS");
            CHECK_INTERFACE(ICamera, data, reply);
             reply->writeString8(getParameters());
            return NO_ERROR;
         } break;
        case SEND_COMMAND: {
            ALOGV("SEND_COMMAND");
            CHECK_INTERFACE(ICamera, data, reply);
            int command = data.readInt32();
            int arg1 = data.readInt32();
            int arg2 = data.readInt32();
            reply->writeInt32(sendCommand(command, arg1, arg2));
            return NO_ERROR;
         } break;
        case CONNECT: {
            CHECK_INTERFACE(ICamera, data, reply);
            sp cameraClient = interface_cast(data.readStrongBinder());
            reply->writeInt32(connect(cameraClient));
            return NO_ERROR;
        } break;
        case LOCK: {
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(lock());
            return NO_ERROR;
        } break;
        case UNLOCK: {
            CHECK_INTERFACE(ICamera, data, reply);
            reply->writeInt32(unlock());
            return NO_ERROR;
        } break;
        default:
            return BBinder::onTransact(code, data, reply, flags);
    }
}

여기까지 가려면 클라이언트 측에 가야 할 것 같은데, 즉frameworks/av/services/camera/libcamera 서비스/api1/Camera Client.cpp:
// get preview/capture parameters - key/value pairs
String8 CameraClient::getParameters() const {
    Mutex::Autolock lock(mLock);
    if (checkPidAndHardware() != NO_ERROR) return String8();

    String8 params(mHardware->getParameters().flatten());
    LOG1("getParameters (pid %d) (%s)", getCallingPid(), params.string());
    return params;
}

mHardware가 어디로 향하는지 다시 확인해야 합니다. mHardware 초기화 코드는 다음과 같습니다.
status_t CameraClient::initialize(camera_module_t *module) {
    int callingPid = getCallingPid();
    status_t res;

    LOG1("CameraClient::initialize E (pid %d, id %d)", callingPid, mCameraId);

    // Verify ops permissions
    res = startCameraOps();
    if (res != OK) {
        return res;
    }

    char camera_device_name[10];
    snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);

    mHardware = new CameraHardwareInterface(camera_device_name);
    res = mHardware->initialize(&module->common);
    if (res != OK) {
        ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
                __FUNCTION__, mCameraId, strerror(-res), res);
        mHardware.clear();
        return NO_INIT;
    }

    mHardware->setCallbacks(notifyCallback,
            dataCallback,
            dataCallbackTimestamp,
            (void *)mCameraId);

    // Enable zoom, error, focus, and metadata messages by default
    enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS |
                  CAMERA_MSG_PREVIEW_METADATA | CAMERA_MSG_FOCUS_MOVE);

    LOG1("CameraClient::initialize X (pid %d, id %d)", callingPid, mCameraId);
    return OK;
}

에서 mHardware = new Camera Hardware Interface(camera device name)를 통해 해당 파일frameworks/av/services/camera/libcamera 서비스/device1/Camera Hardware Interface.h:
/** Return the camera parameters. */
    CameraParameters getParameters() const
    {
        ALOGV("%s(%s)", __FUNCTION__, mName.string());
        CameraParameters parms;
        if (mDevice->ops->get_parameters) {
            char *temp = mDevice->ops->get_parameters(mDevice);
            String8 str_parms(temp);
            if (mDevice->ops->put_parameters)
                mDevice->ops->put_parameters(mDevice, temp);
            else
                free(temp);
            parms.unflatten(str_parms);
        }
        return parms;
    }

여기는 mDevice->ops->get 을 가리키고 있습니다.parameters () 여기에서 Camera 시작 절차를 알고 있다면 이 인터페이스가 공장의 Camera Moudlel 안에 있는cameradevice_open () 에서 구체적인 기능을 지정했습니다. 예를 들어hisilicon 경로는 다음과 같습니다: device/hisilicon/bigfish/hardware/camera/camerahal/CameraModule.cpp, 해당 코드는 다음과 같습니다.
static int camera_device_open(const hw_module_t* module, const char* name, hw_device_t** device)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    int ret         = 0;
    int camera_id   = 0;

    camera_device_t* camera_device              = NULL;
    camera_device_ops_t* camera_ops             = NULL;
    android::CameraHal* camera_hal   = NULL;

    if(!name || !device)
    {
        ALOGE("invalid parameter[name=%p, device=%p]", name, device);
        return -EINVAL;
    }

    camera_id  = atoi(name);
    camera_hal = new android::CameraHal(camera_id);
    if(!camera_hal || !camera_hal->mInitOK)
    {
        ALOGE("fail to allocate memory for CameraHal or fail to init CameraHal");
        ret = -ENOMEM;
        goto EXIT;
        //return -EINVAL;
    }

    camera_device   = new camera_device_t;
    camera_ops      = new camera_device_ops_t;
    if(!camera_device || !camera_ops)
    {
        ALOGE("fail to allocate memory for camera_device_t or camera_device_ops_t");
        ret = -ENOMEM;
        goto EXIT;
    }

    memset(camera_device, 0x00, sizeof(*camera_device));
    memset(camera_ops, 0x00, sizeof(*camera_ops));

    camera_device->common.tag                 = HARDWARE_DEVICE_TAG;
    camera_device->common.version             = 0;
    camera_device->common.module              = const_cast(module);
    camera_device->common.close               = camera_device_close;
    camera_device->ops                        = camera_ops;
    camera_device->priv                       = camera_hal;

    camera_ops->set_preview_window            = set_preview_window;
    camera_ops->set_callbacks                 = set_callbacks;
    camera_ops->auto_focus                    = auto_focus;
    camera_ops->enable_msg_type               = enable_msg_type;
    camera_ops->disable_msg_type              = disable_msg_type;
    camera_ops->msg_type_enabled              = msg_type_enabled;
    camera_ops->start_preview                 = start_preview;
    camera_ops->stop_preview                  = stop_preview;
    camera_ops->preview_enabled               = preview_enabled;
    camera_ops->store_meta_data_in_buffers    = store_meta_data_in_buffers;
    camera_ops->start_recording               = start_recording;
    camera_ops->stop_recording                = stop_recording;
    camera_ops->recording_enabled             = recording_enabled;
    camera_ops->release_recording_frame       = release_recording_frame;
    camera_ops->cancel_auto_focus             = cancel_auto_focus;
    camera_ops->take_picture                  = take_picture;
    camera_ops->cancel_picture                = cancel_picture;
    camera_ops->set_parameters                = set_parameters;
    camera_ops->get_parameters                = get_parameters;
    camera_ops->put_parameters                = put_parameters;
    camera_ops->send_command                  = send_command;
    camera_ops->release                       = release;
    camera_ops->dump                          = dump;

    *device                                   = &camera_device->common;

    return 0;

EXIT:
    if(camera_hal)
    {
        delete camera_hal;
        camera_hal = NULL;
    }

    if(camera_device)
    {
        delete camera_device;
        camera_device = NULL;
    }

    if(camera_ops)
    {
        delete camera_ops;
        camera_ops = NULL;
    }

    return -1;
}

여기서 cameraops->get_parameters = get_parameters가 파일의 get 을 가리켰습니다parameters 방법:
/*
 **************************************************************************
 * FunctionName: get_parameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
static char* get_parameters(struct camera_device * dev)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    android::CameraHal* hal = TO_CAMERA_HAL_INTERFACE(dev);
    android::CameraParameters par;
    android::String8 str_params;
    char* param = NULL;

    par = hal->getParameters();
    str_params = par.flatten();
    if(str_params.length() > 0)
    {
        param = (char*)malloc(str_params.length() + 1);
        if(NULL == param)
        {
            ALOGE("fail to allocate memory for CameraHal or fail to init CameraHal");
            return param;
        }
    }
    sprintf(param, "%s", str_params.string());
    return param;

}

여기는CameraHal,device/hisilicon/bigfish/hardware/camera/camera 를 가리킨다hal/CameraHal.cpp:
/*
 **************************************************************************
 * FunctionName: CameraHal::getParameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
CameraParameters CameraHal::getParameters() const
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    Mutex::Autolock lock(mLock);

    return mExtendedEnv.mParameters;
}

여기에 도착하면 해당 mExtendedEnv로 바로 돌아갑니다.mParameters. 따라서 값을 부여할 위치를 지정해야 합니다. 같은 파일 아래에 파라미터 초기화 함수가 다음과 같습니다.
/*
 **************************************************************************
 * FunctionName: CameraHal::initDefaultParameters;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
void CameraHal::initDefaultParameters()
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    int ret = 0;
    CameraParameters p;

    mParameterManager->queryCap(p);

    ret = mParameterManager->setParam(p);
    if(ret < 0)
    {
        CAMERA_HAL_LOGE("fail to set parameters");
        return;
    }

    ret = mParameterManager->commitParam();
    if(ret < 0)
    {
        CAMERA_HAL_LOGE("fail to commit parameters");
    }

    mExtendedEnv.mParameters = p;
}

파라미터 초기화에서 주로 mParameter Manager->query Cap§을 보아야 한다. 그러면 mParameter Manager->query Cap()를 따라가야 한다. CameraHal의 초기화에는 mParameter Manager = new Capability Manager(&mExtendedEnv)가 있기 때문에 Capability Manager와 관련된 곳을 찾아야 한다. device/hisilicon/bigfish/hardware/camera/capabilitymanager/CapabilityManager.cpp:
/*
 **************************************************************************
 * FunctionName: CapabilityManager::queryCap;
 * Description : NA;
 * Input       : NA;
 * Output      : NA;
 * ReturnValue : NA;
 * Other       : NA;
 **************************************************************************
 */
int CapabilityManager::queryCap ( CameraParameters& p )
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    uint32_t i = 0;
    for(i=0; iqueryCapability(p);
    }
    return 0;
}

여기에서 mParameters Objs[i]->query Capability§의 함수를 가리키는 것을 발견하기 어렵지 않다. 구체적으로 읽을 때 mParameters Objs가 매우 많은 것을 발견했다. 여기서 우리는 자신의 수요에 따라 분석하면 된다. 여기서 우리가 분석해야 할 Camera Parameter Preview 중의query Capability는 코드가 다음과 같다.
int CameraParameterPreview::queryCapability(CameraParameters& p)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);

    struct v4l2_frmsizeenum fsize;
    String8 strPreview("");
    char strTmp[64];
    memset(&fsize, 0x00, sizeof(fsize));
    fsize.index         = 0;
    fsize.pixel_format  = DEFAULT_CAMERA_PREVIEW_V4L2_FORMAT;
    fsize.type          = V4L2_FRMSIZE_TYPE_DISCRETE;

    int order[PREVIEW_SIZE_COUNT];
    int supportnum =0;
    bool haveone=false;

    while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
    {
        fsize.index++;
        for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
        {
            if( (fsize.discrete.width == previewSizes[i].width) &&
                (fsize.discrete.height == previewSizes[i].height) )
            {
                order[supportnum++] = i;
                haveone=true;
            }
        }
    }

    SortingArray(order, supportnum);

    for(int i=0; i < supportnum; i++)
    {
        snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[order[i]].width, previewSizes[order[i]].height);
        strPreview += strTmp;
        if(i < supportnum-1){
            strPreview += ",";
        }
    }

    CAMERA_PARAM_LOGI("support preview size = %s", strPreview.string());
    int maxWidth = 0, maxHeight = 0;
    getMaxSize(strPreview, &maxWidth, &maxHeight);

    if(haveone)
    {
        CAMERA_PARAM_LOGI("real support preview size = %s", strPreview.string());
        p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreview);

        CAMERA_PARAM_LOGI("support video size = %s", strPreview.string());
        p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreview);

        snprintf(strTmp, sizeof(strTmp), "%dx%d", maxWidth, maxHeight);
        String8 realMaxPreviewSize(strTmp);
        CAMERA_PARAM_LOGI("set preferred-preview-size-for-video = %s", realMaxPreviewSize.string());
        p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, realMaxPreviewSize);
    }

    struct v4l2_fmtdesc fmt;

    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    while (0 == ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmt))
    {
        fmt.index++;
        CAMERA_PARAM_LOGI("Camera support capture format: { pixelformat = '%c%c%c%c', description = '%s' }",
                fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
                (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
                fmt.description);
        if(strcmp((char *)fmt.description, "MJPEG")== 0 || strcmp((char *)fmt.description, "Motion-JPEG")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
    }

    //here we must init the preview size
    p.setPreviewSize(maxWidth, maxHeight);

    String8 strPreviewFmt("");

    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420SP;
    strPreviewFmt += ",";
    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420P;

    p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, strPreviewFmt);
    p.set(CameraParameters::KEY_PREVIEW_FORMAT, DEFAULT_CAMERA_PREVIEW_FORMAT);
    p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, DEFAULT_CAMERA_VIDEO_FORMAT);

    return 0;
}

만약에 우리가 인쇄를 보면 여기서 인쇄된 해상도가 기본 640*480밖에 없다는 것을 알 수 있다. 코드를 자세히 보면 관건은 0==ioctl(mCameraFd, VIDIOC ENUM FRAMESIZES, &fsize)이다. 여기는 직접 Camera와 통신하여 지원하는 해상도를 얻었다. 우리가 상자에서 얻은 결과는 기본값뿐이다.이 문제는 여기서 분석할 수 없어서 칩 제조업체인hisilicon에 도움을 청할 수밖에 없었다. 얻은 답장은Camera의 픽셀 형식이 다르고 관련 패치를 제공했다. 수정된 코드는 다음과 같다.
int CameraParameterPreview::queryCapability(CameraParameters& p)
{
    CAMERA_HAL_LOGV("enter %s()", __FUNCTION__);
	struct v4l2_fmtdesc fmt;
    memset(&fmt, 0, sizeof(fmt));
    fmt.index = 0;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	
    struct v4l2_frmsizeenum fsize;
	String8 strPreviewYUV("");
    String8 strPreviewMJEPG("");
	
    char strTmp[64];
	
	bool haveone=false;
    int orderYUV[PREVIEW_SIZE_COUNT];
    int supportYUVnum =0;
    int orderMJPEG[PREVIEW_SIZE_COUNT];
    int supportMJPEGnum =0;
    int maxYUVWidth = 0, maxYUVHeight = 0;
    int maxMJPEGWidth = 0, maxMJPEGHeight = 0;
    while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmt))
    {
        fmt.index++;
        CAMERA_PARAM_LOGI("CameraParameterPreview support capture format: { pixelformat = '%c%c%c%c', description = '%s' }",
            fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
            (fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
            fmt.description);
        if(strcmp((char *)fmt.description, "MJPEG")== 0 || strcmp((char *)fmt.description, "Motion-JPEG")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
        else if (strcmp((char *)fmt.description, "YUV 4:2:2 (YUYV)")== 0 || strcmp((char *)fmt.description, "YUV 4:2:0 (M420)")== 0)
        {
            mPreviewFormat = V4L2_PIX_FMT_YUYV;
        }
        else
        {
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
        }
        memset(strTmp, 0x00, sizeof(strTmp));

		memset(&fsize, 0x00, sizeof(fsize));
		fsize.index         = 0;
		fsize.pixel_format  = mPreviewFormat;
		fsize.type          = V4L2_FRMSIZE_TYPE_DISCRETE;

		if(mPreviewFormat == V4L2_PIX_FMT_YUYV)
		{
			while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
			{
				fsize.index++;
				for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
				{
					if( (fsize.discrete.width == previewSizes[i].width) &&
					(fsize.discrete.height == previewSizes[i].height) )
					{
						orderYUV[supportYUVnum++] = i;
						haveone=true;
					}
				}
			}
			SortingArray(orderYUV, supportYUVnum);
			for(int i=0; i < supportYUVnum; i++)
			{
				snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[orderYUV[i]].width, previewSizes[orderYUV[i]].height);
				strPreviewYUV += strTmp;
				if(i < supportYUVnum){
					strPreviewYUV += ",";
				}
			}
			CAMERA_PARAM_LOGI("support preview yuv size = %s", strPreviewYUV.string());
			getMaxSize(strPreviewYUV, &maxYUVWidth, &maxYUVHeight);
		}
		else
		{
			while(0 == ioctl(mCameraFd, VIDIOC_ENUM_FRAMESIZES, &fsize))
			{
				fsize.index++;
				for(unsigned int i=0; i < PREVIEW_SIZE_COUNT; i++)
				{
					if( (fsize.discrete.width == previewSizes[i].width) &&
						(fsize.discrete.height == previewSizes[i].height) )
					{
						orderMJPEG[supportMJPEGnum++] = i;
						haveone=true;
					}
				}
			}
			SortingArray(orderMJPEG, supportMJPEGnum);
			for(int i=0; i < supportMJPEGnum; i++)
			{			
				snprintf(strTmp, sizeof(strTmp), "%dx%d", previewSizes[orderMJPEG[i]].width, previewSizes[orderMJPEG[i]].height);
				strPreviewMJEPG += strTmp;
				if(i < supportMJPEGnum){
					strPreviewMJEPG += ",";
				}
			}
		
			CAMERA_PARAM_LOGI("support preview mjepg size = %s", strPreviewMJEPG.string());
			getMaxSize(strPreviewMJEPG, &maxMJPEGWidth, &maxMJPEGHeight);
		}
	}
	
    if(haveone)
    {
        if(maxYUVWidth * maxYUVHeight > maxMJPEGWidth * maxMJPEGHeight)
        {
            mPreviewFormat = V4L2_PIX_FMT_YUYV;
            CAMERA_PARAM_LOGI("real support preview yuv size = %s", strPreviewYUV.string());
            p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreviewYUV);

            CAMERA_PARAM_LOGI("support video yuv size = %s", strPreviewYUV.string());
            p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreviewYUV);

            snprintf(strTmp, sizeof(strTmp), "%dx%d", maxYUVWidth, maxYUVHeight);
            p.setPreviewSize(maxYUVWidth, maxYUVHeight);
		}
		else
		{
            mPreviewFormat = V4L2_PIX_FMT_MJPEG;
            CAMERA_PARAM_LOGI("real support preview mjepg size = %s", strPreviewMJEPG.string());
            p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, strPreviewMJEPG);
            CAMERA_PARAM_LOGI("support video mjepg size = %s", strPreviewMJEPG.string());
            p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, strPreviewMJEPG);
            snprintf(strTmp, sizeof(strTmp), "%dx%d", maxMJPEGWidth, maxMJPEGHeight);
            p.setPreviewSize(maxMJPEGWidth, maxMJPEGHeight);
         }
        String8 realMaxPreviewSize(strTmp);
        CAMERA_PARAM_LOGI("set preferred-preview-size-for-video = %s", realMaxPreviewSize.string());
        p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, realMaxPreviewSize);
    }

    //here we must init the preview size


    String8 strPreviewFmt("");

    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420SP;
    strPreviewFmt += ",";
    strPreviewFmt += CameraParameters::PIXEL_FORMAT_YUV420P;

    p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, strPreviewFmt);
    p.set(CameraParameters::KEY_PREVIEW_FORMAT, DEFAULT_CAMERA_PREVIEW_FORMAT);
    p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, DEFAULT_CAMERA_VIDEO_FORMAT);

    return 0;
}

지원되는 포맷을 먼저 얻은 다음에 포맷에 대한 해상도를 얻으면 지원 해상도 문제를 해결할 수 있습니다.

좋은 웹페이지 즐겨찾기