首先,android有個camera的server gingerbread/frameworks/base/services/camera/libcameraservice
如果不想用他預設的FakeCamera,則要實作自己的libcamera.so ,就是所說的camera HAL : gingerbread/hardware/mx5x/libcamera
而這個HAL 其實就是要繼承并實作 gingerbread/frameworks/base/include/camera/CameraHardwareInterface.h 中的類: class CameraHardwareInterface
CameraHal.h:
class CameraHal : public CameraHardwareInterface {
public:
virtual sp<IMemoryHeap> getPreviewHeap() const;
virtual sp<IMemoryHeap> getRawHeap() const;
virtual void setCallbacks(notify_callback notify_cb,
data_callback data_cb,
data_callback_timestamp data_cb_timestamp,
void* user);
virtual void enableMsgType(int32_t msgType);
virtual void disableMsgType(int32_t msgType);
virtual bool msgTypeEnabled(int32_t msgType);
virtual bool useOverlay() { return true; }
virtual status_t setOverlay(const sp<Overlay> &overlay);
virtual status_t startPreview();
virtual void stopPreview();
virtual bool previewEnabled();
virtual status_t startRecording();
virtual void stopRecording();
virtual bool recordingEnabled();
virtual void releaseRecordingFrame(const sp<IMemory>& mem);
virtual status_t autoFocus();
virtual status_t cancelAutoFocus();
virtual status_t takePicture();
virtual status_t cancelPicture();
virtual status_t dump(int fd, const Vector<String16>& args) const;
virtual status_t setParameters(const CameraParameters& params);
virtual CameraParameters getParameters() const;
virtual status_t sendCommand(int32_t command, int32_t arg1,
int32_t arg2);
virtual void release();
CAMERA_HAL_ERR_RET setCaptureDevice(sp<CaptureDeviceInterface> capturedevice);
CAMERA_HAL_ERR_RET setPostProcessDevice(sp<PostProcessDeviceInterface> postprocessdevice);
CAMERA_HAL_ERR_RET setJpegEncoder(sp<JpegEncoderInterface>jpegencoder);
CAMERA_HAL_ERR_RET Init();
void setPreviewRotate(CAMERA_PREVIEW_ROTATE previewRotate);
CameraHal();
virtual ~CameraHal();
private:
// 私有的成員太多就不列了
};
CameraHal.cpp
getPreiwHeap: 擷取preview data所在的記憶體位址。
這裡有個類MemroyHeapBase, 它是Android搞的一套基于Binder機制的對記憶體操作的類。用于建立共享記憶體。
sp<IMemoryHeap> CameraHal::getPreviewHeap() const
{
CAMERA_HAL_LOG_FUNC;
return mPreviewHeap;
}
mPreviewHeap 初始化:
status_t CameraHal::PreparePreviwBuf()
{
CAMERA_HAL_LOG_FUNC;
status_t ret = NO_ERROR;
unsigned int i =0;
//temply hard code here
if (mTakePicFlag == 0){
if(V4L2_PIX_FMT_NV12)
mPreviewFrameSize = mCaptureDeviceCfg.width*mCaptureDeviceCfg.height*3/2;
else
mPreviewFrameSize = mCaptureDeviceCfg.width*mCaptureDeviceCfg.height *2; // 擷取preview幀大小
//now the preview fmt is supposed to be YUV420SP, so, it is now hard code here
mPreviewHeap.clear();
for (i = 0; i< mPreviewHeapBufNum; i++)
mPreviewBuffers[i].clear();
mPreviewHeap = new MemoryHeapBase(mPreviewFrameSize * mPreviewHeapBufNum);
if (mPreviewHeap == NULL)
return NO_MEMORY;
for (i = 0; i < mPreviewHeapBufNum; i++)
mPreviewBuffers[i] = new MemoryBase(mPreviewHeap, mPreviewFrameSize* i, mPreviewFrameSize);
}
/*allocate the buffer for IPU process*/
if (mPPDeviceNeed || mPPDeviceNeedForPic){
mPmemAllocator = new PmemAllocator(mPPbufNum, mCaptureFrameSize);
if(mPmemAllocator == NULL || mPmemAllocator->err_ret < 0){
return NO_MEMORY;
}
for (i = 0; i < mPPbufNum; i++){
if(mPmemAllocator->allocate(&(mPPbuf[i]),mCaptureFrameSize) < 0){
return NO_MEMORY;
}
}
}
return ret;
}
sp<IMemoryHeap> CameraHal::getRawHeap() const
{
return NULL;
}
void CameraHal::setCallbacks(notify_callback notify_cb,
data_callback data_cb,
data_callback_timestamp data_cb_timestamp,
void* user)
{
Mutex::Autolock lock(mLock);
mNotifyCb = notify_cb;
mDataCb = data_cb;
mDataCbTimestamp = data_cb_timestamp;
mCallbackCookie = user;
}
/*
注冊callback函數,callback的作用是在進行某些操作是可以傳回相關的資料,例如takePicture時:
調用mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, JpegMemBase, mCallbackCookie);則是告訴上層應用現在傳回壓縮好的jpeg資料(資料在JpegMemBase中)
但其實mx53的preiw 是用overlay做,data_cb在preiw是沒用
*/
virtual void enableMsgType(int32_t msgType);
virtual void disableMsgType(int32_t msgType);
virtual bool msgTypeEnabled(int32_t msgType);
這三個函數是設定在按下快門或對焦好或拍好照等之後要不要把相關的資訊告訴上層(調用相關的callback)
virtual bool useOverlay() { return true; }
virtual status_t setOverlay(const sp<Overlay> &overlay);
使用overlay, overlay的概念? 太多内容了,簡單地說就是把camera的資料直接寫到framebuffer 而不經過android層,這在preview中很有用
這裡有編博文可以參考http://zhougaofeng.ixiezi.com/2009/12/02/android-camera-preview-and-take-picture-with-v4l2/
virtual status_t startPreview();
看名稱就知道是開始preview:
主要實作:
status_t CameraHal::CameraHALStartPreview()
{
CAMERA_HAL_LOG_FUNC;
status_t ret = NO_ERROR;
int max_fps, min_fps;
//一些基本參數,大小,旋轉,名字(因為可能有多個camera)
mParameters.getPreviewSize((int *)&(mCaptureDeviceCfg.width),(int *)&(mCaptureDeviceCfg.height));
mCaptureDeviceCfg.fmt = mPreviewCapturedFormat;
mCaptureDeviceCfg.rotate = (SENSOR_PREVIEW_ROTATE)mPreviewRotate;
mCaptureDeviceCfg.tv.numerator = 1;
mCaptureDevice->GetDevName(mCameraSensorName);
//camera有uvc和csi兩種,具體是什麼有什麼差別我還真不知道
if (strstr(mCameraSensorName, "uvc") == NULL){
//according to google's doc getPreviewFrameRate & getPreviewFpsRange should support both.
// so here just a walkaround, if the app set the frameRate, will follow this frame rate.
if (mParameters.getPreviewFrameRate() >= 15)
mCaptureDeviceCfg.tv.denominator = mParameters.getPreviewFrameRate();
else{
mParameters.getPreviewFpsRange(&min_fps, &max_fps);
CAMERA_HAL_LOG_INFO("###start the capture the fps is %d###", max_fps);
mCaptureDeviceCfg.tv.denominator = max_fps/1000;
}
}else{
mCaptureDeviceCfg.tv.denominator = 15;
}
mCaptureBufNum = PREVIEW_CAPTURE_BUFFER_NUM;
mPPbufNum = POST_PROCESS_BUFFER_NUM;
mTakePicFlag = false;
// 上一篇博文(驅動部分)有提到camera v4l2 的使用
if ((ret = PrepareCaptureDevices()) < 0){
CAMERA_HAL_ERR("PrepareCaptureDevices error ");
return ret;
}
if (mPPDeviceNeed){
// 預處理? 不清楚。
if ((ret = PreparePostProssDevice()) < 0){
CAMERA_HAL_ERR("PreparePostProssDevice error");
return ret;
}
}
if ((ret = PreparePreviwBuf()) < 0){
CAMERA_HAL_ERR("PreparePreviwBuf error");
return ret;
}
//注冊一下鎖變量,主要是為了overlay和capture不沖突
if ((ret = PreparePreviwMisc()) < 0){
CAMERA_HAL_ERR("PreparePreviwMisc error");
return ret;
}
//
if ((ret = CameraHALPreviewStart()) < 0){
CAMERA_HAL_ERR("CameraHALPreviewStart error");
return ret;
}
return ret;
}
status_t CameraHal ::CameraHALPreviewStart()
{
CAMERA_HAL_LOG_FUNC;
status_t ret = NO_ERROR;
if (mCaptureDevice->DevStart()<0) //就是 ioctl (mCameraDevice, VIDIOC_STREAMON, &type),此處fsl mx53支援同時打開overlay和capture
return INVALID_OPERATION;
mCaptureFrameThread = new CaptureFrameThread(this);
mPreviewShowFrameThread = new PreviewShowFrameThread(this);
mEncodeFrameThread = new EncodeFrameThread(this);
if(mPPDeviceNeed){
mPostProcessThread = new PostProcessThread(this);
if (mPostProcessThread == NULL)
return UNKNOWN_ERROR;
}
if (mCaptureFrameThread == NULL ||
mPreviewShowFrameThread == NULL ||
mEncodeFrameThread == NULL){
return UNKNOWN_ERROR;
}
mPreviewRunning = true;
return ret;
}
//上面函數的4條thread,其中postprocessthread沒看不明白是做什麼的(自動對焦?),下面稍微解析一下capture 和show thread
int CameraHal ::captureframeThread()
{
CAMERA_HAL_LOG_FUNC;
unsigned int DeqBufIdx = 0;
struct timespec ts;
do {
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_nsec +=100000; // 100ms
} while (mPreviewRunning && !error_status &&(sem_timedwait(&avab_dequeue_frame, &ts) != 0) );
// 因為是幾條thread一起工作,當然要做同步啦,其中avab_dequeue_frame 就要去DevQueue之後才 avaliable
if(!mPreviewRunning || error_status)
return UNKNOWN_ERROR;
mCaptureDevice->DevDequeue(&DeqBufIdx); // 這個當然就是capture一幀資料到buffer中以備後用
nCameraBuffersQueued--;
buffer_index_maps[dequeue_head]=DeqBufIdx; //PrepareCaptureDevices時會query幾個buffer,這裡記住他的index
dequeue_head ++;
dequeue_head %= mCaptureBufNum;
// 下面是同步的東西,沒什麼好說的。
if(!mPPDeviceNeed){
sem_post(&avab_show_frame);
sem_post(&avab_enc_frame);
}else{
sem_post(&avab_pp_in_frame);
}
return NO_ERROR;
}
int CameraHal ::previewshowFrameThread()
{
CAMERA_HAL_LOG_FUNC;
struct timespec ts;
int display_index = 0;
DMA_BUFFER InBuf;
int queue_back_index = 0;
do {
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_nsec +=100000; // 100ms
} while (!error_status && mPreviewRunning &&(sem_timedwait(&avab_show_frame, &ts) != 0) );
if ((mPreviewRunning == 0) || error_status)
return UNKNOWN_ERROR;
if (!mPPDeviceNeed){
display_index = buffer_index_maps[display_head];
InBuf = mCaptureBuffers[display_index];
display_head ++;
display_head %= mCaptureBufNum;
}else{
display_index = display_head;
InBuf = mPPbuf[display_index];
display_head ++;
display_head %= mPPbufNum;
}
if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
convertNV12toYUV420SP((uint8_t*)(InBuf.virt_start),
(uint8_t*)(mPreviewBuffers[preview_heap_buf_head]->pointer()),mCaptureDeviceCfg.width, mCaptureDeviceCfg.height);
mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewBuffers[preview_heap_buf_head], mCallbackCookie);
preview_heap_buf_head ++;
preview_heap_buf_head %= mPreviewHeapBufNum;
}
pthread_mutex_lock(&mOverlayMutex);
if (mOverlay != 0) {
// //InBuf 是從capture 中QueueBuffer來的,放進了overlay之後怎麼做其實沒有看懂,fsl這方面寫得很複雜,隻知道它初始化時就配置設定了共享記憶體
// ctx->data_shared, 然後把buffer的位址傳進去data_shared->queued_bufs[data_shared->queued_tail] = phy_addr , 而有個很糾結的事情
// 是,它隻調用了queueBuffer 而沒有調用DqueueBuffer,overlay中有一條專門的thread去處理queueBuffer後的資料 。
if (mOverlay->queueBuffer((overlay_buffer_t)InBuf.phy_offset) < 0){
CAMERA_HAL_ERR("queueBuffer failed. May be bcos stream was not turned on yet.");
}
if (is_first_buffer) {
is_first_buffer = 0;
last_display_index = display_index;
pthread_mutex_unlock(&mOverlayMutex);
goto show_out;
}
}
if (!mPPDeviceNeed){
if (mOverlay != 0){
queue_back_index = last_display_index;
}else{
queue_back_index = display_index;
}
}
pthread_mutex_unlock(&mOverlayMutex);
do {
clock_gettime(CLOCK_REALTIME, &ts);
ts.tv_nsec +=200000; // 100ms
} while ((sem_timedwait(&avab_enc_frame_finish, &ts) != 0)&&!error_status && mPreviewRunning );
if (!mPPDeviceNeed){
//queue the v4l2 buf back
if(mCaptureDevice->DevQueue(queue_back_index) <0){
CAMERA_HAL_ERR("The Capture device queue buf error !!!!");
return INVALID_OPERATION;
}
last_display_index = display_index;
nCameraBuffersQueued++;
sem_post(&avab_dequeue_frame);
}else{
sem_post(&avab_pp_out_frame);
}
show_out:
return NO_ERROR;
}