tenfourfox/dom/camera/update.patch

2297 lines
76 KiB
Diff

diff --git a/GonkCameraListener.h b/GonkCameraListener.h
index 67eeef3..243264c 100644
--- a/GonkCameraListener.h
+++ b/GonkCameraListener.h
@@ -14,49 +14,16 @@
* limitations under the License.
*/
-#ifndef ANDROID_HARDWARE_CAMERA_H
-#define ANDROID_HARDWARE_CAMERA_H
+#ifndef GONK_CAMERA_LISTENER_H
+#define GONK_CAMERA_LISTENER_H
#include <utils/Timers.h>
-#include <gui/ISurfaceTexture.h>
-#include <system/camera.h>
-#include <camera/ICameraClient.h>
-#include <camera/ICameraRecordingProxy.h>
-#include <camera/ICameraRecordingProxyListener.h>
+#include "libcameraservice/CameraHardwareInterface.h"
namespace android {
-struct CameraInfo {
- /**
- * The direction that the camera faces to. It should be CAMERA_FACING_BACK
- * or CAMERA_FACING_FRONT.
- */
- int facing;
-
- /**
- * The orientation of the camera image. The value is the angle that the
- * camera image needs to be rotated clockwise so it shows correctly on the
- * display in its natural orientation. It should be 0, 90, 180, or 270.
- *
- * For example, suppose a device has a naturally tall screen. The
- * back-facing camera sensor is mounted in landscape. You are looking at
- * the screen. If the top side of the camera sensor is aligned with the
- * right edge of the screen in natural orientation, the value should be
- * 90. If the top side of a front-facing camera sensor is aligned with the
- * right of the screen, the value should be 270.
- */
- int orientation;
- int mode;
-};
-
-class ICameraService;
-class ICamera;
-class Surface;
-class Mutex;
-class String8;
-
// ref-counted object for callbacks
-class CameraListener: virtual public RefBase
+class GonkCameraListener: virtual public RefBase
{
public:
virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
@@ -65,133 +32,6 @@ public:
virtual void postDataTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) = 0;
};
-class Camera : public BnCameraClient, public IBinder::DeathRecipient
-{
-public:
- // construct a camera client from an existing remote
- static sp<Camera> create(const sp<ICamera>& camera);
- static int32_t getNumberOfCameras();
- static status_t getCameraInfo(int cameraId,
- struct CameraInfo* cameraInfo);
- static sp<Camera> connect(int cameraId);
- virtual ~Camera();
- void init();
-
- status_t reconnect();
- void disconnect();
- status_t lock();
- status_t unlock();
-
- status_t getStatus() { return mStatus; }
-
- // pass the buffered Surface to the camera service
- status_t setPreviewDisplay(const sp<Surface>& surface);
-
- // pass the buffered ISurfaceTexture to the camera service
- status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture);
-
- // start preview mode, must call setPreviewDisplay first
- status_t startPreview();
-
- // stop preview mode
- void stopPreview();
-
- // get preview state
- bool previewEnabled();
-
- // start recording mode, must call setPreviewDisplay first
- status_t startRecording();
-
- // stop recording mode
- void stopRecording();
-
- // get recording state
- bool recordingEnabled();
-
- // release a recording frame
- void releaseRecordingFrame(const sp<IMemory>& mem);
-
- // autoFocus - status returned from callback
- status_t autoFocus();
-
- // cancel auto focus
- status_t cancelAutoFocus();
-
- // take a picture - picture returned from callback
- status_t takePicture(int msgType);
-
- // set preview/capture parameters - key/value pairs
- status_t setParameters(const String8& params);
-
- // get preview/capture parameters - key/value pairs
- String8 getParameters() const;
-
- // send command to camera driver
- status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
-
- // tell camera hal to store meta data or real YUV in video buffers.
- status_t storeMetaDataInBuffers(bool enabled);
-
- void setListener(const sp<CameraListener>& listener);
- void setRecordingProxyListener(const sp<ICameraRecordingProxyListener>& listener);
- void setPreviewCallbackFlags(int preview_callback_flag);
-
- sp<ICameraRecordingProxy> getRecordingProxy();
-
- // ICameraClient interface
- virtual void notifyCallback(int32_t msgType, int32_t ext, int32_t ext2);
- virtual void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
- camera_frame_metadata_t *metadata);
- virtual void dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
-
- sp<ICamera> remote();
-
- class RecordingProxy : public BnCameraRecordingProxy
- {
- public:
- RecordingProxy(const sp<Camera>& camera);
-
- // ICameraRecordingProxy interface
- virtual status_t startRecording(const sp<ICameraRecordingProxyListener>& listener);
- virtual void stopRecording();
- virtual void releaseRecordingFrame(const sp<IMemory>& mem);
-
- private:
- sp<Camera> mCamera;
- };
-
-private:
- Camera();
- Camera(const Camera&);
- Camera& operator=(const Camera);
- virtual void binderDied(const wp<IBinder>& who);
-
- class DeathNotifier: public IBinder::DeathRecipient
- {
- public:
- DeathNotifier() {
- }
-
- virtual void binderDied(const wp<IBinder>& who);
- };
-
- static sp<DeathNotifier> mDeathNotifier;
-
- // helper function to obtain camera service handle
- static const sp<ICameraService>& getCameraService();
-
- sp<ICamera> mCamera;
- status_t mStatus;
-
- sp<CameraListener> mListener;
- sp<ICameraRecordingProxyListener> mRecordingProxyListener;
-
- friend class DeathNotifier;
-
- static Mutex mLock;
- static sp<ICameraService> mCameraService;
-};
-
}; // namespace android
#endif
diff --git a/GonkCameraSource.cpp b/GonkCameraSource.cpp
index af6b340..9dba596 100644
--- a/GonkCameraSource.cpp
+++ b/GonkCameraSource.cpp
@@ -14,29 +14,34 @@
* limitations under the License.
*/
-//#define LOG_NDEBUG 0
-#define LOG_TAG "CameraSource"
-#include <utils/Log.h>
+#include <base/basictypes.h>
+#include "nsDebug.h"
+#define DOM_CAMERA_LOG_LEVEL 3
+#include "CameraCommon.h"
+#define LOGD DOM_CAMERA_LOGA
+#define LOGV DOM_CAMERA_LOGI
+#define LOGI DOM_CAMERA_LOGI
+#define LOGW DOM_CAMERA_LOGW
+#define LOGE DOM_CAMERA_LOGE
#include <OMX_Component.h>
-#include <binder/IPCThreadState.h>
-#include <media/stagefright/CameraSource.h>
+#include "GonkCameraSource.h"
+#include "GonkCameraListener.h"
+#include "GonkCameraHwMgr.h"
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
-#include <camera/Camera.h>
-#include <camera/CameraParameters.h>
-#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
+using namespace mozilla;
namespace android {
static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
-struct CameraSourceListener : public CameraListener {
- CameraSourceListener(const sp<CameraSource> &source);
+struct GonkCameraSourceListener : public GonkCameraListener {
+ GonkCameraSourceListener(const sp<GonkCameraSource> &source);
virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
virtual void postData(int32_t msgType, const sp<IMemory> &dataPtr,
@@ -46,41 +51,41 @@ struct CameraSourceListener : public CameraListener {
nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr);
protected:
- virtual ~CameraSourceListener();
+ virtual ~GonkCameraSourceListener();
private:
- wp<CameraSource> mSource;
+ wp<GonkCameraSource> mSource;
- CameraSourceListener(const CameraSourceListener &);
- CameraSourceListener &operator=(const CameraSourceListener &);
+ GonkCameraSourceListener(const GonkCameraSourceListener &);
+ GonkCameraSourceListener &operator=(const GonkCameraSourceListener &);
};
-CameraSourceListener::CameraSourceListener(const sp<CameraSource> &source)
+GonkCameraSourceListener::GonkCameraSourceListener(const sp<GonkCameraSource> &source)
: mSource(source) {
}
-CameraSourceListener::~CameraSourceListener() {
+GonkCameraSourceListener::~GonkCameraSourceListener() {
}
-void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+void GonkCameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
LOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
}
-void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
+void GonkCameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
camera_frame_metadata_t *metadata) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
- sp<CameraSource> source = mSource.promote();
+ sp<GonkCameraSource> source = mSource.promote();
if (source.get() != NULL) {
source->dataCallback(msgType, dataPtr);
}
}
-void CameraSourceListener::postDataTimestamp(
+void GonkCameraSourceListener::postDataTimestamp(
nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
- sp<CameraSource> source = mSource.promote();
+ sp<GonkCameraSource> source = mSource.promote();
if (source.get() != NULL) {
source->dataCallbackTimestamp(timestamp/1000, msgType, dataPtr);
}
@@ -114,48 +119,30 @@ static int32_t getColorFormat(const char* colorFormat) {
}
LOGE("Uknown color format (%s), please add it to "
- "CameraSource::getColorFormat", colorFormat);
+ "GonkCameraSource::getColorFormat", colorFormat);
CHECK_EQ(0, "Unknown color format");
}
-CameraSource *CameraSource::Create() {
- Size size;
- size.width = -1;
- size.height = -1;
-
- sp<ICamera> camera;
- return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
-}
-
-// static
-CameraSource *CameraSource::CreateFromCamera(
- const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId,
+GonkCameraSource *GonkCameraSource::Create(
+ int32_t cameraHandle,
Size videoSize,
int32_t frameRate,
- const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers) {
- CameraSource *source = new CameraSource(camera, proxy, cameraId,
- videoSize, frameRate, surface,
+ GonkCameraSource *source = new GonkCameraSource(cameraHandle,
+ videoSize, frameRate,
storeMetaDataInVideoBuffers);
return source;
}
-CameraSource::CameraSource(
- const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId,
+GonkCameraSource::GonkCameraSource(
+ int32_t cameraHandle,
Size videoSize,
int32_t frameRate,
- const sp<Surface>& surface,
bool storeMetaDataInVideoBuffers)
: mCameraFlags(0),
mVideoFrameRate(-1),
- mCamera(0),
- mSurface(surface),
mNumFramesReceived(0),
mLastFrameTimestampUs(0),
mStarted(false),
@@ -169,43 +156,19 @@ CameraSource::CameraSource(
mVideoSize.width = -1;
mVideoSize.height = -1;
- mInitCheck = init(camera, proxy, cameraId,
+ mCameraHandle = cameraHandle;
+
+ mInitCheck = init(
videoSize, frameRate,
storeMetaDataInVideoBuffers);
if (mInitCheck != OK) releaseCamera();
}
-status_t CameraSource::initCheck() const {
+status_t GonkCameraSource::initCheck() const {
return mInitCheck;
}
-status_t CameraSource::isCameraAvailable(
- const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId) {
-
- if (camera == 0) {
- mCamera = Camera::connect(cameraId);
- if (mCamera == 0) return -EBUSY;
- mCameraFlags &= ~FLAGS_HOT_CAMERA;
- } else {
- // We get the proxy from Camera, not ICamera. We need to get the proxy
- // to the remote Camera owned by the application. Here mCamera is a
- // local Camera object created by us. We cannot use the proxy from
- // mCamera here.
- mCamera = Camera::create(camera);
- if (mCamera == 0) return -EBUSY;
- mCameraRecordingProxy = proxy;
- mCameraFlags |= FLAGS_HOT_CAMERA;
- mDeathNotifier = new DeathNotifier();
- // isBinderAlive needs linkToDeath to work.
- mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
- }
-
- mCamera->lock();
-
- return OK;
-}
-
+//TODO: Do we need to reimplement isCameraAvailable?
/*
* Check to see whether the requested video width and height is one
@@ -267,7 +230,7 @@ static void getSupportedVideoSizes(
* @param params CameraParameters to retrieve the information
* @return OK if no error.
*/
-status_t CameraSource::isCameraColorFormatSupported(
+status_t GonkCameraSource::isCameraColorFormatSupported(
const CameraParameters& params) {
mColorFormat = getColorFormat(params.get(
CameraParameters::KEY_VIDEO_FRAME_FORMAT));
@@ -292,7 +255,7 @@ status_t CameraSource::isCameraColorFormatSupported(
* @param frameRate the target frame rate in frames per second.
* @return OK if no error.
*/
-status_t CameraSource::configureCamera(
+status_t GonkCameraSource::configureCamera(
CameraParameters* params,
int32_t width, int32_t height,
int32_t frameRate) {
@@ -347,10 +310,9 @@ status_t CameraSource::configureCamera(
if (isCameraParamChanged) {
// Either frame rate or frame size needs to be changed.
- String8 s = params->flatten();
- if (OK != mCamera->setParameters(s)) {
+ if (OK != GonkCameraHardware::PushParameters(mCameraHandle,*params)) {
LOGE("Could not change settings."
- " Someone else is using camera %p?", mCamera.get());
+ " Someone else is using camera ?");
return -EBUSY;
}
}
@@ -368,7 +330,7 @@ status_t CameraSource::configureCamera(
* @param the target video frame height in pixels to check against
* @return OK if no error
*/
-status_t CameraSource::checkVideoSize(
+status_t GonkCameraSource::checkVideoSize(
const CameraParameters& params,
int32_t width, int32_t height) {
@@ -420,7 +382,7 @@ status_t CameraSource::checkVideoSize(
* @param the target video frame rate to check against
* @return OK if no error.
*/
-status_t CameraSource::checkFrameRate(
+status_t GonkCameraSource::checkFrameRate(
const CameraParameters& params,
int32_t frameRate) {
@@ -462,39 +424,17 @@ status_t CameraSource::checkFrameRate(
*
* @return OK if no error.
*/
-status_t CameraSource::init(
- const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId,
+status_t GonkCameraSource::init(
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
LOGV("init");
status_t err = OK;
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- err = initWithCameraAccess(camera, proxy, cameraId,
- videoSize, frameRate,
- storeMetaDataInVideoBuffers);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return err;
-}
-
-status_t CameraSource::initWithCameraAccess(
- const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId,
- Size videoSize,
- int32_t frameRate,
- bool storeMetaDataInVideoBuffers) {
- LOGV("initWithCameraAccess");
- status_t err = OK;
+ //TODO: need to do something here to check the sanity of camera
- if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
- LOGE("Camera connection could not be established.");
- return err;
- }
- CameraParameters params(mCamera->getParameters());
+ CameraParameters params;
+ GonkCameraHardware::PullParameters(mCameraHandle, params);
if ((err = isCameraColorFormatSupported(params)) != OK) {
return err;
}
@@ -508,7 +448,8 @@ status_t CameraSource::initWithCameraAccess(
}
// Check on video frame size and frame rate.
- CameraParameters newCameraParams(mCamera->getParameters());
+ CameraParameters newCameraParams;
+ GonkCameraHardware::PullParameters(mCameraHandle, newCameraParams);
if ((err = checkVideoSize(newCameraParams,
videoSize.width, videoSize.height)) != OK) {
return err;
@@ -517,15 +458,11 @@ status_t CameraSource::initWithCameraAccess(
return err;
}
- // This CHECK is good, since we just passed the lock/unlock
- // check earlier by calling mCamera->setParameters().
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
-
// By default, do not store metadata in video buffers
mIsMetaDataStoredInVideoBuffers = false;
- mCamera->storeMetaDataInBuffers(false);
+ GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, false);
if (storeMetaDataInVideoBuffers) {
- if (OK == mCamera->storeMetaDataInBuffers(true)) {
+ if (OK == GonkCameraHardware::StoreMetaDataInBuffers(mCameraHandle, true)) {
mIsMetaDataStoredInVideoBuffers = true;
}
}
@@ -568,40 +505,28 @@ status_t CameraSource::initWithCameraAccess(
return OK;
}
-CameraSource::~CameraSource() {
+GonkCameraSource::~GonkCameraSource() {
if (mStarted) {
stop();
} else if (mInitCheck == OK) {
// Camera is initialized but because start() is never called,
// the lock on Camera is never released(). This makes sure
// Camera's lock is released in this case.
+ // TODO: Don't think I need to do this
releaseCamera();
}
}
-void CameraSource::startCameraRecording() {
+void GonkCameraSource::startCameraRecording() {
LOGV("startCameraRecording");
- // Reset the identity to the current thread because media server owns the
- // camera and recording is started by the applications. The applications
- // will connect to the camera in ICameraRecordingProxy::startRecording.
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCameraFlags & FLAGS_HOT_CAMERA) {
- mCamera->unlock();
- mCamera.clear();
- CHECK_EQ(OK, mCameraRecordingProxy->startRecording(new ProxyListener(this)));
- } else {
- mCamera->setListener(new CameraSourceListener(this));
- mCamera->startRecording();
- CHECK(mCamera->recordingEnabled());
- }
- IPCThreadState::self()->restoreCallingIdentity(token);
+ CHECK_EQ(OK, GonkCameraHardware::StartRecording(mCameraHandle));
}
-status_t CameraSource::start(MetaData *meta) {
+status_t GonkCameraSource::start(MetaData *meta) {
LOGV("start");
CHECK(!mStarted);
if (mInitCheck != OK) {
- LOGE("CameraSource is not initialized yet");
+ LOGE("GonkCameraSource is not initialized yet");
return mInitCheck;
}
@@ -614,58 +539,34 @@ status_t CameraSource::start(MetaData *meta) {
mStartTimeUs = 0;
int64_t startTimeUs;
if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
+ LOGV("Metadata enabled, startime: %lld us", startTimeUs);
mStartTimeUs = startTimeUs;
}
+ // Register a listener with GonkCameraHardware so that we can get callbacks
+ GonkCameraHardware::SetListener(mCameraHandle, new GonkCameraSourceListener(this));
+
startCameraRecording();
mStarted = true;
return OK;
}
-void CameraSource::stopCameraRecording() {
+void GonkCameraSource::stopCameraRecording() {
LOGV("stopCameraRecording");
- if (mCameraFlags & FLAGS_HOT_CAMERA) {
- mCameraRecordingProxy->stopRecording();
- } else {
- mCamera->setListener(NULL);
- mCamera->stopRecording();
- }
+ GonkCameraHardware::StopRecording(mCameraHandle);
}
-void CameraSource::releaseCamera() {
+void GonkCameraSource::releaseCamera() {
LOGV("releaseCamera");
- if (mCamera != 0) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
- LOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- mCamera->disconnect();
- }
- mCamera->unlock();
- mCamera.clear();
- mCamera = 0;
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
- if (mCameraRecordingProxy != 0) {
- mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
- mCameraRecordingProxy.clear();
- }
- mCameraFlags = 0;
}
-status_t CameraSource::stop() {
- LOGD("stop: E");
+status_t GonkCameraSource::stop() {
+ LOGV("stop: E");
Mutex::Autolock autoLock(mLock);
mStarted = false;
mFrameAvailableCondition.signal();
- int64_t token;
- bool isTokenValid = false;
- if (mCamera != 0) {
- token = IPCThreadState::self()->clearCallingIdentity();
- isTokenValid = true;
- }
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
if (NO_ERROR !=
@@ -675,11 +576,9 @@ status_t CameraSource::stop() {
mFramesBeingEncoded.size());
}
}
+ LOGV("Calling stopCameraRecording");
stopCameraRecording();
releaseCamera();
- if (isTokenValid) {
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
if (mCollectStats) {
LOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
@@ -692,22 +591,16 @@ status_t CameraSource::stop() {
}
CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
- LOGD("stop: X");
+ LOGV("stop: X");
return OK;
}
-void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+void GonkCameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
LOGV("releaseRecordingFrame");
- if (mCameraRecordingProxy != NULL) {
- mCameraRecordingProxy->releaseRecordingFrame(frame);
- } else if (mCamera != NULL) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
+ GonkCameraHardware::ReleaseRecordingFrame(mCameraHandle, frame);
}
-void CameraSource::releaseQueuedFrames() {
+void GonkCameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
@@ -717,15 +610,15 @@ void CameraSource::releaseQueuedFrames() {
}
}
-sp<MetaData> CameraSource::getFormat() {
+sp<MetaData> GonkCameraSource::getFormat() {
return mMeta;
}
-void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
+void GonkCameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
releaseRecordingFrame(frame);
}
-void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
+void GonkCameraSource::signalBufferReturned(MediaBuffer *buffer) {
LOGV("signalBufferReturned: %p", buffer->data());
Mutex::Autolock autoLock(mLock);
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
@@ -743,7 +636,7 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
CHECK_EQ(0, "signalBufferReturned: bogus buffer");
}
-status_t CameraSource::read(
+status_t GonkCameraSource::read(
MediaBuffer **buffer, const ReadOptions *options) {
LOGV("read");
@@ -764,11 +657,7 @@ status_t CameraSource::read(
if (NO_ERROR !=
mFrameAvailableCondition.waitRelative(mLock,
mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
- if (mCameraRecordingProxy != 0 &&
- !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
- LOGW("camera recording proxy is gone");
- return ERROR_END_OF_STREAM;
- }
+ //TODO: check sanity of camera?
LOGW("Timed out waiting for incoming camera video frames: %lld us",
mLastFrameTimestampUs);
}
@@ -790,9 +679,10 @@ status_t CameraSource::read(
return OK;
}
-void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
+void GonkCameraSource::dataCallbackTimestamp(int64_t timestampUs,
int32_t msgType, const sp<IMemory> &data) {
LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
+ //LOGV("dataCallbackTimestamp: data %x size %d", data->pointer(), data->size());
Mutex::Autolock autoLock(mLock);
if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
LOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
@@ -808,7 +698,7 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
}
// May need to skip frame or modify timestamp. Currently implemented
- // by the subclass CameraSourceTimeLapse.
+ // by the subclass GonkCameraSourceTimeLapse.
if (skipCurrentFrame(timestampUs)) {
releaseOneRecordingFrame(data);
return;
@@ -839,22 +729,9 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFrameAvailableCondition.signal();
}
-bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+bool GonkCameraSource::isMetaDataStoredInVideoBuffers() const {
LOGV("isMetaDataStoredInVideoBuffers");
return mIsMetaDataStoredInVideoBuffers;
}
-CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
- mSource = source;
-}
-
-void CameraSource::ProxyListener::dataCallbackTimestamp(
- nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr) {
- mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);
-}
-
-void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) {
- LOGI("Camera recording proxy died");
-}
-
-} // namespace android
+} // namespace android
diff --git a/GonkCameraSource.h b/GonkCameraSource.h
index 446720b..fe58f96 100644
--- a/GonkCameraSource.h
+++ b/GonkCameraSource.h
@@ -14,69 +14,31 @@
* limitations under the License.
*/
-#ifndef CAMERA_SOURCE_H_
+#ifndef GONK_CAMERA_SOURCE_H_
-#define CAMERA_SOURCE_H_
+#define GONK_CAMERA_SOURCE_H_
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
-#include <camera/ICamera.h>
-#include <camera/ICameraRecordingProxyListener.h>
#include <camera/CameraParameters.h>
#include <utils/List.h>
#include <utils/RefBase.h>
+#include <utils/threads.h>
namespace android {
class IMemory;
-class Camera;
-class Surface;
+class GonkCameraSourceListener;
-class CameraSource : public MediaSource, public MediaBufferObserver {
+class GonkCameraSource : public MediaSource, public MediaBufferObserver {
public:
- /**
- * Factory method to create a new CameraSource using the current
- * settings (such as video size, frame rate, color format, etc)
- * from the default camera.
- *
- * @return NULL on error.
- */
- static CameraSource *Create();
- /**
- * Factory method to create a new CameraSource.
- *
- * @param camera the video input frame data source. If it is NULL,
- * we will try to connect to the camera with the given
- * cameraId.
- *
- * @param cameraId the id of the camera that the source will connect
- * to if camera is NULL; otherwise ignored.
- *
- * @param videoSize the dimension (in pixels) of the video frame
- * @param frameRate the target frames per second
- * @param surface the preview surface for display where preview
- * frames are sent to
- * @param storeMetaDataInVideoBuffers true to request the camera
- * source to store meta data in video buffers; false to
- * request the camera source to store real YUV frame data
- * in the video buffers. The camera source may not support
- * storing meta data in video buffers, if so, a request
- * to do that will NOT be honored. To find out whether
- * meta data is actually being stored in video buffers
- * during recording, call isMetaDataStoredInVideoBuffers().
- *
- * @return NULL on error.
- */
- static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
- const sp<ICameraRecordingProxy> &proxy,
- int32_t cameraId,
- Size videoSize,
- int32_t frameRate,
- const sp<Surface>& surface,
- bool storeMetaDataInVideoBuffers = false);
+ static GonkCameraSource *Create(int32_t cameraHandle,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers = false);
- virtual ~CameraSource();
+ virtual ~GonkCameraSource();
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -84,14 +46,14 @@ public:
MediaBuffer **buffer, const ReadOptions *options = NULL);
/**
- * Check whether a CameraSource object is properly initialized.
+ * Check whether a GonkCameraSource object is properly initialized.
* Must call this method before stop().
* @return OK if initialization has successfully completed.
*/
virtual status_t initCheck() const;
/**
- * Returns the MetaData associated with the CameraSource,
+ * Returns the MetaData associated with the GonkCameraSource,
* including:
* kKeyColorFormat: YUV color format of the video frames
* kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
@@ -113,22 +75,6 @@ public:
virtual void signalBufferReturned(MediaBuffer* buffer);
protected:
- class ProxyListener: public BnCameraRecordingProxyListener {
- public:
- ProxyListener(const sp<CameraSource>& source);
- virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
- const sp<IMemory> &data);
-
- private:
- sp<CameraSource> mSource;
- };
-
- // isBinderAlive needs linkToDeath to work.
- class DeathNotifier: public IBinder::DeathRecipient {
- public:
- DeathNotifier() {}
- virtual void binderDied(const wp<IBinder>& who);
- };
enum CameraFlags {
FLAGS_SET_CAMERA = 1L << 0,
@@ -141,10 +87,6 @@ protected:
int32_t mColorFormat;
status_t mInitCheck;
- sp<Camera> mCamera;
- sp<ICameraRecordingProxy> mCameraRecordingProxy;
- sp<DeathNotifier> mDeathNotifier;
- sp<Surface> mSurface;
sp<MetaData> mMeta;
int64_t mStartTimeUs;
@@ -156,11 +98,9 @@ protected:
// Time between capture of two frames.
int64_t mTimeBetweenFrameCaptureUs;
- CameraSource(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId,
+ GonkCameraSource(int32_t cameraHandle,
Size videoSize, int32_t frameRate,
- const sp<Surface>& surface,
- bool storeMetaDataInVideoBuffers);
+ bool storeMetaDataInVideoBuffers = false);
virtual void startCameraRecording();
virtual void stopCameraRecording();
@@ -170,6 +110,7 @@ protected:
// Called from dataCallbackTimestamp.
virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+ friend class GonkCameraSourceListener;
// Callback called when still camera raw data is available.
virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
@@ -177,7 +118,6 @@ protected:
const sp<IMemory> &data);
private:
- friend class CameraSourceListener;
Mutex mLock;
Condition mFrameAvailableCondition;
@@ -192,23 +132,13 @@ private:
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
bool mIsMetaDataStoredInVideoBuffers;
+ int32_t mCameraHandle;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
-
- status_t init(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId, Size videoSize, int32_t frameRate,
- bool storeMetaDataInVideoBuffers);
-
- status_t initWithCameraAccess(
- const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId, Size videoSize, int32_t frameRate,
+ status_t init(Size videoSize, int32_t frameRate,
bool storeMetaDataInVideoBuffers);
-
- status_t isCameraAvailable(const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId);
status_t isCameraColorFormatSupported(const CameraParameters& params);
status_t configureCamera(CameraParameters* params,
int32_t width, int32_t height,
@@ -222,10 +152,10 @@ private:
void releaseCamera();
- CameraSource(const CameraSource &);
- CameraSource &operator=(const CameraSource &);
+ GonkCameraSource(const GonkCameraSource &);
+ GonkCameraSource &operator=(const GonkCameraSource &);
};
} // namespace android
-#endif // CAMERA_SOURCE_H_
+#endif // GONK_CAMERA_SOURCE_H_
diff --git a/GonkRecorder.cpp b/GonkRecorder.cpp
index b20ca9d..2dc625c 100644
--- a/GonkRecorder.cpp
+++ b/GonkRecorder.cpp
@@ -16,35 +16,23 @@
*/
//#define LOG_NDEBUG 0
-#define LOG_TAG "StagefrightRecorder"
+#define LOG_TAG "GonkRecorder"
+
#include <utils/Log.h>
#include <media/AudioParameter.h>
-#include "StagefrightRecorder.h"
-
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
+#include "GonkRecorder.h"
-#include <media/IMediaPlayerService.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
-#include <media/stagefright/AACWriter.h>
-#include <media/stagefright/ExtendedWriter.h>
-#include <media/stagefright/FMA2DPWriter.h>
-#include <media/stagefright/CameraSource.h>
-#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/ExtendedWriter.h>
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/OMXClient.h>
+#include <OMX.h>
#include <media/stagefright/OMXCodec.h>
-#include <media/stagefright/SurfaceMediaSource.h>
#include <media/MediaProfiles.h>
-#include <camera/ICamera.h>
-#include <camera/CameraParameters.h>
-#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <utils/Errors.h>
@@ -57,51 +45,41 @@
#include "ARTPWriter.h"
#include <cutils/properties.h>
+#include "GonkCameraSource.h"
namespace android {
-// To collect the encoder usage for the battery app
-static void addBatteryData(uint32_t params) {
- sp<IBinder> binder =
- defaultServiceManager()->getService(String16("media.player"));
- sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
- CHECK(service.get() != NULL);
-
- service->addBatteryData(params);
+static sp<IOMX> sOMX = NULL;
+static sp<IOMX> GetOMX() {
+ if(sOMX.get() == NULL) {
+ sOMX = new OMX;
+ }
+ return sOMX;
}
-
-StagefrightRecorder::StagefrightRecorder()
+GonkRecorder::GonkRecorder()
: mWriter(NULL),
mOutputFd(-1),
mAudioSource(AUDIO_SOURCE_CNT),
mVideoSource(VIDEO_SOURCE_LIST_END),
- mStarted(false), mSurfaceMediaSource(NULL),
+ mStarted(false),
mDisableAudio(false) {
LOGV("Constructor");
reset();
}
-StagefrightRecorder::~StagefrightRecorder() {
+GonkRecorder::~GonkRecorder() {
LOGV("Destructor");
stop();
}
-status_t StagefrightRecorder::init() {
+status_t GonkRecorder::init() {
LOGV("init");
return OK;
}
-// The client side of mediaserver asks it to creat a SurfaceMediaSource
-// and return a interface reference. The client side will use that
-// while encoding GL Frames
-sp<ISurfaceTexture> StagefrightRecorder::querySurfaceMediaSource() const {
- LOGV("Get SurfaceMediaSource");
- return mSurfaceMediaSource;
-}
-
-status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
+status_t GonkRecorder::setAudioSource(audio_source_t as) {
LOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
as >= AUDIO_SOURCE_CNT) {
@@ -122,7 +100,7 @@
return OK;
}
-status_t StagefrightRecorder::setVideoSource(video_source vs) {
+status_t GonkRecorder::setVideoSource(video_source vs) {
LOGV("setVideoSource: %d", vs);
if (vs < VIDEO_SOURCE_DEFAULT ||
vs >= VIDEO_SOURCE_LIST_END) {
@@ -139,7 +117,7 @@
return OK;
}
-status_t StagefrightRecorder::setOutputFormat(output_format of) {
+status_t GonkRecorder::setOutputFormat(output_format of) {
LOGV("setOutputFormat: %d", of);
if (of < OUTPUT_FORMAT_DEFAULT ||
of >= OUTPUT_FORMAT_LIST_END) {
@@ -156,7 +134,7 @@
return OK;
}
-status_t StagefrightRecorder::setAudioEncoder(audio_encoder ae) {
+status_t GonkRecorder::setAudioEncoder(audio_encoder ae) {
LOGV("setAudioEncoder: %d", ae);
if (ae < AUDIO_ENCODER_DEFAULT ||
ae >= AUDIO_ENCODER_LIST_END) {
@@ -174,21 +152,10 @@
mAudioEncoder = ae;
}
- // Use default values if appropriate setparam's weren't called.
- if(mAudioEncoder == AUDIO_ENCODER_AAC) {
- mSampleRate = mSampleRate ? mSampleRate : 48000;
- mAudioChannels = mAudioChannels ? mAudioChannels : 2;
- mAudioBitRate = mAudioBitRate ? mAudioBitRate : 156000;
- }
- else{
- mSampleRate = mSampleRate ? mSampleRate : 8000;
- mAudioChannels = mAudioChannels ? mAudioChannels : 1;
- mAudioBitRate = mAudioBitRate ? mAudioBitRate : 12200;
- }
return OK;
}
-status_t StagefrightRecorder::setVideoEncoder(video_encoder ve) {
+status_t GonkRecorder::setVideoEncoder(video_encoder ve) {
LOGV("setVideoEncoder: %d", ve);
if (ve < VIDEO_ENCODER_DEFAULT ||
ve >= VIDEO_ENCODER_LIST_END) {
@@ -205,7 +172,7 @@
return OK;
}
-status_t StagefrightRecorder::setVideoSize(int width, int height) {
+status_t GonkRecorder::setVideoSize(int width, int height) {
LOGV("setVideoSize: %dx%d", width, height);
if (width <= 0 || height <= 0) {
LOGE("Invalid video size: %dx%d", width, height);
@@ -219,7 +186,7 @@
return OK;
}
-status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
+status_t GonkRecorder::setVideoFrameRate(int frames_per_second) {
LOGV("setVideoFrameRate: %d", frames_per_second);
if ((frames_per_second <= 0 && frames_per_second != -1) ||
frames_per_second > 120) {
@@ -233,31 +200,7 @@
return OK;
}
-status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera,
- const sp<ICameraRecordingProxy> &proxy) {
- LOGV("setCamera");
- if (camera == 0) {
- LOGE("camera is NULL");
- return BAD_VALUE;
- }
- if (proxy == 0) {
- LOGE("camera proxy is NULL");
- return BAD_VALUE;
- }
-
- mCamera = camera;
- mCameraProxy = proxy;
- return OK;
-}
-
-status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
- LOGV("setPreviewSurface: %p", surface.get());
- mPreviewSurface = surface;
-
- return OK;
-}
-
-status_t StagefrightRecorder::setOutputFile(const char *path) {
+status_t GonkRecorder::setOutputFile(const char *path) {
LOGE("setOutputFile(const char*) must not be called");
// We don't actually support this at all, as the media_server process
// no longer has permissions to create files.
@@ -265,7 +208,7 @@
return -EPERM;
}
-status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
+status_t GonkRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
LOGV("setOutputFile: %d, %lld, %lld", fd, offset, length);
// These don't make any sense, do they?
CHECK_EQ(offset, 0);
@@ -339,7 +282,7 @@
s->setTo(String8(&data[leading_space], i - leading_space));
}
-status_t StagefrightRecorder::setParamAudioSamplingRate(int32_t sampleRate) {
+status_t GonkRecorder::setParamAudioSamplingRate(int32_t sampleRate) {
LOGV("setParamAudioSamplingRate: %d", sampleRate);
if (sampleRate <= 0) {
LOGE("Invalid audio sampling rate: %d", sampleRate);
@@ -351,7 +294,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamAudioNumberOfChannels(int32_t channels) {
+status_t GonkRecorder::setParamAudioNumberOfChannels(int32_t channels) {
LOGV("setParamAudioNumberOfChannels: %d", channels);
if (channels <= 0 || channels >= 3) {
LOGE("Invalid number of audio channels: %d", channels);
@@ -363,7 +306,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamAudioEncodingBitRate(int32_t bitRate) {
+status_t GonkRecorder::setParamAudioEncodingBitRate(int32_t bitRate) {
LOGV("setParamAudioEncodingBitRate: %d", bitRate);
if (bitRate <= 0) {
LOGE("Invalid audio encoding bit rate: %d", bitRate);
@@ -378,7 +321,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
+status_t GonkRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
LOGV("setParamVideoEncodingBitRate: %d", bitRate);
if (bitRate <= 0) {
LOGE("Invalid video encoding bit rate: %d", bitRate);
@@ -394,7 +337,7 @@
}
// Always rotate clockwise, and only support 0, 90, 180 and 270 for now.
-status_t StagefrightRecorder::setParamVideoRotation(int32_t degrees) {
+status_t GonkRecorder::setParamVideoRotation(int32_t degrees) {
LOGV("setParamVideoRotation: %d", degrees);
if (degrees < 0 || degrees % 90 != 0) {
LOGE("Unsupported video rotation angle: %d", degrees);
@@ -404,7 +347,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
+status_t GonkRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
// This is meant for backward compatibility for MediaRecorder.java
@@ -423,7 +366,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
+status_t GonkRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
// This is meant for backward compatibility for MediaRecorder.java
@@ -449,7 +392,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamInterleaveDuration(int32_t durationUs) {
+status_t GonkRecorder::setParamInterleaveDuration(int32_t durationUs) {
LOGV("setParamInterleaveDuration: %d", durationUs);
if (durationUs <= 500000) { // 500 ms
// If interleave duration is too small, it is very inefficient to do
@@ -471,20 +414,20 @@
// If seconds < 0, only the first frame is I frame, and rest are all P frames
// If seconds == 0, all frames are encoded as I frames. No P frames
// If seconds > 0, it is the time spacing (seconds) between 2 neighboring I frames
-status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t seconds) {
+status_t GonkRecorder::setParamVideoIFramesInterval(int32_t seconds) {
LOGV("setParamVideoIFramesInterval: %d seconds", seconds);
mIFramesIntervalSec = seconds;
return OK;
}
-status_t StagefrightRecorder::setParam64BitFileOffset(bool use64Bit) {
+status_t GonkRecorder::setParam64BitFileOffset(bool use64Bit) {
LOGV("setParam64BitFileOffset: %s",
use64Bit? "use 64 bit file offset": "use 32 bit file offset");
mUse64BitFileOffset = use64Bit;
return OK;
}
-status_t StagefrightRecorder::setParamVideoCameraId(int32_t cameraId) {
+status_t GonkRecorder::setParamVideoCameraId(int32_t cameraId) {
LOGV("setParamVideoCameraId: %d", cameraId);
if (cameraId < 0) {
return BAD_VALUE;
@@ -493,7 +436,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
+status_t GonkRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
if (timeDurationUs < 20000) { // Infeasible if shorter than 20 ms?
LOGE("Tracking time duration too short: %lld us", timeDurationUs);
@@ -503,7 +446,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamVideoEncoderProfile(int32_t profile) {
+status_t GonkRecorder::setParamVideoEncoderProfile(int32_t profile) {
LOGV("setParamVideoEncoderProfile: %d", profile);
// Additional check will be done later when we load the encoder.
@@ -512,7 +455,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamVideoEncoderLevel(int32_t level) {
+status_t GonkRecorder::setParamVideoEncoderLevel(int32_t level) {
LOGV("setParamVideoEncoderLevel: %d", level);
// Additional check will be done later when we load the encoder.
@@ -521,7 +464,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamMovieTimeScale(int32_t timeScale) {
+status_t GonkRecorder::setParamMovieTimeScale(int32_t timeScale) {
LOGV("setParamMovieTimeScale: %d", timeScale);
// The range is set to be the same as the audio's time scale range
@@ -534,7 +477,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamVideoTimeScale(int32_t timeScale) {
+status_t GonkRecorder::setParamVideoTimeScale(int32_t timeScale) {
LOGV("setParamVideoTimeScale: %d", timeScale);
// 60000 is chosen to make sure that each video frame from a 60-fps
@@ -547,7 +490,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {
+status_t GonkRecorder::setParamAudioTimeScale(int32_t timeScale) {
LOGV("setParamAudioTimeScale: %d", timeScale);
// 96000 Hz is the highest sampling rate support in AAC.
@@ -559,33 +502,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
- LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
-
- if(timeLapseEnable == 0) {
- mCaptureTimeLapse = false;
- } else if (timeLapseEnable == 1) {
- mCaptureTimeLapse = true;
- } else {
- return BAD_VALUE;
- }
- return OK;
-}
-
-status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
- LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
-
- // Not allowing time more than a day
- if (timeUs <= 0 || timeUs > 86400*1E6) {
- LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
- return BAD_VALUE;
- }
-
- mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
- return OK;
-}
-
-status_t StagefrightRecorder::setParamGeoDataLongitude(
+status_t GonkRecorder::setParamGeoDataLongitude(
int64_t longitudex10000) {
if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
@@ -595,7 +512,7 @@
return OK;
}
-status_t StagefrightRecorder::setParamGeoDataLatitude(
+status_t GonkRecorder::setParamGeoDataLatitude(
int64_t latitudex10000) {
if (latitudex10000 > 900000 || latitudex10000 < -900000) {
@@ -605,7 +522,7 @@
return OK;
}
-status_t StagefrightRecorder::setParameter(
+status_t GonkRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
if (key == "max-duration") {
@@ -703,24 +620,13 @@
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamVideoTimeScale(timeScale);
}
- } else if (key == "time-lapse-enable") {
- int32_t timeLapseEnable;
- if (safe_strtoi32(value.string(), &timeLapseEnable)) {
- return setParamTimeLapseEnable(timeLapseEnable);
- }
- } else if (key == "time-between-time-lapse-frame-capture") {
- int64_t timeBetweenTimeLapseFrameCaptureMs;
- if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
- return setParamTimeBetweenTimeLapseFrameCapture(
- 1000LL * timeBetweenTimeLapseFrameCaptureMs);
- }
} else {
LOGE("setParameter: failed to find key %s", key.string());
}
return BAD_VALUE;
}
-status_t StagefrightRecorder::setParameters(const String8 &params) {
+status_t GonkRecorder::setParameters(const String8 &params) {
LOGV("setParameters: %s", params.string());
const char *cparams = params.string();
const char *key_start = cparams;
@@ -755,13 +661,13 @@
return OK;
}
-status_t StagefrightRecorder::setListener(const sp<IMediaRecorderClient> &listener) {
+status_t GonkRecorder::setListener(const sp<IMediaRecorderClient> &listener) {
mListener = listener;
return OK;
}
-status_t StagefrightRecorder::prepare() {
+status_t GonkRecorder::prepare() {
LOGV(" %s E", __func__ );
if(mVideoSource != VIDEO_SOURCE_LIST_END && mVideoEncoder != VIDEO_ENCODER_LIST_END && mVideoHeight && mVideoWidth && /*Video recording*/
@@ -776,17 +682,15 @@
return OK;
}
-status_t StagefrightRecorder::start() {
+status_t GonkRecorder::start() {
CHECK(mOutputFd >= 0);
if (mWriter != NULL) {
- LOGE("File writer is not avaialble");
+ LOGE("File writer is not available");
return UNKNOWN_ERROR;
}
status_t status = OK;
- if(AUDIO_SOURCE_FM_RX_A2DP == mAudioSource)
- return startFMA2DPWriter();
switch (mOutputFormat) {
case OUTPUT_FORMAT_DEFAULT:
@@ -800,22 +704,9 @@
status = startAMRRecording();
break;
- case OUTPUT_FORMAT_AAC_ADIF:
- case OUTPUT_FORMAT_AAC_ADTS:
- status = startAACRecording();
- break;
-
- case OUTPUT_FORMAT_RTP_AVP:
- status = startRTPRecording();
- break;
-
case OUTPUT_FORMAT_MPEG2TS:
status = startMPEG2TSRecording();
break;
-
- case OUTPUT_FORMAT_QCP:
- status = startExtendedRecording( );
- break;
default:
LOGE("Unsupported output file format: %d", mOutputFormat);
status = UNKNOWN_ERROR;
@@ -824,22 +715,12 @@
if ((status == OK) && (!mStarted)) {
mStarted = true;
-
- uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted;
- if (mAudioSource != AUDIO_SOURCE_CNT) {
- params |= IMediaPlayerService::kBatteryDataTrackAudio;
- }
- if (mVideoSource != VIDEO_SOURCE_LIST_END) {
- params |= IMediaPlayerService::kBatteryDataTrackVideo;
- }
-
- addBatteryData(params);
}
return status;
}
-sp<MediaSource> StagefrightRecorder::createAudioSource() {
+sp<MediaSource> GonkRecorder::createAudioSource() {
bool tunneledSource = false;
const char *tunnelMime;
@@ -907,12 +788,6 @@
case AUDIO_ENCODER_AAC:
mime = MEDIA_MIMETYPE_AUDIO_AAC;
break;
- case AUDIO_ENCODER_EVRC:
- mime = MEDIA_MIMETYPE_AUDIO_EVRC;
- break;
- case AUDIO_ENCODER_QCELP:
- mime = MEDIA_MIMETYPE_AUDIO_QCELP;
- break;
default:
LOGE("Unknown audio encoder: %d", mAudioEncoder);
return NULL;
@@ -931,36 +806,17 @@
encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
}
- OMXClient client;
- CHECK_EQ(client.connect(), OK);
-
+ // use direct OMX interface instead of connecting to
+ // mediaserver over binder calls
sp<MediaSource> audioEncoder =
- OMXCodec::Create(client.interface(), encMeta,
+ OMXCodec::Create(GetOMX(), encMeta,
true /* createEncoder */, audioSource);
mAudioSourceNode = audioSource;
return audioEncoder;
}
-status_t StagefrightRecorder::startAACRecording() {
- // FIXME:
- // Add support for OUTPUT_FORMAT_AAC_ADIF
- CHECK(mOutputFormat == OUTPUT_FORMAT_AAC_ADTS);
-
- CHECK(mAudioEncoder == AUDIO_ENCODER_AAC);
- CHECK(mAudioSource != AUDIO_SOURCE_CNT);
-
- mWriter = new AACWriter(mOutputFd);
- status_t status = startRawAudioRecording();
- if (status != OK) {
- mWriter.clear();
- mWriter = NULL;
- }
-
- return status;
-}
-
-status_t StagefrightRecorder::startAMRRecording() {
+status_t GonkRecorder::startAMRRecording() {
CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
mOutputFormat == OUTPUT_FORMAT_AMR_WB);
@@ -971,28 +827,12 @@
mAudioEncoder);
return BAD_VALUE;
}
- if (mSampleRate != 8000) {
- LOGE("Invalid sampling rate %d used for AMRNB recording",
- mSampleRate);
- return BAD_VALUE;
- }
} else { // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
LOGE("Invlaid encoder %d used for AMRWB recording",
mAudioEncoder);
return BAD_VALUE;
}
- if (mSampleRate != 16000) {
- LOGE("Invalid sample rate %d used for AMRWB recording",
- mSampleRate);
- return BAD_VALUE;
- }
- }
-
- if (mAudioChannels != 1) {
- LOGE("Invalid number of audio channels %d used for amr recording",
- mAudioChannels);
- return BAD_VALUE;
}
mWriter = new AMRWriter(mOutputFd);
@@ -1004,7 +844,7 @@
return status;
}
-status_t StagefrightRecorder::startRawAudioRecording() {
+status_t GonkRecorder::startRawAudioRecording() {
if (mAudioSource >= AUDIO_SOURCE_CNT) {
LOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
@@ -1035,62 +875,7 @@
return OK;
}
-status_t StagefrightRecorder::startFMA2DPWriter() {
- /* FM soc outputs at 48k */
- mSampleRate = 48000;
- mAudioChannels = 2;
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyChannelCount, mAudioChannels);
- meta->setInt32(kKeySampleRate, mSampleRate);
-
- mWriter = new FMA2DPWriter();
- mWriter->setListener(mListener);
- mWriter->start(meta.get());
- return OK;
-}
-
-status_t StagefrightRecorder::startRTPRecording() {
- CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
-
- if ((mAudioSource != AUDIO_SOURCE_CNT
- && mVideoSource != VIDEO_SOURCE_LIST_END)
- || (mAudioSource == AUDIO_SOURCE_CNT
- && mVideoSource == VIDEO_SOURCE_LIST_END)) {
- // Must have exactly one source.
- return BAD_VALUE;
- }
-
- if (mOutputFd < 0) {
- return BAD_VALUE;
- }
-
- sp<MediaSource> source;
-
- if (mAudioSource != AUDIO_SOURCE_CNT) {
- source = createAudioSource();
- } else {
-
- sp<MediaSource> mediaSource;
- status_t err = setupMediaSource(&mediaSource);
- if (err != OK) {
- return err;
- }
-
- err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
- if (err != OK) {
- return err;
- }
- }
-
- mWriter = new ARTPWriter(mOutputFd);
- mWriter->addSource(source);
- mWriter->setListener(mListener);
-
- return mWriter->start();
-}
-
-status_t StagefrightRecorder::startMPEG2TSRecording() {
+status_t GonkRecorder::startMPEG2TSRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
@@ -1141,7 +926,7 @@
return mWriter->start();
}
-void StagefrightRecorder::clipVideoFrameRate() {
+void GonkRecorder::clipVideoFrameRate() {
LOGV("clipVideoFrameRate: encoder %d", mVideoEncoder);
int minFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.fps.min", mVideoEncoder);
@@ -1158,7 +943,7 @@
}
}
-void StagefrightRecorder::clipVideoBitRate() {
+void GonkRecorder::clipVideoBitRate() {
LOGV("clipVideoBitRate: encoder %d", mVideoEncoder);
int minBitRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.bps.min", mVideoEncoder);
@@ -1175,7 +960,7 @@
}
}
-void StagefrightRecorder::clipVideoFrameWidth() {
+void GonkRecorder::clipVideoFrameWidth() {
LOGV("clipVideoFrameWidth: encoder %d", mVideoEncoder);
int minFrameWidth = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.width.min", mVideoEncoder);
@@ -1192,8 +977,7 @@
}
}
-status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
- if (!mCaptureTimeLapse) {
+status_t GonkRecorder::checkVideoEncoderCapabilities() {
// Dont clip for time lapse capture as encoder will have enough
// time to encode because of slow capture rate of time lapse.
clipVideoBitRate();
@@ -1201,13 +985,12 @@
clipVideoFrameWidth();
clipVideoFrameHeight();
setDefaultProfileIfNecessary();
- }
return OK;
}
// Set to use AVC baseline profile if the encoding parameters matches
// CAMCORDER_QUALITY_LOW profile; this is for the sake of MMS service.
-void StagefrightRecorder::setDefaultProfileIfNecessary() {
+void GonkRecorder::setDefaultProfileIfNecessary() {
LOGV("setDefaultProfileIfNecessary");
camcorder_quality quality = CAMCORDER_QUALITY_LOW;
@@ -1263,14 +1046,14 @@
}
}
-status_t StagefrightRecorder::checkAudioEncoderCapabilities() {
+status_t GonkRecorder::checkAudioEncoderCapabilities() {
clipAudioBitRate();
clipAudioSampleRate();
clipNumberOfAudioChannels();
return OK;
}
-void StagefrightRecorder::clipAudioBitRate() {
+void GonkRecorder::clipAudioBitRate() {
LOGV("clipAudioBitRate: encoder %d", mAudioEncoder);
int minAudioBitRate =
@@ -1292,7 +1075,7 @@
}
}
-void StagefrightRecorder::clipAudioSampleRate() {
+void GonkRecorder::clipAudioSampleRate() {
LOGV("clipAudioSampleRate: encoder %d", mAudioEncoder);
int minSampleRate =
@@ -1314,7 +1097,7 @@
}
}
-void StagefrightRecorder::clipNumberOfAudioChannels() {
+void GonkRecorder::clipNumberOfAudioChannels() {
LOGV("clipNumberOfAudioChannels: encoder %d", mAudioEncoder);
int minChannels =
@@ -1336,7 +1119,7 @@
}
}
-void StagefrightRecorder::clipVideoFrameHeight() {
+void GonkRecorder::clipVideoFrameHeight() {
LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.height.min", mVideoEncoder);
@@ -1354,61 +1137,26 @@
}
// Set up the appropriate MediaSource depending on the chosen option
-status_t StagefrightRecorder::setupMediaSource(
+status_t GonkRecorder::setupMediaSource(
sp<MediaSource> *mediaSource) {
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
- sp<CameraSource> cameraSource;
+ sp<GonkCameraSource> cameraSource;
status_t err = setupCameraSource(&cameraSource);
if (err != OK) {
return err;
}
*mediaSource = cameraSource;
} else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
- // If using GRAlloc buffers, setup surfacemediasource.
- // Later a handle to that will be passed
- // to the client side when queried
- status_t err = setupSurfaceMediaSource();
- if (err != OK) {
- return err;
- }
- *mediaSource = mSurfaceMediaSource;
+ return BAD_VALUE;
} else {
return INVALID_OPERATION;
}
return OK;
}
-// setupSurfaceMediaSource creates a source with the given
-// width and height and framerate.
-// TODO: This could go in a static function inside SurfaceMediaSource
-// similar to that in CameraSource
-status_t StagefrightRecorder::setupSurfaceMediaSource() {
- status_t err = OK;
- mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
- if (mSurfaceMediaSource == NULL) {
- return NO_INIT;
- }
-
- if (mFrameRate == -1) {
- int32_t frameRate = 0;
- CHECK (mSurfaceMediaSource->getFormat()->findInt32(
- kKeyFrameRate, &frameRate));
- LOGI("Frame rate is not explicitly set. Use the current frame "
- "rate (%d fps)", frameRate);
- mFrameRate = frameRate;
- } else {
- err = mSurfaceMediaSource->setFrameRate(mFrameRate);
- }
- CHECK(mFrameRate != -1);
-
- mIsMetaDataStoredInVideoBuffers =
- mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
- return err;
-}
-
-status_t StagefrightRecorder::setupCameraSource(
- sp<CameraSource> *cameraSource) {
+status_t GonkRecorder::setupCameraSource(
+ sp<GonkCameraSource> *cameraSource) {
status_t err = OK;
if ((err = checkVideoEncoderCapabilities()) != OK) {
return err;
@@ -1416,26 +1164,15 @@
Size videoSize;
videoSize.width = mVideoWidth;
videoSize.height = mVideoHeight;
- if (mCaptureTimeLapse) {
- mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
- mCamera, mCameraProxy, mCameraId,
- videoSize, mFrameRate, mPreviewSurface,
- mTimeBetweenTimeLapseFrameCaptureUs);
- *cameraSource = mCameraSourceTimeLapse;
- } else {
-
- bool useMeta = true;
- char value[PROPERTY_VALUE_MAX];
- if (property_get("debug.camcorder.disablemeta", value, NULL) &&
+ bool useMeta = true;
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("debug.camcorder.disablemeta", value, NULL) &&
atoi(value)) {
- useMeta = false;
- }
- *cameraSource = CameraSource::CreateFromCamera(
- mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate,
- mPreviewSurface, useMeta);
+ useMeta = false;
}
- mCamera.clear();
- mCameraProxy.clear();
+
+ *cameraSource = GonkCameraSource::Create(
+ mCameraHandle, videoSize, mFrameRate, useMeta);
if (*cameraSource == NULL) {
return UNKNOWN_ERROR;
}
@@ -1465,7 +1202,7 @@
return OK;
}
-status_t StagefrightRecorder::setupVideoEncoder(
+status_t GonkRecorder::setupVideoEncoder(
sp<MediaSource> cameraSource,
int32_t videoBitRate,
sp<MediaSource> *source) {
@@ -1501,10 +1238,7 @@
CHECK(meta->findInt32(kKeyStride, &stride));
CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
- hfr = 0;
- if (!meta->findInt32(kKeyHFR, &hfr)) {
- LOGW("hfr not found, default to 0");
- }
+ CHECK(meta->findInt32(kKeyHFR, &hfr));
if(hfr) {
mMaxFileDurationUs = mMaxFileDurationUs * (hfr/mFrameRate);
@@ -1598,30 +1332,17 @@
enc_meta->setInt32(kKey3D, is3D);
}
- OMXClient client;
- CHECK_EQ(client.connect(), OK);
-
uint32_t encoder_flags = 0;
if (mIsMetaDataStoredInVideoBuffers) {
LOGW("Camera source supports metadata mode, create OMXCodec for metadata");
encoder_flags |= OMXCodec::kHardwareCodecsOnly;
encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
- if (property_get("ro.board.platform", value, "0")
- && (!strncmp(value, "msm7627", sizeof("msm7627") - 1))) {
- LOGW("msm7627 family of chipsets supports, only one buffer at a time");
- encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
- }
- }
-
- // Do not wait for all the input buffers to become available.
- // This give timelapse video recording faster response in
- // receiving output from video encoder component.
- if (mCaptureTimeLapse) {
encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
}
sp<MediaSource> encoder = OMXCodec::Create(
- client.interface(), enc_meta,
+ GetOMX(),
+ enc_meta,
true /* createEncoder */, cameraSource,
NULL, encoder_flags);
if (encoder == NULL) {
@@ -1638,7 +1359,7 @@
return OK;
}
-status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+status_t GonkRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
status_t status = BAD_VALUE;
if (OK != (status = checkAudioEncoderCapabilities())) {
return status;
@@ -1664,7 +1385,7 @@
return OK;
}
-status_t StagefrightRecorder::setupMPEG4Recording(
+status_t GonkRecorder::setupMPEG4Recording(
int outputFd,
int32_t videoWidth, int32_t videoHeight,
int32_t videoBitRate,
@@ -1696,7 +1417,7 @@
// Audio source is added at the end if it exists.
// This help make sure that the "recoding" sound is suppressed for
// camcorder applications in the recorded files.
- if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
+ if (mAudioSource != AUDIO_SOURCE_CNT) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
*totalBitRate += mAudioBitRate;
@@ -1728,7 +1449,7 @@
return OK;
}
-void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+void GonkRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
sp<MetaData> *meta) {
(*meta)->setInt64(kKeyTime, startTimeUs);
(*meta)->setInt32(kKeyFileType, mOutputFormat);
@@ -1752,7 +1473,7 @@
}
}
-status_t StagefrightRecorder::startMPEG4Recording() {
+status_t GonkRecorder::startMPEG4Recording() {
int32_t totalBitRate;
status_t err = setupMPEG4Recording(
mOutputFd, mVideoWidth, mVideoHeight,
@@ -1761,7 +1482,14 @@
return err;
}
- int64_t startTimeUs = systemTime() / 1000;
+ //systemTime() doesn't give correct time because
+ //HAVE_POSIX_CLOCKS is not defined for utils/Timers.cpp
+ //so, using clock_gettime directly
+#include <time.h>
+ struct timespec t;
+ clock_gettime(CLOCK_MONOTONIC, &t);
+ int64_t startTimeUs = int64_t(t.tv_sec)*1000000000LL + t.tv_nsec;
+ startTimeUs = startTimeUs / 1000;
sp<MetaData> meta = new MetaData;
setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
@@ -1773,7 +1501,7 @@
return OK;
}
-status_t StagefrightRecorder::pause() {
+status_t GonkRecorder::pause() {
LOGV("pause");
if (mWriter == NULL) {
return UNKNOWN_ERROR;
@@ -1782,31 +1510,16 @@
if (mStarted) {
mStarted = false;
-
- uint32_t params = 0;
- if (mAudioSource != AUDIO_SOURCE_CNT) {
- params |= IMediaPlayerService::kBatteryDataTrackAudio;
- }
- if (mVideoSource != VIDEO_SOURCE_LIST_END) {
- params |= IMediaPlayerService::kBatteryDataTrackVideo;
- }
-
- addBatteryData(params);
}
return OK;
}
-status_t StagefrightRecorder::stop() {
+status_t GonkRecorder::stop() {
LOGV("stop");
status_t err = OK;
- if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
- mCameraSourceTimeLapse->startQuickReadReturns();
- mCameraSourceTimeLapse = NULL;
- }
-
if (mWriter != NULL) {
err = mWriter->stop();
mWriter.clear();
@@ -1819,30 +1532,20 @@
if (mStarted) {
mStarted = false;
-
- uint32_t params = 0;
- if (mAudioSource != AUDIO_SOURCE_CNT) {
- params |= IMediaPlayerService::kBatteryDataTrackAudio;
- }
- if (mVideoSource != VIDEO_SOURCE_LIST_END) {
- params |= IMediaPlayerService::kBatteryDataTrackVideo;
- }
-
- addBatteryData(params);
}
return err;
}
-status_t StagefrightRecorder::close() {
+status_t GonkRecorder::close() {
LOGV("close");
stop();
return OK;
}
-status_t StagefrightRecorder::reset() {
+status_t GonkRecorder::reset() {
LOGV("reset");
stop();
@@ -1858,9 +1561,9 @@
mVideoHeight = 144;
mFrameRate = -1;
mVideoBitRate = 192000;
- mSampleRate = 0;
- mAudioChannels = 0;
- mAudioBitRate = 0;
+ mSampleRate = 8000;
+ mAudioChannels = 1;
+ mAudioBitRate = 12200;
mInterleaveDurationUs = 0;
mIFramesIntervalSec = 2;
mAudioSourceNode = 0;
@@ -1875,9 +1578,6 @@
mMaxFileDurationUs = 0;
mMaxFileSizeBytes = 0;
mTrackEveryTimeDurationUs = 0;
- mCaptureTimeLapse = false;
- mTimeBetweenTimeLapseFrameCaptureUs = -1;
- mCameraSourceTimeLapse = NULL;
mIsMetaDataStoredInVideoBuffers = false;
mEncoderProfiles = MediaProfiles::getInstance();
mRotationDegrees = 0;
@@ -1885,6 +1585,11 @@
mLongitudex10000 = -3600000;
mOutputFd = -1;
+ mCameraHandle = -1;
+ //TODO: May need to register a listener eventually
+ //if someone is interested in recorder events for now
+ //default to no listener registered
+ mListener = NULL;
// Disable Audio Encoding
char value[PROPERTY_VALUE_MAX];
@@ -1894,7 +1599,7 @@
return OK;
}
-status_t StagefrightRecorder::getMaxAmplitude(int *max) {
+status_t GonkRecorder::getMaxAmplitude(int *max) {
LOGV("getMaxAmplitude");
if (max == NULL) {
@@ -1911,7 +1616,7 @@
return OK;
}
-status_t StagefrightRecorder::dump(
+status_t GonkRecorder::dump(
int fd, const Vector<String16>& args) const {
LOGV("dump");
const size_t SIZE = 256;
@@ -1958,6 +1663,8 @@
result.append(buffer);
snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId);
result.append(buffer);
+ snprintf(buffer, SIZE, " Camera Handle: %d\n", mCameraHandle);
+ result.append(buffer);
snprintf(buffer, SIZE, " Start time offset (ms): %d\n", mStartTimeOffsetMs);
result.append(buffer);
snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder);
@@ -1978,45 +1685,12 @@
return OK;
}
-status_t StagefrightRecorder::startExtendedRecording() {
- CHECK(mOutputFormat == OUTPUT_FORMAT_QCP);
-
- if (mSampleRate != 8000) {
- LOGE("Invalid sampling rate %d used for recording",
- mSampleRate);
- return BAD_VALUE;
- }
- if (mAudioChannels != 1) {
- LOGE("Invalid number of audio channels %d used for recording",
- mAudioChannels);
- return BAD_VALUE;
- }
-
- if (mAudioSource >= AUDIO_SOURCE_CNT) {
- LOGE("Invalid audio source: %d", mAudioSource);
- return BAD_VALUE;
- }
-
- sp<MediaSource> audioEncoder = createAudioSource();
-
- if (audioEncoder == NULL) {
- LOGE("AudioEncoder NULL");
- return UNKNOWN_ERROR;
- }
-
- mWriter = new ExtendedWriter(dup(mOutputFd));
- mWriter->addSource(audioEncoder);
-
- if (mMaxFileDurationUs != 0) {
- mWriter->setMaxFileDuration(mMaxFileDurationUs);
- }
- if (mMaxFileSizeBytes != 0) {
- mWriter->setMaxFileSize(mMaxFileSizeBytes);
- }
- mWriter->setListener(mListener);
- mWriter->start();
-
- return OK;
+status_t GonkRecorder::setCameraHandle(int32_t handle) {
+ if (handle < 0) {
+ return BAD_VALUE;
+ }
+ mCameraHandle = handle;
+ return OK;
}
} // namespace android
diff --git a/GonkRecorder.h b/GonkRecorder.h
index dba6110..fa948af 100644
--- a/GonkRecorder.h
+++ b/GonkRecorder.h
@@ -14,11 +14,11 @@
* limitations under the License.
*/
-#ifndef STAGEFRIGHT_RECORDER_H_
+#ifndef GONK_RECORDER_H_
-#define STAGEFRIGHT_RECORDER_H_
+#define GONK_RECORDER_H_
-#include <media/MediaRecorderBase.h>
+#include <media/mediarecorder.h>
#include <camera/CameraParameters.h>
#include <utils/String8.h>
@@ -26,21 +26,16 @@
namespace android {
-class Camera;
-class ICameraRecordingProxy;
-class CameraSource;
-class CameraSourceTimeLapse;
+class GonkCameraSource;
struct MediaSource;
struct MediaWriter;
class MetaData;
struct AudioSource;
class MediaProfiles;
-class ISurfaceTexture;
-class SurfaceMediaSource;
-struct StagefrightRecorder : public MediaRecorderBase {
- StagefrightRecorder();
- virtual ~StagefrightRecorder();
+struct GonkRecorder {
+ GonkRecorder();
+ virtual ~GonkRecorder();
virtual status_t init();
virtual status_t setAudioSource(audio_source_t as);
@@ -50,11 +45,10 @@
virtual status_t setVideoEncoder(video_encoder ve);
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
- virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);
- virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setParameters(const String8& params);
+ virtual status_t setCameraHandle(int32_t handle);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
virtual status_t prepare();
virtual status_t start();
@@ -65,12 +59,8 @@
virtual status_t getMaxAmplitude(int *max);
virtual status_t dump(int fd, const Vector<String16>& args) const;
// Querying a SurfaceMediaSourcer
- virtual sp<ISurfaceTexture> querySurfaceMediaSource() const;
private:
- sp<ICamera> mCamera;
- sp<ICameraRecordingProxy> mCameraProxy;
- sp<Surface> mPreviewSurface;
sp<IMediaRecorderClient> mListener;
sp<MediaWriter> mWriter;
int mOutputFd;
@@ -104,11 +94,6 @@
int32_t mLongitudex10000;
int32_t mStartTimeOffsetMs;
- bool mCaptureTimeLapse;
- int64_t mTimeBetweenTimeLapseFrameCaptureUs;
- sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
-
-
String8 mParams;
bool mIsMetaDataStoredInVideoBuffers;
@@ -119,8 +104,8 @@
// An <ISurfaceTexture> pointer
// will be sent to the client side using which the
// frame buffers will be queued and dequeued
- sp<SurfaceMediaSource> mSurfaceMediaSource;
bool mDisableAudio;
+ int32_t mCameraHandle;
status_t setupMPEG4Recording(
int outputFd,
@@ -132,10 +117,7 @@
sp<MetaData> *meta);
status_t startMPEG4Recording();
status_t startAMRRecording();
- status_t startFMA2DPWriter();
- status_t startAACRecording();
status_t startRawAudioRecording();
- status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
status_t checkVideoEncoderCapabilities();
@@ -144,9 +126,8 @@
// source (CameraSource or SurfaceMediaSource)
// depending on the videosource type
status_t setupMediaSource(sp<MediaSource> *mediaSource);
- status_t setupCameraSource(sp<CameraSource> *cameraSource);
+ status_t setupCameraSource(sp<GonkCameraSource> *cameraSource);
// setup the surfacemediasource for the encoder
- status_t setupSurfaceMediaSource();
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
status_t setupVideoEncoder(
@@ -160,8 +141,6 @@
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
- status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
- status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
@@ -186,14 +165,10 @@
void clipNumberOfAudioChannels();
void setDefaultProfileIfNecessary();
-
- StagefrightRecorder(const StagefrightRecorder &);
- StagefrightRecorder &operator=(const StagefrightRecorder &);
-
- /* extension */
- status_t startExtendedRecording();
+ GonkRecorder(const GonkRecorder &);
+ GonkRecorder &operator=(const GonkRecorder &);
};
} // namespace android
-#endif // STAGEFRIGHT_RECORDER_H_
+#endif // GONK_RECORDER_H_