summaryrefslogtreecommitdiffstats
path: root/src/plugins/android/src
diff options
context:
space:
mode:
Diffstat (limited to 'src/plugins/android/src')
-rw-r--r--src/plugins/android/src/common/qandroidmultimediautils.cpp25
-rw-r--r--src/plugins/android/src/common/qandroidmultimediautils.h2
-rw-r--r--src/plugins/android/src/common/qandroidvideooutput.h18
-rw-r--r--src/plugins/android/src/common/qandroidvideorendercontrol.cpp300
-rw-r--r--src/plugins/android/src/common/qandroidvideorendercontrol.h36
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerasession.cpp63
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcamerasession.h9
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcaptureservice.h2
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.cpp39
-rw-r--r--src/plugins/android/src/mediacapture/qandroidcapturesession.h3
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp18
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h4
-rw-r--r--src/plugins/android/src/mediaplayer/qandroidmediaservice.h3
-rw-r--r--src/plugins/android/src/wrappers/jcamera.cpp37
-rw-r--r--src/plugins/android/src/wrappers/jcamera.h17
-rw-r--r--src/plugins/android/src/wrappers/jsurfacetexture.h1
16 files changed, 252 insertions, 325 deletions
diff --git a/src/plugins/android/src/common/qandroidmultimediautils.cpp b/src/plugins/android/src/common/qandroidmultimediautils.cpp
index 7ae40358f..9bf38a869 100644
--- a/src/plugins/android/src/common/qandroidmultimediautils.cpp
+++ b/src/plugins/android/src/common/qandroidmultimediautils.cpp
@@ -76,4 +76,29 @@ bool qt_sizeLessThan(const QSize &s1, const QSize &s2)
return s1.width() * s1.height() < s2.width() * s2.height();
}
+void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height)
+{
+ const int frameSize = width * height;
+
+ int a = 0;
+ for (int i = 0, ci = 0; i < height; ++i, ci += 1) {
+ for (int j = 0, cj = 0; j < width; ++j, cj += 1) {
+ int y = (0xff & ((int) yuv[ci * width + cj]));
+ int v = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 0]));
+ int u = (0xff & ((int) yuv[frameSize + (ci >> 1) * width + (cj & ~1) + 1]));
+ y = y < 16 ? 16 : y;
+
+ int r = (int) (1.164f * (y - 16) + 1.596f * (v - 128));
+ int g = (int) (1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
+ int b = (int) (1.164f * (y - 16) + 2.018f * (u - 128));
+
+ r = qBound(0, r, 255);
+ g = qBound(0, g, 255);
+ b = qBound(0, b, 255);
+
+ rgb[a++] = 0xff000000 | (r << 16) | (g << 8) | b;
+ }
+ }
+}
+
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/common/qandroidmultimediautils.h b/src/plugins/android/src/common/qandroidmultimediautils.h
index 1996209b0..792ab06db 100644
--- a/src/plugins/android/src/common/qandroidmultimediautils.h
+++ b/src/plugins/android/src/common/qandroidmultimediautils.h
@@ -53,6 +53,8 @@ int qt_findClosestValue(const QList<int> &list, int value);
bool qt_sizeLessThan(const QSize &s1, const QSize &s2);
+void qt_convert_NV21_to_ARGB32(const uchar *yuv, quint32 *rgb, int width, int height);
+
QT_END_NAMESPACE
#endif // QANDROIDMULTIMEDIAUTILS_H
diff --git a/src/plugins/android/src/common/qandroidvideooutput.h b/src/plugins/android/src/common/qandroidvideooutput.h
index 8110b67b0..6e4a32e3f 100644
--- a/src/plugins/android/src/common/qandroidvideooutput.h
+++ b/src/plugins/android/src/common/qandroidvideooutput.h
@@ -48,26 +48,26 @@
QT_BEGIN_NAMESPACE
-typedef void (*TextureReadyCallback)(void*);
-
class QAndroidVideoOutput
{
public:
- QAndroidVideoOutput() { }
virtual ~QAndroidVideoOutput() { }
virtual jobject surfaceHolder() = 0;
+ virtual jobject surfaceTexture() { return 0; }
- virtual bool isTextureReady() = 0;
- virtual void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) = 0;
- virtual jobject surfaceTexture() = 0;
+ virtual bool isReady() { return true; }
- virtual void setVideoSize(const QSize &size) = 0;
- virtual void stop() = 0;
+ virtual void setVideoSize(const QSize &) { }
+ virtual void stop() { }
- virtual QImage toImage() = 0;
+ // signals:
+ // void readyChanged(bool);
};
+#define QAndroidVideoOutput_iid "org.qt-project.qt.qandroidvideooutput/5.0"
+Q_DECLARE_INTERFACE(QAndroidVideoOutput, QAndroidVideoOutput_iid)
+
QT_END_NAMESPACE
#endif // QANDROIDVIDEOOUTPUT_H
diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp b/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
index 0eb8d172f..5306fe918 100644
--- a/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
+++ b/src/plugins/android/src/common/qandroidvideorendercontrol.cpp
@@ -44,39 +44,31 @@
#include <QtCore/private/qjni_p.h>
#include "jsurfacetextureholder.h"
#include <QAbstractVideoSurface>
-#include <QOpenGLContext>
-#include <QOffscreenSurface>
-#include <QOpenGLFramebufferObject>
#include <QVideoSurfaceFormat>
-#include <QOpenGLFunctions>
-#include <QOpenGLShaderProgram>
#include <qevent.h>
+#include <qcoreapplication.h>
+#include <qopenglcontext.h>
+#include <qopenglfunctions.h>
QT_BEGIN_NAMESPACE
-static const GLfloat g_vertex_data[] = {
- -1.f, 1.f,
- 1.f, 1.f,
- 1.f, -1.f,
- -1.f, -1.f
-};
+#define ExternalGLTextureHandle QAbstractVideoBuffer::HandleType(QAbstractVideoBuffer::UserHandle + 1)
-static const GLfloat g_texture_data[] = {
- 0.f, 0.f,
- 1.f, 0.f,
- 1.f, 1.f,
- 0.f, 1.f
-};
+TextureDeleter::~TextureDeleter()
+{
+ glDeleteTextures(1, &m_id);
+}
-class TextureVideoBuffer : public QAbstractVideoBuffer
+class AndroidTextureVideoBuffer : public QAbstractVideoBuffer
{
public:
- TextureVideoBuffer(GLuint textureId)
- : QAbstractVideoBuffer(GLTextureHandle)
- , m_textureId(textureId)
- {}
+ AndroidTextureVideoBuffer(JSurfaceTexture *surface)
+ : QAbstractVideoBuffer(ExternalGLTextureHandle)
+ , m_surfaceTexture(surface)
+ {
+ }
- virtual ~TextureVideoBuffer() {}
+ virtual ~AndroidTextureVideoBuffer() {}
MapMode mapMode() const { return NotMapped; }
uchar *map(MapMode, int*, int*) { return 0; }
@@ -84,67 +76,33 @@ public:
QVariant handle() const
{
- return QVariant::fromValue<unsigned int>(m_textureId);
- }
-
-private:
- GLuint m_textureId;
-};
-
-class ImageVideoBuffer : public QAbstractVideoBuffer
-{
-public:
- ImageVideoBuffer(const QImage &image)
- : QAbstractVideoBuffer(NoHandle)
- , m_image(image)
- , m_mode(NotMapped)
- {
-
- }
-
- MapMode mapMode() const { return m_mode; }
- uchar *map(MapMode mode, int *, int *)
- {
- if (mode != NotMapped && m_mode == NotMapped) {
- m_mode = mode;
- return m_image.bits();
+ if (m_data.isEmpty()) {
+ // update the video texture (called from the render thread)
+ m_surfaceTexture->updateTexImage();
+ m_data << (uint)m_surfaceTexture->textureID() << m_surfaceTexture->getTransformMatrix();
}
- return 0;
- }
-
- void unmap()
- {
- m_mode = NotMapped;
+ return m_data;
}
private:
- QImage m_image;
- MapMode m_mode;
+ mutable JSurfaceTexture *m_surfaceTexture;
+ mutable QVariantList m_data;
};
QAndroidVideoRendererControl::QAndroidVideoRendererControl(QObject *parent)
: QVideoRendererControl(parent)
, m_surface(0)
- , m_offscreenSurface(0)
- , m_glContext(0)
- , m_fbo(0)
- , m_program(0)
- , m_useImage(false)
, m_androidSurface(0)
, m_surfaceTexture(0)
, m_surfaceHolder(0)
, m_externalTex(0)
- , m_textureReadyCallback(0)
- , m_textureReadyContext(0)
+ , m_textureDeleter(0)
{
}
QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
{
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
if (m_surfaceTexture) {
m_surfaceTexture->callMethod<void>("release");
delete m_surfaceTexture;
@@ -159,13 +117,8 @@ QAndroidVideoRendererControl::~QAndroidVideoRendererControl()
delete m_surfaceHolder;
m_surfaceHolder = 0;
}
- if (m_externalTex)
- glDeleteTextures(1, &m_externalTex);
-
- delete m_fbo;
- delete m_program;
- delete m_glContext;
- delete m_offscreenSurface;
+ if (m_textureDeleter)
+ m_textureDeleter->deleteLater();
}
QAbstractVideoSurface *QAndroidVideoRendererControl::surface() const
@@ -178,28 +131,23 @@ void QAndroidVideoRendererControl::setSurface(QAbstractVideoSurface *surface)
if (surface == m_surface)
return;
- if (m_surface && m_surface->isActive()) {
- m_surface->stop();
- m_surface->removeEventFilter(this);
+ if (m_surface) {
+ if (m_surface->isActive())
+ m_surface->stop();
+ m_surface->setProperty("_q_GLThreadCallback", QVariant());
}
m_surface = surface;
if (m_surface) {
- m_useImage = !m_surface->supportedPixelFormats(QAbstractVideoBuffer::GLTextureHandle).contains(QVideoFrame::Format_BGR32);
- m_surface->installEventFilter(this);
+ m_surface->setProperty("_q_GLThreadCallback",
+ QVariant::fromValue<QObject*>(this));
}
}
-bool QAndroidVideoRendererControl::isTextureReady()
-{
- return QOpenGLContext::currentContext() || (m_surface && m_surface->property("GLContext").isValid());
-}
-
-void QAndroidVideoRendererControl::setTextureReadyCallback(TextureReadyCallback cb, void *context)
+bool QAndroidVideoRendererControl::isReady()
{
- m_textureReadyCallback = cb;
- m_textureReadyContext = context;
+ return QOpenGLContext::currentContext() || m_externalTex;
}
bool QAndroidVideoRendererControl::initSurfaceTexture()
@@ -210,45 +158,15 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
if (!m_surface)
return false;
- QOpenGLContext *currContext = QOpenGLContext::currentContext();
-
- // If we don't have a GL context in the current thread, create one and share it
- // with the render thread GL context
- if (!currContext && !m_glContext) {
- QOpenGLContext *shareContext = qobject_cast<QOpenGLContext*>(m_surface->property("GLContext").value<QObject*>());
- if (!shareContext)
- return false;
-
- m_offscreenSurface = new QOffscreenSurface;
- QSurfaceFormat format;
- format.setSwapBehavior(QSurfaceFormat::SingleBuffer);
- m_offscreenSurface->setFormat(format);
- m_offscreenSurface->create();
-
- m_glContext = new QOpenGLContext;
- m_glContext->setFormat(m_offscreenSurface->requestedFormat());
-
- if (shareContext)
- m_glContext->setShareContext(shareContext);
-
- if (!m_glContext->create()) {
- delete m_glContext;
- m_glContext = 0;
- delete m_offscreenSurface;
- m_offscreenSurface = 0;
- return false;
- }
-
- // if sharing contexts is not supported, fallback to image rendering and send the bits
- // to the video surface
- if (!m_glContext->shareContext())
- m_useImage = true;
+ // if we have an OpenGL context in the current thread, create a texture. Otherwise, wait
+ // for the GL render thread to call us back to do it.
+ if (QOpenGLContext::currentContext()) {
+ glGenTextures(1, &m_externalTex);
+ m_textureDeleter = new TextureDeleter(m_externalTex);
+ } else if (!m_externalTex) {
+ return false;
}
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- glGenTextures(1, &m_externalTex);
m_surfaceTexture = new JSurfaceTexture(m_externalTex);
if (m_surfaceTexture->isValid()) {
@@ -256,7 +174,9 @@ bool QAndroidVideoRendererControl::initSurfaceTexture()
} else {
delete m_surfaceTexture;
m_surfaceTexture = 0;
- glDeleteTextures(1, &m_externalTex);
+ m_textureDeleter->deleteLater();
+ m_externalTex = 0;
+ m_textureDeleter = 0;
}
return m_surfaceTexture != 0;
@@ -294,9 +214,6 @@ void QAndroidVideoRendererControl::setVideoSize(const QSize &size)
stop();
m_nativeSize = size;
-
- delete m_fbo;
- m_fbo = 0;
}
void QAndroidVideoRendererControl::stop()
@@ -306,133 +223,40 @@ void QAndroidVideoRendererControl::stop()
m_nativeSize = QSize();
}
-QImage QAndroidVideoRendererControl::toImage()
-{
- if (!m_fbo)
- return QImage();
-
- return m_fbo->toImage().mirrored();
-}
-
void QAndroidVideoRendererControl::onFrameAvailable()
{
- if (m_glContext)
- m_glContext->makeCurrent(m_offscreenSurface);
-
- m_surfaceTexture->updateTexImage();
-
- if (!m_nativeSize.isValid())
+ if (!m_nativeSize.isValid() || !m_surface)
return;
- renderFrameToFbo();
+ QAbstractVideoBuffer *buffer = new AndroidTextureVideoBuffer(m_surfaceTexture);
+ QVideoFrame frame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
- QAbstractVideoBuffer *buffer = 0;
- QVideoFrame frame;
-
- if (m_useImage) {
- buffer = new ImageVideoBuffer(m_fbo->toImage().mirrored());
- frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_RGB32);
- } else {
- buffer = new TextureVideoBuffer(m_fbo->texture());
- frame = QVideoFrame(buffer, m_nativeSize, QVideoFrame::Format_BGR32);
+ if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
+ || m_surface->nativeResolution() != frame.size())) {
+ m_surface->stop();
}
- if (m_surface && frame.isValid()) {
- if (m_surface->isActive() && (m_surface->surfaceFormat().pixelFormat() != frame.pixelFormat()
- || m_surface->nativeResolution() != frame.size())) {
- m_surface->stop();
- }
-
- if (!m_surface->isActive()) {
- QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(),
- m_useImage ? QAbstractVideoBuffer::NoHandle
- : QAbstractVideoBuffer::GLTextureHandle);
-
- m_surface->start(format);
- }
+ if (!m_surface->isActive()) {
+ QVideoSurfaceFormat format(frame.size(), frame.pixelFormat(), ExternalGLTextureHandle);
+ format.setScanLineDirection(QVideoSurfaceFormat::BottomToTop);
- if (m_surface->isActive())
- m_surface->present(frame);
+ m_surface->start(format);
}
-}
-
-void QAndroidVideoRendererControl::renderFrameToFbo()
-{
- createGLResources();
-
- m_fbo->bind();
-
- glViewport(0, 0, m_nativeSize.width(), m_nativeSize.height());
-
- m_program->bind();
- m_program->enableAttributeArray(0);
- m_program->enableAttributeArray(1);
- m_program->setUniformValue("frameTexture", GLuint(0));
- m_program->setUniformValue("texMatrix", m_surfaceTexture->getTransformMatrix());
-
- glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, g_vertex_data);
- glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, g_texture_data);
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
-
- m_program->disableAttributeArray(0);
- m_program->disableAttributeArray(1);
- m_program->release();
-
- glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
- m_fbo->release();
-
- glFinish();
-}
-
-void QAndroidVideoRendererControl::createGLResources()
-{
- if (!m_fbo)
- m_fbo = new QOpenGLFramebufferObject(m_nativeSize);
-
- if (!m_program) {
- m_program = new QOpenGLShaderProgram;
-
- QOpenGLShader *vertexShader = new QOpenGLShader(QOpenGLShader::Vertex, m_program);
- vertexShader->compileSourceCode("attribute highp vec4 vertexCoordsArray; \n" \
- "attribute highp vec2 textureCoordArray; \n" \
- "uniform highp mat4 texMatrix; \n" \
- "varying highp vec2 textureCoords; \n" \
- "void main(void) \n" \
- "{ \n" \
- " gl_Position = vertexCoordsArray; \n" \
- " textureCoords = (texMatrix * vec4(textureCoordArray, 0.0, 1.0)).xy; \n" \
- "}\n");
- m_program->addShader(vertexShader);
-
- QOpenGLShader *fragmentShader = new QOpenGLShader(QOpenGLShader::Fragment, m_program);
- fragmentShader->compileSourceCode("#extension GL_OES_EGL_image_external : require \n" \
- "varying highp vec2 textureCoords; \n" \
- "uniform samplerExternalOES frameTexture; \n" \
- "void main() \n" \
- "{ \n" \
- " gl_FragColor = texture2D(frameTexture, textureCoords); \n" \
- "}\n");
- m_program->addShader(fragmentShader);
-
- m_program->bindAttributeLocation("vertexCoordsArray", 0);
- m_program->bindAttributeLocation("textureCoordArray", 1);
- m_program->link();
- }
+ if (m_surface->isActive())
+ m_surface->present(frame);
}
-bool QAndroidVideoRendererControl::eventFilter(QObject *, QEvent *e)
+void QAndroidVideoRendererControl::customEvent(QEvent *e)
{
- if (e->type() == QEvent::DynamicPropertyChange) {
- QDynamicPropertyChangeEvent *event = static_cast<QDynamicPropertyChangeEvent*>(e);
- if (event->propertyName() == "GLContext" && m_textureReadyCallback) {
- m_textureReadyCallback(m_textureReadyContext);
- m_textureReadyCallback = 0;
- m_textureReadyContext = 0;
+ if (e->type() == QEvent::User) {
+ // This is running in the render thread (OpenGL enabled)
+ if (!m_externalTex) {
+ glGenTextures(1, &m_externalTex);
+ m_textureDeleter = new TextureDeleter(m_externalTex); // will be deleted in the correct thread
+ emit readyChanged(true);
}
}
-
- return false;
}
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/common/qandroidvideorendercontrol.h b/src/plugins/android/src/common/qandroidvideorendercontrol.h
index 25220310e..5d9130c07 100644
--- a/src/plugins/android/src/common/qandroidvideorendercontrol.h
+++ b/src/plugins/android/src/common/qandroidvideorendercontrol.h
@@ -48,15 +48,23 @@
QT_BEGIN_NAMESPACE
-class QOpenGLContext;
-class QOffscreenSurface;
-class QOpenGLFramebufferObject;
-class QOpenGLShaderProgram;
class JSurfaceTextureHolder;
+class TextureDeleter : public QObject
+{
+ Q_OBJECT
+public:
+ TextureDeleter(uint id) : m_id(id) { }
+ ~TextureDeleter();
+
+private:
+ uint m_id;
+};
+
class QAndroidVideoRendererControl : public QVideoRendererControl, public QAndroidVideoOutput
{
Q_OBJECT
+ Q_INTERFACES(QAndroidVideoOutput)
public:
explicit QAndroidVideoRendererControl(QObject *parent = 0);
~QAndroidVideoRendererControl() Q_DECL_OVERRIDE;
@@ -65,38 +73,30 @@ public:
void setSurface(QAbstractVideoSurface *surface) Q_DECL_OVERRIDE;
jobject surfaceHolder() Q_DECL_OVERRIDE;
- bool isTextureReady() Q_DECL_OVERRIDE;
- void setTextureReadyCallback(TextureReadyCallback cb, void *context = 0) Q_DECL_OVERRIDE;
jobject surfaceTexture() Q_DECL_OVERRIDE;
+ bool isReady() Q_DECL_OVERRIDE;
void setVideoSize(const QSize &size) Q_DECL_OVERRIDE;
void stop() Q_DECL_OVERRIDE;
- QImage toImage() Q_DECL_OVERRIDE;
- bool eventFilter(QObject *obj, QEvent *event) Q_DECL_OVERRIDE;
+ void customEvent(QEvent *) Q_DECL_OVERRIDE;
+
+Q_SIGNALS:
+ void readyChanged(bool);
private Q_SLOTS:
void onFrameAvailable();
private:
bool initSurfaceTexture();
- void renderFrameToFbo();
- void createGLResources();
QAbstractVideoSurface *m_surface;
- QOffscreenSurface *m_offscreenSurface;
- QOpenGLContext *m_glContext;
- QOpenGLFramebufferObject *m_fbo;
- QOpenGLShaderProgram *m_program;
- bool m_useImage;
QSize m_nativeSize;
QJNIObjectPrivate *m_androidSurface;
JSurfaceTexture *m_surfaceTexture;
JSurfaceTextureHolder *m_surfaceHolder;
uint m_externalTex;
-
- TextureReadyCallback m_textureReadyCallback;
- void *m_textureReadyContext;
+ TextureDeleter *m_textureDeleter;
};
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
index 761b716d1..3ee700bf4 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.cpp
@@ -52,12 +52,6 @@
QT_BEGIN_NAMESPACE
-static void textureReadyCallback(void *context)
-{
- if (context)
- reinterpret_cast<QAndroidCameraSession *>(context)->onSurfaceTextureReady();
-}
-
QAndroidCameraSession::QAndroidCameraSession(QObject *parent)
: QObject(parent)
, m_selectedCamera(0)
@@ -153,8 +147,11 @@ bool QAndroidCameraSession::open()
if (m_camera) {
connect(m_camera, SIGNAL(pictureExposed()), this, SLOT(onCameraPictureExposed()));
connect(m_camera, SIGNAL(pictureCaptured(QByteArray)), this, SLOT(onCameraPictureCaptured(QByteArray)));
+ connect(m_camera, SIGNAL(previewFrameAvailable(QByteArray)), this, SLOT(onCameraPreviewFrameAvailable(QByteArray)));
m_nativeOrientation = m_camera->getNativeOrientation();
m_status = QCamera::LoadedStatus;
+ if (m_camera->getPreviewFormat() != JCamera::NV21)
+ m_camera->setPreviewFormat(JCamera::NV21);
emit opened();
} else {
m_status = QCamera::UnavailableStatus;
@@ -188,12 +185,17 @@ void QAndroidCameraSession::close()
emit statusChanged(m_status);
}
-void QAndroidCameraSession::setVideoPreview(QAndroidVideoOutput *videoOutput)
+void QAndroidCameraSession::setVideoPreview(QObject *videoOutput)
{
if (m_videoOutput)
m_videoOutput->stop();
- m_videoOutput = videoOutput;
+ if (videoOutput) {
+ connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
+ m_videoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
+ } else {
+ m_videoOutput = 0;
+ }
}
void QAndroidCameraSession::adjustViewfinderSize(const QSize &captureSize, bool restartPreview)
@@ -243,12 +245,8 @@ void QAndroidCameraSession::startPreview()
applyImageSettings();
adjustViewfinderSize(m_imageSettings.resolution());
- if (m_videoOutput) {
- if (m_videoOutput->isTextureReady())
- m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
- else
- m_videoOutput->setTextureReadyCallback(textureReadyCallback, this);
- }
+ if (m_videoOutput && m_videoOutput->isReady())
+ onVideoOutputReady(true);
JMultimediaUtils::enableOrientationListener(true);
@@ -427,6 +425,7 @@ int QAndroidCameraSession::capture(const QString &fileName)
// adjust picture rotation depending on the device orientation
m_camera->setRotation(currentCameraRotation());
+ m_camera->requestPreviewFrame();
m_camera->takePicture();
} else {
emit imageCaptureError(m_lastImageCaptureId, QCameraImageCapture::NotSupportedFeatureError,
@@ -455,10 +454,6 @@ void QAndroidCameraSession::onCameraPictureExposed()
void QAndroidCameraSession::onCameraPictureCaptured(const QByteArray &data)
{
if (!m_captureCanceled) {
- // generate a preview from the viewport
- if (m_videoOutput)
- emit imageCaptured(m_currentImageCaptureId, m_videoOutput->toImage());
-
// Loading and saving the captured image can be slow, do it in a separate thread
QtConcurrent::run(this, &QAndroidCameraSession::processCapturedImage,
m_currentImageCaptureId,
@@ -522,9 +517,37 @@ void QAndroidCameraSession::processCapturedImage(int id,
}
}
-void QAndroidCameraSession::onSurfaceTextureReady()
+void QAndroidCameraSession::onCameraPreviewFrameAvailable(const QByteArray &data)
+{
+ if (m_captureCanceled || m_readyForCapture)
+ return;
+
+ QtConcurrent::run(this, &QAndroidCameraSession::processPreviewImage,
+ m_currentImageCaptureId,
+ data);
+}
+
+void QAndroidCameraSession::processPreviewImage(int id, const QByteArray &data)
+{
+ QSize frameSize = m_camera->previewSize();
+ QImage preview(frameSize, QImage::Format_ARGB32);
+ qt_convert_NV21_to_ARGB32((const uchar *)data.constData(),
+ (quint32 *)preview.bits(),
+ frameSize.width(),
+ frameSize.height());
+
+ // Preview display of front-facing cameras is flipped horizontally, but the frame data
+ // we get here is not. Flip it ourselves if the camera is front-facing to match what the user
+ // sees on the viewfinder.
+ if (m_camera->getFacing() == JCamera::CameraFacingFront)
+ preview = preview.transformed(QTransform().scale(-1, 1));
+
+ emit imageCaptured(id, preview);
+}
+
+void QAndroidCameraSession::onVideoOutputReady(bool ready)
{
- if (m_camera && m_videoOutput)
+ if (m_camera && m_videoOutput && ready)
m_camera->setPreviewTexture(m_videoOutput->surfaceTexture());
}
diff --git a/src/plugins/android/src/mediacapture/qandroidcamerasession.h b/src/plugins/android/src/mediacapture/qandroidcamerasession.h
index f1cf44eec..de891522b 100644
--- a/src/plugins/android/src/mediacapture/qandroidcamerasession.h
+++ b/src/plugins/android/src/mediacapture/qandroidcamerasession.h
@@ -71,7 +71,7 @@ public:
void setCaptureMode(QCamera::CaptureModes mode);
bool isCaptureModeSupported(QCamera::CaptureModes mode) const;
- void setVideoPreview(QAndroidVideoOutput *videoOutput);
+ void setVideoPreview(QObject *videoOutput);
void adjustViewfinderSize(const QSize &captureSize, bool restartPreview = true);
QImageEncoderSettings imageSettings() const { return m_imageSettings; }
@@ -88,8 +88,6 @@ public:
int capture(const QString &fileName);
void cancelCapture();
- void onSurfaceTextureReady();
-
int currentCameraRotation() const;
Q_SIGNALS:
@@ -110,10 +108,13 @@ Q_SIGNALS:
void imageCaptureError(int id, int error, const QString &errorString);
private Q_SLOTS:
+ void onVideoOutputReady(bool ready);
+
void onApplicationStateChanged(Qt::ApplicationState state);
void onCameraPictureExposed();
void onCameraPictureCaptured(const QByteArray &data);
+ void onCameraPreviewFrameAvailable(const QByteArray &data);
private:
bool open();
@@ -123,7 +124,7 @@ private:
void stopPreview();
void applyImageSettings();
- void processPreviewImage(int id);
+ void processPreviewImage(int id, const QByteArray &data);
void processCapturedImage(int id,
const QByteArray &data,
QCameraImageCapture::CaptureDestinations dest,
diff --git a/src/plugins/android/src/mediacapture/qandroidcaptureservice.h b/src/plugins/android/src/mediacapture/qandroidcaptureservice.h
index 71aaf2d64..4050622f2 100644
--- a/src/plugins/android/src/mediacapture/qandroidcaptureservice.h
+++ b/src/plugins/android/src/mediacapture/qandroidcaptureservice.h
@@ -88,7 +88,7 @@ private:
QAndroidVideoDeviceSelectorControl *m_videoInputControl;
QAndroidAudioInputSelectorControl *m_audioInputControl;
QAndroidCameraSession *m_cameraSession;
- QAndroidVideoRendererControl *m_videoRendererControl;
+ QMediaControl *m_videoRendererControl;
QAndroidCameraZoomControl *m_cameraZoomControl;
QAndroidCameraExposureControl *m_cameraExposureControl;
QAndroidCameraFlashControl *m_cameraFlashControl;
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
index ec458eddb..3962baba8 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.cpp
@@ -110,25 +110,27 @@ void QAndroidCaptureSession::setAudioInput(const QString &input)
QUrl QAndroidCaptureSession::outputLocation() const
{
- return m_outputLocation;
+ return m_actualOutputLocation;
}
bool QAndroidCaptureSession::setOutputLocation(const QUrl &location)
{
- if (m_outputLocation == location)
+ if (m_requestedOutputLocation == location)
return false;
- m_outputLocation = location;
+ m_actualOutputLocation = QUrl();
+ m_requestedOutputLocation = location;
- if (m_outputLocation.isEmpty())
+ if (m_requestedOutputLocation.isEmpty())
return true;
- if (m_outputLocation.isValid() && (m_outputLocation.isLocalFile() || m_outputLocation.isRelative())) {
- emit actualLocationChanged(m_outputLocation);
+ if (m_requestedOutputLocation.isValid()
+ && (m_requestedOutputLocation.isLocalFile() || m_requestedOutputLocation.isRelative())) {
+ emit actualLocationChanged(m_requestedOutputLocation);
return true;
}
- m_outputLocation = QUrl();
+ m_requestedOutputLocation = QUrl();
return false;
}
@@ -213,15 +215,18 @@ bool QAndroidCaptureSession::start()
// Set output file
- QString filePath = m_mediaStorageLocation.generateFileName(m_outputLocation.isLocalFile() ? m_outputLocation.toLocalFile()
- : m_outputLocation.toString(),
- m_cameraSession ? QAndroidMediaStorageLocation::Camera
- : QAndroidMediaStorageLocation::Audio,
- m_cameraSession ? QLatin1String("VID_")
- : QLatin1String("REC_"),
- m_containerFormat);
- m_outputLocation = QUrl::fromLocalFile(filePath);
- emit actualLocationChanged(m_outputLocation);
+ QString filePath = m_mediaStorageLocation.generateFileName(
+ m_requestedOutputLocation.isLocalFile() ? m_requestedOutputLocation.toLocalFile()
+ : m_requestedOutputLocation.toString(),
+ m_cameraSession ? QAndroidMediaStorageLocation::Camera
+ : QAndroidMediaStorageLocation::Audio,
+ m_cameraSession ? QLatin1String("VID_")
+ : QLatin1String("REC_"),
+ m_containerFormat);
+
+ m_actualOutputLocation = QUrl::fromLocalFile(filePath);
+ if (m_actualOutputLocation != m_requestedOutputLocation)
+ emit actualLocationChanged(m_actualOutputLocation);
m_mediaRecorder->setOutputFile(filePath);
@@ -280,7 +285,7 @@ void QAndroidCaptureSession::stop(bool error)
// if the media is saved into the standard media location, register it
// with the Android media scanner so it appears immediately in apps
// such as the gallery.
- QString mediaPath = m_outputLocation.toLocalFile();
+ QString mediaPath = m_actualOutputLocation.toLocalFile();
QString standardLoc = m_cameraSession ? JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::DCIM)
: JMultimediaUtils::getDefaultMediaDirectory(JMultimediaUtils::Sounds);
if (mediaPath.startsWith(standardLoc))
diff --git a/src/plugins/android/src/mediacapture/qandroidcapturesession.h b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
index 6d3645c13..fcd87cd02 100644
--- a/src/plugins/android/src/mediacapture/qandroidcapturesession.h
+++ b/src/plugins/android/src/mediacapture/qandroidcapturesession.h
@@ -160,7 +160,8 @@ private:
QMediaRecorder::State m_state;
QMediaRecorder::Status m_status;
- QUrl m_outputLocation;
+ QUrl m_requestedOutputLocation;
+ QUrl m_actualOutputLocation;
CaptureProfile m_defaultSettings;
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
index 753c60662..3f3b599ac 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.cpp
@@ -45,12 +45,6 @@
QT_BEGIN_NAMESPACE
-static void textureReadyCallback(void *context)
-{
- if (context)
- reinterpret_cast<QAndroidMediaPlayerControl *>(context)->onSurfaceTextureReady();
-}
-
QAndroidMediaPlayerControl::QAndroidMediaPlayerControl(QObject *parent)
: QMediaPlayerControl(parent),
mMediaPlayer(new JMediaPlayer),
@@ -241,18 +235,18 @@ void QAndroidMediaPlayerControl::setMedia(const QMediaContent &mediaContent,
setSeekable(true);
}
-void QAndroidMediaPlayerControl::setVideoOutput(QAndroidVideoOutput *videoOutput)
+void QAndroidMediaPlayerControl::setVideoOutput(QObject *videoOutput)
{
if (mVideoOutput)
mVideoOutput->stop();
- mVideoOutput = videoOutput;
+ mVideoOutput = qobject_cast<QAndroidVideoOutput *>(videoOutput);
if (mVideoOutput && !mMediaPlayer->display()) {
- if (mVideoOutput->isTextureReady())
+ if (mVideoOutput->isReady())
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
else
- mVideoOutput->setTextureReadyCallback(textureReadyCallback, this);
+ connect(videoOutput, SIGNAL(readyChanged(bool)), this, SLOT(onVideoOutputReady(bool)));
}
}
@@ -426,9 +420,9 @@ void QAndroidMediaPlayerControl::onVideoSizeChanged(qint32 width, qint32 height)
mVideoOutput->setVideoSize(mVideoSize);
}
-void QAndroidMediaPlayerControl::onSurfaceTextureReady()
+void QAndroidMediaPlayerControl::onVideoOutputReady(bool ready)
{
- if (!mMediaPlayer->display() && mVideoOutput) {
+ if (!mMediaPlayer->display() && mVideoOutput && ready) {
mMediaPlayer->setDisplay(mVideoOutput->surfaceHolder());
flushPendingStates();
}
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
index 93eced853..ef1d325e5 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaplayercontrol.h
@@ -75,8 +75,7 @@ public:
const QIODevice *mediaStream() const Q_DECL_OVERRIDE;
void setMedia(const QMediaContent &mediaContent, QIODevice *stream) Q_DECL_OVERRIDE;
- void setVideoOutput(QAndroidVideoOutput *videoOutput);
- void onSurfaceTextureReady();
+ void setVideoOutput(QObject *videoOutput);
Q_SIGNALS:
void metaDataUpdated();
@@ -90,6 +89,7 @@ public Q_SLOTS:
void setMuted(bool muted) Q_DECL_OVERRIDE;
private Q_SLOTS:
+ void onVideoOutputReady(bool ready);
void onError(qint32 what, qint32 extra);
void onInfo(qint32 what, qint32 extra);
void onMediaPlayerInfo(qint32 what, qint32 extra);
diff --git a/src/plugins/android/src/mediaplayer/qandroidmediaservice.h b/src/plugins/android/src/mediaplayer/qandroidmediaservice.h
index 4d310e8e0..ba4b4ccd2 100644
--- a/src/plugins/android/src/mediaplayer/qandroidmediaservice.h
+++ b/src/plugins/android/src/mediaplayer/qandroidmediaservice.h
@@ -48,7 +48,6 @@ QT_BEGIN_NAMESPACE
class QAndroidMediaPlayerControl;
class QAndroidMetaDataReaderControl;
-class QAndroidVideoRendererControl;
class QAndroidMediaService : public QMediaService
{
@@ -63,7 +62,7 @@ public:
private:
QAndroidMediaPlayerControl *mMediaControl;
QAndroidMetaDataReaderControl *mMetadataControl;
- QAndroidVideoRendererControl *mVideoRendererControl;
+ QMediaControl *mVideoRendererControl;
};
QT_END_NAMESPACE
diff --git a/src/plugins/android/src/wrappers/jcamera.cpp b/src/plugins/android/src/wrappers/jcamera.cpp
index f53fa4936..f858f4702 100644
--- a/src/plugins/android/src/wrappers/jcamera.cpp
+++ b/src/plugins/android/src/wrappers/jcamera.cpp
@@ -102,6 +102,18 @@ static void notifyPictureCaptured(JNIEnv *env, jobject, int id, jbyteArray data)
}
}
+static void notifyPreviewFrame(JNIEnv *env, jobject, int id, jbyteArray data)
+{
+ JCamera *obj = g_objectMap.value(id, 0);
+ if (obj) {
+ QByteArray bytes;
+ int arrayLength = env->GetArrayLength(data);
+ bytes.resize(arrayLength);
+ env->GetByteArrayRegion(data, 0, arrayLength, (jbyte*)bytes.data());
+ Q_EMIT obj->previewFrameAvailable(bytes);
+ }
+}
+
JCamera::JCamera(int cameraId, jobject cam)
: QObject()
, QJNIObjectPrivate(cam)
@@ -225,6 +237,23 @@ QList<QSize> JCamera::getSupportedPreviewSizes()
return list;
}
+JCamera::ImageFormat JCamera::getPreviewFormat()
+{
+ if (!m_parameters.isValid())
+ return Unknown;
+
+ return JCamera::ImageFormat(m_parameters.callMethod<jint>("getPreviewFormat"));
+}
+
+void JCamera::setPreviewFormat(ImageFormat fmt)
+{
+ if (!m_parameters.isValid())
+ return;
+
+ m_parameters.callMethod<void>("setPreviewFormat", "(I)V", jint(fmt));
+ applyParameters();
+}
+
void JCamera::setPreviewSize(const QSize &size)
{
if (!m_parameters.isValid())
@@ -624,6 +653,11 @@ void JCamera::setJpegQuality(int quality)
applyParameters();
}
+void JCamera::requestPreviewFrame()
+{
+ callMethod<void>("requestPreviewFrame");
+}
+
void JCamera::takePicture()
{
callMethod<void>("takePicture");
@@ -672,7 +706,8 @@ QStringList JCamera::callStringListMethod(const char *methodName)
static JNINativeMethod methods[] = {
{"notifyAutoFocusComplete", "(IZ)V", (void *)notifyAutoFocusComplete},
{"notifyPictureExposed", "(I)V", (void *)notifyPictureExposed},
- {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured}
+ {"notifyPictureCaptured", "(I[B)V", (void *)notifyPictureCaptured},
+ {"notifyPreviewFrame", "(I[B)V", (void *)notifyPreviewFrame}
};
bool JCamera::initJNI(JNIEnv *env)
diff --git a/src/plugins/android/src/wrappers/jcamera.h b/src/plugins/android/src/wrappers/jcamera.h
index 0aea81f38..464ca3cb2 100644
--- a/src/plugins/android/src/wrappers/jcamera.h
+++ b/src/plugins/android/src/wrappers/jcamera.h
@@ -58,6 +58,16 @@ public:
CameraFacingFront = 1
};
+ enum ImageFormat { // same values as in android.graphics.ImageFormat Java class
+ Unknown = 0,
+ RGB565 = 4,
+ NV16 = 16,
+ NV21 = 17,
+ YUY2 = 20,
+ JPEG = 256,
+ YV12 = 842094169
+ };
+
~JCamera();
static JCamera *open(int cameraId);
@@ -75,6 +85,9 @@ public:
QSize getPreferredPreviewSizeForVideo();
QList<QSize> getSupportedPreviewSizes();
+ ImageFormat getPreviewFormat();
+ void setPreviewFormat(ImageFormat fmt);
+
QSize previewSize() const { return m_previewSize; }
void setPreviewSize(const QSize &size);
void setPreviewTexture(jobject surfaceTexture);
@@ -131,6 +144,8 @@ public:
void startPreview();
void stopPreview();
+ void requestPreviewFrame();
+
void takePicture();
static bool initJNI(JNIEnv *env);
@@ -143,6 +158,8 @@ Q_SIGNALS:
void whiteBalanceChanged();
+ void previewFrameAvailable(const QByteArray &data);
+
void pictureExposed();
void pictureCaptured(const QByteArray &data);
diff --git a/src/plugins/android/src/wrappers/jsurfacetexture.h b/src/plugins/android/src/wrappers/jsurfacetexture.h
index d65fc01e9..ea53b68ba 100644
--- a/src/plugins/android/src/wrappers/jsurfacetexture.h
+++ b/src/plugins/android/src/wrappers/jsurfacetexture.h
@@ -56,6 +56,7 @@ public:
explicit JSurfaceTexture(unsigned int texName);
~JSurfaceTexture();
+ int textureID() const { return m_texID; }
QMatrix4x4 getTransformMatrix();
void updateTexImage();