Import goldfish HAL sources from devices/generic/goldfish
We import all code here so we can customize and keep in sync with what we do on the host side more easily.
This commit is contained in:
parent
ddea17253c
commit
23392b9732
196 changed files with 62159 additions and 0 deletions
11
Android.mk
11
Android.mk
|
|
@ -85,3 +85,14 @@ include $(BUILD_SHARED_LIBRARY)
|
|||
TMP_PATH := $(LOCAL_PATH)
|
||||
|
||||
include $(TMP_PATH)/android/appmgr/Android.mk
|
||||
include $(TMP_PATH)/android/fingerprint/Android.mk
|
||||
include $(TMP_PATH)/android/power/Android.mk
|
||||
include $(TMP_PATH)/android/qemu-props/Android.mk
|
||||
include $(TMP_PATH)/android/qemud/Android.mk
|
||||
include $(TMP_PATH)/android/audio/Android.mk
|
||||
include $(TMP_PATH)/android/sensors/Android.mk
|
||||
include $(TMP_PATH)/android/opengl/Android.mk
|
||||
include $(TMP_PATH)/android/gps/Android.mk
|
||||
include $(TMP_PATH)/android/lights/Android.mk
|
||||
include $(TMP_PATH)/android/camera/Android.mk
|
||||
include $(TMP_PATH)/android/vibrator/Android.mk
|
||||
|
|
|
|||
31
android/audio/Android.mk
Normal file
31
android/audio/Android.mk
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
#
|
||||
# Copyright (C) 2011 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE := audio.primary.goldfish
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := libcutils liblog
|
||||
|
||||
LOCAL_SRC_FILES := audio_hw.c
|
||||
|
||||
LOCAL_SHARED_LIBRARIES += libdl
|
||||
LOCAL_CFLAGS := -Wno-unused-parameter
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
0
android/audio/MODULE_LICENSE_APACHE2
Normal file
0
android/audio/MODULE_LICENSE_APACHE2
Normal file
190
android/audio/NOTICE
Normal file
190
android/audio/NOTICE
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
|
||||
Copyright (c) 2008-2009, The Android Open Source Project
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
694
android/audio/audio_hw.c
Normal file
694
android/audio/audio_hw.c
Normal file
|
|
@ -0,0 +1,694 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_TAG "audio_hw_generic"
|
||||
/*#define LOG_NDEBUG 0*/
|
||||
|
||||
#include <errno.h>
|
||||
#include <pthread.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <sys/time.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include <cutils/log.h>
|
||||
#include <cutils/str_parms.h>
|
||||
|
||||
#include <hardware/hardware.h>
|
||||
#include <system/audio.h>
|
||||
#include <hardware/audio.h>
|
||||
|
||||
|
||||
#define AUDIO_DEVICE_NAME "/dev/eac"
|
||||
#define OUT_SAMPLING_RATE 44100
|
||||
#define OUT_BUFFER_SIZE 4096
|
||||
#define OUT_LATENCY_MS 20
|
||||
#define IN_SAMPLING_RATE 8000
|
||||
#define IN_BUFFER_SIZE 320
|
||||
|
||||
|
||||
struct generic_audio_device {
|
||||
struct audio_hw_device device;
|
||||
pthread_mutex_t lock;
|
||||
struct audio_stream_out *output;
|
||||
struct audio_stream_in *input;
|
||||
int fd;
|
||||
bool mic_mute;
|
||||
};
|
||||
|
||||
|
||||
struct generic_stream_out {
|
||||
struct audio_stream_out stream;
|
||||
struct generic_audio_device *dev;
|
||||
audio_devices_t device;
|
||||
};
|
||||
|
||||
struct generic_stream_in {
|
||||
struct audio_stream_in stream;
|
||||
struct generic_audio_device *dev;
|
||||
audio_devices_t device;
|
||||
};
|
||||
|
||||
|
||||
static uint32_t out_get_sample_rate(const struct audio_stream *stream)
|
||||
{
|
||||
return OUT_SAMPLING_RATE;
|
||||
}
|
||||
|
||||
static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static size_t out_get_buffer_size(const struct audio_stream *stream)
|
||||
{
|
||||
return OUT_BUFFER_SIZE;
|
||||
}
|
||||
|
||||
static audio_channel_mask_t out_get_channels(const struct audio_stream *stream)
|
||||
{
|
||||
return AUDIO_CHANNEL_OUT_STEREO;
|
||||
}
|
||||
|
||||
static audio_format_t out_get_format(const struct audio_stream *stream)
|
||||
{
|
||||
return AUDIO_FORMAT_PCM_16_BIT;
|
||||
}
|
||||
|
||||
static int out_set_format(struct audio_stream *stream, audio_format_t format)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int out_standby(struct audio_stream *stream)
|
||||
{
|
||||
// out_standby is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int out_dump(const struct audio_stream *stream, int fd)
|
||||
{
|
||||
struct generic_stream_out *out = (struct generic_stream_out *)stream;
|
||||
|
||||
dprintf(fd, "\tout_dump:\n"
|
||||
"\t\tsample rate: %u\n"
|
||||
"\t\tbuffer size: %u\n"
|
||||
"\t\tchannel mask: %08x\n"
|
||||
"\t\tformat: %d\n"
|
||||
"\t\tdevice: %08x\n"
|
||||
"\t\taudio dev: %p\n\n",
|
||||
out_get_sample_rate(stream),
|
||||
out_get_buffer_size(stream),
|
||||
out_get_channels(stream),
|
||||
out_get_format(stream),
|
||||
out->device,
|
||||
out->dev);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int out_set_parameters(struct audio_stream *stream, const char *kvpairs)
|
||||
{
|
||||
struct generic_stream_out *out = (struct generic_stream_out *)stream;
|
||||
struct str_parms *parms;
|
||||
char value[32];
|
||||
int ret;
|
||||
long val;
|
||||
char *end;
|
||||
|
||||
parms = str_parms_create_str(kvpairs);
|
||||
|
||||
ret = str_parms_get_str(parms, AUDIO_PARAMETER_STREAM_ROUTING,
|
||||
value, sizeof(value));
|
||||
if (ret >= 0) {
|
||||
errno = 0;
|
||||
val = strtol(value, &end, 10);
|
||||
if (errno == 0 && (end != NULL) && (*end == '\0') && ((int)val == val)) {
|
||||
out->device = (int)val;
|
||||
} else {
|
||||
ret = -EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
str_parms_destroy(parms);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static char * out_get_parameters(const struct audio_stream *stream, const char *keys)
|
||||
{
|
||||
struct generic_stream_out *out = (struct generic_stream_out *)stream;
|
||||
struct str_parms *query = str_parms_create_str(keys);
|
||||
char *str;
|
||||
char value[256];
|
||||
struct str_parms *reply = str_parms_create();
|
||||
int ret;
|
||||
|
||||
ret = str_parms_get_str(query, AUDIO_PARAMETER_STREAM_ROUTING, value, sizeof(value));
|
||||
if (ret >= 0) {
|
||||
str_parms_add_int(reply, AUDIO_PARAMETER_STREAM_ROUTING, out->device);
|
||||
str = strdup(str_parms_to_str(reply));
|
||||
} else {
|
||||
str = strdup(keys);
|
||||
}
|
||||
|
||||
str_parms_destroy(query);
|
||||
str_parms_destroy(reply);
|
||||
return str;
|
||||
}
|
||||
|
||||
static uint32_t out_get_latency(const struct audio_stream_out *stream)
|
||||
{
|
||||
return OUT_LATENCY_MS;
|
||||
}
|
||||
|
||||
static int out_set_volume(struct audio_stream_out *stream, float left,
|
||||
float right)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static ssize_t out_write(struct audio_stream_out *stream, const void* buffer,
|
||||
size_t bytes)
|
||||
{
|
||||
struct generic_stream_out *out = (struct generic_stream_out *)stream;
|
||||
struct generic_audio_device *adev = out->dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (adev->fd >= 0)
|
||||
bytes = write(adev->fd, buffer, bytes);
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
static int out_get_render_position(const struct audio_stream_out *stream,
|
||||
uint32_t *dsp_frames)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int out_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect)
|
||||
{
|
||||
// out_add_audio_effect is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int out_remove_audio_effect(const struct audio_stream *stream, effect_handle_t effect)
|
||||
{
|
||||
// out_remove_audio_effect is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int out_get_next_write_timestamp(const struct audio_stream_out *stream,
|
||||
int64_t *timestamp)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
/** audio_stream_in implementation **/
|
||||
static uint32_t in_get_sample_rate(const struct audio_stream *stream)
|
||||
{
|
||||
return IN_SAMPLING_RATE;
|
||||
}
|
||||
|
||||
static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static size_t in_get_buffer_size(const struct audio_stream *stream)
|
||||
{
|
||||
return IN_BUFFER_SIZE;
|
||||
}
|
||||
|
||||
static audio_channel_mask_t in_get_channels(const struct audio_stream *stream)
|
||||
{
|
||||
return AUDIO_CHANNEL_IN_MONO;
|
||||
}
|
||||
|
||||
static audio_format_t in_get_format(const struct audio_stream *stream)
|
||||
{
|
||||
return AUDIO_FORMAT_PCM_16_BIT;
|
||||
}
|
||||
|
||||
static int in_set_format(struct audio_stream *stream, audio_format_t format)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int in_standby(struct audio_stream *stream)
|
||||
{
|
||||
// in_standby is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int in_dump(const struct audio_stream *stream, int fd)
|
||||
{
|
||||
struct generic_stream_in *in = (struct generic_stream_in *)stream;
|
||||
|
||||
dprintf(fd, "\tin_dump:\n"
|
||||
"\t\tsample rate: %u\n"
|
||||
"\t\tbuffer size: %u\n"
|
||||
"\t\tchannel mask: %08x\n"
|
||||
"\t\tformat: %d\n"
|
||||
"\t\tdevice: %08x\n"
|
||||
"\t\taudio dev: %p\n\n",
|
||||
in_get_sample_rate(stream),
|
||||
in_get_buffer_size(stream),
|
||||
in_get_channels(stream),
|
||||
in_get_format(stream),
|
||||
in->device,
|
||||
in->dev);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int in_set_parameters(struct audio_stream *stream, const char *kvpairs)
|
||||
{
|
||||
struct generic_stream_in *in = (struct generic_stream_in *)stream;
|
||||
struct str_parms *parms;
|
||||
char value[32];
|
||||
int ret;
|
||||
long val;
|
||||
char *end;
|
||||
|
||||
parms = str_parms_create_str(kvpairs);
|
||||
|
||||
ret = str_parms_get_str(parms, AUDIO_PARAMETER_STREAM_ROUTING,
|
||||
value, sizeof(value));
|
||||
if (ret >= 0) {
|
||||
errno = 0;
|
||||
val = strtol(value, &end, 10);
|
||||
if ((errno == 0) && (end != NULL) && (*end == '\0') && ((int)val == val)) {
|
||||
in->device = (int)val;
|
||||
} else {
|
||||
ret = -EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
str_parms_destroy(parms);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static char * in_get_parameters(const struct audio_stream *stream,
|
||||
const char *keys)
|
||||
{
|
||||
struct generic_stream_in *in = (struct generic_stream_in *)stream;
|
||||
struct str_parms *query = str_parms_create_str(keys);
|
||||
char *str;
|
||||
char value[256];
|
||||
struct str_parms *reply = str_parms_create();
|
||||
int ret;
|
||||
|
||||
ret = str_parms_get_str(query, AUDIO_PARAMETER_STREAM_ROUTING, value, sizeof(value));
|
||||
if (ret >= 0) {
|
||||
str_parms_add_int(reply, AUDIO_PARAMETER_STREAM_ROUTING, in->device);
|
||||
str = strdup(str_parms_to_str(reply));
|
||||
} else {
|
||||
str = strdup(keys);
|
||||
}
|
||||
|
||||
str_parms_destroy(query);
|
||||
str_parms_destroy(reply);
|
||||
return str;
|
||||
}
|
||||
|
||||
static int in_set_gain(struct audio_stream_in *stream, float gain)
|
||||
{
|
||||
// in_set_gain is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
|
||||
size_t bytes)
|
||||
{
|
||||
struct generic_stream_in *in = (struct generic_stream_in *)stream;
|
||||
struct generic_audio_device *adev = in->dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (adev->fd >= 0)
|
||||
bytes = read(adev->fd, buffer, bytes);
|
||||
if (adev->mic_mute && (bytes > 0)) {
|
||||
memset(buffer, 0, bytes);
|
||||
}
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
static uint32_t in_get_input_frames_lost(struct audio_stream_in *stream)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int in_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect)
|
||||
{
|
||||
// in_add_audio_effect is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int in_remove_audio_effect(const struct audio_stream *stream, effect_handle_t effect)
|
||||
{
|
||||
// in_add_audio_effect is a no op
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_open_output_stream(struct audio_hw_device *dev,
|
||||
audio_io_handle_t handle,
|
||||
audio_devices_t devices,
|
||||
audio_output_flags_t flags,
|
||||
struct audio_config *config,
|
||||
struct audio_stream_out **stream_out,
|
||||
const char *address __unused)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
struct generic_stream_out *out;
|
||||
int ret = 0;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (adev->output != NULL) {
|
||||
ret = -ENOSYS;
|
||||
goto error;
|
||||
}
|
||||
|
||||
if ((config->format != AUDIO_FORMAT_PCM_16_BIT) ||
|
||||
(config->channel_mask != AUDIO_CHANNEL_OUT_STEREO) ||
|
||||
(config->sample_rate != OUT_SAMPLING_RATE)) {
|
||||
ALOGE("Error opening output stream format %d, channel_mask %04x, sample_rate %u",
|
||||
config->format, config->channel_mask, config->sample_rate);
|
||||
config->format = AUDIO_FORMAT_PCM_16_BIT;
|
||||
config->channel_mask = AUDIO_CHANNEL_OUT_STEREO;
|
||||
config->sample_rate = OUT_SAMPLING_RATE;
|
||||
ret = -EINVAL;
|
||||
goto error;
|
||||
}
|
||||
|
||||
out = (struct generic_stream_out *)calloc(1, sizeof(struct generic_stream_out));
|
||||
|
||||
out->stream.common.get_sample_rate = out_get_sample_rate;
|
||||
out->stream.common.set_sample_rate = out_set_sample_rate;
|
||||
out->stream.common.get_buffer_size = out_get_buffer_size;
|
||||
out->stream.common.get_channels = out_get_channels;
|
||||
out->stream.common.get_format = out_get_format;
|
||||
out->stream.common.set_format = out_set_format;
|
||||
out->stream.common.standby = out_standby;
|
||||
out->stream.common.dump = out_dump;
|
||||
out->stream.common.set_parameters = out_set_parameters;
|
||||
out->stream.common.get_parameters = out_get_parameters;
|
||||
out->stream.common.add_audio_effect = out_add_audio_effect;
|
||||
out->stream.common.remove_audio_effect = out_remove_audio_effect;
|
||||
out->stream.get_latency = out_get_latency;
|
||||
out->stream.set_volume = out_set_volume;
|
||||
out->stream.write = out_write;
|
||||
out->stream.get_render_position = out_get_render_position;
|
||||
out->stream.get_next_write_timestamp = out_get_next_write_timestamp;
|
||||
|
||||
out->dev = adev;
|
||||
out->device = devices;
|
||||
adev->output = (struct audio_stream_out *)out;
|
||||
*stream_out = &out->stream;
|
||||
|
||||
error:
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void adev_close_output_stream(struct audio_hw_device *dev,
|
||||
struct audio_stream_out *stream)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (stream == adev->output) {
|
||||
free(stream);
|
||||
adev->output = NULL;
|
||||
}
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
}
|
||||
|
||||
static int adev_set_parameters(struct audio_hw_device *dev, const char *kvpairs)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static char * adev_get_parameters(const struct audio_hw_device *dev,
|
||||
const char *keys)
|
||||
{
|
||||
return strdup("");
|
||||
}
|
||||
|
||||
static int adev_init_check(const struct audio_hw_device *dev)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
if (adev->fd >= 0)
|
||||
return 0;
|
||||
|
||||
return -ENODEV;
|
||||
}
|
||||
|
||||
static int adev_set_voice_volume(struct audio_hw_device *dev, float volume)
|
||||
{
|
||||
// adev_set_voice_volume is a no op (simulates phones)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_set_master_volume(struct audio_hw_device *dev, float volume)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int adev_get_master_volume(struct audio_hw_device *dev, float *volume)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int adev_set_master_mute(struct audio_hw_device *dev, bool muted)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int adev_get_master_mute(struct audio_hw_device *dev, bool *muted)
|
||||
{
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
static int adev_set_mode(struct audio_hw_device *dev, audio_mode_t mode)
|
||||
{
|
||||
// adev_set_mode is a no op (simulates phones)
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_set_mic_mute(struct audio_hw_device *dev, bool state)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
adev->mic_mute = state;
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_get_mic_mute(const struct audio_hw_device *dev, bool *state)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
*state = adev->mic_mute;
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static size_t adev_get_input_buffer_size(const struct audio_hw_device *dev,
|
||||
const struct audio_config *config)
|
||||
{
|
||||
return IN_BUFFER_SIZE;
|
||||
}
|
||||
|
||||
static int adev_open_input_stream(struct audio_hw_device *dev,
|
||||
audio_io_handle_t handle,
|
||||
audio_devices_t devices,
|
||||
struct audio_config *config,
|
||||
struct audio_stream_in **stream_in,
|
||||
audio_input_flags_t flags __unused,
|
||||
const char *address __unused,
|
||||
audio_source_t source __unused)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
struct generic_stream_in *in;
|
||||
int ret = 0;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (adev->input != NULL) {
|
||||
ret = -ENOSYS;
|
||||
goto error;
|
||||
}
|
||||
|
||||
if ((config->format != AUDIO_FORMAT_PCM_16_BIT) ||
|
||||
(config->channel_mask != AUDIO_CHANNEL_IN_MONO) ||
|
||||
(config->sample_rate != IN_SAMPLING_RATE)) {
|
||||
ALOGE("Error opening input stream format %d, channel_mask %04x, sample_rate %u",
|
||||
config->format, config->channel_mask, config->sample_rate);
|
||||
config->format = AUDIO_FORMAT_PCM_16_BIT;
|
||||
config->channel_mask = AUDIO_CHANNEL_IN_MONO;
|
||||
config->sample_rate = IN_SAMPLING_RATE;
|
||||
ret = -EINVAL;
|
||||
goto error;
|
||||
}
|
||||
|
||||
in = (struct generic_stream_in *)calloc(1, sizeof(struct generic_stream_in));
|
||||
|
||||
in->stream.common.get_sample_rate = in_get_sample_rate;
|
||||
in->stream.common.set_sample_rate = in_set_sample_rate;
|
||||
in->stream.common.get_buffer_size = in_get_buffer_size;
|
||||
in->stream.common.get_channels = in_get_channels;
|
||||
in->stream.common.get_format = in_get_format;
|
||||
in->stream.common.set_format = in_set_format;
|
||||
in->stream.common.standby = in_standby;
|
||||
in->stream.common.dump = in_dump;
|
||||
in->stream.common.set_parameters = in_set_parameters;
|
||||
in->stream.common.get_parameters = in_get_parameters;
|
||||
in->stream.common.add_audio_effect = in_add_audio_effect;
|
||||
in->stream.common.remove_audio_effect = in_remove_audio_effect;
|
||||
in->stream.set_gain = in_set_gain;
|
||||
in->stream.read = in_read;
|
||||
in->stream.get_input_frames_lost = in_get_input_frames_lost;
|
||||
|
||||
in->dev = adev;
|
||||
in->device = devices;
|
||||
adev->input = (struct audio_stream_in *)in;
|
||||
*stream_in = &in->stream;
|
||||
|
||||
error:
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void adev_close_input_stream(struct audio_hw_device *dev,
|
||||
struct audio_stream_in *stream)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
pthread_mutex_lock(&adev->lock);
|
||||
if (stream == adev->input) {
|
||||
free(stream);
|
||||
adev->input = NULL;
|
||||
}
|
||||
pthread_mutex_unlock(&adev->lock);
|
||||
}
|
||||
|
||||
static int adev_dump(const audio_hw_device_t *dev, int fd)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
const size_t SIZE = 256;
|
||||
char buffer[SIZE];
|
||||
|
||||
dprintf(fd, "\nadev_dump:\n"
|
||||
"\tfd: %d\n"
|
||||
"\tmic_mute: %s\n"
|
||||
"\toutput: %p\n"
|
||||
"\tinput: %p\n\n",
|
||||
adev->fd,
|
||||
adev->mic_mute ? "true": "false",
|
||||
adev->output,
|
||||
adev->input);
|
||||
|
||||
if (adev->output != NULL)
|
||||
out_dump((const struct audio_stream *)adev->output, fd);
|
||||
if (adev->input != NULL)
|
||||
in_dump((const struct audio_stream *)adev->input, fd);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_close(hw_device_t *dev)
|
||||
{
|
||||
struct generic_audio_device *adev = (struct generic_audio_device *)dev;
|
||||
|
||||
adev_close_output_stream((struct audio_hw_device *)dev, adev->output);
|
||||
adev_close_input_stream((struct audio_hw_device *)dev, adev->input);
|
||||
|
||||
if (adev->fd >= 0)
|
||||
close(adev->fd);
|
||||
|
||||
free(dev);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int adev_open(const hw_module_t* module, const char* name,
|
||||
hw_device_t** device)
|
||||
{
|
||||
struct generic_audio_device *adev;
|
||||
int fd;
|
||||
|
||||
if (strcmp(name, AUDIO_HARDWARE_INTERFACE) != 0)
|
||||
return -EINVAL;
|
||||
|
||||
fd = open(AUDIO_DEVICE_NAME, O_RDWR);
|
||||
if (fd < 0)
|
||||
return -ENOSYS;
|
||||
|
||||
adev = calloc(1, sizeof(struct generic_audio_device));
|
||||
|
||||
adev->fd = fd;
|
||||
|
||||
adev->device.common.tag = HARDWARE_DEVICE_TAG;
|
||||
adev->device.common.version = AUDIO_DEVICE_API_VERSION_2_0;
|
||||
adev->device.common.module = (struct hw_module_t *) module;
|
||||
adev->device.common.close = adev_close;
|
||||
|
||||
adev->device.init_check = adev_init_check;
|
||||
adev->device.set_voice_volume = adev_set_voice_volume;
|
||||
adev->device.set_master_volume = adev_set_master_volume;
|
||||
adev->device.get_master_volume = adev_get_master_volume;
|
||||
adev->device.set_master_mute = adev_set_master_mute;
|
||||
adev->device.get_master_mute = adev_get_master_mute;
|
||||
adev->device.set_mode = adev_set_mode;
|
||||
adev->device.set_mic_mute = adev_set_mic_mute;
|
||||
adev->device.get_mic_mute = adev_get_mic_mute;
|
||||
adev->device.set_parameters = adev_set_parameters;
|
||||
adev->device.get_parameters = adev_get_parameters;
|
||||
adev->device.get_input_buffer_size = adev_get_input_buffer_size;
|
||||
adev->device.open_output_stream = adev_open_output_stream;
|
||||
adev->device.close_output_stream = adev_close_output_stream;
|
||||
adev->device.open_input_stream = adev_open_input_stream;
|
||||
adev->device.close_input_stream = adev_close_input_stream;
|
||||
adev->device.dump = adev_dump;
|
||||
|
||||
*device = &adev->device.common;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct hw_module_methods_t hal_module_methods = {
|
||||
.open = adev_open,
|
||||
};
|
||||
|
||||
struct audio_module HAL_MODULE_INFO_SYM = {
|
||||
.common = {
|
||||
.tag = HARDWARE_MODULE_TAG,
|
||||
.module_api_version = AUDIO_MODULE_API_VERSION_0_1,
|
||||
.hal_api_version = HARDWARE_HAL_API_VERSION,
|
||||
.id = AUDIO_HARDWARE_MODULE_ID,
|
||||
.name = "Generic audio HW HAL",
|
||||
.author = "The Android Open Source Project",
|
||||
.methods = &hal_module_methods,
|
||||
},
|
||||
};
|
||||
152
android/camera/Android.mk
Normal file
152
android/camera/Android.mk
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
# Copyright (C) 2011 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
# Emulator camera module########################################################
|
||||
|
||||
emulator_camera_module_relative_path := hw
|
||||
emulator_camera_cflags := -fno-short-enums -DQEMU_HARDWARE
|
||||
emulator_camera_cflags += -Wno-unused-parameter -Wno-missing-field-initializers
|
||||
emulator_camera_clang_flags := -Wno-c++11-narrowing
|
||||
emulator_camera_shared_libraries := \
|
||||
libbinder \
|
||||
liblog \
|
||||
libutils \
|
||||
libcutils \
|
||||
libcamera_client \
|
||||
libui \
|
||||
libdl \
|
||||
libjpeg \
|
||||
libcamera_metadata
|
||||
|
||||
emulator_camera_c_includes := external/jpeg \
|
||||
frameworks/native/include/media/hardware \
|
||||
$(LOCAL_PATH)/../opengl/system/OpenglSystemCommon \
|
||||
$(call include-path-for, camera)
|
||||
|
||||
emulator_camera_src := \
|
||||
EmulatedCameraHal.cpp \
|
||||
EmulatedCameraFactory.cpp \
|
||||
EmulatedCameraHotplugThread.cpp \
|
||||
EmulatedBaseCamera.cpp \
|
||||
EmulatedCamera.cpp \
|
||||
EmulatedCameraDevice.cpp \
|
||||
EmulatedQemuCamera.cpp \
|
||||
EmulatedQemuCameraDevice.cpp \
|
||||
EmulatedFakeCamera.cpp \
|
||||
EmulatedFakeCameraDevice.cpp \
|
||||
Converters.cpp \
|
||||
PreviewWindow.cpp \
|
||||
CallbackNotifier.cpp \
|
||||
QemuClient.cpp \
|
||||
JpegCompressor.cpp \
|
||||
EmulatedCamera2.cpp \
|
||||
EmulatedFakeCamera2.cpp \
|
||||
EmulatedQemuCamera2.cpp \
|
||||
fake-pipeline2/Scene.cpp \
|
||||
fake-pipeline2/Sensor.cpp \
|
||||
fake-pipeline2/JpegCompressor.cpp \
|
||||
EmulatedCamera3.cpp \
|
||||
EmulatedFakeCamera3.cpp
|
||||
|
||||
# Emulated camera - goldfish / vbox_x86 build###################################
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := ${emulator_camera_module_relative_path}
|
||||
LOCAL_CFLAGS := ${emulator_camera_cflags}
|
||||
LOCAL_CLANG_CFLAGS += ${emulator_camera_clang_flags}
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := ${emulator_camera_shared_libraries}
|
||||
LOCAL_C_INCLUDES += ${emulator_camera_c_includes}
|
||||
LOCAL_SRC_FILES := ${emulator_camera_src}
|
||||
|
||||
ifeq ($(TARGET_PRODUCT),vbox_x86)
|
||||
LOCAL_MODULE := camera.vbox_x86
|
||||
else
|
||||
LOCAL_MODULE := camera.goldfish
|
||||
endif
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
# Emulator camera - ranchu build################################################
|
||||
|
||||
include ${CLEAR_VARS}
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := ${emulator_camera_module_relative_path}
|
||||
LOCAL_CFLAGS := ${emulator_camera_cflags}
|
||||
LOCAL_CLANG_CFLAGS += ${emulator_camera_clang_flags}
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := ${emulator_camera_shared_libraries}
|
||||
LOCAL_C_INCLUDES += ${emulator_camera_c_includes}
|
||||
LOCAL_SRC_FILES := ${emulator_camera_src}
|
||||
|
||||
LOCAL_MODULE := camera.ranchu
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
# JPEG stub#####################################################################
|
||||
|
||||
ifneq ($(TARGET_BUILD_PDK),true)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
jpeg_module_relative_path := hw
|
||||
jpeg_cflags := -fno-short-enums -DQEMU_HARDWARE
|
||||
jpeg_cflags += -Wno-unused-parameter
|
||||
jpeg_clang_flags += -Wno-c++11-narrowing
|
||||
jpeg_shared_libraries := \
|
||||
libcutils \
|
||||
liblog \
|
||||
libskia \
|
||||
libandroid_runtime
|
||||
jpeg_c_includes := external/libjpeg-turbo \
|
||||
external/skia/include/core/ \
|
||||
frameworks/base/core/jni/android/graphics \
|
||||
frameworks/native/include
|
||||
jpeg_src := JpegStub.cpp
|
||||
|
||||
# JPEG stub - goldfish build####################################################
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := ${jpeg_module_relative_path}
|
||||
LOCAL_CFLAGS += ${jpeg_cflags}
|
||||
LOCAL_CLANG_CFLAGS += ${jpeg_clangflags}
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries}
|
||||
LOCAL_C_INCLUDES += ${jpeg_c_includes}
|
||||
LOCAL_SRC_FILES := ${jpeg_src}
|
||||
|
||||
LOCAL_MODULE := camera.goldfish.jpeg
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
# JPEG stub - ranchu build######################################################
|
||||
|
||||
include ${CLEAR_VARS}
|
||||
|
||||
LOCAL_MODULE := camera.ranchu.jpeg
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := ${jpeg_module_relative_path}
|
||||
LOCAL_CFLAGS += ${jpeg_cflags}
|
||||
LOCAL_CLANG_CFLAGS += ${jpeg_clangflags}
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries}
|
||||
LOCAL_C_INCLUDES += ${jpeg_c_includes}
|
||||
LOCAL_SRC_FILES := ${jpeg_src}
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
endif # !PDK
|
||||
300
android/camera/CallbackNotifier.cpp
Executable file
300
android/camera/CallbackNotifier.cpp
Executable file
|
|
@ -0,0 +1,300 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class CallbackNotifier that manages callbacks set
|
||||
* via set_callbacks, enable_msg_type, and disable_msg_type camera HAL API.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_CallbackNotifier"
|
||||
#include <cutils/log.h>
|
||||
#include <MetadataBufferType.h>
|
||||
#include "EmulatedCameraDevice.h"
|
||||
#include "CallbackNotifier.h"
|
||||
#include "JpegCompressor.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* String representation of camera messages. */
|
||||
static const char* lCameraMessages[] =
|
||||
{
|
||||
"CAMERA_MSG_ERROR",
|
||||
"CAMERA_MSG_SHUTTER",
|
||||
"CAMERA_MSG_FOCUS",
|
||||
"CAMERA_MSG_ZOOM",
|
||||
"CAMERA_MSG_PREVIEW_FRAME",
|
||||
"CAMERA_MSG_VIDEO_FRAME",
|
||||
"CAMERA_MSG_POSTVIEW_FRAME",
|
||||
"CAMERA_MSG_RAW_IMAGE",
|
||||
"CAMERA_MSG_COMPRESSED_IMAGE",
|
||||
"CAMERA_MSG_RAW_IMAGE_NOTIFY",
|
||||
"CAMERA_MSG_PREVIEW_METADATA"
|
||||
};
|
||||
static const int lCameraMessagesNum = sizeof(lCameraMessages) / sizeof(char*);
|
||||
|
||||
/* Builds an array of strings for the given set of messages.
|
||||
* Param:
|
||||
* msg - Messages to get strings for,
|
||||
* strings - Array where to save strings
|
||||
* max - Maximum number of entries in the array.
|
||||
* Return:
|
||||
* Number of strings saved into the 'strings' array.
|
||||
*/
|
||||
static int GetMessageStrings(uint32_t msg, const char** strings, int max)
|
||||
{
|
||||
int index = 0;
|
||||
int out = 0;
|
||||
while (msg != 0 && out < max && index < lCameraMessagesNum) {
|
||||
while ((msg & 0x1) == 0 && index < lCameraMessagesNum) {
|
||||
msg >>= 1;
|
||||
index++;
|
||||
}
|
||||
if ((msg & 0x1) != 0 && index < lCameraMessagesNum) {
|
||||
strings[out] = lCameraMessages[index];
|
||||
out++;
|
||||
msg >>= 1;
|
||||
index++;
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
/* Logs messages, enabled by the mask. */
|
||||
static void PrintMessages(uint32_t msg)
|
||||
{
|
||||
const char* strs[lCameraMessagesNum];
|
||||
const int translated = GetMessageStrings(msg, strs, lCameraMessagesNum);
|
||||
for (int n = 0; n < translated; n++) {
|
||||
ALOGV(" %s", strs[n]);
|
||||
}
|
||||
}
|
||||
|
||||
CallbackNotifier::CallbackNotifier()
|
||||
: mNotifyCB(NULL),
|
||||
mDataCB(NULL),
|
||||
mDataCBTimestamp(NULL),
|
||||
mGetMemoryCB(NULL),
|
||||
mCBOpaque(NULL),
|
||||
mLastFrameTimestamp(0),
|
||||
mFrameRefreshFreq(0),
|
||||
mMessageEnabler(0),
|
||||
mJpegQuality(90),
|
||||
mVideoRecEnabled(false),
|
||||
mTakingPicture(false)
|
||||
{
|
||||
}
|
||||
|
||||
CallbackNotifier::~CallbackNotifier()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API
|
||||
***************************************************************************/
|
||||
|
||||
void CallbackNotifier::setCallbacks(camera_notify_callback notify_cb,
|
||||
camera_data_callback data_cb,
|
||||
camera_data_timestamp_callback data_cb_timestamp,
|
||||
camera_request_memory get_memory,
|
||||
void* user)
|
||||
{
|
||||
ALOGV("%s: %p, %p, %p, %p (%p)",
|
||||
__FUNCTION__, notify_cb, data_cb, data_cb_timestamp, get_memory, user);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mNotifyCB = notify_cb;
|
||||
mDataCB = data_cb;
|
||||
mDataCBTimestamp = data_cb_timestamp;
|
||||
mGetMemoryCB = get_memory;
|
||||
mCBOpaque = user;
|
||||
}
|
||||
|
||||
void CallbackNotifier::enableMessage(uint msg_type)
|
||||
{
|
||||
ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
|
||||
PrintMessages(msg_type);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mMessageEnabler |= msg_type;
|
||||
ALOGV("**** Currently enabled messages:");
|
||||
PrintMessages(mMessageEnabler);
|
||||
}
|
||||
|
||||
void CallbackNotifier::disableMessage(uint msg_type)
|
||||
{
|
||||
ALOGV("%s: msg_type = 0x%x", __FUNCTION__, msg_type);
|
||||
PrintMessages(msg_type);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mMessageEnabler &= ~msg_type;
|
||||
ALOGV("**** Currently enabled messages:");
|
||||
PrintMessages(mMessageEnabler);
|
||||
}
|
||||
|
||||
status_t CallbackNotifier::enableVideoRecording(int fps)
|
||||
{
|
||||
ALOGV("%s: FPS = %d", __FUNCTION__, fps);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mVideoRecEnabled = true;
|
||||
mLastFrameTimestamp = 0;
|
||||
mFrameRefreshFreq = 1000000000LL / fps;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
void CallbackNotifier::disableVideoRecording()
|
||||
{
|
||||
ALOGV("%s:", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mVideoRecEnabled = false;
|
||||
mLastFrameTimestamp = 0;
|
||||
mFrameRefreshFreq = 0;
|
||||
}
|
||||
|
||||
void CallbackNotifier::releaseRecordingFrame(const void* opaque)
|
||||
{
|
||||
List<camera_memory_t*>::iterator it = mCameraMemoryTs.begin();
|
||||
for( ; it != mCameraMemoryTs.end(); ++it ) {
|
||||
if ( (*it)->data == opaque ) {
|
||||
(*it)->release( *it );
|
||||
mCameraMemoryTs.erase(it);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
status_t CallbackNotifier::storeMetaDataInBuffers(bool enable)
|
||||
{
|
||||
// Return error if metadata is request, otherwise silently agree.
|
||||
return enable ? INVALID_OPERATION : NO_ERROR;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
void CallbackNotifier::cleanupCBNotifier()
|
||||
{
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mMessageEnabler = 0;
|
||||
mNotifyCB = NULL;
|
||||
mDataCB = NULL;
|
||||
mDataCBTimestamp = NULL;
|
||||
mGetMemoryCB = NULL;
|
||||
mCBOpaque = NULL;
|
||||
mLastFrameTimestamp = 0;
|
||||
mFrameRefreshFreq = 0;
|
||||
mJpegQuality = 90;
|
||||
mVideoRecEnabled = false;
|
||||
mTakingPicture = false;
|
||||
}
|
||||
|
||||
void CallbackNotifier::onNextFrameAvailable(const void* frame,
|
||||
nsecs_t timestamp,
|
||||
EmulatedCameraDevice* camera_dev)
|
||||
{
|
||||
if (isMessageEnabled(CAMERA_MSG_VIDEO_FRAME) && isVideoRecordingEnabled() &&
|
||||
isNewVideoFrameTime(timestamp)) {
|
||||
camera_memory_t* cam_buff =
|
||||
mGetMemoryCB(-1, camera_dev->getFrameBufferSize(), 1, NULL);
|
||||
if (NULL != cam_buff && NULL != cam_buff->data) {
|
||||
memcpy(cam_buff->data, frame, camera_dev->getFrameBufferSize());
|
||||
mDataCBTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME,
|
||||
cam_buff, 0, mCBOpaque);
|
||||
|
||||
mCameraMemoryTs.push_back( cam_buff );
|
||||
} else {
|
||||
ALOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
|
||||
}
|
||||
}
|
||||
|
||||
if (isMessageEnabled(CAMERA_MSG_PREVIEW_FRAME)) {
|
||||
camera_memory_t* cam_buff =
|
||||
mGetMemoryCB(-1, camera_dev->getFrameBufferSize(), 1, NULL);
|
||||
if (NULL != cam_buff && NULL != cam_buff->data) {
|
||||
memcpy(cam_buff->data, frame, camera_dev->getFrameBufferSize());
|
||||
mDataCB(CAMERA_MSG_PREVIEW_FRAME, cam_buff, 0, NULL, mCBOpaque);
|
||||
cam_buff->release(cam_buff);
|
||||
} else {
|
||||
ALOGE("%s: Memory failure in CAMERA_MSG_PREVIEW_FRAME", __FUNCTION__);
|
||||
}
|
||||
}
|
||||
|
||||
if (mTakingPicture) {
|
||||
/* This happens just once. */
|
||||
mTakingPicture = false;
|
||||
/* The sequence of callbacks during picture taking is:
|
||||
* - CAMERA_MSG_SHUTTER
|
||||
* - CAMERA_MSG_RAW_IMAGE_NOTIFY
|
||||
* - CAMERA_MSG_COMPRESSED_IMAGE
|
||||
*/
|
||||
if (isMessageEnabled(CAMERA_MSG_SHUTTER)) {
|
||||
mNotifyCB(CAMERA_MSG_SHUTTER, 0, 0, mCBOpaque);
|
||||
}
|
||||
if (isMessageEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY)) {
|
||||
mNotifyCB(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCBOpaque);
|
||||
}
|
||||
if (isMessageEnabled(CAMERA_MSG_COMPRESSED_IMAGE)) {
|
||||
/* Compress the frame to JPEG. Note that when taking pictures, we
|
||||
* have requested camera device to provide us with NV21 frames. */
|
||||
NV21JpegCompressor compressor;
|
||||
status_t res =
|
||||
compressor.compressRawImage(frame, camera_dev->getFrameWidth(),
|
||||
camera_dev->getFrameHeight(),
|
||||
mJpegQuality);
|
||||
if (res == NO_ERROR) {
|
||||
camera_memory_t* jpeg_buff =
|
||||
mGetMemoryCB(-1, compressor.getCompressedSize(), 1, NULL);
|
||||
if (NULL != jpeg_buff && NULL != jpeg_buff->data) {
|
||||
compressor.getCompressedImage(jpeg_buff->data);
|
||||
mDataCB(CAMERA_MSG_COMPRESSED_IMAGE, jpeg_buff, 0, NULL, mCBOpaque);
|
||||
jpeg_buff->release(jpeg_buff);
|
||||
} else {
|
||||
ALOGE("%s: Memory failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
|
||||
}
|
||||
} else {
|
||||
ALOGE("%s: Compression failure in CAMERA_MSG_VIDEO_FRAME", __FUNCTION__);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void CallbackNotifier::onCameraDeviceError(int err)
|
||||
{
|
||||
if (isMessageEnabled(CAMERA_MSG_ERROR) && mNotifyCB != NULL) {
|
||||
mNotifyCB(CAMERA_MSG_ERROR, err, 0, mCBOpaque);
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Private API
|
||||
***************************************************************************/
|
||||
|
||||
bool CallbackNotifier::isNewVideoFrameTime(nsecs_t timestamp)
|
||||
{
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if ((timestamp - mLastFrameTimestamp) >= mFrameRefreshFreq) {
|
||||
mLastFrameTimestamp = timestamp;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
238
android/camera/CallbackNotifier.h
Executable file
238
android/camera/CallbackNotifier.h
Executable file
|
|
@ -0,0 +1,238 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H
|
||||
#define HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class CallbackNotifier that manages callbacks set
|
||||
* via set_callbacks, enable_msg_type, and disable_msg_type camera HAL API.
|
||||
*/
|
||||
|
||||
#include <utils/List.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedCameraDevice;
|
||||
|
||||
/* Manages callbacks set via set_callbacks, enable_msg_type, and disable_msg_type
|
||||
* camera HAL API.
|
||||
*
|
||||
* Objects of this class are contained in EmulatedCamera objects, and handle
|
||||
* relevant camera API callbacks.
|
||||
* Locking considerations. Apparently, it's not allowed to call callbacks
|
||||
* registered in this class, while holding a lock: recursion is quite possible,
|
||||
* which will cause a deadlock.
|
||||
*/
|
||||
class CallbackNotifier {
|
||||
public:
|
||||
/* Constructs CallbackNotifier instance. */
|
||||
CallbackNotifier();
|
||||
|
||||
/* Destructs CallbackNotifier instance. */
|
||||
~CallbackNotifier();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Actual handler for camera_device_ops_t::set_callbacks callback.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::set_callbacks callback.
|
||||
*/
|
||||
void setCallbacks(camera_notify_callback notify_cb,
|
||||
camera_data_callback data_cb,
|
||||
camera_data_timestamp_callback data_cb_timestamp,
|
||||
camera_request_memory get_memory,
|
||||
void* user);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::enable_msg_type callback.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::enable_msg_type callback.
|
||||
*/
|
||||
void enableMessage(uint msg_type);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::disable_msg_type callback.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::disable_msg_type callback.
|
||||
*/
|
||||
void disableMessage(uint msg_type);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::store_meta_data_in_buffers
|
||||
* callback. This method is called by the containing emulated camera object
|
||||
* when it is handing the camera_device_ops_t::store_meta_data_in_buffers
|
||||
* callback.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
status_t storeMetaDataInBuffers(bool enable);
|
||||
|
||||
/* Enables video recording.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::start_recording callback.
|
||||
* Param:
|
||||
* fps - Video frame frequency. This parameter determins when a frame
|
||||
* received via onNextFrameAvailable call will be pushed through the
|
||||
* callback.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
status_t enableVideoRecording(int fps);
|
||||
|
||||
/* Disables video recording.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::stop_recording callback.
|
||||
*/
|
||||
void disableVideoRecording();
|
||||
|
||||
/* Releases video frame, sent to the framework.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::release_recording_frame callback.
|
||||
*/
|
||||
void releaseRecordingFrame(const void* opaque);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::msg_type_enabled callback.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::msg_type_enabled callback.
|
||||
* Note: this method doesn't grab a lock while checking message status, since
|
||||
* upon exit the status would be undefined anyway. So, grab a lock before
|
||||
* calling this method if you care about persisting a defined message status.
|
||||
* Return:
|
||||
* 0 if message is disabled, or non-zero value, if message is enabled.
|
||||
*/
|
||||
inline int isMessageEnabled(uint msg_type)
|
||||
{
|
||||
return mMessageEnabler & msg_type;
|
||||
}
|
||||
|
||||
/* Checks id video recording is enabled.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::recording_enabled callback.
|
||||
* Note: this method doesn't grab a lock while checking video recordin status,
|
||||
* since upon exit the status would be undefined anyway. So, grab a lock
|
||||
* before calling this method if you care about persisting of a defined video
|
||||
* recording status.
|
||||
* Return:
|
||||
* true if video recording is enabled, or false if it is disabled.
|
||||
*/
|
||||
inline bool isVideoRecordingEnabled()
|
||||
{
|
||||
return mVideoRecEnabled;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Resets the callback notifier. */
|
||||
void cleanupCBNotifier();
|
||||
|
||||
/* Next frame is available in the camera device.
|
||||
* This is a notification callback that is invoked by the camera device when
|
||||
* a new frame is available.
|
||||
* Note that most likely this method is called in context of a worker thread
|
||||
* that camera device has created for frame capturing.
|
||||
* Param:
|
||||
* frame - Captured frame, or NULL if camera device didn't pull the frame
|
||||
* yet. If NULL is passed in this parameter use GetCurrentFrame method
|
||||
* of the camera device class to obtain the next frame. Also note that
|
||||
* the size of the frame that is passed here (as well as the frame
|
||||
* returned from the GetCurrentFrame method) is defined by the current
|
||||
* frame settings (width + height + pixel format) for the camera device.
|
||||
* timestamp - Frame's timestamp.
|
||||
* camera_dev - Camera device instance that delivered the frame.
|
||||
*/
|
||||
void onNextFrameAvailable(const void* frame,
|
||||
nsecs_t timestamp,
|
||||
EmulatedCameraDevice* camera_dev);
|
||||
|
||||
/* Entry point for notifications that occur in camera device.
|
||||
* Param:
|
||||
* err - CAMERA_ERROR_XXX error code.
|
||||
*/
|
||||
void onCameraDeviceError(int err);
|
||||
|
||||
/* Sets, or resets taking picture state.
|
||||
* This state control whether or not to notify the framework about compressed
|
||||
* image, shutter, and other picture related events.
|
||||
*/
|
||||
void setTakingPicture(bool taking)
|
||||
{
|
||||
mTakingPicture = taking;
|
||||
}
|
||||
|
||||
/* Sets JPEG quality used to compress frame during picture taking. */
|
||||
void setJpegQuality(int jpeg_quality)
|
||||
{
|
||||
mJpegQuality = jpeg_quality;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Private API
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Checks if it's time to push new video frame.
|
||||
* Note that this method must be called while object is locked.
|
||||
* Param:
|
||||
* timestamp - Timestamp for the new frame. */
|
||||
bool isNewVideoFrameTime(nsecs_t timestamp);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Locks this instance for data change. */
|
||||
Mutex mObjectLock;
|
||||
|
||||
/*
|
||||
* Callbacks, registered in set_callbacks.
|
||||
*/
|
||||
|
||||
camera_notify_callback mNotifyCB;
|
||||
camera_data_callback mDataCB;
|
||||
camera_data_timestamp_callback mDataCBTimestamp;
|
||||
camera_request_memory mGetMemoryCB;
|
||||
void* mCBOpaque;
|
||||
|
||||
/* video frame queue for the CameraHeapMemory destruction */
|
||||
List<camera_memory_t*> mCameraMemoryTs;
|
||||
|
||||
/* Timestamp when last frame has been delivered to the framework. */
|
||||
nsecs_t mLastFrameTimestamp;
|
||||
|
||||
/* Video frequency in nanosec. */
|
||||
nsecs_t mFrameRefreshFreq;
|
||||
|
||||
/* Message enabler. */
|
||||
uint32_t mMessageEnabler;
|
||||
|
||||
/* JPEG quality used to compress frame during picture taking. */
|
||||
int mJpegQuality;
|
||||
|
||||
/* Video recording status. */
|
||||
bool mVideoRecEnabled;
|
||||
|
||||
/* Picture taking status. */
|
||||
bool mTakingPicture;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_CALLBACK_NOTIFIER_H */
|
||||
173
android/camera/Converters.cpp
Executable file
173
android/camera/Converters.cpp
Executable file
|
|
@ -0,0 +1,173 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implemenation of framebuffer conversion routines.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_Converter"
|
||||
#include <cutils/log.h>
|
||||
#include "Converters.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
static void _YUV420SToRGB565(const uint8_t* Y,
|
||||
const uint8_t* U,
|
||||
const uint8_t* V,
|
||||
int dUV,
|
||||
uint16_t* rgb,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
const uint8_t* U_pos = U;
|
||||
const uint8_t* V_pos = V;
|
||||
|
||||
for (int y = 0; y < height; y++) {
|
||||
for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
|
||||
const uint8_t nU = *U;
|
||||
const uint8_t nV = *V;
|
||||
*rgb = YUVToRGB565(*Y, nU, nV);
|
||||
Y++; rgb++;
|
||||
*rgb = YUVToRGB565(*Y, nU, nV);
|
||||
Y++; rgb++;
|
||||
}
|
||||
if (y & 0x1) {
|
||||
U_pos = U;
|
||||
V_pos = V;
|
||||
} else {
|
||||
U = U_pos;
|
||||
V = V_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void _YUV420SToRGB32(const uint8_t* Y,
|
||||
const uint8_t* U,
|
||||
const uint8_t* V,
|
||||
int dUV,
|
||||
uint32_t* rgb,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
const uint8_t* U_pos = U;
|
||||
const uint8_t* V_pos = V;
|
||||
|
||||
for (int y = 0; y < height; y++) {
|
||||
for (int x = 0; x < width; x += 2, U += dUV, V += dUV) {
|
||||
const uint8_t nU = *U;
|
||||
const uint8_t nV = *V;
|
||||
*rgb = YUVToRGB32(*Y, nU, nV);
|
||||
Y++; rgb++;
|
||||
*rgb = YUVToRGB32(*Y, nU, nV);
|
||||
Y++; rgb++;
|
||||
}
|
||||
if (y & 0x1) {
|
||||
U_pos = U;
|
||||
V_pos = V;
|
||||
} else {
|
||||
U = U_pos;
|
||||
V = V_pos;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void YV12ToRGB565(const void* yv12, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
|
||||
const uint8_t* U = Y + pix_total;
|
||||
const uint8_t* V = U + pix_total / 4;
|
||||
_YUV420SToRGB565(Y, U, V, 1, reinterpret_cast<uint16_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
void YV12ToRGB32(const void* yv12, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yv12);
|
||||
const uint8_t* V = Y + pix_total;
|
||||
const uint8_t* U = V + pix_total / 4;
|
||||
_YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
void YU12ToRGB32(const void* yu12, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* Y = reinterpret_cast<const uint8_t*>(yu12);
|
||||
const uint8_t* U = Y + pix_total;
|
||||
const uint8_t* V = U + pix_total / 4;
|
||||
_YUV420SToRGB32(Y, U, V, 1, reinterpret_cast<uint32_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
/* Common converter for YUV 4:2:0 interleaved to RGB565.
|
||||
* y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
|
||||
*/
|
||||
static void _NVXXToRGB565(const uint8_t* Y,
|
||||
const uint8_t* U,
|
||||
const uint8_t* V,
|
||||
uint16_t* rgb,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
_YUV420SToRGB565(Y, U, V, 2, rgb, width, height);
|
||||
}
|
||||
|
||||
/* Common converter for YUV 4:2:0 interleaved to RGB32.
|
||||
* y, u, and v point to Y,U, and V panes, where U and V values are interleaved.
|
||||
*/
|
||||
static void _NVXXToRGB32(const uint8_t* Y,
|
||||
const uint8_t* U,
|
||||
const uint8_t* V,
|
||||
uint32_t* rgb,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
_YUV420SToRGB32(Y, U, V, 2, rgb, width, height);
|
||||
}
|
||||
|
||||
void NV12ToRGB565(const void* nv12, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
|
||||
_NVXXToRGB565(y, y + pix_total, y + pix_total + 1,
|
||||
reinterpret_cast<uint16_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
void NV12ToRGB32(const void* nv12, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv12);
|
||||
_NVXXToRGB32(y, y + pix_total, y + pix_total + 1,
|
||||
reinterpret_cast<uint32_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
void NV21ToRGB565(const void* nv21, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
|
||||
_NVXXToRGB565(y, y + pix_total + 1, y + pix_total,
|
||||
reinterpret_cast<uint16_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
void NV21ToRGB32(const void* nv21, void* rgb, int width, int height)
|
||||
{
|
||||
const int pix_total = width * height;
|
||||
const uint8_t* y = reinterpret_cast<const uint8_t*>(nv21);
|
||||
_NVXXToRGB32(y, y + pix_total + 1, y + pix_total,
|
||||
reinterpret_cast<uint32_t*>(rgb), width, height);
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
314
android/camera/Converters.h
Executable file
314
android/camera/Converters.h
Executable file
|
|
@ -0,0 +1,314 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_CONVERTERS_H
|
||||
#define HW_EMULATOR_CAMERA_CONVERTERS_H
|
||||
|
||||
#include <endian.h>
|
||||
|
||||
#ifndef __BYTE_ORDER
|
||||
#error "could not determine byte order"
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Contains declaration of framebuffer conversion routines.
|
||||
*
|
||||
* NOTE: RGB and big/little endian considerations. Wherewer in this code RGB
|
||||
* pixels are represented as WORD, or DWORD, the color order inside the
|
||||
* WORD / DWORD matches the one that would occur if that WORD / DWORD would have
|
||||
* been read from the typecasted framebuffer:
|
||||
*
|
||||
* const uint32_t rgb = *reinterpret_cast<const uint32_t*>(framebuffer);
|
||||
*
|
||||
* So, if this code runs on the little endian CPU, red color in 'rgb' would be
|
||||
* masked as 0x000000ff, and blue color would be masked as 0x00ff0000, while if
|
||||
* the code runs on a big endian CPU, the red color in 'rgb' would be masked as
|
||||
* 0xff000000, and blue color would be masked as 0x0000ff00,
|
||||
*/
|
||||
|
||||
namespace android {
|
||||
|
||||
/*
|
||||
* RGB565 color masks
|
||||
*/
|
||||
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
static const uint16_t kRed5 = 0x001f;
|
||||
static const uint16_t kGreen6 = 0x07e0;
|
||||
static const uint16_t kBlue5 = 0xf800;
|
||||
#else // __BYTE_ORDER
|
||||
static const uint16_t kRed5 = 0xf800;
|
||||
static const uint16_t kGreen6 = 0x07e0;
|
||||
static const uint16_t kBlue5 = 0x001f;
|
||||
#endif // __BYTE_ORDER
|
||||
static const uint32_t kBlack16 = 0x0000;
|
||||
static const uint32_t kWhite16 = kRed5 | kGreen6 | kBlue5;
|
||||
|
||||
/*
|
||||
* RGB32 color masks
|
||||
*/
|
||||
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
static const uint32_t kRed8 = 0x000000ff;
|
||||
static const uint32_t kGreen8 = 0x0000ff00;
|
||||
static const uint32_t kBlue8 = 0x00ff0000;
|
||||
#else // __BYTE_ORDER
|
||||
static const uint32_t kRed8 = 0x00ff0000;
|
||||
static const uint32_t kGreen8 = 0x0000ff00;
|
||||
static const uint32_t kBlue8 = 0x000000ff;
|
||||
#endif // __BYTE_ORDER
|
||||
static const uint32_t kBlack32 = 0x00000000;
|
||||
static const uint32_t kWhite32 = kRed8 | kGreen8 | kBlue8;
|
||||
|
||||
/*
|
||||
* Extracting, and saving color bytes from / to WORD / DWORD RGB.
|
||||
*/
|
||||
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
/* Extract red, green, and blue bytes from RGB565 word. */
|
||||
#define R16(rgb) static_cast<uint8_t>(rgb & kRed5)
|
||||
#define G16(rgb) static_cast<uint8_t>((rgb & kGreen6) >> 5)
|
||||
#define B16(rgb) static_cast<uint8_t>((rgb & kBlue5) >> 11)
|
||||
/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
|
||||
#define R16_32(rgb) static_cast<uint8_t>(((rgb & kRed5) << 3) | ((rgb & kRed5) >> 2))
|
||||
#define G16_32(rgb) static_cast<uint8_t>(((rgb & kGreen6) >> 3) | ((rgb & kGreen6) >> 9))
|
||||
#define B16_32(rgb) static_cast<uint8_t>(((rgb & kBlue5) >> 8) | ((rgb & kBlue5) >> 14))
|
||||
/* Extract red, green, and blue bytes from RGB32 dword. */
|
||||
#define R32(rgb) static_cast<uint8_t>(rgb & kRed8)
|
||||
#define G32(rgb) static_cast<uint8_t>(((rgb & kGreen8) >> 8) & 0xff)
|
||||
#define B32(rgb) static_cast<uint8_t>(((rgb & kBlue8) >> 16) & 0xff)
|
||||
/* Build RGB565 word from red, green, and blue bytes. */
|
||||
#define RGB565(r, g, b) static_cast<uint16_t>((((static_cast<uint16_t>(b) << 6) | g) << 5) | r)
|
||||
/* Build RGB32 dword from red, green, and blue bytes. */
|
||||
#define RGB32(r, g, b) static_cast<uint32_t>((((static_cast<uint32_t>(b) << 8) | g) << 8) | r)
|
||||
#else // __BYTE_ORDER
|
||||
/* Extract red, green, and blue bytes from RGB565 word. */
|
||||
#define R16(rgb) static_cast<uint8_t>((rgb & kRed5) >> 11)
|
||||
#define G16(rgb) static_cast<uint8_t>((rgb & kGreen6) >> 5)
|
||||
#define B16(rgb) static_cast<uint8_t>(rgb & kBlue5)
|
||||
/* Make 8 bits red, green, and blue, extracted from RGB565 word. */
|
||||
#define R16_32(rgb) static_cast<uint8_t>(((rgb & kRed5) >> 8) | ((rgb & kRed5) >> 14))
|
||||
#define G16_32(rgb) static_cast<uint8_t>(((rgb & kGreen6) >> 3) | ((rgb & kGreen6) >> 9))
|
||||
#define B16_32(rgb) static_cast<uint8_t>(((rgb & kBlue5) << 3) | ((rgb & kBlue5) >> 2))
|
||||
/* Extract red, green, and blue bytes from RGB32 dword. */
|
||||
#define R32(rgb) static_cast<uint8_t>((rgb & kRed8) >> 16)
|
||||
#define G32(rgb) static_cast<uint8_t>((rgb & kGreen8) >> 8)
|
||||
#define B32(rgb) static_cast<uint8_t>(rgb & kBlue8)
|
||||
/* Build RGB565 word from red, green, and blue bytes. */
|
||||
#define RGB565(r, g, b) static_cast<uint16_t>((((static_cast<uint16_t>(r) << 6) | g) << 5) | b)
|
||||
/* Build RGB32 dword from red, green, and blue bytes. */
|
||||
#define RGB32(r, g, b) static_cast<uint32_t>((((static_cast<uint32_t>(r) << 8) | g) << 8) | b)
|
||||
#endif // __BYTE_ORDER
|
||||
|
||||
/* An union that simplifies breaking 32 bit RGB into separate R, G, and B colors.
|
||||
*/
|
||||
typedef union RGB32_t {
|
||||
uint32_t color;
|
||||
struct {
|
||||
#if __BYTE_ORDER == __LITTLE_ENDIAN
|
||||
uint8_t r; uint8_t g; uint8_t b; uint8_t a;
|
||||
#else // __BYTE_ORDER
|
||||
uint8_t a; uint8_t b; uint8_t g; uint8_t r;
|
||||
#endif // __BYTE_ORDER
|
||||
};
|
||||
} RGB32_t;
|
||||
|
||||
|
||||
/* Clips a value to the unsigned 0-255 range, treating negative values as zero.
|
||||
*/
|
||||
static __inline__ int
|
||||
clamp(int x)
|
||||
{
|
||||
if (x > 255) return 255;
|
||||
if (x < 0) return 0;
|
||||
return x;
|
||||
}
|
||||
|
||||
/********************************************************************************
|
||||
* Basics of RGB -> YUV conversion
|
||||
*******************************************************************************/
|
||||
|
||||
/*
|
||||
* RGB -> YUV conversion macros
|
||||
*/
|
||||
#define RGB2Y(r, g, b) (uint8_t)(((66 * (r) + 129 * (g) + 25 * (b) + 128) >> 8) + 16)
|
||||
#define RGB2U(r, g, b) (uint8_t)(((-38 * (r) - 74 * (g) + 112 * (b) + 128) >> 8) + 128)
|
||||
#define RGB2V(r, g, b) (uint8_t)(((112 * (r) - 94 * (g) - 18 * (b) + 128) >> 8) + 128)
|
||||
|
||||
/* Converts R8 G8 B8 color to YUV. */
|
||||
static __inline__ void
|
||||
R8G8B8ToYUV(uint8_t r, uint8_t g, uint8_t b, uint8_t* y, uint8_t* u, uint8_t* v)
|
||||
{
|
||||
*y = RGB2Y((int)r, (int)g, (int)b);
|
||||
*u = RGB2U((int)r, (int)g, (int)b);
|
||||
*v = RGB2V((int)r, (int)g, (int)b);
|
||||
}
|
||||
|
||||
/* Converts RGB565 color to YUV. */
|
||||
static __inline__ void
|
||||
RGB565ToYUV(uint16_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
|
||||
{
|
||||
R8G8B8ToYUV(R16_32(rgb), G16_32(rgb), B16_32(rgb), y, u, v);
|
||||
}
|
||||
|
||||
/* Converts RGB32 color to YUV. */
|
||||
static __inline__ void
|
||||
RGB32ToYUV(uint32_t rgb, uint8_t* y, uint8_t* u, uint8_t* v)
|
||||
{
|
||||
RGB32_t rgb_c;
|
||||
rgb_c.color = rgb;
|
||||
R8G8B8ToYUV(rgb_c.r, rgb_c.g, rgb_c.b, y, u, v);
|
||||
}
|
||||
|
||||
/********************************************************************************
|
||||
* Basics of YUV -> RGB conversion.
|
||||
* Note that due to the fact that guest uses RGB only on preview window, and the
|
||||
* RGB format that is used is RGB565, we can limit YUV -> RGB conversions to
|
||||
* RGB565 only.
|
||||
*******************************************************************************/
|
||||
|
||||
/*
|
||||
* YUV -> RGB conversion macros
|
||||
*/
|
||||
|
||||
/* "Optimized" macros that take specialy prepared Y, U, and V values:
|
||||
* C = Y - 16
|
||||
* D = U - 128
|
||||
* E = V - 128
|
||||
*/
|
||||
#define YUV2RO(C, D, E) clamp((298 * (C) + 409 * (E) + 128) >> 8)
|
||||
#define YUV2GO(C, D, E) clamp((298 * (C) - 100 * (D) - 208 * (E) + 128) >> 8)
|
||||
#define YUV2BO(C, D, E) clamp((298 * (C) + 516 * (D) + 128) >> 8)
|
||||
|
||||
/*
|
||||
* Main macros that take the original Y, U, and V values
|
||||
*/
|
||||
#define YUV2R(y, u, v) clamp((298 * ((y)-16) + 409 * ((v)-128) + 128) >> 8)
|
||||
#define YUV2G(y, u, v) clamp((298 * ((y)-16) - 100 * ((u)-128) - 208 * ((v)-128) + 128) >> 8)
|
||||
#define YUV2B(y, u, v) clamp((298 * ((y)-16) + 516 * ((u)-128) + 128) >> 8)
|
||||
|
||||
|
||||
/* Converts YUV color to RGB565. */
|
||||
static __inline__ uint16_t
|
||||
YUVToRGB565(int y, int u, int v)
|
||||
{
|
||||
/* Calculate C, D, and E values for the optimized macro. */
|
||||
y -= 16; u -= 128; v -= 128;
|
||||
const uint16_t r = (YUV2RO(y,u,v) >> 3) & 0x1f;
|
||||
const uint16_t g = (YUV2GO(y,u,v) >> 2) & 0x3f;
|
||||
const uint16_t b = (YUV2BO(y,u,v) >> 3) & 0x1f;
|
||||
return RGB565(r, g, b);
|
||||
}
|
||||
|
||||
/* Converts YUV color to RGB32. */
|
||||
static __inline__ uint32_t
|
||||
YUVToRGB32(int y, int u, int v)
|
||||
{
|
||||
/* Calculate C, D, and E values for the optimized macro. */
|
||||
y -= 16; u -= 128; v -= 128;
|
||||
RGB32_t rgb;
|
||||
rgb.r = YUV2RO(y,u,v) & 0xff;
|
||||
rgb.g = YUV2GO(y,u,v) & 0xff;
|
||||
rgb.b = YUV2BO(y,u,v) & 0xff;
|
||||
return rgb.color;
|
||||
}
|
||||
|
||||
/* YUV pixel descriptor. */
|
||||
struct YUVPixel {
|
||||
uint8_t Y;
|
||||
uint8_t U;
|
||||
uint8_t V;
|
||||
|
||||
inline YUVPixel()
|
||||
: Y(0), U(0), V(0)
|
||||
{
|
||||
}
|
||||
|
||||
inline explicit YUVPixel(uint16_t rgb565)
|
||||
{
|
||||
RGB565ToYUV(rgb565, &Y, &U, &V);
|
||||
}
|
||||
|
||||
inline explicit YUVPixel(uint32_t rgb32)
|
||||
{
|
||||
RGB32ToYUV(rgb32, &Y, &U, &V);
|
||||
}
|
||||
|
||||
inline void get(uint8_t* pY, uint8_t* pU, uint8_t* pV) const
|
||||
{
|
||||
*pY = Y; *pU = U; *pV = V;
|
||||
}
|
||||
};
|
||||
|
||||
/* Converts an YV12 framebuffer to RGB565 framebuffer.
|
||||
* Param:
|
||||
* yv12 - YV12 framebuffer.
|
||||
* rgb - RGB565 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void YV12ToRGB565(const void* yv12, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an YV12 framebuffer to RGB32 framebuffer.
|
||||
* Param:
|
||||
* yv12 - YV12 framebuffer.
|
||||
* rgb - RGB32 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void YV12ToRGB32(const void* yv12, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an YU12 framebuffer to RGB32 framebuffer.
|
||||
* Param:
|
||||
* yu12 - YU12 framebuffer.
|
||||
* rgb - RGB32 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void YU12ToRGB32(const void* yu12, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an NV12 framebuffer to RGB565 framebuffer.
|
||||
* Param:
|
||||
* nv12 - NV12 framebuffer.
|
||||
* rgb - RGB565 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void NV12ToRGB565(const void* nv12, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an NV12 framebuffer to RGB32 framebuffer.
|
||||
* Param:
|
||||
* nv12 - NV12 framebuffer.
|
||||
* rgb - RGB32 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void NV12ToRGB32(const void* nv12, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an NV21 framebuffer to RGB565 framebuffer.
|
||||
* Param:
|
||||
* nv21 - NV21 framebuffer.
|
||||
* rgb - RGB565 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void NV21ToRGB565(const void* nv21, void* rgb, int width, int height);
|
||||
|
||||
/* Converts an NV21 framebuffer to RGB32 framebuffer.
|
||||
* Param:
|
||||
* nv21 - NV21 framebuffer.
|
||||
* rgb - RGB32 framebuffer.
|
||||
* width, height - Dimensions for both framebuffers.
|
||||
*/
|
||||
void NV21ToRGB32(const void* nv21, void* rgb, int width, int height);
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_CONVERTERS_H */
|
||||
89
android/camera/EmulatedBaseCamera.cpp
Normal file
89
android/camera/EmulatedBaseCamera.cpp
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedBaseCamera that encapsulates
|
||||
* functionality common to all emulated camera device versions ("fake",
|
||||
* "webcam", "video file", "cam2.0" etc.). Instances of this class (for each
|
||||
* emulated camera) are created during the construction of the
|
||||
* EmulatedCameraFactory instance. This class serves as an entry point for all
|
||||
* camera API calls that are common across all versions of the
|
||||
* camera_device_t/camera_module_t structures.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_BaseCamera"
|
||||
#include <cutils/log.h>
|
||||
|
||||
#include "EmulatedBaseCamera.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedBaseCamera::EmulatedBaseCamera(int cameraId,
|
||||
uint32_t cameraVersion,
|
||||
struct hw_device_t* device,
|
||||
struct hw_module_t* module)
|
||||
: mCameraInfo(NULL),
|
||||
mCameraID(cameraId),
|
||||
mCameraDeviceVersion(cameraVersion)
|
||||
{
|
||||
/*
|
||||
* Initialize camera_device descriptor for this object.
|
||||
*/
|
||||
|
||||
/* Common header */
|
||||
device->tag = HARDWARE_DEVICE_TAG;
|
||||
device->version = cameraVersion;
|
||||
device->module = module;
|
||||
device->close = NULL; // Must be filled in by child implementation
|
||||
}
|
||||
|
||||
EmulatedBaseCamera::~EmulatedBaseCamera()
|
||||
{
|
||||
}
|
||||
|
||||
status_t EmulatedBaseCamera::getCameraInfo(struct camera_info* info)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
info->device_version = mCameraDeviceVersion;
|
||||
if (mCameraDeviceVersion >= HARDWARE_DEVICE_API_VERSION(2, 0)) {
|
||||
info->static_camera_characteristics = mCameraInfo;
|
||||
} else {
|
||||
info->static_camera_characteristics = (camera_metadata_t*)0xcafef00d;
|
||||
}
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedBaseCamera::plugCamera() {
|
||||
ALOGE("%s: not supported", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
status_t EmulatedBaseCamera::unplugCamera() {
|
||||
ALOGE("%s: not supported", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
camera_device_status_t EmulatedBaseCamera::getHotplugStatus() {
|
||||
return CAMERA_DEVICE_STATUS_PRESENT;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
} /* namespace android */
|
||||
117
android/camera/EmulatedBaseCamera.h
Normal file
117
android/camera/EmulatedBaseCamera.h
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H
|
||||
|
||||
#include <hardware/camera_common.h>
|
||||
#include <utils/Errors.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedBaseCamera that encapsulates
|
||||
* functionality common to all emulated camera device versions ("fake",
|
||||
* "webcam", "video file", etc.). Instances of this class (for each emulated
|
||||
* camera) are created during the construction of the EmulatedCameraFactory
|
||||
* instance. This class serves as an entry point for all camera API calls that
|
||||
* are common across all versions of the camera_device_t/camera_module_t
|
||||
* structures.
|
||||
*/
|
||||
|
||||
class EmulatedBaseCamera {
|
||||
public:
|
||||
EmulatedBaseCamera(int cameraId,
|
||||
uint32_t cameraVersion,
|
||||
struct hw_device_t* device,
|
||||
struct hw_module_t* module);
|
||||
|
||||
virtual ~EmulatedBaseCamera();
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedCamera instance.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
virtual status_t Initialize() = 0;
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Creates connection to the emulated camera device.
|
||||
* This method is called in response to hw_module_methods_t::open callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negative EXXX statuses.
|
||||
*/
|
||||
virtual status_t connectCamera(hw_device_t** device) = 0;
|
||||
|
||||
|
||||
/* Plug the connection for the emulated camera. Until it's plugged in
|
||||
* calls to connectCamera should fail with -ENODEV.
|
||||
*/
|
||||
virtual status_t plugCamera();
|
||||
|
||||
/* Unplug the connection from underneath the emulated camera.
|
||||
* This is similar to closing the camera, except that
|
||||
* all function calls into the camera device will return
|
||||
* -EPIPE errors until the camera is reopened.
|
||||
*/
|
||||
virtual status_t unplugCamera();
|
||||
|
||||
virtual camera_device_status_t getHotplugStatus();
|
||||
|
||||
/* Closes connection to the emulated camera.
|
||||
* This method is called in response to camera_device::close callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negative EXXX statuses.
|
||||
*/
|
||||
virtual status_t closeCamera() = 0;
|
||||
|
||||
/* Gets camera information.
|
||||
* This method is called in response to camera_module_t::get_camera_info
|
||||
* callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negative EXXX statuses.
|
||||
*/
|
||||
virtual status_t getCameraInfo(struct camera_info* info) = 0;
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Fixed camera information for camera2 devices. Must be valid to access if
|
||||
* mCameraDeviceVersion is >= HARDWARE_DEVICE_API_VERSION(2,0) */
|
||||
camera_metadata_t *mCameraInfo;
|
||||
|
||||
/* Zero-based ID assigned to this camera. */
|
||||
int mCameraID;
|
||||
|
||||
private:
|
||||
|
||||
/* Version of the camera device HAL implemented by this camera */
|
||||
int mCameraDeviceVersion;
|
||||
};
|
||||
|
||||
} /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_BASE_CAMERA_H */
|
||||
1041
android/camera/EmulatedCamera.cpp
Executable file
1041
android/camera/EmulatedCamera.cpp
Executable file
File diff suppressed because it is too large
Load diff
401
android/camera/EmulatedCamera.h
Executable file
401
android/camera/EmulatedCamera.h
Executable file
|
|
@ -0,0 +1,401 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedCamera that encapsulates
|
||||
* functionality common to all version 1.0 emulated camera devices ("fake",
|
||||
* "webcam", "video file", etc.). Instances of this class (for each emulated
|
||||
* camera) are created during the construction of the EmulatedCameraFactory
|
||||
* instance. This class serves as an entry point for all camera API calls that
|
||||
* defined by camera_device_ops_t API.
|
||||
*/
|
||||
|
||||
#include <camera/CameraParameters.h>
|
||||
#include "EmulatedBaseCamera.h"
|
||||
#include "EmulatedCameraDevice.h"
|
||||
#include "PreviewWindow.h"
|
||||
#include "CallbackNotifier.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality common to all version 1.0 emulated camera devices
|
||||
* ("fake", "webcam", "file stream", etc.).
|
||||
*
|
||||
* Note that EmulatedCameraFactory instantiates object of this class just once,
|
||||
* when EmulatedCameraFactory instance gets constructed. Connection to /
|
||||
* disconnection from the actual camera device is handled by calls to
|
||||
* connectDevice(), and closeCamera() methods of this class that are ivoked in
|
||||
* response to hw_module_methods_t::open, and camera_device::close callbacks.
|
||||
*/
|
||||
class EmulatedCamera : public camera_device, public EmulatedBaseCamera {
|
||||
public:
|
||||
/* Constructs EmulatedCamera instance.
|
||||
* Param:
|
||||
* cameraId - Zero based camera identifier, which is an index of the camera
|
||||
* instance in camera factory's array.
|
||||
* module - Emulated camera HAL module descriptor.
|
||||
*/
|
||||
EmulatedCamera(int cameraId,
|
||||
struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedCamera instance. */
|
||||
virtual ~EmulatedCamera();
|
||||
|
||||
/****************************************************************************
|
||||
* Abstract API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Gets emulated camera device used by this instance of the emulated camera.
|
||||
*/
|
||||
virtual EmulatedCameraDevice* getCameraDevice() = 0;
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/** Override of base class method */
|
||||
virtual status_t Initialize();
|
||||
|
||||
/* Next frame is available in the camera device.
|
||||
* This is a notification callback that is invoked by the camera device when
|
||||
* a new frame is available.
|
||||
* Note that most likely this method is called in context of a worker thread
|
||||
* that camera device has created for frame capturing.
|
||||
* Param:
|
||||
* frame - Captured frame, or NULL if camera device didn't pull the frame
|
||||
* yet. If NULL is passed in this parameter use GetCurrentFrame method
|
||||
* of the camera device class to obtain the next frame. Also note that
|
||||
* the size of the frame that is passed here (as well as the frame
|
||||
* returned from the GetCurrentFrame method) is defined by the current
|
||||
* frame settings (width + height + pixel format) for the camera device.
|
||||
* timestamp - Frame's timestamp.
|
||||
* camera_dev - Camera device instance that delivered the frame.
|
||||
*/
|
||||
virtual void onNextFrameAvailable(const void* frame,
|
||||
nsecs_t timestamp,
|
||||
EmulatedCameraDevice* camera_dev);
|
||||
|
||||
/* Entry point for notifications that occur in camera device.
|
||||
* Param:
|
||||
* err - CAMERA_ERROR_XXX error code.
|
||||
*/
|
||||
virtual void onCameraDeviceError(int err);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/** Override of base class method */
|
||||
virtual status_t connectCamera(hw_device_t** device);
|
||||
|
||||
/** Override of base class method */
|
||||
virtual status_t closeCamera();
|
||||
|
||||
/** Override of base class method */
|
||||
virtual status_t getCameraInfo(struct camera_info* info);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation.
|
||||
* These methods are called from the camera API callback routines.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Actual handler for camera_device_ops_t::set_preview_window callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t setPreviewWindow(struct preview_stream_ops *window);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::set_callbacks callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void setCallbacks(camera_notify_callback notify_cb,
|
||||
camera_data_callback data_cb,
|
||||
camera_data_timestamp_callback data_cb_timestamp,
|
||||
camera_request_memory get_memory,
|
||||
void* user);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::enable_msg_type callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void enableMsgType(int32_t msg_type);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::disable_msg_type callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void disableMsgType(int32_t msg_type);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::msg_type_enabled callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Return:
|
||||
* 0 if message(s) is (are) disabled, != 0 if enabled.
|
||||
*/
|
||||
virtual int isMsgTypeEnabled(int32_t msg_type);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::start_preview callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t startPreview();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::stop_preview callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void stopPreview();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::preview_enabled callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Return:
|
||||
* 0 if preview is disabled, != 0 if enabled.
|
||||
*/
|
||||
virtual int isPreviewEnabled();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::store_meta_data_in_buffers callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t storeMetaDataInBuffers(int enable);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::start_recording callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t startRecording();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::stop_recording callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void stopRecording();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::recording_enabled callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Return:
|
||||
* 0 if recording is disabled, != 0 if enabled.
|
||||
*/
|
||||
virtual int isRecordingEnabled();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::release_recording_frame callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void releaseRecordingFrame(const void* opaque);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::auto_focus callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t setAutoFocus();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::cancel_auto_focus callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t cancelAutoFocus();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::take_picture callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t takePicture();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::cancel_picture callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t cancelPicture();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::set_parameters callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t setParameters(const char* parms);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::get_parameters callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Return:
|
||||
* Flattened parameters string. The caller will free the buffer allocated
|
||||
* for the string by calling camera_device_ops_t::put_parameters callback.
|
||||
*/
|
||||
virtual char* getParameters();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::put_parameters callback.
|
||||
* Called to free the string returned from camera_device_ops_t::get_parameters
|
||||
* callback. There is nothing more to it: the name of the callback is just
|
||||
* misleading.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void putParameters(char* params);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::send_command callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
|
||||
|
||||
/* Actual handler for camera_device_ops_t::release callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
*/
|
||||
virtual void releaseCamera();
|
||||
|
||||
/* Actual handler for camera_device_ops_t::dump callback.
|
||||
* NOTE: When this method is called the object is locked.
|
||||
* Note that failures in this method are reported as negave EXXX statuses.
|
||||
*/
|
||||
virtual status_t dumpCamera(int fd);
|
||||
|
||||
/****************************************************************************
|
||||
* Preview management.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Starts preview.
|
||||
* Note that when this method is called mPreviewWindow may be NULL,
|
||||
* indicating that framework has an intention to start displaying video
|
||||
* frames, but didn't create the preview window yet.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
virtual status_t doStartPreview();
|
||||
|
||||
/* Stops preview.
|
||||
* This method reverts DoStartPreview.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
virtual status_t doStopPreview();
|
||||
|
||||
/****************************************************************************
|
||||
* Private API.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Cleans up camera when released. */
|
||||
virtual status_t cleanupCamera();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API callbacks as defined by camera_device_ops structure.
|
||||
* See hardware/libhardware/include/hardware/camera.h for information on
|
||||
* each of these callbacks. Implemented in this class, these callbacks simply
|
||||
* dispatch the call into an instance of EmulatedCamera class defined by the
|
||||
* 'camera_device' parameter.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
static int set_preview_window(struct camera_device* dev,
|
||||
struct preview_stream_ops* window);
|
||||
|
||||
static void set_callbacks(struct camera_device* dev,
|
||||
camera_notify_callback notify_cb,
|
||||
camera_data_callback data_cb,
|
||||
camera_data_timestamp_callback data_cb_timestamp,
|
||||
camera_request_memory get_memory,
|
||||
void* user);
|
||||
|
||||
static void enable_msg_type(struct camera_device* dev, int32_t msg_type);
|
||||
|
||||
static void disable_msg_type(struct camera_device* dev, int32_t msg_type);
|
||||
|
||||
static int msg_type_enabled(struct camera_device* dev, int32_t msg_type);
|
||||
|
||||
static int start_preview(struct camera_device* dev);
|
||||
|
||||
static void stop_preview(struct camera_device* dev);
|
||||
|
||||
static int preview_enabled(struct camera_device* dev);
|
||||
|
||||
static int store_meta_data_in_buffers(struct camera_device* dev, int enable);
|
||||
|
||||
static int start_recording(struct camera_device* dev);
|
||||
|
||||
static void stop_recording(struct camera_device* dev);
|
||||
|
||||
static int recording_enabled(struct camera_device* dev);
|
||||
|
||||
static void release_recording_frame(struct camera_device* dev,
|
||||
const void* opaque);
|
||||
|
||||
static int auto_focus(struct camera_device* dev);
|
||||
|
||||
static int cancel_auto_focus(struct camera_device* dev);
|
||||
|
||||
static int take_picture(struct camera_device* dev);
|
||||
|
||||
static int cancel_picture(struct camera_device* dev);
|
||||
|
||||
static int set_parameters(struct camera_device* dev, const char* parms);
|
||||
|
||||
static char* get_parameters(struct camera_device* dev);
|
||||
|
||||
static void put_parameters(struct camera_device* dev, char* params);
|
||||
|
||||
static int send_command(struct camera_device* dev,
|
||||
int32_t cmd,
|
||||
int32_t arg1,
|
||||
int32_t arg2);
|
||||
|
||||
static void release(struct camera_device* dev);
|
||||
|
||||
static int dump(struct camera_device* dev, int fd);
|
||||
|
||||
static int close(struct hw_device_t* device);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Locks this instance for parameters, state, etc. change. */
|
||||
Mutex mObjectLock;
|
||||
|
||||
/* Camera parameters. */
|
||||
CameraParameters mParameters;
|
||||
|
||||
/* Preview window. */
|
||||
PreviewWindow mPreviewWindow;
|
||||
|
||||
/* Callback notifier. */
|
||||
CallbackNotifier mCallbackNotifier;
|
||||
|
||||
private:
|
||||
/* Registered callbacks implementing camera API. */
|
||||
static camera_device_ops_t mDeviceOps;
|
||||
|
||||
/****************************************************************************
|
||||
* Common keys
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
static const char FACING_KEY[];
|
||||
static const char ORIENTATION_KEY[];
|
||||
static const char RECORDING_HINT_KEY[];
|
||||
|
||||
/****************************************************************************
|
||||
* Common string values
|
||||
***************************************************************************/
|
||||
|
||||
/* Possible values for FACING_KEY */
|
||||
static const char FACING_BACK[];
|
||||
static const char FACING_FRONT[];
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_H */
|
||||
410
android/camera/EmulatedCamera2.cpp
Normal file
410
android/camera/EmulatedCamera2.cpp
Normal file
|
|
@ -0,0 +1,410 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedCamera that encapsulates
|
||||
* functionality common to all version 2.0 emulated camera devices. Instances
|
||||
* of this class (for each emulated camera) are created during the construction
|
||||
* of the EmulatedCameraFactory instance. This class serves as an entry point
|
||||
* for all camera API calls that defined by camera2_device_ops_t API.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera2_Camera"
|
||||
#include <cutils/log.h>
|
||||
|
||||
#include "EmulatedCamera2.h"
|
||||
#include "system/camera_metadata.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Constructs EmulatedCamera2 instance.
|
||||
* Param:
|
||||
* cameraId - Zero based camera identifier, which is an index of the camera
|
||||
* instance in camera factory's array.
|
||||
* module - Emulated camera HAL module descriptor.
|
||||
*/
|
||||
EmulatedCamera2::EmulatedCamera2(int cameraId,
|
||||
struct hw_module_t* module):
|
||||
EmulatedBaseCamera(cameraId,
|
||||
CAMERA_DEVICE_API_VERSION_2_0,
|
||||
&common,
|
||||
module)
|
||||
{
|
||||
common.close = EmulatedCamera2::close;
|
||||
ops = &sDeviceOps;
|
||||
priv = this;
|
||||
|
||||
mNotifyCb = NULL;
|
||||
|
||||
mRequestQueueSrc = NULL;
|
||||
mFrameQueueDst = NULL;
|
||||
|
||||
mVendorTagOps.get_camera_vendor_section_name =
|
||||
EmulatedCamera2::get_camera_vendor_section_name;
|
||||
mVendorTagOps.get_camera_vendor_tag_name =
|
||||
EmulatedCamera2::get_camera_vendor_tag_name;
|
||||
mVendorTagOps.get_camera_vendor_tag_type =
|
||||
EmulatedCamera2::get_camera_vendor_tag_type;
|
||||
mVendorTagOps.parent = this;
|
||||
|
||||
mStatusPresent = true;
|
||||
}
|
||||
|
||||
/* Destructs EmulatedCamera2 instance. */
|
||||
EmulatedCamera2::~EmulatedCamera2() {
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Abstract API
|
||||
***************************************************************************/
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCamera2::Initialize() {
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCamera2::connectCamera(hw_device_t** device) {
|
||||
*device = &common;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera2::closeCamera() {
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera2::getCameraInfo(struct camera_info* info) {
|
||||
return EmulatedBaseCamera::getCameraInfo(info);
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera Device API implementation.
|
||||
* These methods are called from the camera API callback routines.
|
||||
***************************************************************************/
|
||||
|
||||
/** Request input queue */
|
||||
|
||||
int EmulatedCamera2::requestQueueNotify() {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Count of requests in flight */
|
||||
int EmulatedCamera2::getInProgressCount() {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Cancel all captures in flight */
|
||||
int EmulatedCamera2::flushCapturesInProgress() {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Construct a default request for a given use case */
|
||||
int EmulatedCamera2::constructDefaultRequest(
|
||||
int request_template,
|
||||
camera_metadata_t **request) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Output stream creation and management */
|
||||
|
||||
int EmulatedCamera2::allocateStream(
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
int format,
|
||||
const camera2_stream_ops_t *stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *format_actual,
|
||||
uint32_t *usage,
|
||||
uint32_t *max_buffers) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::registerStreamBuffers(
|
||||
uint32_t stream_id,
|
||||
int num_buffers,
|
||||
buffer_handle_t *buffers) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
|
||||
int EmulatedCamera2::releaseStream(uint32_t stream_id) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Reprocessing input stream management */
|
||||
|
||||
int EmulatedCamera2::allocateReprocessStream(
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
uint32_t format,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *consumer_usage,
|
||||
uint32_t *max_buffers) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::allocateReprocessStreamFromStream(
|
||||
uint32_t output_stream_id,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** 3A triggering */
|
||||
|
||||
int EmulatedCamera2::triggerAction(uint32_t trigger_id,
|
||||
int ext1, int ext2) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Custom tag query methods */
|
||||
|
||||
const char* EmulatedCamera2::getVendorSectionName(uint32_t tag) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const char* EmulatedCamera2::getVendorTagName(uint32_t tag) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::getVendorTagType(uint32_t tag) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
int EmulatedCamera2::dump(int fd) {
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Private API.
|
||||
***************************************************************************/
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API callbacks as defined by camera2_device_ops structure. See
|
||||
* hardware/libhardware/include/hardware/camera2.h for information on each
|
||||
* of these callbacks. Implemented in this class, these callbacks simply
|
||||
* dispatch the call into an instance of EmulatedCamera2 class defined by the
|
||||
* 'camera_device2' parameter, or set a member value in the same.
|
||||
***************************************************************************/
|
||||
|
||||
EmulatedCamera2* getInstance(const camera2_device_t *d) {
|
||||
const EmulatedCamera2* cec = static_cast<const EmulatedCamera2*>(d);
|
||||
return const_cast<EmulatedCamera2*>(cec);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::set_request_queue_src_ops(const camera2_device_t *d,
|
||||
const camera2_request_queue_src_ops *queue_src_ops) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
ec->mRequestQueueSrc = queue_src_ops;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::notify_request_queue_not_empty(const camera2_device_t *d) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->requestQueueNotify();
|
||||
}
|
||||
|
||||
int EmulatedCamera2::set_frame_queue_dst_ops(const camera2_device_t *d,
|
||||
const camera2_frame_queue_dst_ops *queue_dst_ops) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
ec->mFrameQueueDst = queue_dst_ops;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::get_in_progress_count(const camera2_device_t *d) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->getInProgressCount();
|
||||
}
|
||||
|
||||
int EmulatedCamera2::flush_captures_in_progress(const camera2_device_t *d) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->flushCapturesInProgress();
|
||||
}
|
||||
|
||||
int EmulatedCamera2::construct_default_request(const camera2_device_t *d,
|
||||
int request_template,
|
||||
camera_metadata_t **request) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->constructDefaultRequest(request_template, request);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::allocate_stream(const camera2_device_t *d,
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
int format,
|
||||
const camera2_stream_ops_t *stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *format_actual,
|
||||
uint32_t *usage,
|
||||
uint32_t *max_buffers) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->allocateStream(width, height, format, stream_ops,
|
||||
stream_id, format_actual, usage, max_buffers);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::register_stream_buffers(const camera2_device_t *d,
|
||||
uint32_t stream_id,
|
||||
int num_buffers,
|
||||
buffer_handle_t *buffers) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->registerStreamBuffers(stream_id,
|
||||
num_buffers,
|
||||
buffers);
|
||||
}
|
||||
int EmulatedCamera2::release_stream(const camera2_device_t *d,
|
||||
uint32_t stream_id) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->releaseStream(stream_id);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::allocate_reprocess_stream(const camera2_device_t *d,
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
uint32_t format,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *consumer_usage,
|
||||
uint32_t *max_buffers) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->allocateReprocessStream(width, height, format,
|
||||
reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::allocate_reprocess_stream_from_stream(
|
||||
const camera2_device_t *d,
|
||||
uint32_t output_stream_id,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->allocateReprocessStreamFromStream(output_stream_id,
|
||||
reprocess_stream_ops, stream_id);
|
||||
}
|
||||
|
||||
|
||||
int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
|
||||
uint32_t stream_id) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->releaseReprocessStream(stream_id);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::trigger_action(const camera2_device_t *d,
|
||||
uint32_t trigger_id,
|
||||
int ext1,
|
||||
int ext2) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->triggerAction(trigger_id, ext1, ext2);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::set_notify_callback(const camera2_device_t *d,
|
||||
camera2_notify_callback notify_cb, void* user) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
Mutex::Autolock l(ec->mMutex);
|
||||
ec->mNotifyCb = notify_cb;
|
||||
ec->mNotifyUserPtr = user;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
int EmulatedCamera2::get_metadata_vendor_tag_ops(const camera2_device_t *d,
|
||||
vendor_tag_query_ops_t **ops) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
*ops = static_cast<vendor_tag_query_ops_t*>(
|
||||
&ec->mVendorTagOps);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
const char* EmulatedCamera2::get_camera_vendor_section_name(
|
||||
const vendor_tag_query_ops_t *v,
|
||||
uint32_t tag) {
|
||||
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
|
||||
return ec->getVendorSectionName(tag);
|
||||
}
|
||||
|
||||
const char* EmulatedCamera2::get_camera_vendor_tag_name(
|
||||
const vendor_tag_query_ops_t *v,
|
||||
uint32_t tag) {
|
||||
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
|
||||
return ec->getVendorTagName(tag);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::get_camera_vendor_tag_type(
|
||||
const vendor_tag_query_ops_t *v,
|
||||
uint32_t tag) {
|
||||
EmulatedCamera2* ec = static_cast<const TagOps*>(v)->parent;
|
||||
return ec->getVendorTagType(tag);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::dump(const camera2_device_t *d, int fd) {
|
||||
EmulatedCamera2* ec = getInstance(d);
|
||||
return ec->dump(fd);
|
||||
}
|
||||
|
||||
int EmulatedCamera2::close(struct hw_device_t* device) {
|
||||
EmulatedCamera2* ec =
|
||||
static_cast<EmulatedCamera2*>(
|
||||
reinterpret_cast<camera2_device_t*>(device) );
|
||||
if (ec == NULL) {
|
||||
ALOGE("%s: Unexpected NULL camera2 device", __FUNCTION__);
|
||||
return -EINVAL;
|
||||
}
|
||||
return ec->closeCamera();
|
||||
}
|
||||
|
||||
void EmulatedCamera2::sendNotification(int32_t msgType,
|
||||
int32_t ext1, int32_t ext2, int32_t ext3) {
|
||||
camera2_notify_callback notifyCb;
|
||||
{
|
||||
Mutex::Autolock l(mMutex);
|
||||
notifyCb = mNotifyCb;
|
||||
}
|
||||
if (notifyCb != NULL) {
|
||||
notifyCb(msgType, ext1, ext2, ext3, mNotifyUserPtr);
|
||||
}
|
||||
}
|
||||
|
||||
camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
|
||||
EmulatedCamera2::set_request_queue_src_ops,
|
||||
EmulatedCamera2::notify_request_queue_not_empty,
|
||||
EmulatedCamera2::set_frame_queue_dst_ops,
|
||||
EmulatedCamera2::get_in_progress_count,
|
||||
EmulatedCamera2::flush_captures_in_progress,
|
||||
EmulatedCamera2::construct_default_request,
|
||||
EmulatedCamera2::allocate_stream,
|
||||
EmulatedCamera2::register_stream_buffers,
|
||||
EmulatedCamera2::release_stream,
|
||||
EmulatedCamera2::allocate_reprocess_stream,
|
||||
EmulatedCamera2::allocate_reprocess_stream_from_stream,
|
||||
EmulatedCamera2::release_reprocess_stream,
|
||||
EmulatedCamera2::trigger_action,
|
||||
EmulatedCamera2::set_notify_callback,
|
||||
EmulatedCamera2::get_metadata_vendor_tag_ops,
|
||||
EmulatedCamera2::dump
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
274
android/camera/EmulatedCamera2.h
Normal file
274
android/camera/EmulatedCamera2.h
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedCamera that encapsulates
|
||||
* functionality common to all version 2.0 emulated camera devices. Instances
|
||||
* of this class (for each emulated camera) are created during the construction
|
||||
* of the EmulatedCameraFactory instance. This class serves as an entry point
|
||||
* for all camera API calls that defined by camera2_device_ops_t API.
|
||||
*/
|
||||
|
||||
#include "hardware/camera2.h"
|
||||
#include "system/camera_metadata.h"
|
||||
#include "EmulatedBaseCamera.h"
|
||||
#include <utils/Thread.h>
|
||||
#include <utils/Mutex.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality common to all version 2.0 emulated camera devices
|
||||
*
|
||||
* Note that EmulatedCameraFactory instantiates object of this class just once,
|
||||
* when EmulatedCameraFactory instance gets constructed. Connection to /
|
||||
* disconnection from the actual camera device is handled by calls to
|
||||
* connectDevice(), and closeCamera() methods of this class that are invoked in
|
||||
* response to hw_module_methods_t::open, and camera_device::close callbacks.
|
||||
*/
|
||||
class EmulatedCamera2 : public camera2_device, public EmulatedBaseCamera {
|
||||
public:
|
||||
/* Constructs EmulatedCamera2 instance.
|
||||
* Param:
|
||||
* cameraId - Zero based camera identifier, which is an index of the camera
|
||||
* instance in camera factory's array.
|
||||
* module - Emulated camera HAL module descriptor.
|
||||
*/
|
||||
EmulatedCamera2(int cameraId,
|
||||
struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedCamera2 instance. */
|
||||
virtual ~EmulatedCamera2();
|
||||
|
||||
/****************************************************************************
|
||||
* Abstract API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
virtual status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera module API and generic hardware device API implementation
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
virtual status_t connectCamera(hw_device_t** device);
|
||||
|
||||
virtual status_t closeCamera();
|
||||
|
||||
virtual status_t getCameraInfo(struct camera_info* info) = 0;
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation.
|
||||
* These methods are called from the camera API callback routines.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/** Request input queue notification */
|
||||
virtual int requestQueueNotify();
|
||||
|
||||
/** Count of requests in flight */
|
||||
virtual int getInProgressCount();
|
||||
|
||||
/** Cancel all captures in flight */
|
||||
virtual int flushCapturesInProgress();
|
||||
|
||||
virtual int constructDefaultRequest(
|
||||
int request_template,
|
||||
camera_metadata_t **request);
|
||||
|
||||
/** Output stream creation and management */
|
||||
virtual int allocateStream(
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
int format,
|
||||
const camera2_stream_ops_t *stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *format_actual,
|
||||
uint32_t *usage,
|
||||
uint32_t *max_buffers);
|
||||
|
||||
virtual int registerStreamBuffers(
|
||||
uint32_t stream_id,
|
||||
int num_buffers,
|
||||
buffer_handle_t *buffers);
|
||||
|
||||
virtual int releaseStream(uint32_t stream_id);
|
||||
|
||||
/** Input stream creation and management */
|
||||
virtual int allocateReprocessStream(
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
uint32_t format,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *consumer_usage,
|
||||
uint32_t *max_buffers);
|
||||
|
||||
virtual int allocateReprocessStreamFromStream(
|
||||
uint32_t output_stream_id,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id);
|
||||
|
||||
virtual int releaseReprocessStream(uint32_t stream_id);
|
||||
|
||||
/** 3A action triggering */
|
||||
virtual int triggerAction(uint32_t trigger_id,
|
||||
int32_t ext1, int32_t ext2);
|
||||
|
||||
/** Custom tag definitions */
|
||||
virtual const char* getVendorSectionName(uint32_t tag);
|
||||
virtual const char* getVendorTagName(uint32_t tag);
|
||||
virtual int getVendorTagType(uint32_t tag);
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
virtual int dump(int fd);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API callbacks as defined by camera2_device_ops structure. See
|
||||
* hardware/libhardware/include/hardware/camera2.h for information on each
|
||||
* of these callbacks. Implemented in this class, these callbacks simply
|
||||
* dispatch the call into an instance of EmulatedCamera2 class defined in
|
||||
* the 'camera_device2' parameter.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/** Input request queue */
|
||||
static int set_request_queue_src_ops(const camera2_device_t *,
|
||||
const camera2_request_queue_src_ops *queue_src_ops);
|
||||
static int notify_request_queue_not_empty(const camera2_device_t *);
|
||||
|
||||
/** Output frame queue */
|
||||
static int set_frame_queue_dst_ops(const camera2_device_t *,
|
||||
const camera2_frame_queue_dst_ops *queue_dst_ops);
|
||||
|
||||
/** In-progress request management */
|
||||
static int get_in_progress_count(const camera2_device_t *);
|
||||
|
||||
static int flush_captures_in_progress(const camera2_device_t *);
|
||||
|
||||
/** Request template creation */
|
||||
static int construct_default_request(const camera2_device_t *,
|
||||
int request_template,
|
||||
camera_metadata_t **request);
|
||||
|
||||
/** Stream management */
|
||||
static int allocate_stream(const camera2_device_t *,
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
int format,
|
||||
const camera2_stream_ops_t *stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *format_actual,
|
||||
uint32_t *usage,
|
||||
uint32_t *max_buffers);
|
||||
|
||||
static int register_stream_buffers(const camera2_device_t *,
|
||||
uint32_t stream_id,
|
||||
int num_buffers,
|
||||
buffer_handle_t *buffers);
|
||||
|
||||
static int release_stream(const camera2_device_t *,
|
||||
uint32_t stream_id);
|
||||
|
||||
static int allocate_reprocess_stream(const camera2_device_t *,
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
uint32_t format,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *consumer_usage,
|
||||
uint32_t *max_buffers);
|
||||
|
||||
static int allocate_reprocess_stream_from_stream(const camera2_device_t *,
|
||||
uint32_t output_stream_id,
|
||||
const camera2_stream_in_ops_t *reprocess_stream_ops,
|
||||
uint32_t *stream_id);
|
||||
|
||||
static int release_reprocess_stream(const camera2_device_t *,
|
||||
uint32_t stream_id);
|
||||
|
||||
/** 3A triggers*/
|
||||
static int trigger_action(const camera2_device_t *,
|
||||
uint32_t trigger_id,
|
||||
int ext1,
|
||||
int ext2);
|
||||
|
||||
/** Notifications to application */
|
||||
static int set_notify_callback(const camera2_device_t *,
|
||||
camera2_notify_callback notify_cb,
|
||||
void *user);
|
||||
|
||||
/** Vendor metadata registration */
|
||||
static int get_metadata_vendor_tag_ops(const camera2_device_t *,
|
||||
vendor_tag_query_ops_t **ops);
|
||||
// for get_metadata_vendor_tag_ops
|
||||
static const char* get_camera_vendor_section_name(
|
||||
const vendor_tag_query_ops_t *,
|
||||
uint32_t tag);
|
||||
static const char* get_camera_vendor_tag_name(
|
||||
const vendor_tag_query_ops_t *,
|
||||
uint32_t tag);
|
||||
static int get_camera_vendor_tag_type(
|
||||
const vendor_tag_query_ops_t *,
|
||||
uint32_t tag);
|
||||
|
||||
static int dump(const camera2_device_t *, int fd);
|
||||
|
||||
/** For hw_device_t ops */
|
||||
static int close(struct hw_device_t* device);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members shared with implementations
|
||||
***************************************************************************/
|
||||
protected:
|
||||
/** Mutex for calls through camera2 device interface */
|
||||
Mutex mMutex;
|
||||
|
||||
bool mStatusPresent;
|
||||
|
||||
const camera2_request_queue_src_ops *mRequestQueueSrc;
|
||||
const camera2_frame_queue_dst_ops *mFrameQueueDst;
|
||||
|
||||
struct TagOps : public vendor_tag_query_ops {
|
||||
EmulatedCamera2 *parent;
|
||||
};
|
||||
TagOps mVendorTagOps;
|
||||
|
||||
void sendNotification(int32_t msgType,
|
||||
int32_t ext1, int32_t ext2, int32_t ext3);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
private:
|
||||
static camera2_device_ops_t sDeviceOps;
|
||||
camera2_notify_callback mNotifyCb;
|
||||
void* mNotifyUserPtr;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA2_H */
|
||||
271
android/camera/EmulatedCamera3.cpp
Normal file
271
android/camera/EmulatedCamera3.cpp
Normal file
|
|
@ -0,0 +1,271 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Contains implementation of a class EmulatedCamera that encapsulates
|
||||
* functionality common to all version 3.0 emulated camera devices. Instances
|
||||
* of this class (for each emulated camera) are created during the construction
|
||||
* of the EmulatedCameraFactory instance. This class serves as an entry point
|
||||
* for all camera API calls that defined by camera3_device_ops_t API.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera3_Camera"
|
||||
#include <cutils/log.h>
|
||||
|
||||
#include "EmulatedCamera3.h"
|
||||
#include "system/camera_metadata.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/**
|
||||
* Constructs EmulatedCamera3 instance.
|
||||
* Param:
|
||||
* cameraId - Zero based camera identifier, which is an index of the camera
|
||||
* instance in camera factory's array.
|
||||
* module - Emulated camera HAL module descriptor.
|
||||
*/
|
||||
EmulatedCamera3::EmulatedCamera3(int cameraId,
|
||||
struct hw_module_t* module):
|
||||
EmulatedBaseCamera(cameraId,
|
||||
CAMERA_DEVICE_API_VERSION_3_3,
|
||||
&common,
|
||||
module),
|
||||
mStatus(STATUS_ERROR)
|
||||
{
|
||||
common.close = EmulatedCamera3::close;
|
||||
ops = &sDeviceOps;
|
||||
|
||||
mCallbackOps = NULL;
|
||||
|
||||
}
|
||||
|
||||
/* Destructs EmulatedCamera3 instance. */
|
||||
EmulatedCamera3::~EmulatedCamera3() {
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Abstract API
|
||||
***************************************************************************/
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCamera3::Initialize() {
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
mStatus = STATUS_CLOSED;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCamera3::connectCamera(hw_device_t** device) {
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
if (device == NULL) return BAD_VALUE;
|
||||
|
||||
if (mStatus != STATUS_CLOSED) {
|
||||
ALOGE("%s: Trying to open a camera in state %d!",
|
||||
__FUNCTION__, mStatus);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
*device = &common;
|
||||
mStatus = STATUS_OPEN;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::closeCamera() {
|
||||
mStatus = STATUS_CLOSED;
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::getCameraInfo(struct camera_info* info) {
|
||||
return EmulatedBaseCamera::getCameraInfo(info);
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera Device API implementation.
|
||||
* These methods are called from the camera API callback routines.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCamera3::initializeDevice(
|
||||
const camera3_callback_ops *callbackOps) {
|
||||
if (callbackOps == NULL) {
|
||||
ALOGE("%s: NULL callback ops provided to HAL!",
|
||||
__FUNCTION__);
|
||||
return BAD_VALUE;
|
||||
}
|
||||
|
||||
if (mStatus != STATUS_OPEN) {
|
||||
ALOGE("%s: Trying to initialize a camera in state %d!",
|
||||
__FUNCTION__, mStatus);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
mCallbackOps = callbackOps;
|
||||
mStatus = STATUS_READY;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::configureStreams(
|
||||
camera3_stream_configuration *streamList) {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::registerStreamBuffers(
|
||||
const camera3_stream_buffer_set *bufferSet) {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
const camera_metadata_t* EmulatedCamera3::constructDefaultRequestSettings(
|
||||
int type) {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::processCaptureRequest(
|
||||
camera3_capture_request *request) {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
status_t EmulatedCamera3::flush() {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
void EmulatedCamera3::dump(int fd) {
|
||||
ALOGE("%s: Not implemented", __FUNCTION__);
|
||||
return;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Protected API. Callbacks to the framework.
|
||||
***************************************************************************/
|
||||
|
||||
void EmulatedCamera3::sendCaptureResult(camera3_capture_result_t *result) {
|
||||
mCallbackOps->process_capture_result(mCallbackOps, result);
|
||||
}
|
||||
|
||||
void EmulatedCamera3::sendNotify(camera3_notify_msg_t *msg) {
|
||||
mCallbackOps->notify(mCallbackOps, msg);
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Private API.
|
||||
***************************************************************************/
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API callbacks as defined by camera3_device_ops structure. See
|
||||
* hardware/libhardware/include/hardware/camera3.h for information on each
|
||||
* of these callbacks. Implemented in this class, these callbacks simply
|
||||
* dispatch the call into an instance of EmulatedCamera3 class defined by the
|
||||
* 'camera_device3' parameter, or set a member value in the same.
|
||||
***************************************************************************/
|
||||
|
||||
EmulatedCamera3* getInstance(const camera3_device_t *d) {
|
||||
const EmulatedCamera3* cec = static_cast<const EmulatedCamera3*>(d);
|
||||
return const_cast<EmulatedCamera3*>(cec);
|
||||
}
|
||||
|
||||
int EmulatedCamera3::initialize(const struct camera3_device *d,
|
||||
const camera3_callback_ops_t *callback_ops) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->initializeDevice(callback_ops);
|
||||
}
|
||||
|
||||
int EmulatedCamera3::configure_streams(const struct camera3_device *d,
|
||||
camera3_stream_configuration_t *stream_list) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->configureStreams(stream_list);
|
||||
}
|
||||
|
||||
int EmulatedCamera3::register_stream_buffers(
|
||||
const struct camera3_device *d,
|
||||
const camera3_stream_buffer_set_t *buffer_set) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->registerStreamBuffers(buffer_set);
|
||||
}
|
||||
|
||||
int EmulatedCamera3::process_capture_request(
|
||||
const struct camera3_device *d,
|
||||
camera3_capture_request_t *request) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->processCaptureRequest(request);
|
||||
}
|
||||
|
||||
const camera_metadata_t* EmulatedCamera3::construct_default_request_settings(
|
||||
const camera3_device_t *d, int type) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->constructDefaultRequestSettings(type);
|
||||
}
|
||||
|
||||
void EmulatedCamera3::dump(const camera3_device_t *d, int fd) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
ec->dump(fd);
|
||||
}
|
||||
|
||||
int EmulatedCamera3::flush(const camera3_device_t *d) {
|
||||
EmulatedCamera3* ec = getInstance(d);
|
||||
return ec->flush();
|
||||
}
|
||||
|
||||
int EmulatedCamera3::close(struct hw_device_t* device) {
|
||||
EmulatedCamera3* ec =
|
||||
static_cast<EmulatedCamera3*>(
|
||||
reinterpret_cast<camera3_device_t*>(device) );
|
||||
if (ec == NULL) {
|
||||
ALOGE("%s: Unexpected NULL camera3 device", __FUNCTION__);
|
||||
return BAD_VALUE;
|
||||
}
|
||||
return ec->closeCamera();
|
||||
}
|
||||
|
||||
camera3_device_ops_t EmulatedCamera3::sDeviceOps = {
|
||||
EmulatedCamera3::initialize,
|
||||
EmulatedCamera3::configure_streams,
|
||||
/* DEPRECATED: register_stream_buffers */ nullptr,
|
||||
EmulatedCamera3::construct_default_request_settings,
|
||||
EmulatedCamera3::process_capture_request,
|
||||
/* DEPRECATED: get_metadata_vendor_tag_ops */ nullptr,
|
||||
EmulatedCamera3::dump,
|
||||
EmulatedCamera3::flush
|
||||
};
|
||||
|
||||
const char* EmulatedCamera3::sAvailableCapabilitiesStrings[NUM_CAPABILITIES] = {
|
||||
"BACKWARD_COMPATIBLE",
|
||||
"MANUAL_SENSOR",
|
||||
"MANUAL_POST_PROCESSING",
|
||||
"RAW",
|
||||
"PRIVATE_REPROCESSING",
|
||||
"READ_SENSOR_SETTINGS",
|
||||
"BURST_CAPTURE",
|
||||
"YUV_REPROCESSING",
|
||||
"DEPTH_OUTPUT",
|
||||
"CONSTRAINED_HIGH_SPEED_VIDEO",
|
||||
"FULL_LEVEL"
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
203
android/camera/EmulatedCamera3.h
Normal file
203
android/camera/EmulatedCamera3.h
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H
|
||||
|
||||
/**
|
||||
* Contains declaration of a class EmulatedCamera that encapsulates
|
||||
* functionality common to all version 3.0 emulated camera devices. Instances
|
||||
* of this class (for each emulated camera) are created during the construction
|
||||
* of the EmulatedCameraFactory instance. This class serves as an entry point
|
||||
* for all camera API calls that defined by camera3_device_ops_t API.
|
||||
*/
|
||||
|
||||
#include "hardware/camera3.h"
|
||||
#include "system/camera_metadata.h"
|
||||
#include "EmulatedBaseCamera.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/**
|
||||
* Encapsulates functionality common to all version 3.0 emulated camera devices
|
||||
*
|
||||
* Note that EmulatedCameraFactory instantiates an object of this class just
|
||||
* once, when EmulatedCameraFactory instance gets constructed. Connection to /
|
||||
* disconnection from the actual camera device is handled by calls to
|
||||
* connectDevice(), and closeCamera() methods of this class that are invoked in
|
||||
* response to hw_module_methods_t::open, and camera_device::close callbacks.
|
||||
*/
|
||||
class EmulatedCamera3 : public camera3_device, public EmulatedBaseCamera {
|
||||
public:
|
||||
/* Constructs EmulatedCamera3 instance.
|
||||
* Param:
|
||||
* cameraId - Zero based camera identifier, which is an index of the camera
|
||||
* instance in camera factory's array.
|
||||
* module - Emulated camera HAL module descriptor.
|
||||
*/
|
||||
EmulatedCamera3(int cameraId,
|
||||
struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedCamera2 instance. */
|
||||
virtual ~EmulatedCamera3();
|
||||
|
||||
/* List of all defined capabilities plus useful HW levels */
|
||||
enum AvailableCapabilities {
|
||||
BACKWARD_COMPATIBLE,
|
||||
MANUAL_SENSOR,
|
||||
MANUAL_POST_PROCESSING,
|
||||
RAW,
|
||||
PRIVATE_REPROCESSING,
|
||||
READ_SENSOR_SETTINGS,
|
||||
BURST_CAPTURE,
|
||||
YUV_REPROCESSING,
|
||||
DEPTH_OUTPUT,
|
||||
CONSTRAINED_HIGH_SPEED_VIDEO,
|
||||
// Levels
|
||||
FULL_LEVEL,
|
||||
|
||||
NUM_CAPABILITIES
|
||||
};
|
||||
|
||||
// Char strings for above enum, with size NUM_CAPABILITIES
|
||||
static const char *sAvailableCapabilitiesStrings[];
|
||||
|
||||
/****************************************************************************
|
||||
* Abstract API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
virtual status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera module API and generic hardware device API implementation
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
virtual status_t connectCamera(hw_device_t** device);
|
||||
|
||||
virtual status_t closeCamera();
|
||||
|
||||
virtual status_t getCameraInfo(struct camera_info* info);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API implementation.
|
||||
* These methods are called from the camera API callback routines.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
|
||||
virtual status_t initializeDevice(
|
||||
const camera3_callback_ops *callbackOps);
|
||||
|
||||
virtual status_t configureStreams(
|
||||
camera3_stream_configuration *streamList);
|
||||
|
||||
virtual status_t registerStreamBuffers(
|
||||
const camera3_stream_buffer_set *bufferSet) ;
|
||||
|
||||
virtual const camera_metadata_t* constructDefaultRequestSettings(
|
||||
int type);
|
||||
|
||||
virtual status_t processCaptureRequest(camera3_capture_request *request);
|
||||
|
||||
virtual status_t flush();
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
virtual void dump(int fd);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API callbacks as defined by camera3_device_ops structure. See
|
||||
* hardware/libhardware/include/hardware/camera3.h for information on each
|
||||
* of these callbacks. Implemented in this class, these callbacks simply
|
||||
* dispatch the call into an instance of EmulatedCamera3 class defined in
|
||||
* the 'camera_device3' parameter.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
|
||||
/** Startup */
|
||||
static int initialize(const struct camera3_device *,
|
||||
const camera3_callback_ops_t *callback_ops);
|
||||
|
||||
/** Stream configuration and buffer registration */
|
||||
|
||||
static int configure_streams(const struct camera3_device *,
|
||||
camera3_stream_configuration_t *stream_list);
|
||||
|
||||
static int register_stream_buffers(const struct camera3_device *,
|
||||
const camera3_stream_buffer_set_t *buffer_set);
|
||||
|
||||
/** Template request settings provision */
|
||||
|
||||
static const camera_metadata_t* construct_default_request_settings(
|
||||
const struct camera3_device *, int type);
|
||||
|
||||
/** Submission of capture requests to HAL */
|
||||
|
||||
static int process_capture_request(const struct camera3_device *,
|
||||
camera3_capture_request_t *request);
|
||||
|
||||
static void dump(const camera3_device_t *, int fd);
|
||||
|
||||
static int flush(const camera3_device_t *);
|
||||
|
||||
/** For hw_device_t ops */
|
||||
static int close(struct hw_device_t* device);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members shared with implementations
|
||||
***************************************************************************/
|
||||
protected:
|
||||
|
||||
enum {
|
||||
// State at construction time, and after a device operation error
|
||||
STATUS_ERROR = 0,
|
||||
// State after startup-time init and after device instance close
|
||||
STATUS_CLOSED,
|
||||
// State after being opened, before device instance init
|
||||
STATUS_OPEN,
|
||||
// State after device instance initialization
|
||||
STATUS_READY,
|
||||
// State while actively capturing data
|
||||
STATUS_ACTIVE
|
||||
} mStatus;
|
||||
|
||||
/**
|
||||
* Callbacks back to the framework
|
||||
*/
|
||||
|
||||
void sendCaptureResult(camera3_capture_result_t *result);
|
||||
void sendNotify(camera3_notify_msg_t *msg);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
private:
|
||||
static camera3_device_ops_t sDeviceOps;
|
||||
const camera3_callback_ops_t *mCallbackOps;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H */
|
||||
60
android/camera/EmulatedCameraCommon.h
Executable file
60
android/camera/EmulatedCameraCommon.h
Executable file
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H
|
||||
|
||||
/*
|
||||
* Contains common declarations that are used across the camera emulation.
|
||||
*/
|
||||
|
||||
#include <linux/videodev2.h>
|
||||
#include <hardware/camera.h>
|
||||
|
||||
/* A helper class that tracks a routine execution.
|
||||
* Basically, it dumps an enry message in its constructor, and an exit message
|
||||
* in its destructor. Use LOGRE() macro (declared bellow) to create instances
|
||||
* of this class at the beginning of the tracked routines / methods.
|
||||
*/
|
||||
class HWERoutineTracker {
|
||||
public:
|
||||
/* Constructor that prints an "entry" trace message. */
|
||||
explicit HWERoutineTracker(const char* name)
|
||||
: mName(name) {
|
||||
ALOGV("Entering %s", mName);
|
||||
}
|
||||
|
||||
/* Destructor that prints a "leave" trace message. */
|
||||
~HWERoutineTracker() {
|
||||
ALOGV("Leaving %s", mName);
|
||||
}
|
||||
|
||||
private:
|
||||
/* Stores the routine name. */
|
||||
const char* mName;
|
||||
};
|
||||
|
||||
/* Logs an execution of a routine / method. */
|
||||
#define LOGRE() HWERoutineTracker hwertracker_##__LINE__(__FUNCTION__)
|
||||
|
||||
/*
|
||||
* min / max macros
|
||||
*/
|
||||
|
||||
#define min(a,b) (((a) < (b)) ? (a) : (b))
|
||||
#define max(a,b) (((a) > (b)) ? (a) : (b))
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_COMMON_H */
|
||||
413
android/camera/EmulatedCameraDevice.cpp
Executable file
413
android/camera/EmulatedCameraDevice.cpp
Executable file
|
|
@ -0,0 +1,413 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of an abstract class EmulatedCameraDevice that defines
|
||||
* functionality expected from an emulated physical camera device:
|
||||
* - Obtaining and setting camera parameters
|
||||
* - Capturing frames
|
||||
* - Streaming video
|
||||
* - etc.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_Device"
|
||||
#include <cutils/log.h>
|
||||
#include <sys/select.h>
|
||||
#include <cmath>
|
||||
#include "EmulatedCameraDevice.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
const float GAMMA_CORRECTION = 2.2f;
|
||||
EmulatedCameraDevice::EmulatedCameraDevice(EmulatedCamera* camera_hal)
|
||||
: mObjectLock(),
|
||||
mCurFrameTimestamp(0),
|
||||
mCameraHAL(camera_hal),
|
||||
mCurrentFrame(NULL),
|
||||
mExposureCompensation(1.0f),
|
||||
mWhiteBalanceScale(NULL),
|
||||
mSupportedWhiteBalanceScale(),
|
||||
mState(ECDS_CONSTRUCTED)
|
||||
{
|
||||
}
|
||||
|
||||
EmulatedCameraDevice::~EmulatedCameraDevice()
|
||||
{
|
||||
ALOGV("EmulatedCameraDevice destructor");
|
||||
if (mCurrentFrame != NULL) {
|
||||
delete[] mCurrentFrame;
|
||||
}
|
||||
for (size_t i = 0; i < mSupportedWhiteBalanceScale.size(); ++i) {
|
||||
if (mSupportedWhiteBalanceScale.valueAt(i) != NULL) {
|
||||
delete[] mSupportedWhiteBalanceScale.valueAt(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Emulated camera device public API
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCameraDevice::Initialize()
|
||||
{
|
||||
if (isInitialized()) {
|
||||
ALOGW("%s: Emulated camera device is already initialized: mState = %d",
|
||||
__FUNCTION__, mState);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/* Instantiate worker thread object. */
|
||||
mWorkerThread = new WorkerThread(this);
|
||||
if (getWorkerThread() == NULL) {
|
||||
ALOGE("%s: Unable to instantiate worker thread object", __FUNCTION__);
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
mState = ECDS_INITIALIZED;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedCameraDevice::startDeliveringFrames(bool one_burst)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (!isStarted()) {
|
||||
ALOGE("%s: Device is not started", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Frames will be delivered from the thread routine. */
|
||||
const status_t res = startWorkerThread(one_burst);
|
||||
ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedCameraDevice::stopDeliveringFrames()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (!isStarted()) {
|
||||
ALOGW("%s: Device is not started", __FUNCTION__);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
const status_t res = stopWorkerThread();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: startWorkerThread failed", __FUNCTION__);
|
||||
return res;
|
||||
}
|
||||
|
||||
void EmulatedCameraDevice::setExposureCompensation(const float ev) {
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (!isStarted()) {
|
||||
ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
|
||||
}
|
||||
|
||||
mExposureCompensation = std::pow(2.0f, ev / GAMMA_CORRECTION);
|
||||
ALOGV("New exposure compensation is %f", mExposureCompensation);
|
||||
}
|
||||
|
||||
void EmulatedCameraDevice::initializeWhiteBalanceModes(const char* mode,
|
||||
const float r_scale,
|
||||
const float b_scale) {
|
||||
ALOGV("%s with %s, %f, %f", __FUNCTION__, mode, r_scale, b_scale);
|
||||
float* value = new float[3];
|
||||
value[0] = r_scale; value[1] = 1.0f; value[2] = b_scale;
|
||||
mSupportedWhiteBalanceScale.add(String8(mode), value);
|
||||
}
|
||||
|
||||
void EmulatedCameraDevice::setWhiteBalanceMode(const char* mode) {
|
||||
ALOGV("%s with white balance %s", __FUNCTION__, mode);
|
||||
mWhiteBalanceScale =
|
||||
mSupportedWhiteBalanceScale.valueFor(String8(mode));
|
||||
}
|
||||
|
||||
/* Computes the pixel value after adjusting the white balance to the current
|
||||
* one. The input the y, u, v channel of the pixel and the adjusted value will
|
||||
* be stored in place. The adjustment is done in RGB space.
|
||||
*/
|
||||
void EmulatedCameraDevice::changeWhiteBalance(uint8_t& y,
|
||||
uint8_t& u,
|
||||
uint8_t& v) const {
|
||||
float r_scale = mWhiteBalanceScale[0];
|
||||
float b_scale = mWhiteBalanceScale[2];
|
||||
int r = static_cast<float>(YUV2R(y, u, v)) / r_scale;
|
||||
int g = YUV2G(y, u, v);
|
||||
int b = static_cast<float>(YUV2B(y, u, v)) / b_scale;
|
||||
|
||||
y = RGB2Y(r, g, b);
|
||||
u = RGB2U(r, g, b);
|
||||
v = RGB2V(r, g, b);
|
||||
}
|
||||
|
||||
status_t EmulatedCameraDevice::getCurrentPreviewFrame(void* buffer)
|
||||
{
|
||||
if (!isStarted()) {
|
||||
ALOGE("%s: Device is not started", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
if (mCurrentFrame == NULL || buffer == NULL) {
|
||||
ALOGE("%s: No framebuffer", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* In emulation the framebuffer is never RGB. */
|
||||
switch (mPixelFormat) {
|
||||
case V4L2_PIX_FMT_YVU420:
|
||||
YV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
|
||||
return NO_ERROR;
|
||||
case V4L2_PIX_FMT_YUV420:
|
||||
YU12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
|
||||
return NO_ERROR;
|
||||
case V4L2_PIX_FMT_NV21:
|
||||
NV21ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
|
||||
return NO_ERROR;
|
||||
case V4L2_PIX_FMT_NV12:
|
||||
NV12ToRGB32(mCurrentFrame, buffer, mFrameWidth, mFrameHeight);
|
||||
return NO_ERROR;
|
||||
|
||||
default:
|
||||
ALOGE("%s: Unknown pixel format %.4s",
|
||||
__FUNCTION__, reinterpret_cast<const char*>(&mPixelFormat));
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Emulated camera device private API
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCameraDevice::commonStartDevice(int width,
|
||||
int height,
|
||||
uint32_t pix_fmt)
|
||||
{
|
||||
/* Validate pixel format, and calculate framebuffer size at the same time. */
|
||||
switch (pix_fmt) {
|
||||
case V4L2_PIX_FMT_YVU420:
|
||||
case V4L2_PIX_FMT_YUV420:
|
||||
case V4L2_PIX_FMT_NV21:
|
||||
case V4L2_PIX_FMT_NV12:
|
||||
mFrameBufferSize = (width * height * 12) / 8;
|
||||
break;
|
||||
|
||||
default:
|
||||
ALOGE("%s: Unknown pixel format %.4s",
|
||||
__FUNCTION__, reinterpret_cast<const char*>(&pix_fmt));
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Cache framebuffer info. */
|
||||
mFrameWidth = width;
|
||||
mFrameHeight = height;
|
||||
mPixelFormat = pix_fmt;
|
||||
mTotalPixels = width * height;
|
||||
|
||||
/* Allocate framebuffer. */
|
||||
mCurrentFrame = new uint8_t[mFrameBufferSize];
|
||||
if (mCurrentFrame == NULL) {
|
||||
ALOGE("%s: Unable to allocate framebuffer", __FUNCTION__);
|
||||
return ENOMEM;
|
||||
}
|
||||
ALOGV("%s: Allocated %p %zu bytes for %d pixels in %.4s[%dx%d] frame",
|
||||
__FUNCTION__, mCurrentFrame, mFrameBufferSize, mTotalPixels,
|
||||
reinterpret_cast<const char*>(&mPixelFormat), mFrameWidth, mFrameHeight);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
void EmulatedCameraDevice::commonStopDevice()
|
||||
{
|
||||
mFrameWidth = mFrameHeight = mTotalPixels = 0;
|
||||
mPixelFormat = 0;
|
||||
|
||||
if (mCurrentFrame != NULL) {
|
||||
delete[] mCurrentFrame;
|
||||
mCurrentFrame = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Worker thread management.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCameraDevice::startWorkerThread(bool one_burst)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (!isInitialized()) {
|
||||
ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
const status_t res = getWorkerThread()->startThread(one_burst);
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Unable to start worker thread", __FUNCTION__);
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedCameraDevice::stopWorkerThread()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (!isInitialized()) {
|
||||
ALOGE("%s: Emulated camera device is not initialized", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
const status_t res = getWorkerThread()->stopThread();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Unable to stop worker thread", __FUNCTION__);
|
||||
return res;
|
||||
}
|
||||
|
||||
bool EmulatedCameraDevice::inWorkerThread()
|
||||
{
|
||||
/* This will end the thread loop, and will terminate the thread. Derived
|
||||
* classes must override this method. */
|
||||
return false;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Worker thread implementation.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedCameraDevice::WorkerThread::readyToRun()
|
||||
{
|
||||
ALOGV("Starting emulated camera device worker thread...");
|
||||
|
||||
ALOGW_IF(mThreadControl >= 0 || mControlFD >= 0,
|
||||
"%s: Thread control FDs are opened", __FUNCTION__);
|
||||
/* Create a pair of FDs that would be used to control the thread. */
|
||||
int thread_fds[2];
|
||||
status_t ret;
|
||||
Mutex::Autolock lock(mCameraDevice->mObjectLock);
|
||||
if (pipe(thread_fds) == 0) {
|
||||
mThreadControl = thread_fds[1];
|
||||
mControlFD = thread_fds[0];
|
||||
ALOGV("Emulated device's worker thread has been started.");
|
||||
ret = NO_ERROR;
|
||||
} else {
|
||||
ALOGE("%s: Unable to create thread control FDs: %d -> %s",
|
||||
__FUNCTION__, errno, strerror(errno));
|
||||
ret = errno;
|
||||
}
|
||||
|
||||
mSetup.signal();
|
||||
return ret;
|
||||
}
|
||||
|
||||
status_t EmulatedCameraDevice::WorkerThread::stopThread()
|
||||
{
|
||||
ALOGV("Stopping emulated camera device's worker thread...");
|
||||
|
||||
status_t res = EINVAL;
|
||||
|
||||
// Limit the scope of the Autolock
|
||||
{
|
||||
// If thread is running and readyToRun() has not finished running,
|
||||
// then wait until it is done.
|
||||
Mutex::Autolock lock(mCameraDevice->mObjectLock);
|
||||
if (isRunning() && (mThreadControl < 0 || mControlFD < 0)) {
|
||||
mSetup.wait(mCameraDevice->mObjectLock);
|
||||
}
|
||||
}
|
||||
|
||||
if (mThreadControl >= 0) {
|
||||
/* Send "stop" message to the thread loop. */
|
||||
const ControlMessage msg = THREAD_STOP;
|
||||
const int wres =
|
||||
TEMP_FAILURE_RETRY(write(mThreadControl, &msg, sizeof(msg)));
|
||||
if (wres == sizeof(msg)) {
|
||||
/* Stop the thread, and wait till it's terminated. */
|
||||
res = requestExitAndWait();
|
||||
if (res == NO_ERROR) {
|
||||
/* Close control FDs. */
|
||||
if (mThreadControl >= 0) {
|
||||
close(mThreadControl);
|
||||
mThreadControl = -1;
|
||||
}
|
||||
if (mControlFD >= 0) {
|
||||
close(mControlFD);
|
||||
mControlFD = -1;
|
||||
}
|
||||
ALOGV("Emulated camera device's worker thread has been stopped.");
|
||||
} else {
|
||||
ALOGE("%s: requestExitAndWait failed: %d -> %s",
|
||||
__FUNCTION__, res, strerror(-res));
|
||||
}
|
||||
} else {
|
||||
ALOGE("%s: Unable to send THREAD_STOP message: %d -> %s",
|
||||
__FUNCTION__, errno, strerror(errno));
|
||||
res = errno ? errno : EINVAL;
|
||||
}
|
||||
} else {
|
||||
ALOGE("%s: Thread control FDs are not opened", __FUNCTION__);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
EmulatedCameraDevice::WorkerThread::SelectRes
|
||||
EmulatedCameraDevice::WorkerThread::Select(int fd, int timeout)
|
||||
{
|
||||
fd_set fds[1];
|
||||
struct timeval tv, *tvp = NULL;
|
||||
|
||||
const int fd_num = (fd >= 0) ? max(fd, mControlFD) + 1 :
|
||||
mControlFD + 1;
|
||||
FD_ZERO(fds);
|
||||
FD_SET(mControlFD, fds);
|
||||
if (fd >= 0) {
|
||||
FD_SET(fd, fds);
|
||||
}
|
||||
if (timeout) {
|
||||
tv.tv_sec = timeout / 1000000;
|
||||
tv.tv_usec = timeout % 1000000;
|
||||
tvp = &tv;
|
||||
}
|
||||
int res = TEMP_FAILURE_RETRY(select(fd_num, fds, NULL, NULL, tvp));
|
||||
if (res < 0) {
|
||||
ALOGE("%s: select returned %d and failed: %d -> %s",
|
||||
__FUNCTION__, res, errno, strerror(errno));
|
||||
return ERROR;
|
||||
} else if (res == 0) {
|
||||
/* Timeout. */
|
||||
return TIMEOUT;
|
||||
} else if (FD_ISSET(mControlFD, fds)) {
|
||||
/* A control event. Lets read the message. */
|
||||
ControlMessage msg;
|
||||
res = TEMP_FAILURE_RETRY(read(mControlFD, &msg, sizeof(msg)));
|
||||
if (res != sizeof(msg)) {
|
||||
ALOGE("%s: Unexpected message size %d, or an error %d -> %s",
|
||||
__FUNCTION__, res, errno, strerror(errno));
|
||||
return ERROR;
|
||||
}
|
||||
/* THREAD_STOP is the only message expected here. */
|
||||
if (msg == THREAD_STOP) {
|
||||
ALOGV("%s: THREAD_STOP message is received", __FUNCTION__);
|
||||
return EXIT_THREAD;
|
||||
} else {
|
||||
ALOGE("Unknown worker thread message %d", msg);
|
||||
return ERROR;
|
||||
}
|
||||
} else {
|
||||
/* Must be an FD. */
|
||||
ALOGW_IF(fd < 0 || !FD_ISSET(fd, fds), "%s: Undefined 'select' result",
|
||||
__FUNCTION__);
|
||||
return READY;
|
||||
}
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
546
android/camera/EmulatedCameraDevice.h
Executable file
546
android/camera/EmulatedCameraDevice.h
Executable file
|
|
@ -0,0 +1,546 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H
|
||||
|
||||
/*
|
||||
* Contains declaration of an abstract class EmulatedCameraDevice that defines
|
||||
* functionality expected from an emulated physical camera device:
|
||||
* - Obtaining and setting camera device parameters
|
||||
* - Capturing frames
|
||||
* - Streaming video
|
||||
* - etc.
|
||||
*/
|
||||
|
||||
#include <utils/threads.h>
|
||||
#include <utils/KeyedVector.h>
|
||||
#include <utils/String8.h>
|
||||
#include "EmulatedCameraCommon.h"
|
||||
#include "Converters.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedCamera;
|
||||
|
||||
/* Encapsulates an abstract class EmulatedCameraDevice that defines
|
||||
* functionality expected from an emulated physical camera device:
|
||||
* - Obtaining and setting camera device parameters
|
||||
* - Capturing frames
|
||||
* - Streaming video
|
||||
* - etc.
|
||||
*/
|
||||
class EmulatedCameraDevice {
|
||||
public:
|
||||
/* Constructs EmulatedCameraDevice instance.
|
||||
* Param:
|
||||
* camera_hal - Emulated camera that implements the camera HAL API, and
|
||||
* manages (contains) this object.
|
||||
*/
|
||||
explicit EmulatedCameraDevice(EmulatedCamera* camera_hal);
|
||||
|
||||
/* Destructs EmulatedCameraDevice instance. */
|
||||
virtual ~EmulatedCameraDevice();
|
||||
|
||||
/***************************************************************************
|
||||
* Emulated camera device abstract interface
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Connects to the camera device.
|
||||
* This method must be called on an initialized instance of this class.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t connectDevice() = 0;
|
||||
|
||||
/* Disconnects from the camera device.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status. If this method is
|
||||
* called for already disconnected, or uninitialized instance of this class,
|
||||
* a successful status must be returned from this method. If this method is
|
||||
* called for an instance that is in the "started" state, this method must
|
||||
* return a failure.
|
||||
*/
|
||||
virtual status_t disconnectDevice() = 0;
|
||||
|
||||
/* Starts the camera device.
|
||||
* This method tells the camera device to start capturing frames of the given
|
||||
* dimensions for the given pixel format. Note that this method doesn't start
|
||||
* the delivery of the captured frames to the emulated camera. Call
|
||||
* startDeliveringFrames method to start delivering frames. This method must
|
||||
* be called on a connected instance of this class. If it is called on a
|
||||
* disconnected instance, this method must return a failure.
|
||||
* Param:
|
||||
* width, height - Frame dimensions to use when capturing video frames.
|
||||
* pix_fmt - Pixel format to use when capturing video frames.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t startDevice(int width, int height, uint32_t pix_fmt) = 0;
|
||||
|
||||
/* Stops the camera device.
|
||||
* This method tells the camera device to stop capturing frames. Note that
|
||||
* this method doesn't stop delivering frames to the emulated camera. Always
|
||||
* call stopDeliveringFrames prior to calling this method.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status. If this method is
|
||||
* called for an object that is not capturing frames, or is disconnected,
|
||||
* or is uninitialized, a successful status must be returned from this
|
||||
* method.
|
||||
*/
|
||||
virtual status_t stopDevice() = 0;
|
||||
|
||||
/***************************************************************************
|
||||
* Emulated camera device public API
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedCameraDevice instance.
|
||||
* Derived classes should override this method in order to cache static
|
||||
* properties of the physical device (list of supported pixel formats, frame
|
||||
* sizes, etc.) If this method is called on an already initialized instance,
|
||||
* it must return a successful status.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t Initialize();
|
||||
|
||||
/* Initializes the white balance modes parameters.
|
||||
* The parameters are passed by each individual derived camera API to
|
||||
* represent that different camera manufacturers may have different
|
||||
* preferences on the white balance parameters. Green channel in the RGB
|
||||
* color space is fixed to keep the luminance to be reasonably constant.
|
||||
*
|
||||
* Param:
|
||||
* mode the text describing the current white balance mode
|
||||
* r_scale the scale factor for the R channel in RGB space
|
||||
* b_scale the scale factor for the B channel in RGB space.
|
||||
*/
|
||||
void initializeWhiteBalanceModes(const char* mode,
|
||||
const float r_scale,
|
||||
const float b_scale);
|
||||
|
||||
/* Starts delivering frames captured from the camera device.
|
||||
* This method will start the worker thread that would be pulling frames from
|
||||
* the camera device, and will deliver the pulled frames back to the emulated
|
||||
* camera via onNextFrameAvailable callback. This method must be called on a
|
||||
* connected instance of this class with a started camera device. If it is
|
||||
* called on a disconnected instance, or camera device has not been started,
|
||||
* this method must return a failure.
|
||||
* Param:
|
||||
* one_burst - Controls how many frames should be delivered. If this
|
||||
* parameter is 'true', only one captured frame will be delivered to the
|
||||
* emulated camera. If this parameter is 'false', frames will keep
|
||||
* coming until stopDeliveringFrames method is called. Typically, this
|
||||
* parameter is set to 'true' only in order to obtain a single frame
|
||||
* that will be used as a "picture" in takePicture method of the
|
||||
* emulated camera.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t startDeliveringFrames(bool one_burst);
|
||||
|
||||
/* Stops delivering frames captured from the camera device.
|
||||
* This method will stop the worker thread started by startDeliveringFrames.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t stopDeliveringFrames();
|
||||
|
||||
/* Sets the exposure compensation for the camera device.
|
||||
*/
|
||||
void setExposureCompensation(const float ev);
|
||||
|
||||
/* Sets the white balance mode for the device.
|
||||
*/
|
||||
void setWhiteBalanceMode(const char* mode);
|
||||
|
||||
/* Gets current framebuffer, converted into preview frame format.
|
||||
* This method must be called on a connected instance of this class with a
|
||||
* started camera device. If it is called on a disconnected instance, or
|
||||
* camera device has not been started, this method must return a failure.
|
||||
* Note that this method should be called only after at least one frame has
|
||||
* been captured and delivered. Otherwise it will return garbage in the
|
||||
* preview frame buffer. Typically, this method shuld be called from
|
||||
* onNextFrameAvailable callback.
|
||||
* Param:
|
||||
* buffer - Buffer, large enough to contain the entire preview frame.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t getCurrentPreviewFrame(void* buffer);
|
||||
|
||||
/* Gets width of the frame obtained from the physical device.
|
||||
* Return:
|
||||
* Width of the frame obtained from the physical device. Note that value
|
||||
* returned from this method is valid only in case if camera device has been
|
||||
* started.
|
||||
*/
|
||||
inline int getFrameWidth() const
|
||||
{
|
||||
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
|
||||
return mFrameWidth;
|
||||
}
|
||||
|
||||
/* Gets height of the frame obtained from the physical device.
|
||||
* Return:
|
||||
* Height of the frame obtained from the physical device. Note that value
|
||||
* returned from this method is valid only in case if camera device has been
|
||||
* started.
|
||||
*/
|
||||
inline int getFrameHeight() const
|
||||
{
|
||||
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
|
||||
return mFrameHeight;
|
||||
}
|
||||
|
||||
/* Gets byte size of the current frame buffer.
|
||||
* Return:
|
||||
* Byte size of the frame buffer. Note that value returned from this method
|
||||
* is valid only in case if camera device has been started.
|
||||
*/
|
||||
inline size_t getFrameBufferSize() const
|
||||
{
|
||||
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
|
||||
return mFrameBufferSize;
|
||||
}
|
||||
|
||||
/* Gets number of pixels in the current frame buffer.
|
||||
* Return:
|
||||
* Number of pixels in the frame buffer. Note that value returned from this
|
||||
* method is valid only in case if camera device has been started.
|
||||
*/
|
||||
inline int getPixelNum() const
|
||||
{
|
||||
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
|
||||
return mTotalPixels;
|
||||
}
|
||||
|
||||
/* Gets pixel format of the frame that camera device streams to this class.
|
||||
* Throughout camera framework, there are three different forms of pixel
|
||||
* format representation:
|
||||
* - Original format, as reported by the actual camera device. Values for
|
||||
* this format are declared in bionic/libc/kernel/common/linux/videodev2.h
|
||||
* - String representation as defined in CameraParameters::PIXEL_FORMAT_XXX
|
||||
* strings in frameworks/base/include/camera/CameraParameters.h
|
||||
* - HAL_PIXEL_FORMAT_XXX format, as defined in system/core/include/system/graphics.h
|
||||
* Since emulated camera device gets its data from the actual device, it gets
|
||||
* pixel format in the original form. And that's the pixel format
|
||||
* representation that will be returned from this method. HAL components will
|
||||
* need to translate value returned from this method to the appropriate form.
|
||||
* This method must be called only on started instance of this class, since
|
||||
* it's applicable only when camera device is ready to stream frames.
|
||||
* Param:
|
||||
* pix_fmt - Upon success contains the original pixel format.
|
||||
* Return:
|
||||
* Current framebuffer's pixel format. Note that value returned from this
|
||||
* method is valid only in case if camera device has been started.
|
||||
*/
|
||||
inline uint32_t getOriginalPixelFormat() const
|
||||
{
|
||||
ALOGE_IF(!isStarted(), "%s: Device is not started", __FUNCTION__);
|
||||
return mPixelFormat;
|
||||
}
|
||||
|
||||
/*
|
||||
* State checkers.
|
||||
*/
|
||||
|
||||
inline bool isInitialized() const {
|
||||
/* Instance is initialized when the worker thread has been successfuly
|
||||
* created (but not necessarily started). */
|
||||
return mWorkerThread.get() != NULL && mState != ECDS_CONSTRUCTED;
|
||||
}
|
||||
inline bool isConnected() const {
|
||||
/* Instance is connected when its status is either"connected", or
|
||||
* "started". */
|
||||
return mState == ECDS_CONNECTED || mState == ECDS_STARTED;
|
||||
}
|
||||
inline bool isStarted() const {
|
||||
return mState == ECDS_STARTED;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Emulated camera device private API
|
||||
***************************************************************************/
|
||||
protected:
|
||||
/* Performs common validation and calculation of startDevice parameters.
|
||||
* Param:
|
||||
* width, height, pix_fmt - Parameters passed to the startDevice method.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t commonStartDevice(int width, int height, uint32_t pix_fmt);
|
||||
|
||||
/* Performs common cleanup on stopDevice.
|
||||
* This method will undo what commonStartDevice had done.
|
||||
*/
|
||||
virtual void commonStopDevice();
|
||||
|
||||
/** Computes a luminance value after taking the exposure compensation.
|
||||
* value into account.
|
||||
*
|
||||
* Param:
|
||||
* inputY - The input luminance value.
|
||||
* Return:
|
||||
* The luminance value after adjusting the exposure compensation.
|
||||
*/
|
||||
inline uint8_t changeExposure(const uint8_t& inputY) const {
|
||||
return static_cast<uint8_t>(clamp(static_cast<float>(inputY) *
|
||||
mExposureCompensation));
|
||||
}
|
||||
|
||||
/** Computes the pixel value in YUV space after adjusting to the current
|
||||
* white balance mode.
|
||||
*/
|
||||
void changeWhiteBalance(uint8_t& y, uint8_t& u, uint8_t& v) const;
|
||||
|
||||
/****************************************************************************
|
||||
* Worker thread management.
|
||||
* Typicaly when emulated camera device starts capturing frames from the
|
||||
* actual device, it does that in a worker thread created in StartCapturing,
|
||||
* and terminated in StopCapturing. Since this is such a typical scenario,
|
||||
* it makes sence to encapsulate worker thread management in the base class
|
||||
* for all emulated camera devices.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Starts the worker thread.
|
||||
* Typically, worker thread is started from startDeliveringFrames method of
|
||||
* this class.
|
||||
* Param:
|
||||
* one_burst - Controls how many times thread loop should run. If this
|
||||
* parameter is 'true', thread routine will run only once If this
|
||||
* parameter is 'false', thread routine will run until stopWorkerThread
|
||||
* method is called. See startDeliveringFrames for more info.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t startWorkerThread(bool one_burst);
|
||||
|
||||
/* Stops the worker thread.
|
||||
* Note that this method will always wait for the worker thread to terminate.
|
||||
* Typically, worker thread is started from stopDeliveringFrames method of
|
||||
* this class.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t stopWorkerThread();
|
||||
|
||||
/* Implementation of the worker thread routine.
|
||||
* In the default implementation of the worker thread routine we simply
|
||||
* return 'false' forcing the thread loop to exit, and the thread to
|
||||
* terminate. Derived class should override that method to provide there the
|
||||
* actual frame delivery.
|
||||
* Return:
|
||||
* true To continue thread loop (this method will be called again), or false
|
||||
* to exit the thread loop and to terminate the thread.
|
||||
*/
|
||||
virtual bool inWorkerThread();
|
||||
|
||||
/* Encapsulates a worker thread used by the emulated camera device.
|
||||
*/
|
||||
friend class WorkerThread;
|
||||
class WorkerThread : public Thread {
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
inline explicit WorkerThread(EmulatedCameraDevice* camera_dev)
|
||||
: Thread(true), // Callbacks may involve Java calls.
|
||||
mCameraDevice(camera_dev),
|
||||
mThreadControl(-1),
|
||||
mControlFD(-1)
|
||||
{
|
||||
}
|
||||
|
||||
inline ~WorkerThread()
|
||||
{
|
||||
ALOGW_IF(mThreadControl >= 0 || mControlFD >= 0,
|
||||
"%s: Control FDs are opened in the destructor",
|
||||
__FUNCTION__);
|
||||
if (mThreadControl >= 0) {
|
||||
close(mThreadControl);
|
||||
}
|
||||
if (mControlFD >= 0) {
|
||||
close(mControlFD);
|
||||
}
|
||||
}
|
||||
|
||||
/* Starts the thread
|
||||
* Param:
|
||||
* one_burst - Controls how many times thread loop should run. If
|
||||
* this parameter is 'true', thread routine will run only once
|
||||
* If this parameter is 'false', thread routine will run until
|
||||
* stopThread method is called. See startWorkerThread for more
|
||||
* info.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
inline status_t startThread(bool one_burst)
|
||||
{
|
||||
mOneBurst = one_burst;
|
||||
return run("Camera_startThread", ANDROID_PRIORITY_URGENT_DISPLAY, 0);
|
||||
}
|
||||
|
||||
/* Overriden base class method.
|
||||
* It is overriden in order to provide one-time initialization just
|
||||
* prior to starting the thread routine.
|
||||
*/
|
||||
status_t readyToRun();
|
||||
|
||||
/* Stops the thread. */
|
||||
status_t stopThread();
|
||||
|
||||
/* Values returned from the Select method of this class. */
|
||||
enum SelectRes {
|
||||
/* A timeout has occurred. */
|
||||
TIMEOUT,
|
||||
/* Data are available for read on the provided FD. */
|
||||
READY,
|
||||
/* Thread exit request has been received. */
|
||||
EXIT_THREAD,
|
||||
/* An error has occurred. */
|
||||
ERROR
|
||||
};
|
||||
|
||||
/* Select on an FD event, keeping in mind thread exit message.
|
||||
* Param:
|
||||
* fd - File descriptor on which to wait for an event. This
|
||||
* parameter may be negative. If it is negative this method will
|
||||
* only wait on a control message to the thread.
|
||||
* timeout - Timeout in microseconds. 0 indicates no timeout (wait
|
||||
* forever).
|
||||
* Return:
|
||||
* See SelectRes enum comments.
|
||||
*/
|
||||
SelectRes Select(int fd, int timeout);
|
||||
|
||||
/****************************************************************************
|
||||
* Private API
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/* Implements abstract method of the base Thread class. */
|
||||
bool threadLoop()
|
||||
{
|
||||
/* Simply dispatch the call to the containing camera device. */
|
||||
if (mCameraDevice->inWorkerThread()) {
|
||||
/* Respect "one burst" parameter (see startThread). */
|
||||
return !mOneBurst;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/* Containing camera device object. */
|
||||
EmulatedCameraDevice* mCameraDevice;
|
||||
|
||||
/* FD that is used to send control messages into the thread. */
|
||||
int mThreadControl;
|
||||
|
||||
/* FD that thread uses to receive control messages. */
|
||||
int mControlFD;
|
||||
|
||||
/* Controls number of times the thread loop runs.
|
||||
* See startThread for more information. */
|
||||
bool mOneBurst;
|
||||
|
||||
/* Enumerates control messages that can be sent into the thread. */
|
||||
enum ControlMessage {
|
||||
/* Stop the thread. */
|
||||
THREAD_STOP
|
||||
};
|
||||
|
||||
Condition mSetup;
|
||||
};
|
||||
|
||||
/* Worker thread accessor. */
|
||||
inline WorkerThread* getWorkerThread() const
|
||||
{
|
||||
return mWorkerThread.get();
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Locks this instance for parameters, state, etc. change. */
|
||||
Mutex mObjectLock;
|
||||
|
||||
/* Worker thread that is used in frame capturing. */
|
||||
sp<WorkerThread> mWorkerThread;
|
||||
|
||||
/* Timestamp of the current frame. */
|
||||
nsecs_t mCurFrameTimestamp;
|
||||
|
||||
/* Emulated camera object containing this instance. */
|
||||
EmulatedCamera* mCameraHAL;
|
||||
|
||||
/* Framebuffer containing the current frame. */
|
||||
uint8_t* mCurrentFrame;
|
||||
|
||||
/*
|
||||
* Framebuffer properties.
|
||||
*/
|
||||
|
||||
/* Byte size of the framebuffer. */
|
||||
size_t mFrameBufferSize;
|
||||
|
||||
/* Original pixel format (one of the V4L2_PIX_FMT_XXX values, as defined in
|
||||
* bionic/libc/kernel/common/linux/videodev2.h */
|
||||
uint32_t mPixelFormat;
|
||||
|
||||
/* Frame width */
|
||||
int mFrameWidth;
|
||||
|
||||
/* Frame height */
|
||||
int mFrameHeight;
|
||||
|
||||
/* Total number of pixels */
|
||||
int mTotalPixels;
|
||||
|
||||
/* Exposure compensation value */
|
||||
float mExposureCompensation;
|
||||
|
||||
float* mWhiteBalanceScale;
|
||||
|
||||
DefaultKeyedVector<String8, float*> mSupportedWhiteBalanceScale;
|
||||
|
||||
/* Defines possible states of the emulated camera device object.
|
||||
*/
|
||||
enum EmulatedCameraDeviceState {
|
||||
/* Object has been constructed. */
|
||||
ECDS_CONSTRUCTED,
|
||||
/* Object has been initialized. */
|
||||
ECDS_INITIALIZED,
|
||||
/* Object has been connected to the physical device. */
|
||||
ECDS_CONNECTED,
|
||||
/* Camera device has been started. */
|
||||
ECDS_STARTED,
|
||||
};
|
||||
|
||||
/* Object state. */
|
||||
EmulatedCameraDeviceState mState;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_DEVICE_H */
|
||||
542
android/camera/EmulatedCameraFactory.cpp
Executable file
542
android/camera/EmulatedCameraFactory.cpp
Executable file
|
|
@ -0,0 +1,542 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedCameraFactory that manages cameras
|
||||
* available for emulation.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_Factory"
|
||||
#include <cutils/log.h>
|
||||
#include <cutils/properties.h>
|
||||
#include "EmulatedQemuCamera.h"
|
||||
#include "EmulatedFakeCamera.h"
|
||||
#include "EmulatedFakeCamera2.h"
|
||||
#include "EmulatedFakeCamera3.h"
|
||||
#include "EmulatedCameraHotplugThread.h"
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
extern camera_module_t HAL_MODULE_INFO_SYM;
|
||||
|
||||
/* A global instance of EmulatedCameraFactory is statically instantiated and
|
||||
* initialized when camera emulation HAL is loaded.
|
||||
*/
|
||||
android::EmulatedCameraFactory gEmulatedCameraFactory;
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedCameraFactory::EmulatedCameraFactory()
|
||||
: mQemuClient(),
|
||||
mEmulatedCameras(NULL),
|
||||
mEmulatedCameraNum(0),
|
||||
mFakeCameraNum(0),
|
||||
mConstructedOK(false),
|
||||
mCallbacks(NULL)
|
||||
{
|
||||
status_t res;
|
||||
/* Connect to the factory service in the emulator, and create Qemu cameras. */
|
||||
if (mQemuClient.connectClient(NULL) == NO_ERROR) {
|
||||
/* Connection has succeeded. Create emulated cameras for each camera
|
||||
* device, reported by the service. */
|
||||
createQemuCameras();
|
||||
}
|
||||
|
||||
if (isBackFakeCameraEmulationOn()) {
|
||||
/* Camera ID. */
|
||||
const int camera_id = mEmulatedCameraNum;
|
||||
/* Use fake camera to emulate back-facing camera. */
|
||||
mEmulatedCameraNum++;
|
||||
|
||||
/* Make sure that array is allocated (in case there were no 'qemu'
|
||||
* cameras created. Note that we preallocate the array so it may contain
|
||||
* two fake cameras: one facing back, and another facing front. */
|
||||
if (mEmulatedCameras == NULL) {
|
||||
mEmulatedCameras = new EmulatedBaseCamera*[mEmulatedCameraNum + 1];
|
||||
if (mEmulatedCameras == NULL) {
|
||||
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
|
||||
__FUNCTION__, mEmulatedCameraNum);
|
||||
return;
|
||||
}
|
||||
memset(mEmulatedCameras, 0,
|
||||
(mEmulatedCameraNum + 1) * sizeof(EmulatedBaseCamera*));
|
||||
}
|
||||
|
||||
/* Create, and initialize the fake camera */
|
||||
switch (getBackCameraHalVersion()) {
|
||||
case 1:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera(camera_id, true,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
case 2:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera2(camera_id, true,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
case 3:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera3(camera_id, true,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
default:
|
||||
ALOGE("%s: Unknown back camera hal version requested: %d", __FUNCTION__,
|
||||
getBackCameraHalVersion());
|
||||
}
|
||||
if (mEmulatedCameras[camera_id] != NULL) {
|
||||
ALOGV("%s: Back camera device version is %d", __FUNCTION__,
|
||||
getBackCameraHalVersion());
|
||||
res = mEmulatedCameras[camera_id]->Initialize();
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: Unable to intialize back camera %d: %s (%d)",
|
||||
__FUNCTION__, camera_id, strerror(-res), res);
|
||||
delete mEmulatedCameras[camera_id];
|
||||
mEmulatedCameraNum--;
|
||||
}
|
||||
} else {
|
||||
mEmulatedCameraNum--;
|
||||
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
|
||||
}
|
||||
}
|
||||
|
||||
if (isFrontFakeCameraEmulationOn()) {
|
||||
/* Camera ID. */
|
||||
const int camera_id = mEmulatedCameraNum;
|
||||
/* Use fake camera to emulate front-facing camera. */
|
||||
mEmulatedCameraNum++;
|
||||
|
||||
/* Make sure that array is allocated (in case there were no 'qemu'
|
||||
* cameras created. */
|
||||
if (mEmulatedCameras == NULL) {
|
||||
mEmulatedCameras = new EmulatedBaseCamera*[mEmulatedCameraNum];
|
||||
if (mEmulatedCameras == NULL) {
|
||||
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
|
||||
__FUNCTION__, mEmulatedCameraNum);
|
||||
return;
|
||||
}
|
||||
memset(mEmulatedCameras, 0,
|
||||
mEmulatedCameraNum * sizeof(EmulatedBaseCamera*));
|
||||
}
|
||||
|
||||
/* Create, and initialize the fake camera */
|
||||
switch (getFrontCameraHalVersion()) {
|
||||
case 1:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera(camera_id, false,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
case 2:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera2(camera_id, false,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
case 3:
|
||||
mEmulatedCameras[camera_id] =
|
||||
new EmulatedFakeCamera3(camera_id, false,
|
||||
&HAL_MODULE_INFO_SYM.common);
|
||||
break;
|
||||
default:
|
||||
ALOGE("%s: Unknown front camera hal version requested: %d",
|
||||
__FUNCTION__,
|
||||
getFrontCameraHalVersion());
|
||||
}
|
||||
if (mEmulatedCameras[camera_id] != NULL) {
|
||||
ALOGV("%s: Front camera device version is %d", __FUNCTION__,
|
||||
getFrontCameraHalVersion());
|
||||
res = mEmulatedCameras[camera_id]->Initialize();
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: Unable to intialize front camera %d: %s (%d)",
|
||||
__FUNCTION__, camera_id, strerror(-res), res);
|
||||
delete mEmulatedCameras[camera_id];
|
||||
mEmulatedCameraNum--;
|
||||
}
|
||||
} else {
|
||||
mEmulatedCameraNum--;
|
||||
ALOGE("%s: Unable to instantiate fake camera class", __FUNCTION__);
|
||||
}
|
||||
}
|
||||
|
||||
ALOGV("%d cameras are being emulated. %d of them are fake cameras.",
|
||||
mEmulatedCameraNum, mFakeCameraNum);
|
||||
|
||||
/* Create hotplug thread */
|
||||
{
|
||||
Vector<int> cameraIdVector;
|
||||
for (int i = 0; i < mEmulatedCameraNum; ++i) {
|
||||
cameraIdVector.push_back(i);
|
||||
}
|
||||
mHotplugThread = new EmulatedCameraHotplugThread(&cameraIdVector[0],
|
||||
mEmulatedCameraNum);
|
||||
mHotplugThread->run("EmulatedCameraHotplugThread");
|
||||
}
|
||||
|
||||
mConstructedOK = true;
|
||||
}
|
||||
|
||||
EmulatedCameraFactory::~EmulatedCameraFactory()
|
||||
{
|
||||
if (mEmulatedCameras != NULL) {
|
||||
for (int n = 0; n < mEmulatedCameraNum; n++) {
|
||||
if (mEmulatedCameras[n] != NULL) {
|
||||
delete mEmulatedCameras[n];
|
||||
}
|
||||
}
|
||||
delete[] mEmulatedCameras;
|
||||
}
|
||||
|
||||
if (mHotplugThread != NULL) {
|
||||
mHotplugThread->requestExit();
|
||||
mHotplugThread->join();
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera HAL API handlers.
|
||||
*
|
||||
* Each handler simply verifies existence of an appropriate EmulatedBaseCamera
|
||||
* instance, and dispatches the call to that instance.
|
||||
*
|
||||
***************************************************************************/
|
||||
|
||||
int EmulatedCameraFactory::cameraDeviceOpen(int camera_id, hw_device_t** device)
|
||||
{
|
||||
ALOGV("%s: id = %d", __FUNCTION__, camera_id);
|
||||
|
||||
*device = NULL;
|
||||
|
||||
if (!isConstructedOK()) {
|
||||
ALOGE("%s: EmulatedCameraFactory has failed to initialize", __FUNCTION__);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
|
||||
ALOGE("%s: Camera id %d is out of bounds (%d)",
|
||||
__FUNCTION__, camera_id, getEmulatedCameraNum());
|
||||
return -ENODEV;
|
||||
}
|
||||
|
||||
return mEmulatedCameras[camera_id]->connectCamera(device);
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::getCameraInfo(int camera_id, struct camera_info* info)
|
||||
{
|
||||
ALOGV("%s: id = %d", __FUNCTION__, camera_id);
|
||||
|
||||
if (!isConstructedOK()) {
|
||||
ALOGE("%s: EmulatedCameraFactory has failed to initialize", __FUNCTION__);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
if (camera_id < 0 || camera_id >= getEmulatedCameraNum()) {
|
||||
ALOGE("%s: Camera id %d is out of bounds (%d)",
|
||||
__FUNCTION__, camera_id, getEmulatedCameraNum());
|
||||
return -ENODEV;
|
||||
}
|
||||
|
||||
return mEmulatedCameras[camera_id]->getCameraInfo(info);
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::setCallbacks(
|
||||
const camera_module_callbacks_t *callbacks)
|
||||
{
|
||||
ALOGV("%s: callbacks = %p", __FUNCTION__, callbacks);
|
||||
|
||||
mCallbacks = callbacks;
|
||||
|
||||
return OK;
|
||||
}
|
||||
|
||||
void EmulatedCameraFactory::getVendorTagOps(vendor_tag_ops_t* ops) {
|
||||
ALOGV("%s: ops = %p", __FUNCTION__, ops);
|
||||
|
||||
// No vendor tags defined for emulator yet, so not touching ops
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera HAL API callbacks.
|
||||
***************************************************************************/
|
||||
|
||||
int EmulatedCameraFactory::device_open(const hw_module_t* module,
|
||||
const char* name,
|
||||
hw_device_t** device)
|
||||
{
|
||||
/*
|
||||
* Simply verify the parameters, and dispatch the call inside the
|
||||
* EmulatedCameraFactory instance.
|
||||
*/
|
||||
|
||||
if (module != &HAL_MODULE_INFO_SYM.common) {
|
||||
ALOGE("%s: Invalid module %p expected %p",
|
||||
__FUNCTION__, module, &HAL_MODULE_INFO_SYM.common);
|
||||
return -EINVAL;
|
||||
}
|
||||
if (name == NULL) {
|
||||
ALOGE("%s: NULL name is not expected here", __FUNCTION__);
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
return gEmulatedCameraFactory.cameraDeviceOpen(atoi(name), device);
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::get_number_of_cameras(void)
|
||||
{
|
||||
return gEmulatedCameraFactory.getEmulatedCameraNum();
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::get_camera_info(int camera_id,
|
||||
struct camera_info* info)
|
||||
{
|
||||
return gEmulatedCameraFactory.getCameraInfo(camera_id, info);
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::set_callbacks(
|
||||
const camera_module_callbacks_t *callbacks)
|
||||
{
|
||||
return gEmulatedCameraFactory.setCallbacks(callbacks);
|
||||
}
|
||||
|
||||
void EmulatedCameraFactory::get_vendor_tag_ops(vendor_tag_ops_t* ops)
|
||||
{
|
||||
gEmulatedCameraFactory.getVendorTagOps(ops);
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::open_legacy(const struct hw_module_t* module,
|
||||
const char* id, uint32_t halVersion, struct hw_device_t** device) {
|
||||
// Not supporting legacy open
|
||||
return -ENOSYS;
|
||||
}
|
||||
|
||||
/********************************************************************************
|
||||
* Internal API
|
||||
*******************************************************************************/
|
||||
|
||||
/*
|
||||
* Camera information tokens passed in response to the "list" factory query.
|
||||
*/
|
||||
|
||||
/* Device name token. */
|
||||
static const char lListNameToken[] = "name=";
|
||||
/* Frame dimensions token. */
|
||||
static const char lListDimsToken[] = "framedims=";
|
||||
/* Facing direction token. */
|
||||
static const char lListDirToken[] = "dir=";
|
||||
|
||||
void EmulatedCameraFactory::createQemuCameras()
|
||||
{
|
||||
/* Obtain camera list. */
|
||||
char* camera_list = NULL;
|
||||
status_t res = mQemuClient.listCameras(&camera_list);
|
||||
/* Empty list, or list containing just an EOL means that there were no
|
||||
* connected cameras found. */
|
||||
if (res != NO_ERROR || camera_list == NULL || *camera_list == '\0' ||
|
||||
*camera_list == '\n') {
|
||||
if (camera_list != NULL) {
|
||||
free(camera_list);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculate number of connected cameras. Number of EOLs in the camera list
|
||||
* is the number of the connected cameras.
|
||||
*/
|
||||
|
||||
int num = 0;
|
||||
const char* eol = strchr(camera_list, '\n');
|
||||
while (eol != NULL) {
|
||||
num++;
|
||||
eol = strchr(eol + 1, '\n');
|
||||
}
|
||||
|
||||
/* Allocate the array for emulated camera instances. Note that we allocate
|
||||
* two more entries for back and front fake camera emulation. */
|
||||
mEmulatedCameras = new EmulatedBaseCamera*[num + 2];
|
||||
if (mEmulatedCameras == NULL) {
|
||||
ALOGE("%s: Unable to allocate emulated camera array for %d entries",
|
||||
__FUNCTION__, num + 1);
|
||||
free(camera_list);
|
||||
return;
|
||||
}
|
||||
memset(mEmulatedCameras, 0, sizeof(EmulatedBaseCamera*) * (num + 1));
|
||||
|
||||
/*
|
||||
* Iterate the list, creating, and initializin emulated qemu cameras for each
|
||||
* entry (line) in the list.
|
||||
*/
|
||||
|
||||
int index = 0;
|
||||
char* cur_entry = camera_list;
|
||||
while (cur_entry != NULL && *cur_entry != '\0' && index < num) {
|
||||
/* Find the end of the current camera entry, and terminate it with zero
|
||||
* for simpler string manipulation. */
|
||||
char* next_entry = strchr(cur_entry, '\n');
|
||||
if (next_entry != NULL) {
|
||||
*next_entry = '\0';
|
||||
next_entry++; // Start of the next entry.
|
||||
}
|
||||
|
||||
/* Find 'name', 'framedims', and 'dir' tokens that are required here. */
|
||||
char* name_start = strstr(cur_entry, lListNameToken);
|
||||
char* dim_start = strstr(cur_entry, lListDimsToken);
|
||||
char* dir_start = strstr(cur_entry, lListDirToken);
|
||||
if (name_start != NULL && dim_start != NULL && dir_start != NULL) {
|
||||
/* Advance to the token values. */
|
||||
name_start += strlen(lListNameToken);
|
||||
dim_start += strlen(lListDimsToken);
|
||||
dir_start += strlen(lListDirToken);
|
||||
|
||||
/* Terminate token values with zero. */
|
||||
char* s = strchr(name_start, ' ');
|
||||
if (s != NULL) {
|
||||
*s = '\0';
|
||||
}
|
||||
s = strchr(dim_start, ' ');
|
||||
if (s != NULL) {
|
||||
*s = '\0';
|
||||
}
|
||||
s = strchr(dir_start, ' ');
|
||||
if (s != NULL) {
|
||||
*s = '\0';
|
||||
}
|
||||
|
||||
/* Create and initialize qemu camera. */
|
||||
EmulatedQemuCamera* qemu_cam =
|
||||
new EmulatedQemuCamera(index, &HAL_MODULE_INFO_SYM.common);
|
||||
if (NULL != qemu_cam) {
|
||||
res = qemu_cam->Initialize(name_start, dim_start, dir_start);
|
||||
if (res == NO_ERROR) {
|
||||
mEmulatedCameras[index] = qemu_cam;
|
||||
index++;
|
||||
} else {
|
||||
delete qemu_cam;
|
||||
}
|
||||
} else {
|
||||
ALOGE("%s: Unable to instantiate EmulatedQemuCamera",
|
||||
__FUNCTION__);
|
||||
}
|
||||
} else {
|
||||
ALOGW("%s: Bad camera information: %s", __FUNCTION__, cur_entry);
|
||||
}
|
||||
|
||||
cur_entry = next_entry;
|
||||
}
|
||||
|
||||
mEmulatedCameraNum = index;
|
||||
}
|
||||
|
||||
bool EmulatedCameraFactory::isBackFakeCameraEmulationOn()
|
||||
{
|
||||
/* Defined by 'qemu.sf.fake_camera' boot property: if property exist, and
|
||||
* is set to 'both', or 'back', then fake camera is used to emulate back
|
||||
* camera. */
|
||||
char prop[PROPERTY_VALUE_MAX];
|
||||
if ((property_get("qemu.sf.fake_camera", prop, NULL) > 0) &&
|
||||
(!strcmp(prop, "both") || !strcmp(prop, "back"))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::getBackCameraHalVersion()
|
||||
{
|
||||
/* Defined by 'qemu.sf.back_camera_hal_version' boot property: if the
|
||||
* property doesn't exist, it is assumed to be 1. */
|
||||
char prop[PROPERTY_VALUE_MAX];
|
||||
if (property_get("qemu.sf.back_camera_hal", prop, NULL) > 0) {
|
||||
char *prop_end = prop;
|
||||
int val = strtol(prop, &prop_end, 10);
|
||||
if (*prop_end == '\0') {
|
||||
return val;
|
||||
}
|
||||
// Badly formatted property, should just be a number
|
||||
ALOGE("qemu.sf.back_camera_hal is not a number: %s", prop);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
bool EmulatedCameraFactory::isFrontFakeCameraEmulationOn()
|
||||
{
|
||||
/* Defined by 'qemu.sf.fake_camera' boot property: if property exist, and
|
||||
* is set to 'both', or 'front', then fake camera is used to emulate front
|
||||
* camera. */
|
||||
char prop[PROPERTY_VALUE_MAX];
|
||||
if ((property_get("qemu.sf.fake_camera", prop, NULL) > 0) &&
|
||||
(!strcmp(prop, "both") || !strcmp(prop, "front"))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int EmulatedCameraFactory::getFrontCameraHalVersion()
|
||||
{
|
||||
/* Defined by 'qemu.sf.front_camera_hal_version' boot property: if the
|
||||
* property doesn't exist, it is assumed to be 1. */
|
||||
char prop[PROPERTY_VALUE_MAX];
|
||||
if (property_get("qemu.sf.front_camera_hal", prop, NULL) > 0) {
|
||||
char *prop_end = prop;
|
||||
int val = strtol(prop, &prop_end, 10);
|
||||
if (*prop_end == '\0') {
|
||||
return val;
|
||||
}
|
||||
// Badly formatted property, should just be a number
|
||||
ALOGE("qemu.sf.front_camera_hal is not a number: %s", prop);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
void EmulatedCameraFactory::onStatusChanged(int cameraId, int newStatus) {
|
||||
|
||||
EmulatedBaseCamera *cam = mEmulatedCameras[cameraId];
|
||||
if (!cam) {
|
||||
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* (Order is important)
|
||||
* Send the callback first to framework, THEN close the camera.
|
||||
*/
|
||||
|
||||
if (newStatus == cam->getHotplugStatus()) {
|
||||
ALOGW("%s: Ignoring transition to the same status", __FUNCTION__);
|
||||
return;
|
||||
}
|
||||
|
||||
const camera_module_callbacks_t* cb = mCallbacks;
|
||||
if (cb != NULL && cb->camera_device_status_change != NULL) {
|
||||
cb->camera_device_status_change(cb, cameraId, newStatus);
|
||||
}
|
||||
|
||||
if (newStatus == CAMERA_DEVICE_STATUS_NOT_PRESENT) {
|
||||
cam->unplugCamera();
|
||||
} else if (newStatus == CAMERA_DEVICE_STATUS_PRESENT) {
|
||||
cam->plugCamera();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/********************************************************************************
|
||||
* Initializer for the static member structure.
|
||||
*******************************************************************************/
|
||||
|
||||
/* Entry point for camera HAL API. */
|
||||
struct hw_module_methods_t EmulatedCameraFactory::mCameraModuleMethods = {
|
||||
open: EmulatedCameraFactory::device_open
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
207
android/camera/EmulatedCameraFactory.h
Executable file
207
android/camera/EmulatedCameraFactory.h
Executable file
|
|
@ -0,0 +1,207 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H
|
||||
|
||||
#include <utils/RefBase.h>
|
||||
#include "EmulatedBaseCamera.h"
|
||||
#include "QemuClient.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
struct EmulatedCameraHotplugThread;
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedCameraFactory that manages cameras
|
||||
* available for the emulation. A global instance of this class is statically
|
||||
* instantiated and initialized when camera emulation HAL is loaded.
|
||||
*/
|
||||
|
||||
/* Class EmulatedCameraFactoryManages cameras available for the emulation.
|
||||
*
|
||||
* When the global static instance of this class is created on the module load,
|
||||
* it enumerates cameras available for the emulation by connecting to the
|
||||
* emulator's 'camera' service. For every camera found out there it creates an
|
||||
* instance of an appropriate class, and stores it an in array of emulated
|
||||
* cameras. In addition to the cameras reported by the emulator, a fake camera
|
||||
* emulator is always created, so there is always at least one camera that is
|
||||
* available.
|
||||
*
|
||||
* Instance of this class is also used as the entry point for the camera HAL API,
|
||||
* including:
|
||||
* - hw_module_methods_t::open entry point
|
||||
* - camera_module_t::get_number_of_cameras entry point
|
||||
* - camera_module_t::get_camera_info entry point
|
||||
*
|
||||
*/
|
||||
class EmulatedCameraFactory {
|
||||
public:
|
||||
/* Constructs EmulatedCameraFactory instance.
|
||||
* In this constructor the factory will create and initialize a list of
|
||||
* emulated cameras. All errors that occur on this constructor are reported
|
||||
* via mConstructedOK data member of this class.
|
||||
*/
|
||||
EmulatedCameraFactory();
|
||||
|
||||
/* Destructs EmulatedCameraFactory instance. */
|
||||
~EmulatedCameraFactory();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera HAL API handlers.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Opens (connects to) a camera device.
|
||||
* This method is called in response to hw_module_methods_t::open callback.
|
||||
*/
|
||||
int cameraDeviceOpen(int camera_id, hw_device_t** device);
|
||||
|
||||
/* Gets emulated camera information.
|
||||
* This method is called in response to camera_module_t::get_camera_info callback.
|
||||
*/
|
||||
int getCameraInfo(int camera_id, struct camera_info *info);
|
||||
|
||||
/* Sets emulated camera callbacks.
|
||||
* This method is called in response to camera_module_t::set_callbacks callback.
|
||||
*/
|
||||
int setCallbacks(const camera_module_callbacks_t *callbacks);
|
||||
|
||||
/* Fill in vendor tags for the module
|
||||
* This method is called in response to camera_module_t::get_vendor_tag_ops callback.
|
||||
*/
|
||||
void getVendorTagOps(vendor_tag_ops_t* ops);
|
||||
|
||||
/****************************************************************************
|
||||
* Camera HAL API callbacks.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* camera_module_t::get_number_of_cameras callback entry point. */
|
||||
static int get_number_of_cameras(void);
|
||||
|
||||
/* camera_module_t::get_camera_info callback entry point. */
|
||||
static int get_camera_info(int camera_id, struct camera_info *info);
|
||||
|
||||
/* camera_module_t::set_callbacks callback entry point. */
|
||||
static int set_callbacks(const camera_module_callbacks_t *callbacks);
|
||||
|
||||
/* camera_module_t::get_vendor_tag_ops callback entry point */
|
||||
static void get_vendor_tag_ops(vendor_tag_ops_t* ops);
|
||||
|
||||
/* camera_module_t::open_legacy callback entry point */
|
||||
static int open_legacy(const struct hw_module_t* module, const char* id,
|
||||
uint32_t halVersion, struct hw_device_t** device);
|
||||
|
||||
private:
|
||||
/* hw_module_methods_t::open callback entry point. */
|
||||
static int device_open(const hw_module_t* module,
|
||||
const char* name,
|
||||
hw_device_t** device);
|
||||
|
||||
/****************************************************************************
|
||||
* Public API.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
|
||||
/* Gets fake camera orientation. */
|
||||
int getFakeCameraOrientation() {
|
||||
/* TODO: Have a boot property that controls that. */
|
||||
return 90;
|
||||
}
|
||||
|
||||
/* Gets qemu camera orientation. */
|
||||
int getQemuCameraOrientation() {
|
||||
/* TODO: Have a boot property that controls that. */
|
||||
return 270;
|
||||
}
|
||||
|
||||
/* Gets number of emulated cameras.
|
||||
*/
|
||||
int getEmulatedCameraNum() const {
|
||||
return mEmulatedCameraNum;
|
||||
}
|
||||
|
||||
/* Checks whether or not the constructor has succeeded.
|
||||
*/
|
||||
bool isConstructedOK() const {
|
||||
return mConstructedOK;
|
||||
}
|
||||
|
||||
void onStatusChanged(int cameraId, int newStatus);
|
||||
|
||||
/****************************************************************************
|
||||
* Private API
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/* Populates emulated cameras array with cameras that are available via
|
||||
* 'camera' service in the emulator. For each such camera and instance of
|
||||
* the EmulatedCameraQemud will be created and added to the mEmulatedCameras
|
||||
* array.
|
||||
*/
|
||||
void createQemuCameras();
|
||||
|
||||
/* Checks if fake camera emulation is on for the camera facing back. */
|
||||
bool isBackFakeCameraEmulationOn();
|
||||
|
||||
/* Gets camera device version number to use for back camera emulation */
|
||||
int getBackCameraHalVersion();
|
||||
|
||||
/* Checks if fake camera emulation is on for the camera facing front. */
|
||||
bool isFrontFakeCameraEmulationOn();
|
||||
|
||||
/* Gets camera device version number to use for front camera emulation */
|
||||
int getFrontCameraHalVersion();
|
||||
|
||||
/****************************************************************************
|
||||
* Data members.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/* Connection to the camera service in the emulator. */
|
||||
FactoryQemuClient mQemuClient;
|
||||
|
||||
/* Array of cameras available for the emulation. */
|
||||
EmulatedBaseCamera** mEmulatedCameras;
|
||||
|
||||
/* Number of emulated cameras (including the fake ones). */
|
||||
int mEmulatedCameraNum;
|
||||
|
||||
/* Number of emulated fake cameras. */
|
||||
int mFakeCameraNum;
|
||||
|
||||
/* Flags whether or not constructor has succeeded. */
|
||||
bool mConstructedOK;
|
||||
|
||||
/* Camera callbacks (for status changing) */
|
||||
const camera_module_callbacks_t* mCallbacks;
|
||||
|
||||
/* Hotplug thread (to call onStatusChanged) */
|
||||
sp<EmulatedCameraHotplugThread> mHotplugThread;
|
||||
|
||||
public:
|
||||
/* Contains device open entry point, as required by HAL API. */
|
||||
static struct hw_module_methods_t mCameraModuleMethods;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
/* References the global EmulatedCameraFactory instance. */
|
||||
extern android::EmulatedCameraFactory gEmulatedCameraFactory;
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_CAMERA_FACTORY_H */
|
||||
48
android/camera/EmulatedCameraHal.cpp
Executable file
48
android/camera/EmulatedCameraHal.cpp
Executable file
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of the camera HAL layer in the system running
|
||||
* under the emulator.
|
||||
*
|
||||
* This file contains only required HAL header, which directs all the API calls
|
||||
* to the EmulatedCameraFactory class implementation, wich is responsible for
|
||||
* managing emulated cameras.
|
||||
*/
|
||||
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
/*
|
||||
* Required HAL header.
|
||||
*/
|
||||
camera_module_t HAL_MODULE_INFO_SYM = {
|
||||
common: {
|
||||
tag: HARDWARE_MODULE_TAG,
|
||||
module_api_version: CAMERA_MODULE_API_VERSION_2_3,
|
||||
hal_api_version: HARDWARE_HAL_API_VERSION,
|
||||
id: CAMERA_HARDWARE_MODULE_ID,
|
||||
name: "Emulated Camera Module",
|
||||
author: "The Android Open Source Project",
|
||||
methods: &android::EmulatedCameraFactory::mCameraModuleMethods,
|
||||
dso: NULL,
|
||||
reserved: {0},
|
||||
},
|
||||
get_number_of_cameras: android::EmulatedCameraFactory::get_number_of_cameras,
|
||||
get_camera_info: android::EmulatedCameraFactory::get_camera_info,
|
||||
set_callbacks: android::EmulatedCameraFactory::set_callbacks,
|
||||
get_vendor_tag_ops: android::EmulatedCameraFactory::get_vendor_tag_ops,
|
||||
open_legacy: android::EmulatedCameraFactory::open_legacy
|
||||
};
|
||||
372
android/camera/EmulatedCameraHotplugThread.cpp
Normal file
372
android/camera/EmulatedCameraHotplugThread.cpp
Normal file
|
|
@ -0,0 +1,372 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_HotplugThread"
|
||||
#include <cutils/log.h>
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
#include <sys/inotify.h>
|
||||
|
||||
#include "EmulatedCameraHotplugThread.h"
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
#define FAKE_HOTPLUG_FILE "/data/misc/media/emulator.camera.hotplug"
|
||||
|
||||
#define EVENT_SIZE (sizeof(struct inotify_event))
|
||||
#define EVENT_BUF_LEN (1024*(EVENT_SIZE+16))
|
||||
|
||||
#define SubscriberInfo EmulatedCameraHotplugThread::SubscriberInfo
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedCameraHotplugThread::EmulatedCameraHotplugThread(
|
||||
const int* cameraIdArray,
|
||||
size_t size) :
|
||||
Thread(/*canCallJava*/false) {
|
||||
|
||||
mRunning = true;
|
||||
mInotifyFd = 0;
|
||||
|
||||
for (size_t i = 0; i < size; ++i) {
|
||||
int id = cameraIdArray[i];
|
||||
|
||||
if (createFileIfNotExists(id)) {
|
||||
mSubscribedCameraIds.push_back(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
EmulatedCameraHotplugThread::~EmulatedCameraHotplugThread() {
|
||||
}
|
||||
|
||||
status_t EmulatedCameraHotplugThread::requestExitAndWait() {
|
||||
ALOGE("%s: Not implemented. Use requestExit + join instead",
|
||||
__FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
void EmulatedCameraHotplugThread::requestExit() {
|
||||
Mutex::Autolock al(mMutex);
|
||||
|
||||
ALOGV("%s: Requesting thread exit", __FUNCTION__);
|
||||
mRunning = false;
|
||||
|
||||
bool rmWatchFailed = false;
|
||||
Vector<SubscriberInfo>::iterator it;
|
||||
for (it = mSubscribers.begin(); it != mSubscribers.end(); ++it) {
|
||||
|
||||
if (inotify_rm_watch(mInotifyFd, it->WatchID) == -1) {
|
||||
|
||||
ALOGE("%s: Could not remove watch for camID '%d',"
|
||||
" error: '%s' (%d)",
|
||||
__FUNCTION__, it->CameraID, strerror(errno),
|
||||
errno);
|
||||
|
||||
rmWatchFailed = true ;
|
||||
} else {
|
||||
ALOGV("%s: Removed watch for camID '%d'",
|
||||
__FUNCTION__, it->CameraID);
|
||||
}
|
||||
}
|
||||
|
||||
if (rmWatchFailed) { // unlikely
|
||||
// Give the thread a fighting chance to error out on the next
|
||||
// read
|
||||
if (close(mInotifyFd) == -1) {
|
||||
ALOGE("%s: close failure error: '%s' (%d)",
|
||||
__FUNCTION__, strerror(errno), errno);
|
||||
}
|
||||
}
|
||||
|
||||
ALOGV("%s: Request exit complete.", __FUNCTION__);
|
||||
}
|
||||
|
||||
status_t EmulatedCameraHotplugThread::readyToRun() {
|
||||
Mutex::Autolock al(mMutex);
|
||||
|
||||
mInotifyFd = -1;
|
||||
|
||||
do {
|
||||
ALOGV("%s: Initializing inotify", __FUNCTION__);
|
||||
|
||||
mInotifyFd = inotify_init();
|
||||
if (mInotifyFd == -1) {
|
||||
ALOGE("%s: inotify_init failure error: '%s' (%d)",
|
||||
__FUNCTION__, strerror(errno), errno);
|
||||
mRunning = false;
|
||||
break;
|
||||
}
|
||||
|
||||
/**
|
||||
* For each fake camera file, add a watch for when
|
||||
* the file is closed (if it was written to)
|
||||
*/
|
||||
Vector<int>::const_iterator it, end;
|
||||
it = mSubscribedCameraIds.begin();
|
||||
end = mSubscribedCameraIds.end();
|
||||
for (; it != end; ++it) {
|
||||
int cameraId = *it;
|
||||
if (!addWatch(cameraId)) {
|
||||
mRunning = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while(false);
|
||||
|
||||
if (!mRunning) {
|
||||
status_t err = -errno;
|
||||
|
||||
if (mInotifyFd != -1) {
|
||||
close(mInotifyFd);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
return OK;
|
||||
}
|
||||
|
||||
bool EmulatedCameraHotplugThread::threadLoop() {
|
||||
|
||||
// If requestExit was already called, mRunning will be false
|
||||
while (mRunning) {
|
||||
char buffer[EVENT_BUF_LEN];
|
||||
int length = TEMP_FAILURE_RETRY(
|
||||
read(mInotifyFd, buffer, EVENT_BUF_LEN));
|
||||
|
||||
if (length < 0) {
|
||||
ALOGE("%s: Error reading from inotify FD, error: '%s' (%d)",
|
||||
__FUNCTION__, strerror(errno),
|
||||
errno);
|
||||
mRunning = false;
|
||||
break;
|
||||
}
|
||||
|
||||
ALOGV("%s: Read %d bytes from inotify FD", __FUNCTION__, length);
|
||||
|
||||
int i = 0;
|
||||
while (i < length) {
|
||||
inotify_event* event = (inotify_event*) &buffer[i];
|
||||
|
||||
if (event->mask & IN_IGNORED) {
|
||||
Mutex::Autolock al(mMutex);
|
||||
if (!mRunning) {
|
||||
ALOGV("%s: Shutting down thread", __FUNCTION__);
|
||||
break;
|
||||
} else {
|
||||
ALOGE("%s: File was deleted, aborting",
|
||||
__FUNCTION__);
|
||||
mRunning = false;
|
||||
break;
|
||||
}
|
||||
} else if (event->mask & IN_CLOSE_WRITE) {
|
||||
int cameraId = getCameraId(event->wd);
|
||||
|
||||
if (cameraId < 0) {
|
||||
ALOGE("%s: Got bad camera ID from WD '%d",
|
||||
__FUNCTION__, event->wd);
|
||||
} else {
|
||||
// Check the file for the new hotplug event
|
||||
String8 filePath = getFilePath(cameraId);
|
||||
/**
|
||||
* NOTE: we carefully avoid getting an inotify
|
||||
* for the same exact file because it's opened for
|
||||
* read-only, but our inotify is for write-only
|
||||
*/
|
||||
int newStatus = readFile(filePath);
|
||||
|
||||
if (newStatus < 0) {
|
||||
mRunning = false;
|
||||
break;
|
||||
}
|
||||
|
||||
int halStatus = newStatus ?
|
||||
CAMERA_DEVICE_STATUS_PRESENT :
|
||||
CAMERA_DEVICE_STATUS_NOT_PRESENT;
|
||||
gEmulatedCameraFactory.onStatusChanged(cameraId,
|
||||
halStatus);
|
||||
}
|
||||
|
||||
} else {
|
||||
ALOGW("%s: Unknown mask 0x%x",
|
||||
__FUNCTION__, event->mask);
|
||||
}
|
||||
|
||||
i += EVENT_SIZE + event->len;
|
||||
}
|
||||
}
|
||||
|
||||
if (!mRunning) {
|
||||
close(mInotifyFd);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
String8 EmulatedCameraHotplugThread::getFilePath(int cameraId) const {
|
||||
return String8::format(FAKE_HOTPLUG_FILE ".%d", cameraId);
|
||||
}
|
||||
|
||||
bool EmulatedCameraHotplugThread::createFileIfNotExists(int cameraId) const
|
||||
{
|
||||
String8 filePath = getFilePath(cameraId);
|
||||
// make sure this file exists and we have access to it
|
||||
int fd = TEMP_FAILURE_RETRY(
|
||||
open(filePath.string(), O_WRONLY | O_CREAT | O_TRUNC,
|
||||
/* mode = ug+rwx */ S_IRWXU | S_IRWXG ));
|
||||
if (fd == -1) {
|
||||
ALOGE("%s: Could not create file '%s', error: '%s' (%d)",
|
||||
__FUNCTION__, filePath.string(), strerror(errno), errno);
|
||||
return false;
|
||||
}
|
||||
|
||||
// File has '1' by default since we are plugged in by default
|
||||
if (TEMP_FAILURE_RETRY(write(fd, "1\n", /*count*/2)) == -1) {
|
||||
ALOGE("%s: Could not write '1' to file '%s', error: '%s' (%d)",
|
||||
__FUNCTION__, filePath.string(), strerror(errno), errno);
|
||||
return false;
|
||||
}
|
||||
|
||||
close(fd);
|
||||
return true;
|
||||
}
|
||||
|
||||
int EmulatedCameraHotplugThread::getCameraId(String8 filePath) const {
|
||||
Vector<int>::const_iterator it, end;
|
||||
it = mSubscribedCameraIds.begin();
|
||||
end = mSubscribedCameraIds.end();
|
||||
for (; it != end; ++it) {
|
||||
String8 camPath = getFilePath(*it);
|
||||
|
||||
if (camPath == filePath) {
|
||||
return *it;
|
||||
}
|
||||
}
|
||||
|
||||
return NAME_NOT_FOUND;
|
||||
}
|
||||
|
||||
int EmulatedCameraHotplugThread::getCameraId(int wd) const {
|
||||
for (size_t i = 0; i < mSubscribers.size(); ++i) {
|
||||
if (mSubscribers[i].WatchID == wd) {
|
||||
return mSubscribers[i].CameraID;
|
||||
}
|
||||
}
|
||||
|
||||
return NAME_NOT_FOUND;
|
||||
}
|
||||
|
||||
SubscriberInfo* EmulatedCameraHotplugThread::getSubscriberInfo(int cameraId)
|
||||
{
|
||||
for (size_t i = 0; i < mSubscribers.size(); ++i) {
|
||||
if (mSubscribers[i].CameraID == cameraId) {
|
||||
return (SubscriberInfo*)&mSubscribers[i];
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool EmulatedCameraHotplugThread::addWatch(int cameraId) {
|
||||
String8 camPath = getFilePath(cameraId);
|
||||
int wd = inotify_add_watch(mInotifyFd,
|
||||
camPath.string(),
|
||||
IN_CLOSE_WRITE);
|
||||
|
||||
if (wd == -1) {
|
||||
ALOGE("%s: Could not add watch for '%s', error: '%s' (%d)",
|
||||
__FUNCTION__, camPath.string(), strerror(errno),
|
||||
errno);
|
||||
|
||||
mRunning = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
ALOGV("%s: Watch added for camID='%d', wd='%d'",
|
||||
__FUNCTION__, cameraId, wd);
|
||||
|
||||
SubscriberInfo si = { cameraId, wd };
|
||||
mSubscribers.push_back(si);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool EmulatedCameraHotplugThread::removeWatch(int cameraId) {
|
||||
SubscriberInfo* si = getSubscriberInfo(cameraId);
|
||||
|
||||
if (!si) return false;
|
||||
|
||||
if (inotify_rm_watch(mInotifyFd, si->WatchID) == -1) {
|
||||
|
||||
ALOGE("%s: Could not remove watch for camID '%d', error: '%s' (%d)",
|
||||
__FUNCTION__, cameraId, strerror(errno),
|
||||
errno);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Vector<SubscriberInfo>::iterator it;
|
||||
for (it = mSubscribers.begin(); it != mSubscribers.end(); ++it) {
|
||||
if (it->CameraID == cameraId) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (it != mSubscribers.end()) {
|
||||
mSubscribers.erase(it);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
int EmulatedCameraHotplugThread::readFile(String8 filePath) const {
|
||||
|
||||
int fd = TEMP_FAILURE_RETRY(
|
||||
open(filePath.string(), O_RDONLY, /*mode*/0));
|
||||
if (fd == -1) {
|
||||
ALOGE("%s: Could not open file '%s', error: '%s' (%d)",
|
||||
__FUNCTION__, filePath.string(), strerror(errno), errno);
|
||||
return -1;
|
||||
}
|
||||
|
||||
char buffer[1];
|
||||
int length;
|
||||
|
||||
length = TEMP_FAILURE_RETRY(
|
||||
read(fd, buffer, sizeof(buffer)));
|
||||
|
||||
int retval;
|
||||
|
||||
ALOGV("%s: Read file '%s', length='%d', buffer='%c'",
|
||||
__FUNCTION__, filePath.string(), length, buffer[0]);
|
||||
|
||||
if (length == 0) { // EOF
|
||||
retval = 0; // empty file is the same thing as 0
|
||||
} else if (buffer[0] == '0') {
|
||||
retval = 0;
|
||||
} else { // anything non-empty that's not beginning with '0'
|
||||
retval = 1;
|
||||
}
|
||||
|
||||
close(fd);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
} //namespace android
|
||||
77
android/camera/EmulatedCameraHotplugThread.h
Normal file
77
android/camera/EmulatedCameraHotplugThread.h
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_CAMERA_HOTPLUG_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_CAMERA_HOTPLUG_H
|
||||
|
||||
/**
|
||||
* This class emulates hotplug events by inotifying on a file, specific
|
||||
* to a camera ID. When the file changes between 1/0 the hotplug
|
||||
* status goes between PRESENT and NOT_PRESENT.
|
||||
*
|
||||
* Refer to FAKE_HOTPLUG_FILE in EmulatedCameraHotplugThread.cpp
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera2.h"
|
||||
#include <utils/String8.h>
|
||||
#include <utils/Vector.h>
|
||||
|
||||
namespace android {
|
||||
class EmulatedCameraHotplugThread : public Thread {
|
||||
public:
|
||||
EmulatedCameraHotplugThread(const int* cameraIdArray, size_t size);
|
||||
~EmulatedCameraHotplugThread();
|
||||
|
||||
virtual void requestExit();
|
||||
virtual status_t requestExitAndWait();
|
||||
|
||||
private:
|
||||
|
||||
|
||||
virtual status_t readyToRun();
|
||||
virtual bool threadLoop();
|
||||
|
||||
struct SubscriberInfo {
|
||||
int CameraID;
|
||||
int WatchID;
|
||||
};
|
||||
|
||||
bool addWatch(int cameraId);
|
||||
bool removeWatch(int cameraId);
|
||||
SubscriberInfo* getSubscriberInfo(int cameraId);
|
||||
|
||||
int getCameraId(String8 filePath) const;
|
||||
int getCameraId(int wd) const;
|
||||
|
||||
String8 getFilePath(int cameraId) const;
|
||||
int readFile(String8 filePath) const;
|
||||
|
||||
bool createFileIfNotExists(int cameraId) const;
|
||||
|
||||
int mInotifyFd;
|
||||
Vector<int> mSubscribedCameraIds;
|
||||
Vector<SubscriberInfo> mSubscribers;
|
||||
|
||||
// variables above are unguarded:
|
||||
// -- accessed in thread loop or in constructor only
|
||||
|
||||
Mutex mMutex;
|
||||
|
||||
bool mRunning; // guarding only when it's important
|
||||
};
|
||||
} // namespace android
|
||||
|
||||
#endif
|
||||
90
android/camera/EmulatedFakeCamera.cpp
Executable file
90
android/camera/EmulatedFakeCamera.cpp
Executable file
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedFakeCamera that encapsulates
|
||||
* functionality of a fake camera.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_FakeCamera"
|
||||
#include <cutils/log.h>
|
||||
#include <cutils/properties.h>
|
||||
#include "EmulatedFakeCamera.h"
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedFakeCamera::EmulatedFakeCamera(int cameraId,
|
||||
bool facingBack,
|
||||
struct hw_module_t* module)
|
||||
: EmulatedCamera(cameraId, module),
|
||||
mFacingBack(facingBack),
|
||||
mFakeCameraDevice(this)
|
||||
{
|
||||
}
|
||||
|
||||
EmulatedFakeCamera::~EmulatedFakeCamera()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API overrides
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedFakeCamera::Initialize()
|
||||
{
|
||||
status_t res = mFakeCameraDevice.Initialize();
|
||||
if (res != NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
||||
const char* facing = mFacingBack ? EmulatedCamera::FACING_BACK :
|
||||
EmulatedCamera::FACING_FRONT;
|
||||
|
||||
mParameters.set(EmulatedCamera::FACING_KEY, facing);
|
||||
ALOGD("%s: Fake camera is facing %s", __FUNCTION__, facing);
|
||||
|
||||
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
|
||||
gEmulatedCameraFactory.getFakeCameraOrientation());
|
||||
|
||||
res = EmulatedCamera::Initialize();
|
||||
if (res != NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
||||
/*
|
||||
* Parameters provided by the camera device.
|
||||
*/
|
||||
|
||||
/* 352x288 and 320x240 frame dimensions are required by the framework for
|
||||
* video mode preview and video recording. */
|
||||
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
|
||||
"640x480,352x288,320x240");
|
||||
mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
|
||||
"640x480,352x288,320x240");
|
||||
mParameters.setPreviewSize(640, 480);
|
||||
mParameters.setPictureSize(640, 480);
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
EmulatedCameraDevice* EmulatedFakeCamera::getCameraDevice()
|
||||
{
|
||||
return &mFakeCameraDevice;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
74
android/camera/EmulatedFakeCamera.h
Executable file
74
android/camera/EmulatedFakeCamera.h
Executable file
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedFakeCamera that encapsulates
|
||||
* functionality of a fake camera. This class is nothing more than a placeholder
|
||||
* for EmulatedFakeCameraDevice instance.
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera.h"
|
||||
#include "EmulatedFakeCameraDevice.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality of a fake camera.
|
||||
* This class is nothing more than a placeholder for EmulatedFakeCameraDevice
|
||||
* instance that emulates a fake camera device.
|
||||
*/
|
||||
class EmulatedFakeCamera : public EmulatedCamera {
|
||||
public:
|
||||
/* Constructs EmulatedFakeCamera instance. */
|
||||
EmulatedFakeCamera(int cameraId, bool facingBack, struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedFakeCamera instance. */
|
||||
~EmulatedFakeCamera();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera virtual overrides.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedFakeCamera instance. */
|
||||
status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera abstract API implementation.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Gets emulated camera device ised by this instance of the emulated camera.
|
||||
*/
|
||||
EmulatedCameraDevice* getCameraDevice();
|
||||
|
||||
/****************************************************************************
|
||||
* Data memebers.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Facing back (true) or front (false) switch. */
|
||||
bool mFacingBack;
|
||||
|
||||
/* Contained fake camera device object. */
|
||||
EmulatedFakeCameraDevice mFakeCameraDevice;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_H */
|
||||
2717
android/camera/EmulatedFakeCamera2.cpp
Normal file
2717
android/camera/EmulatedFakeCamera2.cpp
Normal file
File diff suppressed because it is too large
Load diff
429
android/camera/EmulatedFakeCamera2.h
Normal file
429
android/camera/EmulatedFakeCamera2.h
Normal file
|
|
@ -0,0 +1,429 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedFakeCamera2 that encapsulates
|
||||
* functionality of a fake camera that implements version 2 of the camera device
|
||||
* interface.
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera2.h"
|
||||
#include "fake-pipeline2/Base.h"
|
||||
#include "fake-pipeline2/Sensor.h"
|
||||
#include "fake-pipeline2/JpegCompressor.h"
|
||||
#include <utils/Condition.h>
|
||||
#include <utils/KeyedVector.h>
|
||||
#include <utils/String8.h>
|
||||
#include <utils/String16.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality of an advanced fake camera. This camera contains
|
||||
* a simple simulation of a scene, sensor, and image processing pipeline.
|
||||
*/
|
||||
class EmulatedFakeCamera2 : public EmulatedCamera2 {
|
||||
public:
|
||||
/* Constructs EmulatedFakeCamera instance. */
|
||||
EmulatedFakeCamera2(int cameraId, bool facingBack, struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedFakeCamera instance. */
|
||||
~EmulatedFakeCamera2();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera2 virtual overrides.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedFakeCamera2 instance. */
|
||||
status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera Module API and generic hardware device API implementation
|
||||
***************************************************************************/
|
||||
public:
|
||||
|
||||
virtual status_t connectCamera(hw_device_t** device);
|
||||
|
||||
virtual status_t plugCamera();
|
||||
virtual status_t unplugCamera();
|
||||
virtual camera_device_status_t getHotplugStatus();
|
||||
|
||||
virtual status_t closeCamera();
|
||||
|
||||
virtual status_t getCameraInfo(struct camera_info *info);
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera2 abstract API implementation.
|
||||
***************************************************************************/
|
||||
protected:
|
||||
/** Request input queue */
|
||||
|
||||
virtual int requestQueueNotify();
|
||||
|
||||
/** Count of requests in flight */
|
||||
virtual int getInProgressCount();
|
||||
|
||||
/** Cancel all captures in flight */
|
||||
//virtual int flushCapturesInProgress();
|
||||
|
||||
/** Construct default request */
|
||||
virtual int constructDefaultRequest(
|
||||
int request_template,
|
||||
camera_metadata_t **request);
|
||||
|
||||
virtual int allocateStream(
|
||||
uint32_t width,
|
||||
uint32_t height,
|
||||
int format,
|
||||
const camera2_stream_ops_t *stream_ops,
|
||||
uint32_t *stream_id,
|
||||
uint32_t *format_actual,
|
||||
uint32_t *usage,
|
||||
uint32_t *max_buffers);
|
||||
|
||||
virtual int registerStreamBuffers(
|
||||
uint32_t stream_id,
|
||||
int num_buffers,
|
||||
buffer_handle_t *buffers);
|
||||
|
||||
virtual int releaseStream(uint32_t stream_id);
|
||||
|
||||
// virtual int allocateReprocessStream(
|
||||
// uint32_t width,
|
||||
// uint32_t height,
|
||||
// uint32_t format,
|
||||
// const camera2_stream_ops_t *stream_ops,
|
||||
// uint32_t *stream_id,
|
||||
// uint32_t *format_actual,
|
||||
// uint32_t *usage,
|
||||
// uint32_t *max_buffers);
|
||||
|
||||
virtual int allocateReprocessStreamFromStream(
|
||||
uint32_t output_stream_id,
|
||||
const camera2_stream_in_ops_t *stream_ops,
|
||||
uint32_t *stream_id);
|
||||
|
||||
virtual int releaseReprocessStream(uint32_t stream_id);
|
||||
|
||||
virtual int triggerAction(uint32_t trigger_id,
|
||||
int32_t ext1,
|
||||
int32_t ext2);
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
virtual int dump(int fd);
|
||||
|
||||
public:
|
||||
/****************************************************************************
|
||||
* Utility methods called by configure/readout threads and pipeline
|
||||
***************************************************************************/
|
||||
|
||||
// Get information about a given stream. Will lock mMutex
|
||||
const Stream &getStreamInfo(uint32_t streamId);
|
||||
const ReprocessStream &getReprocessStreamInfo(uint32_t streamId);
|
||||
|
||||
// Notifies rest of camera subsystem of serious error
|
||||
void signalError();
|
||||
|
||||
private:
|
||||
/****************************************************************************
|
||||
* Utility methods
|
||||
***************************************************************************/
|
||||
/** Construct static camera metadata, two-pass */
|
||||
status_t constructStaticInfo(
|
||||
camera_metadata_t **info,
|
||||
bool sizeRequest) const;
|
||||
|
||||
/** Two-pass implementation of constructDefaultRequest */
|
||||
status_t constructDefaultRequest(
|
||||
int request_template,
|
||||
camera_metadata_t **request,
|
||||
bool sizeRequest) const;
|
||||
/** Helper function for constructDefaultRequest */
|
||||
static status_t addOrSize( camera_metadata_t *request,
|
||||
bool sizeRequest,
|
||||
size_t *entryCount,
|
||||
size_t *dataCount,
|
||||
uint32_t tag,
|
||||
const void *entry_data,
|
||||
size_t entry_count);
|
||||
|
||||
/** Determine if the stream id is listed in any currently-in-flight
|
||||
* requests. Assumes mMutex is locked */
|
||||
bool isStreamInUse(uint32_t streamId);
|
||||
|
||||
/** Determine if the reprocess stream id is listed in any
|
||||
* currently-in-flight requests. Assumes mMutex is locked */
|
||||
bool isReprocessStreamInUse(uint32_t streamId);
|
||||
|
||||
/****************************************************************************
|
||||
* Pipeline controller threads
|
||||
***************************************************************************/
|
||||
|
||||
class ConfigureThread: public Thread {
|
||||
public:
|
||||
ConfigureThread(EmulatedFakeCamera2 *parent);
|
||||
~ConfigureThread();
|
||||
|
||||
status_t waitUntilRunning();
|
||||
status_t newRequestAvailable();
|
||||
status_t readyToRun();
|
||||
|
||||
bool isStreamInUse(uint32_t id);
|
||||
int getInProgressCount();
|
||||
private:
|
||||
EmulatedFakeCamera2 *mParent;
|
||||
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
|
||||
|
||||
bool mRunning;
|
||||
bool threadLoop();
|
||||
|
||||
bool setupCapture();
|
||||
bool setupReprocess();
|
||||
|
||||
bool configureNextCapture();
|
||||
bool configureNextReprocess();
|
||||
|
||||
bool getBuffers();
|
||||
|
||||
Mutex mInputMutex; // Protects mActive, mRequestCount
|
||||
Condition mInputSignal;
|
||||
bool mActive; // Whether we're waiting for input requests or actively
|
||||
// working on them
|
||||
size_t mRequestCount;
|
||||
|
||||
camera_metadata_t *mRequest;
|
||||
|
||||
Mutex mInternalsMutex; // Lock before accessing below members.
|
||||
bool mWaitingForReadout;
|
||||
bool mNextNeedsJpeg;
|
||||
bool mNextIsCapture;
|
||||
int32_t mNextFrameNumber;
|
||||
int64_t mNextExposureTime;
|
||||
int64_t mNextFrameDuration;
|
||||
int32_t mNextSensitivity;
|
||||
Buffers *mNextBuffers;
|
||||
};
|
||||
|
||||
class ReadoutThread: public Thread, private JpegCompressor::JpegListener {
|
||||
public:
|
||||
ReadoutThread(EmulatedFakeCamera2 *parent);
|
||||
~ReadoutThread();
|
||||
|
||||
status_t readyToRun();
|
||||
|
||||
// Input
|
||||
status_t waitUntilRunning();
|
||||
bool waitForReady(nsecs_t timeout);
|
||||
void setNextOperation(bool isCapture,
|
||||
camera_metadata_t *request,
|
||||
Buffers *buffers);
|
||||
bool isStreamInUse(uint32_t id);
|
||||
int getInProgressCount();
|
||||
private:
|
||||
EmulatedFakeCamera2 *mParent;
|
||||
|
||||
bool mRunning;
|
||||
bool threadLoop();
|
||||
|
||||
bool readyForNextCapture();
|
||||
status_t collectStatisticsMetadata(camera_metadata_t *frame);
|
||||
|
||||
// Inputs
|
||||
Mutex mInputMutex; // Protects mActive, mInFlightQueue, mRequestCount
|
||||
Condition mInputSignal;
|
||||
Condition mReadySignal;
|
||||
|
||||
bool mActive;
|
||||
|
||||
static const int kInFlightQueueSize = 4;
|
||||
struct InFlightQueue {
|
||||
bool isCapture;
|
||||
camera_metadata_t *request;
|
||||
Buffers *buffers;
|
||||
} *mInFlightQueue;
|
||||
|
||||
size_t mInFlightHead;
|
||||
size_t mInFlightTail;
|
||||
|
||||
size_t mRequestCount;
|
||||
|
||||
// Internals
|
||||
Mutex mInternalsMutex;
|
||||
|
||||
bool mIsCapture;
|
||||
camera_metadata_t *mRequest;
|
||||
Buffers *mBuffers;
|
||||
|
||||
// Jpeg completion listeners
|
||||
void onJpegDone(const StreamBuffer &jpegBuffer, bool success);
|
||||
void onJpegInputDone(const StreamBuffer &inputBuffer);
|
||||
nsecs_t mJpegTimestamp;
|
||||
};
|
||||
|
||||
// 3A management thread (auto-exposure, focus, white balance)
|
||||
class ControlThread: public Thread {
|
||||
public:
|
||||
ControlThread(EmulatedFakeCamera2 *parent);
|
||||
~ControlThread();
|
||||
|
||||
status_t readyToRun();
|
||||
|
||||
status_t waitUntilRunning();
|
||||
|
||||
// Interpret request's control parameters and override
|
||||
// capture settings as needed
|
||||
status_t processRequest(camera_metadata_t *request);
|
||||
|
||||
status_t triggerAction(uint32_t msgType,
|
||||
int32_t ext1, int32_t ext2);
|
||||
private:
|
||||
ControlThread(const ControlThread &t);
|
||||
ControlThread& operator=(const ControlThread &t);
|
||||
|
||||
// Constants controlling fake 3A behavior
|
||||
static const nsecs_t kControlCycleDelay;
|
||||
static const nsecs_t kMinAfDuration;
|
||||
static const nsecs_t kMaxAfDuration;
|
||||
static const float kAfSuccessRate;
|
||||
static const float kContinuousAfStartRate;
|
||||
|
||||
static const float kAeScanStartRate;
|
||||
static const nsecs_t kMinAeDuration;
|
||||
static const nsecs_t kMaxAeDuration;
|
||||
static const nsecs_t kMinPrecaptureAeDuration;
|
||||
static const nsecs_t kMaxPrecaptureAeDuration;
|
||||
|
||||
static const nsecs_t kNormalExposureTime;
|
||||
static const nsecs_t kExposureJump;
|
||||
static const nsecs_t kMinExposureTime;
|
||||
|
||||
EmulatedFakeCamera2 *mParent;
|
||||
|
||||
bool mRunning;
|
||||
bool threadLoop();
|
||||
|
||||
Mutex mInputMutex; // Protects input methods
|
||||
Condition mInputSignal;
|
||||
|
||||
// Trigger notifications
|
||||
bool mStartAf;
|
||||
bool mCancelAf;
|
||||
bool mStartPrecapture;
|
||||
|
||||
// Latest state for 3A request fields
|
||||
uint8_t mControlMode;
|
||||
|
||||
uint8_t mEffectMode;
|
||||
uint8_t mSceneMode;
|
||||
|
||||
uint8_t mAfMode;
|
||||
bool mAfModeChange;
|
||||
|
||||
uint8_t mAwbMode;
|
||||
uint8_t mAeMode;
|
||||
|
||||
// Latest trigger IDs
|
||||
int32_t mAfTriggerId;
|
||||
int32_t mPrecaptureTriggerId;
|
||||
|
||||
// Current state for 3A algorithms
|
||||
uint8_t mAfState;
|
||||
uint8_t mAeState;
|
||||
uint8_t mAwbState;
|
||||
bool mAeLock;
|
||||
|
||||
// Current control parameters
|
||||
nsecs_t mExposureTime;
|
||||
|
||||
// Private to threadLoop and its utility methods
|
||||
|
||||
nsecs_t mAfScanDuration;
|
||||
nsecs_t mAeScanDuration;
|
||||
bool mLockAfterPassiveScan;
|
||||
|
||||
// Utility methods for AF
|
||||
int processAfTrigger(uint8_t afMode, uint8_t afState);
|
||||
int maybeStartAfScan(uint8_t afMode, uint8_t afState);
|
||||
int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
|
||||
void updateAfState(uint8_t newState, int32_t triggerId);
|
||||
|
||||
// Utility methods for precapture trigger
|
||||
int processPrecaptureTrigger(uint8_t aeMode, uint8_t aeState);
|
||||
int maybeStartAeScan(uint8_t aeMode, bool aeLock, uint8_t aeState);
|
||||
int updateAeScan(uint8_t aeMode, bool aeLock, uint8_t aeState,
|
||||
nsecs_t *maxSleep);
|
||||
void updateAeState(uint8_t newState, int32_t triggerId);
|
||||
};
|
||||
|
||||
/****************************************************************************
|
||||
* Static configuration information
|
||||
***************************************************************************/
|
||||
private:
|
||||
static const uint32_t kMaxRawStreamCount = 1;
|
||||
static const uint32_t kMaxProcessedStreamCount = 3;
|
||||
static const uint32_t kMaxJpegStreamCount = 1;
|
||||
static const uint32_t kMaxReprocessStreamCount = 2;
|
||||
static const uint32_t kMaxBufferCount = 4;
|
||||
static const uint32_t kAvailableFormats[];
|
||||
static const uint32_t kAvailableRawSizes[];
|
||||
static const uint64_t kAvailableRawMinDurations[];
|
||||
static const uint32_t kAvailableProcessedSizesBack[];
|
||||
static const uint32_t kAvailableProcessedSizesFront[];
|
||||
static const uint64_t kAvailableProcessedMinDurations[];
|
||||
static const uint32_t kAvailableJpegSizesBack[];
|
||||
static const uint32_t kAvailableJpegSizesFront[];
|
||||
static const uint64_t kAvailableJpegMinDurations[];
|
||||
|
||||
/****************************************************************************
|
||||
* Data members.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Facing back (true) or front (false) switch. */
|
||||
bool mFacingBack;
|
||||
|
||||
private:
|
||||
bool mIsConnected;
|
||||
|
||||
/** Stream manipulation */
|
||||
uint32_t mNextStreamId;
|
||||
uint32_t mRawStreamCount;
|
||||
uint32_t mProcessedStreamCount;
|
||||
uint32_t mJpegStreamCount;
|
||||
|
||||
uint32_t mNextReprocessStreamId;
|
||||
uint32_t mReprocessStreamCount;
|
||||
|
||||
KeyedVector<uint32_t, Stream> mStreams;
|
||||
KeyedVector<uint32_t, ReprocessStream> mReprocessStreams;
|
||||
|
||||
/** Simulated hardware interfaces */
|
||||
sp<Sensor> mSensor;
|
||||
sp<JpegCompressor> mJpegCompressor;
|
||||
|
||||
/** Pipeline control threads */
|
||||
sp<ConfigureThread> mConfigureThread;
|
||||
sp<ReadoutThread> mReadoutThread;
|
||||
sp<ControlThread> mControlThread;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA2_H */
|
||||
2519
android/camera/EmulatedFakeCamera3.cpp
Normal file
2519
android/camera/EmulatedFakeCamera3.cpp
Normal file
File diff suppressed because it is too large
Load diff
289
android/camera/EmulatedFakeCamera3.h
Normal file
289
android/camera/EmulatedFakeCamera3.h
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA3_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA3_H
|
||||
|
||||
/**
|
||||
* Contains declaration of a class EmulatedCamera that encapsulates
|
||||
* functionality of a fake camera that implements version 3 of the camera device
|
||||
* interace.
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera3.h"
|
||||
#include "fake-pipeline2/Base.h"
|
||||
#include "fake-pipeline2/Sensor.h"
|
||||
#include "fake-pipeline2/JpegCompressor.h"
|
||||
#include <camera/CameraMetadata.h>
|
||||
#include <utils/SortedVector.h>
|
||||
#include <utils/List.h>
|
||||
#include <utils/Mutex.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/**
|
||||
* Encapsulates functionality for a v3 HAL camera which produces synthetic data.
|
||||
*
|
||||
* Note that EmulatedCameraFactory instantiates an object of this class just
|
||||
* once, when EmulatedCameraFactory instance gets constructed. Connection to /
|
||||
* disconnection from the actual camera device is handled by calls to
|
||||
* connectDevice(), and closeCamera() methods of this class that are invoked in
|
||||
* response to hw_module_methods_t::open, and camera_device::close callbacks.
|
||||
*/
|
||||
class EmulatedFakeCamera3 : public EmulatedCamera3,
|
||||
private Sensor::SensorListener {
|
||||
public:
|
||||
|
||||
EmulatedFakeCamera3(int cameraId, bool facingBack,
|
||||
struct hw_module_t* module);
|
||||
|
||||
virtual ~EmulatedFakeCamera3();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera3 virtual overrides
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
|
||||
virtual status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* Camera module API and generic hardware device API implementation
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
virtual status_t connectCamera(hw_device_t** device);
|
||||
|
||||
virtual status_t closeCamera();
|
||||
|
||||
virtual status_t getCameraInfo(struct camera_info *info);
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera3 abstract API implementation
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
|
||||
virtual status_t configureStreams(
|
||||
camera3_stream_configuration *streamList);
|
||||
|
||||
virtual status_t registerStreamBuffers(
|
||||
const camera3_stream_buffer_set *bufferSet) ;
|
||||
|
||||
virtual const camera_metadata_t* constructDefaultRequestSettings(
|
||||
int type);
|
||||
|
||||
virtual status_t processCaptureRequest(camera3_capture_request *request);
|
||||
|
||||
virtual status_t flush();
|
||||
|
||||
/** Debug methods */
|
||||
|
||||
virtual void dump(int fd);
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
* Get the requested capability set for this camera
|
||||
*/
|
||||
status_t getCameraCapabilities();
|
||||
|
||||
bool hasCapability(AvailableCapabilities cap);
|
||||
|
||||
/**
|
||||
* Build the static info metadata buffer for this device
|
||||
*/
|
||||
status_t constructStaticInfo();
|
||||
|
||||
/**
|
||||
* Run the fake 3A algorithms as needed. May override/modify settings
|
||||
* values.
|
||||
*/
|
||||
status_t process3A(CameraMetadata &settings);
|
||||
|
||||
status_t doFakeAE(CameraMetadata &settings);
|
||||
status_t doFakeAF(CameraMetadata &settings);
|
||||
status_t doFakeAWB(CameraMetadata &settings);
|
||||
void update3A(CameraMetadata &settings);
|
||||
|
||||
/** Signal from readout thread that it doesn't have anything to do */
|
||||
void signalReadoutIdle();
|
||||
|
||||
/** Handle interrupt events from the sensor */
|
||||
void onSensorEvent(uint32_t frameNumber, Event e, nsecs_t timestamp);
|
||||
|
||||
/****************************************************************************
|
||||
* Static configuration information
|
||||
***************************************************************************/
|
||||
private:
|
||||
static const uint32_t kMaxRawStreamCount = 1;
|
||||
static const uint32_t kMaxProcessedStreamCount = 3;
|
||||
static const uint32_t kMaxJpegStreamCount = 1;
|
||||
static const uint32_t kMaxReprocessStreamCount = 2;
|
||||
static const uint32_t kMaxBufferCount = 4;
|
||||
// We need a positive stream ID to distinguish external buffers from
|
||||
// sensor-generated buffers which use a nonpositive ID. Otherwise, HAL3 has
|
||||
// no concept of a stream id.
|
||||
static const uint32_t kGenericStreamId = 1;
|
||||
static const int32_t kAvailableFormats[];
|
||||
|
||||
static const int64_t kSyncWaitTimeout = 10000000; // 10 ms
|
||||
static const int32_t kMaxSyncTimeoutCount = 1000; // 1000 kSyncWaitTimeouts
|
||||
static const uint32_t kFenceTimeoutMs = 2000; // 2 s
|
||||
|
||||
/****************************************************************************
|
||||
* Data members.
|
||||
***************************************************************************/
|
||||
|
||||
/* HAL interface serialization lock. */
|
||||
Mutex mLock;
|
||||
|
||||
/* Facing back (true) or front (false) switch. */
|
||||
bool mFacingBack;
|
||||
|
||||
SortedVector<AvailableCapabilities> mCapabilities;
|
||||
|
||||
/**
|
||||
* Cache for default templates. Once one is requested, the pointer must be
|
||||
* valid at least until close() is called on the device
|
||||
*/
|
||||
camera_metadata_t *mDefaultTemplates[CAMERA3_TEMPLATE_COUNT];
|
||||
|
||||
/**
|
||||
* Private stream information, stored in camera3_stream_t->priv.
|
||||
*/
|
||||
struct PrivateStreamInfo {
|
||||
bool alive;
|
||||
};
|
||||
|
||||
// Shortcut to the input stream
|
||||
camera3_stream_t* mInputStream;
|
||||
|
||||
typedef List<camera3_stream_t*> StreamList;
|
||||
typedef List<camera3_stream_t*>::iterator StreamIterator;
|
||||
typedef Vector<camera3_stream_buffer> HalBufferVector;
|
||||
|
||||
// All streams, including input stream
|
||||
StreamList mStreams;
|
||||
|
||||
// Cached settings from latest submitted request
|
||||
CameraMetadata mPrevSettings;
|
||||
|
||||
/** Fake hardware interfaces */
|
||||
sp<Sensor> mSensor;
|
||||
sp<JpegCompressor> mJpegCompressor;
|
||||
friend class JpegCompressor;
|
||||
|
||||
/** Processing thread for sending out results */
|
||||
|
||||
class ReadoutThread : public Thread, private JpegCompressor::JpegListener {
|
||||
public:
|
||||
ReadoutThread(EmulatedFakeCamera3 *parent);
|
||||
~ReadoutThread();
|
||||
|
||||
struct Request {
|
||||
uint32_t frameNumber;
|
||||
CameraMetadata settings;
|
||||
HalBufferVector *buffers;
|
||||
Buffers *sensorBuffers;
|
||||
};
|
||||
|
||||
/**
|
||||
* Interface to parent class
|
||||
*/
|
||||
|
||||
// Place request in the in-flight queue to wait for sensor capture
|
||||
void queueCaptureRequest(const Request &r);
|
||||
|
||||
// Test if the readout thread is idle (no in-flight requests, not
|
||||
// currently reading out anything
|
||||
bool isIdle();
|
||||
|
||||
// Wait until isIdle is true
|
||||
status_t waitForReadout();
|
||||
|
||||
private:
|
||||
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
|
||||
static const nsecs_t kMaxWaitLoops = 1000;
|
||||
static const size_t kMaxQueueSize = 2;
|
||||
|
||||
EmulatedFakeCamera3 *mParent;
|
||||
Mutex mLock;
|
||||
|
||||
List<Request> mInFlightQueue;
|
||||
Condition mInFlightSignal;
|
||||
bool mThreadActive;
|
||||
|
||||
virtual bool threadLoop();
|
||||
|
||||
// Only accessed by threadLoop
|
||||
|
||||
Request mCurrentRequest;
|
||||
|
||||
// Jpeg completion callbacks
|
||||
|
||||
Mutex mJpegLock;
|
||||
bool mJpegWaiting;
|
||||
camera3_stream_buffer mJpegHalBuffer;
|
||||
uint32_t mJpegFrameNumber;
|
||||
virtual void onJpegDone(const StreamBuffer &jpegBuffer, bool success);
|
||||
virtual void onJpegInputDone(const StreamBuffer &inputBuffer);
|
||||
};
|
||||
|
||||
sp<ReadoutThread> mReadoutThread;
|
||||
|
||||
/** Fake 3A constants */
|
||||
|
||||
static const nsecs_t kNormalExposureTime;
|
||||
static const nsecs_t kFacePriorityExposureTime;
|
||||
static const int kNormalSensitivity;
|
||||
static const int kFacePrioritySensitivity;
|
||||
// Rate of converging AE to new target value, as fraction of difference between
|
||||
// current and target value.
|
||||
static const float kExposureTrackRate;
|
||||
// Minimum duration for precapture state. May be longer if slow to converge
|
||||
// to target exposure
|
||||
static const int kPrecaptureMinFrames;
|
||||
// How often to restart AE 'scanning'
|
||||
static const int kStableAeMaxFrames;
|
||||
// Maximum stop below 'normal' exposure time that we'll wander to while
|
||||
// pretending to converge AE. In powers of 2. (-2 == 1/4 as bright)
|
||||
static const float kExposureWanderMin;
|
||||
// Maximum stop above 'normal' exposure time that we'll wander to while
|
||||
// pretending to converge AE. In powers of 2. (2 == 4x as bright)
|
||||
static const float kExposureWanderMax;
|
||||
|
||||
/** Fake 3A state */
|
||||
|
||||
uint8_t mControlMode;
|
||||
bool mFacePriority;
|
||||
uint8_t mAeState;
|
||||
uint8_t mAfState;
|
||||
uint8_t mAwbState;
|
||||
uint8_t mAeMode;
|
||||
uint8_t mAfMode;
|
||||
uint8_t mAwbMode;
|
||||
|
||||
int mAeCounter;
|
||||
nsecs_t mAeCurrentExposureTime;
|
||||
nsecs_t mAeTargetExposureTime;
|
||||
int mAeCurrentSensitivity;
|
||||
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
#endif // HW_EMULATOR_CAMERA_EMULATED_CAMERA3_H
|
||||
437
android/camera/EmulatedFakeCameraDevice.cpp
Executable file
437
android/camera/EmulatedFakeCameraDevice.cpp
Executable file
|
|
@ -0,0 +1,437 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedFakeCameraDevice that encapsulates
|
||||
* fake camera device.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_FakeDevice"
|
||||
#include <cutils/log.h>
|
||||
#include "EmulatedFakeCamera.h"
|
||||
#include "EmulatedFakeCameraDevice.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedFakeCameraDevice::EmulatedFakeCameraDevice(EmulatedFakeCamera* camera_hal)
|
||||
: EmulatedCameraDevice(camera_hal),
|
||||
mBlackYUV(kBlack32),
|
||||
mWhiteYUV(kWhite32),
|
||||
mRedYUV(kRed8),
|
||||
mGreenYUV(kGreen8),
|
||||
mBlueYUV(kBlue8),
|
||||
mLastRedrawn(0),
|
||||
mCheckX(0),
|
||||
mCheckY(0),
|
||||
mCcounter(0)
|
||||
#if EFCD_ROTATE_FRAME
|
||||
, mLastRotatedAt(0),
|
||||
mCurrentFrameType(0),
|
||||
mCurrentColor(&mWhiteYUV)
|
||||
#endif // EFCD_ROTATE_FRAME
|
||||
{
|
||||
// Makes the image with the original exposure compensation darker.
|
||||
// So the effects of changing the exposure compensation can be seen.
|
||||
mBlackYUV.Y = mBlackYUV.Y / 2;
|
||||
mWhiteYUV.Y = mWhiteYUV.Y / 2;
|
||||
mRedYUV.Y = mRedYUV.Y / 2;
|
||||
mGreenYUV.Y = mGreenYUV.Y / 2;
|
||||
mBlueYUV.Y = mBlueYUV.Y / 2;
|
||||
}
|
||||
|
||||
EmulatedFakeCameraDevice::~EmulatedFakeCameraDevice()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Emulated camera device abstract interface implementation.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedFakeCameraDevice::connectDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isInitialized()) {
|
||||
ALOGE("%s: Fake camera device is not initialized.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
if (isConnected()) {
|
||||
ALOGW("%s: Fake camera device is already connected.", __FUNCTION__);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/* There is no device to connect to. */
|
||||
mState = ECDS_CONNECTED;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedFakeCameraDevice::disconnectDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isConnected()) {
|
||||
ALOGW("%s: Fake camera device is already disconnected.", __FUNCTION__);
|
||||
return NO_ERROR;
|
||||
}
|
||||
if (isStarted()) {
|
||||
ALOGE("%s: Cannot disconnect from the started device.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* There is no device to disconnect from. */
|
||||
mState = ECDS_INITIALIZED;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t EmulatedFakeCameraDevice::startDevice(int width,
|
||||
int height,
|
||||
uint32_t pix_fmt)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isConnected()) {
|
||||
ALOGE("%s: Fake camera device is not connected.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
if (isStarted()) {
|
||||
ALOGE("%s: Fake camera device is already started.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Initialize the base class. */
|
||||
const status_t res =
|
||||
EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
|
||||
if (res == NO_ERROR) {
|
||||
/* Calculate U/V panes inside the framebuffer. */
|
||||
switch (mPixelFormat) {
|
||||
case V4L2_PIX_FMT_YVU420:
|
||||
mFrameV = mCurrentFrame + mTotalPixels;
|
||||
mFrameU = mFrameU + mTotalPixels / 4;
|
||||
mUVStep = 1;
|
||||
mUVTotalNum = mTotalPixels / 4;
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_YUV420:
|
||||
mFrameU = mCurrentFrame + mTotalPixels;
|
||||
mFrameV = mFrameU + mTotalPixels / 4;
|
||||
mUVStep = 1;
|
||||
mUVTotalNum = mTotalPixels / 4;
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_NV21:
|
||||
/* Interleaved UV pane, V first. */
|
||||
mFrameV = mCurrentFrame + mTotalPixels;
|
||||
mFrameU = mFrameV + 1;
|
||||
mUVStep = 2;
|
||||
mUVTotalNum = mTotalPixels / 4;
|
||||
break;
|
||||
|
||||
case V4L2_PIX_FMT_NV12:
|
||||
/* Interleaved UV pane, U first. */
|
||||
mFrameU = mCurrentFrame + mTotalPixels;
|
||||
mFrameV = mFrameU + 1;
|
||||
mUVStep = 2;
|
||||
mUVTotalNum = mTotalPixels / 4;
|
||||
break;
|
||||
|
||||
default:
|
||||
ALOGE("%s: Unknown pixel format %.4s", __FUNCTION__,
|
||||
reinterpret_cast<const char*>(&mPixelFormat));
|
||||
return EINVAL;
|
||||
}
|
||||
/* Number of items in a single row inside U/V panes. */
|
||||
mUVInRow = (width / 2) * mUVStep;
|
||||
mState = ECDS_STARTED;
|
||||
} else {
|
||||
ALOGE("%s: commonStartDevice failed", __FUNCTION__);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedFakeCameraDevice::stopDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isStarted()) {
|
||||
ALOGW("%s: Fake camera device is not started.", __FUNCTION__);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
mFrameU = mFrameV = NULL;
|
||||
EmulatedCameraDevice::commonStopDevice();
|
||||
mState = ECDS_CONNECTED;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Worker thread management overrides.
|
||||
***************************************************************************/
|
||||
|
||||
bool EmulatedFakeCameraDevice::inWorkerThread()
|
||||
{
|
||||
/* Wait till FPS timeout expires, or thread exit message is received. */
|
||||
WorkerThread::SelectRes res =
|
||||
getWorkerThread()->Select(-1, 1000000 / mEmulatedFPS);
|
||||
if (res == WorkerThread::EXIT_THREAD) {
|
||||
ALOGV("%s: Worker thread has been terminated.", __FUNCTION__);
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Lets see if we need to generate a new frame. */
|
||||
if ((systemTime(SYSTEM_TIME_MONOTONIC) - mLastRedrawn) >= mRedrawAfter) {
|
||||
/*
|
||||
* Time to generate a new frame.
|
||||
*/
|
||||
|
||||
#if EFCD_ROTATE_FRAME
|
||||
const int frame_type = rotateFrame();
|
||||
switch (frame_type) {
|
||||
case 0:
|
||||
drawCheckerboard();
|
||||
break;
|
||||
case 1:
|
||||
drawStripes();
|
||||
break;
|
||||
case 2:
|
||||
drawSolid(mCurrentColor);
|
||||
break;
|
||||
}
|
||||
#else
|
||||
/* Draw the checker board. */
|
||||
drawCheckerboard();
|
||||
|
||||
#endif // EFCD_ROTATE_FRAME
|
||||
|
||||
mLastRedrawn = systemTime(SYSTEM_TIME_MONOTONIC);
|
||||
}
|
||||
|
||||
/* Timestamp the current frame, and notify the camera HAL about new frame. */
|
||||
mCurFrameTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
|
||||
mCameraHAL->onNextFrameAvailable(mCurrentFrame, mCurFrameTimestamp, this);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Fake camera device private API
|
||||
***************************************************************************/
|
||||
|
||||
void EmulatedFakeCameraDevice::drawCheckerboard()
|
||||
{
|
||||
const int size = mFrameWidth / 10;
|
||||
bool black = true;
|
||||
|
||||
if (size == 0) {
|
||||
// When this happens, it happens at a very high rate,
|
||||
// so don't log any messages and just return.
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if((mCheckX / size) & 1)
|
||||
black = false;
|
||||
if((mCheckY / size) & 1)
|
||||
black = !black;
|
||||
|
||||
int county = mCheckY % size;
|
||||
int checkxremainder = mCheckX % size;
|
||||
uint8_t* Y = mCurrentFrame;
|
||||
uint8_t* U_pos = mFrameU;
|
||||
uint8_t* V_pos = mFrameV;
|
||||
uint8_t* U = U_pos;
|
||||
uint8_t* V = V_pos;
|
||||
|
||||
YUVPixel adjustedWhite = YUVPixel(mWhiteYUV);
|
||||
changeWhiteBalance(adjustedWhite.Y, adjustedWhite.U, adjustedWhite.V);
|
||||
|
||||
for(int y = 0; y < mFrameHeight; y++) {
|
||||
int countx = checkxremainder;
|
||||
bool current = black;
|
||||
for(int x = 0; x < mFrameWidth; x += 2) {
|
||||
if (current) {
|
||||
mBlackYUV.get(Y, U, V);
|
||||
} else {
|
||||
adjustedWhite.get(Y, U, V);
|
||||
}
|
||||
*Y = changeExposure(*Y);
|
||||
Y[1] = *Y;
|
||||
Y += 2; U += mUVStep; V += mUVStep;
|
||||
countx += 2;
|
||||
if(countx >= size) {
|
||||
countx = 0;
|
||||
current = !current;
|
||||
}
|
||||
}
|
||||
if (y & 0x1) {
|
||||
U_pos = U;
|
||||
V_pos = V;
|
||||
} else {
|
||||
U = U_pos;
|
||||
V = V_pos;
|
||||
}
|
||||
if(county++ >= size) {
|
||||
county = 0;
|
||||
black = !black;
|
||||
}
|
||||
}
|
||||
mCheckX += 3;
|
||||
mCheckY++;
|
||||
|
||||
/* Run the square. */
|
||||
int sqx = ((mCcounter * 3) & 255);
|
||||
if(sqx > 128) sqx = 255 - sqx;
|
||||
int sqy = ((mCcounter * 5) & 255);
|
||||
if(sqy > 128) sqy = 255 - sqy;
|
||||
const int sqsize = mFrameWidth / 10;
|
||||
drawSquare(sqx * sqsize / 32, sqy * sqsize / 32, (sqsize * 5) >> 1,
|
||||
(mCcounter & 0x100) ? &mRedYUV : &mGreenYUV);
|
||||
mCcounter++;
|
||||
}
|
||||
|
||||
void EmulatedFakeCameraDevice::drawSquare(int x,
|
||||
int y,
|
||||
int size,
|
||||
const YUVPixel* color)
|
||||
{
|
||||
const int square_xstop = min(mFrameWidth, x + size);
|
||||
const int square_ystop = min(mFrameHeight, y + size);
|
||||
uint8_t* Y_pos = mCurrentFrame + y * mFrameWidth + x;
|
||||
|
||||
YUVPixel adjustedColor = *color;
|
||||
changeWhiteBalance(adjustedColor.Y, adjustedColor.U, adjustedColor.V);
|
||||
|
||||
// Draw the square.
|
||||
for (; y < square_ystop; y++) {
|
||||
const int iUV = (y / 2) * mUVInRow + (x / 2) * mUVStep;
|
||||
uint8_t* sqU = mFrameU + iUV;
|
||||
uint8_t* sqV = mFrameV + iUV;
|
||||
uint8_t* sqY = Y_pos;
|
||||
for (int i = x; i < square_xstop; i += 2) {
|
||||
adjustedColor.get(sqY, sqU, sqV);
|
||||
*sqY = changeExposure(*sqY);
|
||||
sqY[1] = *sqY;
|
||||
sqY += 2; sqU += mUVStep; sqV += mUVStep;
|
||||
}
|
||||
Y_pos += mFrameWidth;
|
||||
}
|
||||
}
|
||||
|
||||
#if EFCD_ROTATE_FRAME
|
||||
|
||||
void EmulatedFakeCameraDevice::drawSolid(YUVPixel* color)
|
||||
{
|
||||
YUVPixel adjustedColor = *color;
|
||||
changeWhiteBalance(adjustedColor.Y, adjustedColor.U, adjustedColor.V);
|
||||
|
||||
/* All Ys are the same. */
|
||||
memset(mCurrentFrame, changeExposure(adjustedColor.Y), mTotalPixels);
|
||||
|
||||
/* Fill U, and V panes. */
|
||||
uint8_t* U = mFrameU;
|
||||
uint8_t* V = mFrameV;
|
||||
for (int k = 0; k < mUVTotalNum; k++, U += mUVStep, V += mUVStep) {
|
||||
*U = color->U;
|
||||
*V = color->V;
|
||||
}
|
||||
}
|
||||
|
||||
void EmulatedFakeCameraDevice::drawStripes()
|
||||
{
|
||||
/* Divide frame into 4 stripes. */
|
||||
const int change_color_at = mFrameHeight / 4;
|
||||
const int each_in_row = mUVInRow / mUVStep;
|
||||
uint8_t* pY = mCurrentFrame;
|
||||
for (int y = 0; y < mFrameHeight; y++, pY += mFrameWidth) {
|
||||
/* Select the color. */
|
||||
YUVPixel* color;
|
||||
const int color_index = y / change_color_at;
|
||||
if (color_index == 0) {
|
||||
/* White stripe on top. */
|
||||
color = &mWhiteYUV;
|
||||
} else if (color_index == 1) {
|
||||
/* Then the red stripe. */
|
||||
color = &mRedYUV;
|
||||
} else if (color_index == 2) {
|
||||
/* Then the green stripe. */
|
||||
color = &mGreenYUV;
|
||||
} else {
|
||||
/* And the blue stripe at the bottom. */
|
||||
color = &mBlueYUV;
|
||||
}
|
||||
changeWhiteBalance(color->Y, color->U, color->V);
|
||||
|
||||
/* All Ys at the row are the same. */
|
||||
memset(pY, changeExposure(color->Y), mFrameWidth);
|
||||
|
||||
/* Offset of the current row inside U/V panes. */
|
||||
const int uv_off = (y / 2) * mUVInRow;
|
||||
/* Fill U, and V panes. */
|
||||
uint8_t* U = mFrameU + uv_off;
|
||||
uint8_t* V = mFrameV + uv_off;
|
||||
for (int k = 0; k < each_in_row; k++, U += mUVStep, V += mUVStep) {
|
||||
*U = color->U;
|
||||
*V = color->V;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int EmulatedFakeCameraDevice::rotateFrame()
|
||||
{
|
||||
if ((systemTime(SYSTEM_TIME_MONOTONIC) - mLastRotatedAt) >= mRotateFreq) {
|
||||
mLastRotatedAt = systemTime(SYSTEM_TIME_MONOTONIC);
|
||||
mCurrentFrameType++;
|
||||
if (mCurrentFrameType > 2) {
|
||||
mCurrentFrameType = 0;
|
||||
}
|
||||
if (mCurrentFrameType == 2) {
|
||||
ALOGD("********** Rotated to the SOLID COLOR frame **********");
|
||||
/* Solid color: lets rotate color too. */
|
||||
if (mCurrentColor == &mWhiteYUV) {
|
||||
ALOGD("----- Painting a solid RED frame -----");
|
||||
mCurrentColor = &mRedYUV;
|
||||
} else if (mCurrentColor == &mRedYUV) {
|
||||
ALOGD("----- Painting a solid GREEN frame -----");
|
||||
mCurrentColor = &mGreenYUV;
|
||||
} else if (mCurrentColor == &mGreenYUV) {
|
||||
ALOGD("----- Painting a solid BLUE frame -----");
|
||||
mCurrentColor = &mBlueYUV;
|
||||
} else {
|
||||
/* Back to white. */
|
||||
ALOGD("----- Painting a solid WHITE frame -----");
|
||||
mCurrentColor = &mWhiteYUV;
|
||||
}
|
||||
} else if (mCurrentFrameType == 0) {
|
||||
ALOGD("********** Rotated to the CHECKERBOARD frame **********");
|
||||
} else if (mCurrentFrameType == 1) {
|
||||
ALOGD("********** Rotated to the STRIPED frame **********");
|
||||
}
|
||||
}
|
||||
|
||||
return mCurrentFrameType;
|
||||
}
|
||||
|
||||
#endif // EFCD_ROTATE_FRAME
|
||||
|
||||
}; /* namespace android */
|
||||
197
android/camera/EmulatedFakeCameraDevice.h
Executable file
197
android/camera/EmulatedFakeCameraDevice.h
Executable file
|
|
@ -0,0 +1,197 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedFakeCameraDevice that encapsulates
|
||||
* a fake camera device.
|
||||
*/
|
||||
|
||||
#include "Converters.h"
|
||||
#include "EmulatedCameraDevice.h"
|
||||
|
||||
/* This is used for debugging format / conversion issues. If EFCD_ROTATE_FRAME is
|
||||
* set to 0, the frame content will be always the "checkerboard". Otherwise, if
|
||||
* EFCD_ROTATE_FRAME is set to a non-zero value, the frame content will "rotate"
|
||||
* from a "checkerboard" frame to a "white/red/green/blue stripes" frame, to a
|
||||
* "white/red/green/blue" frame. Frame content rotation helps finding bugs in
|
||||
* format conversions.
|
||||
*/
|
||||
#define EFCD_ROTATE_FRAME 0
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedFakeCamera;
|
||||
|
||||
/* Encapsulates a fake camera device.
|
||||
* Fake camera device emulates a camera device by providing frames containing
|
||||
* a black and white checker board, moving diagonally towards the 0,0 corner.
|
||||
* There is also a green, or red square that bounces inside the frame, changing
|
||||
* its color when bouncing off the 0,0 corner.
|
||||
*/
|
||||
class EmulatedFakeCameraDevice : public EmulatedCameraDevice {
|
||||
public:
|
||||
/* Constructs EmulatedFakeCameraDevice instance. */
|
||||
explicit EmulatedFakeCameraDevice(EmulatedFakeCamera* camera_hal);
|
||||
|
||||
/* Destructs EmulatedFakeCameraDevice instance. */
|
||||
~EmulatedFakeCameraDevice();
|
||||
|
||||
/***************************************************************************
|
||||
* Emulated camera device abstract interface implementation.
|
||||
* See declarations of these methods in EmulatedCameraDevice class for
|
||||
* information on each of these methods.
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Connects to the camera device.
|
||||
* Since there is no real device to connect to, this method does nothing,
|
||||
* but changes the state.
|
||||
*/
|
||||
status_t connectDevice();
|
||||
|
||||
/* Disconnects from the camera device.
|
||||
* Since there is no real device to disconnect from, this method does
|
||||
* nothing, but changes the state.
|
||||
*/
|
||||
status_t disconnectDevice();
|
||||
|
||||
/* Starts the camera device. */
|
||||
status_t startDevice(int width, int height, uint32_t pix_fmt);
|
||||
|
||||
/* Stops the camera device. */
|
||||
status_t stopDevice();
|
||||
|
||||
/* Gets current preview fame into provided buffer. */
|
||||
status_t getPreviewFrame(void* buffer);
|
||||
|
||||
/***************************************************************************
|
||||
* Worker thread management overrides.
|
||||
* See declarations of these methods in EmulatedCameraDevice class for
|
||||
* information on each of these methods.
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Implementation of the worker thread routine.
|
||||
* This method simply sleeps for a period of time defined by the FPS property
|
||||
* of the fake camera (simulating frame frequency), and then calls emulated
|
||||
* camera's onNextFrameAvailable method.
|
||||
*/
|
||||
bool inWorkerThread();
|
||||
|
||||
/****************************************************************************
|
||||
* Fake camera device private API
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
|
||||
/* Draws a black and white checker board in the current frame buffer. */
|
||||
void drawCheckerboard();
|
||||
|
||||
/* Draws a square of the given color in the current frame buffer.
|
||||
* Param:
|
||||
* x, y - Coordinates of the top left corner of the square in the buffer.
|
||||
* size - Size of the square's side.
|
||||
* color - Square's color.
|
||||
*/
|
||||
void drawSquare(int x, int y, int size, const YUVPixel* color);
|
||||
|
||||
#if EFCD_ROTATE_FRAME
|
||||
void drawSolid(YUVPixel* color);
|
||||
void drawStripes();
|
||||
int rotateFrame();
|
||||
#endif // EFCD_ROTATE_FRAME
|
||||
|
||||
/****************************************************************************
|
||||
* Fake camera device data members
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/*
|
||||
* Pixel colors in YUV format used when drawing the checker board.
|
||||
*/
|
||||
|
||||
YUVPixel mBlackYUV;
|
||||
YUVPixel mWhiteYUV;
|
||||
YUVPixel mRedYUV;
|
||||
YUVPixel mGreenYUV;
|
||||
YUVPixel mBlueYUV;
|
||||
|
||||
/* Last time the frame has been redrawn. */
|
||||
nsecs_t mLastRedrawn;
|
||||
|
||||
/*
|
||||
* Precalculated values related to U/V panes.
|
||||
*/
|
||||
|
||||
/* U pane inside the framebuffer. */
|
||||
uint8_t* mFrameU;
|
||||
|
||||
/* V pane inside the framebuffer. */
|
||||
uint8_t* mFrameV;
|
||||
|
||||
/* Defines byte distance between adjacent U, and V values. */
|
||||
int mUVStep;
|
||||
|
||||
/* Defines number of Us and Vs in a row inside the U/V panes.
|
||||
* Note that if U/V panes are interleaved, this value reflects the total
|
||||
* number of both, Us and Vs in a single row in the interleaved UV pane. */
|
||||
int mUVInRow;
|
||||
|
||||
/* Total number of each, U, and V elements in the framebuffer. */
|
||||
int mUVTotalNum;
|
||||
|
||||
/*
|
||||
* Checkerboard drawing related stuff
|
||||
*/
|
||||
|
||||
int mCheckX;
|
||||
int mCheckY;
|
||||
int mCcounter;
|
||||
|
||||
/* Emulated FPS (frames per second).
|
||||
* We will emulate 50 FPS. */
|
||||
static const int mEmulatedFPS = 50;
|
||||
|
||||
/* Defines time (in nanoseconds) between redrawing the checker board.
|
||||
* We will redraw the checker board every 15 milliseconds. */
|
||||
static const nsecs_t mRedrawAfter = 15000000LL;
|
||||
|
||||
#if EFCD_ROTATE_FRAME
|
||||
/* Frame rotation frequency in nanosec (currently - 3 sec) */
|
||||
static const nsecs_t mRotateFreq = 3000000000LL;
|
||||
|
||||
/* Last time the frame has rotated. */
|
||||
nsecs_t mLastRotatedAt;
|
||||
|
||||
/* Type of the frame to display in the current rotation:
|
||||
* 0 - Checkerboard.
|
||||
* 1 - White/Red/Green/Blue horisontal stripes
|
||||
* 2 - Solid color. */
|
||||
int mCurrentFrameType;
|
||||
|
||||
/* Color to use to paint the solid color frame. Colors will rotate between
|
||||
* white, red, gree, and blue each time rotation comes to the solid color
|
||||
* frame. */
|
||||
YUVPixel* mCurrentColor;
|
||||
#endif // EFCD_ROTATE_FRAME
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_FAKE_CAMERA_DEVICE_H */
|
||||
119
android/camera/EmulatedQemuCamera.cpp
Executable file
119
android/camera/EmulatedQemuCamera.cpp
Executable file
|
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedQemuCamera that encapsulates
|
||||
* functionality of an emulated camera connected to the host.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_QemuCamera"
|
||||
#include <cutils/log.h>
|
||||
#include "EmulatedQemuCamera.h"
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedQemuCamera::EmulatedQemuCamera(int cameraId, struct hw_module_t* module)
|
||||
: EmulatedCamera(cameraId, module),
|
||||
mQemuCameraDevice(this)
|
||||
{
|
||||
}
|
||||
|
||||
EmulatedQemuCamera::~EmulatedQemuCamera()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera virtual overrides.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedQemuCamera::Initialize(const char* device_name,
|
||||
const char* frame_dims,
|
||||
const char* facing_dir)
|
||||
{
|
||||
ALOGV("%s:\n Name=%s\n Facing '%s'\n Dimensions=%s",
|
||||
__FUNCTION__, device_name, facing_dir, frame_dims);
|
||||
/* Save dimensions. */
|
||||
mFrameDims = frame_dims;
|
||||
|
||||
/* Initialize camera device. */
|
||||
status_t res = mQemuCameraDevice.Initialize(device_name);
|
||||
if (res != NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
||||
/* Initialize base class. */
|
||||
res = EmulatedCamera::Initialize();
|
||||
if (res != NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
||||
/*
|
||||
* Set customizable parameters.
|
||||
*/
|
||||
|
||||
mParameters.set(EmulatedCamera::FACING_KEY, facing_dir);
|
||||
mParameters.set(EmulatedCamera::ORIENTATION_KEY,
|
||||
gEmulatedCameraFactory.getQemuCameraOrientation());
|
||||
mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, frame_dims);
|
||||
mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, frame_dims);
|
||||
|
||||
/*
|
||||
* Use first dimension reported by the device to set current preview and
|
||||
* picture sizes.
|
||||
*/
|
||||
|
||||
char first_dim[128];
|
||||
/* Dimensions are separated with ',' */
|
||||
const char* c = strchr(frame_dims, ',');
|
||||
if (c == NULL) {
|
||||
strncpy(first_dim, frame_dims, sizeof(first_dim));
|
||||
first_dim[sizeof(first_dim)-1] = '\0';
|
||||
} else if (static_cast<size_t>(c - frame_dims) < sizeof(first_dim)) {
|
||||
memcpy(first_dim, frame_dims, c - frame_dims);
|
||||
first_dim[c - frame_dims] = '\0';
|
||||
} else {
|
||||
memcpy(first_dim, frame_dims, sizeof(first_dim));
|
||||
first_dim[sizeof(first_dim)-1] = '\0';
|
||||
}
|
||||
|
||||
/* Width and height are separated with 'x' */
|
||||
char* sep = strchr(first_dim, 'x');
|
||||
if (sep == NULL) {
|
||||
ALOGE("%s: Invalid first dimension format in %s",
|
||||
__FUNCTION__, frame_dims);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
*sep = '\0';
|
||||
const int x = atoi(first_dim);
|
||||
const int y = atoi(sep + 1);
|
||||
mParameters.setPreviewSize(x, y);
|
||||
mParameters.setPictureSize(x, y);
|
||||
|
||||
ALOGV("%s: Qemu camera %s is initialized. Current frame is %dx%d",
|
||||
__FUNCTION__, device_name, x, y);
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
EmulatedCameraDevice* EmulatedQemuCamera::getCameraDevice()
|
||||
{
|
||||
return &mQemuCameraDevice;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
73
android/camera/EmulatedQemuCamera.h
Executable file
73
android/camera/EmulatedQemuCamera.h
Executable file
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedQemuCamera that encapsulates
|
||||
* functionality of an emulated camera connected to the host.
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera.h"
|
||||
#include "EmulatedQemuCameraDevice.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality of an emulated camera connected to the host.
|
||||
*/
|
||||
class EmulatedQemuCamera : public EmulatedCamera {
|
||||
public:
|
||||
/* Constructs EmulatedQemuCamera instance. */
|
||||
EmulatedQemuCamera(int cameraId, struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedQemuCamera instance. */
|
||||
~EmulatedQemuCamera();
|
||||
|
||||
/***************************************************************************
|
||||
* EmulatedCamera virtual overrides.
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedQemuCamera instance. */
|
||||
status_t Initialize(const char* device_name,
|
||||
const char* frame_dims,
|
||||
const char* facing_dir);
|
||||
|
||||
/***************************************************************************
|
||||
* EmulatedCamera abstract API implementation.
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Gets emulated camera device ised by this instance of the emulated camera.
|
||||
*/
|
||||
EmulatedCameraDevice* getCameraDevice();
|
||||
|
||||
/***************************************************************************
|
||||
* Data memebers.
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Contained qemu camera device object. */
|
||||
EmulatedQemuCameraDevice mQemuCameraDevice;
|
||||
|
||||
/* Supported frame dimensions reported by the camera device. */
|
||||
String8 mFrameDims;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_H */
|
||||
55
android/camera/EmulatedQemuCamera2.cpp
Normal file
55
android/camera/EmulatedQemuCamera2.cpp
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedQemuCamera2 that encapsulates
|
||||
* functionality of a host webcam with further processing to simulate the
|
||||
* capabilities of a v2 camera device.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_QemuCamera2"
|
||||
#include <cutils/log.h>
|
||||
#include <cutils/properties.h>
|
||||
#include "EmulatedQemuCamera2.h"
|
||||
#include "EmulatedCameraFactory.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedQemuCamera2::EmulatedQemuCamera2(int cameraId,
|
||||
bool facingBack,
|
||||
struct hw_module_t* module)
|
||||
: EmulatedCamera2(cameraId,module),
|
||||
mFacingBack(facingBack)
|
||||
{
|
||||
ALOGD("Constructing emulated qemu camera 2 facing %s",
|
||||
facingBack ? "back" : "front");
|
||||
}
|
||||
|
||||
EmulatedQemuCamera2::~EmulatedQemuCamera2()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API overrides
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedQemuCamera2::Initialize()
|
||||
{
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
66
android/camera/EmulatedQemuCamera2.h
Normal file
66
android/camera/EmulatedQemuCamera2.h
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedQemuCamera2 that encapsulates
|
||||
* functionality of a host webcam with added processing to implement version 2
|
||||
* of the camera device interface.
|
||||
*/
|
||||
|
||||
#include "EmulatedCamera2.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates functionality of an advanced fake camera based on real host camera data.
|
||||
*/
|
||||
class EmulatedQemuCamera2 : public EmulatedCamera2 {
|
||||
public:
|
||||
/* Constructs EmulatedFakeCamera instance. */
|
||||
EmulatedQemuCamera2(int cameraId, bool facingBack, struct hw_module_t* module);
|
||||
|
||||
/* Destructs EmulatedFakeCamera instance. */
|
||||
~EmulatedQemuCamera2();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera2 virtual overrides.
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedQemuCamera2 instance. */
|
||||
status_t Initialize();
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCamera abstract API implementation.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
|
||||
/****************************************************************************
|
||||
* Data memebers.
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Facing back (true) or front (false) switch. */
|
||||
bool mFacingBack;
|
||||
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA2_H */
|
||||
265
android/camera/EmulatedQemuCameraDevice.cpp
Executable file
265
android/camera/EmulatedQemuCameraDevice.cpp
Executable file
|
|
@ -0,0 +1,265 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class EmulatedQemuCameraDevice that encapsulates
|
||||
* an emulated camera device connected to the host.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_QemuDevice"
|
||||
#include <cutils/log.h>
|
||||
#include "EmulatedQemuCamera.h"
|
||||
#include "EmulatedQemuCameraDevice.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
EmulatedQemuCameraDevice::EmulatedQemuCameraDevice(EmulatedQemuCamera* camera_hal)
|
||||
: EmulatedCameraDevice(camera_hal),
|
||||
mQemuClient(),
|
||||
mPreviewFrame(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
EmulatedQemuCameraDevice::~EmulatedQemuCameraDevice()
|
||||
{
|
||||
if (mPreviewFrame != NULL) {
|
||||
delete[] mPreviewFrame;
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedQemuCameraDevice::Initialize(const char* device_name)
|
||||
{
|
||||
/* Connect to the service. */
|
||||
char connect_str[256];
|
||||
snprintf(connect_str, sizeof(connect_str), "name=%s", device_name);
|
||||
status_t res = mQemuClient.connectClient(connect_str);
|
||||
if (res != NO_ERROR) {
|
||||
return res;
|
||||
}
|
||||
|
||||
/* Initialize base class. */
|
||||
res = EmulatedCameraDevice::Initialize();
|
||||
if (res == NO_ERROR) {
|
||||
ALOGV("%s: Connected to the emulated camera service '%s'",
|
||||
__FUNCTION__, device_name);
|
||||
mDeviceName = device_name;
|
||||
} else {
|
||||
mQemuClient.queryDisconnect();
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Emulated camera device abstract interface implementation.
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedQemuCameraDevice::connectDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isInitialized()) {
|
||||
ALOGE("%s: Qemu camera device is not initialized.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
if (isConnected()) {
|
||||
ALOGW("%s: Qemu camera device '%s' is already connected.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/* Connect to the camera device via emulator. */
|
||||
const status_t res = mQemuClient.queryConnect();
|
||||
if (res == NO_ERROR) {
|
||||
ALOGV("%s: Connected to device '%s'",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
mState = ECDS_CONNECTED;
|
||||
} else {
|
||||
ALOGE("%s: Connection to device '%s' failed",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedQemuCameraDevice::disconnectDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isConnected()) {
|
||||
ALOGW("%s: Qemu camera device '%s' is already disconnected.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return NO_ERROR;
|
||||
}
|
||||
if (isStarted()) {
|
||||
ALOGE("%s: Cannot disconnect from the started device '%s.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Disconnect from the camera device via emulator. */
|
||||
const status_t res = mQemuClient.queryDisconnect();
|
||||
if (res == NO_ERROR) {
|
||||
ALOGV("%s: Disonnected from device '%s'",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
mState = ECDS_INITIALIZED;
|
||||
} else {
|
||||
ALOGE("%s: Disconnection from device '%s' failed",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedQemuCameraDevice::startDevice(int width,
|
||||
int height,
|
||||
uint32_t pix_fmt)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isConnected()) {
|
||||
ALOGE("%s: Qemu camera device '%s' is not connected.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return EINVAL;
|
||||
}
|
||||
if (isStarted()) {
|
||||
ALOGW("%s: Qemu camera device '%s' is already started.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t res = EmulatedCameraDevice::commonStartDevice(width, height, pix_fmt);
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: commonStartDevice failed", __FUNCTION__);
|
||||
return res;
|
||||
}
|
||||
|
||||
/* Allocate preview frame buffer. */
|
||||
/* TODO: Watch out for preview format changes! At this point we implement
|
||||
* RGB32 only.*/
|
||||
mPreviewFrame = new uint32_t[mTotalPixels];
|
||||
if (mPreviewFrame == NULL) {
|
||||
ALOGE("%s: Unable to allocate %d bytes for preview frame",
|
||||
__FUNCTION__, mTotalPixels);
|
||||
return ENOMEM;
|
||||
}
|
||||
|
||||
/* Start the actual camera device. */
|
||||
res = mQemuClient.queryStart(mPixelFormat, mFrameWidth, mFrameHeight);
|
||||
if (res == NO_ERROR) {
|
||||
ALOGV("%s: Qemu camera device '%s' is started for %.4s[%dx%d] frames",
|
||||
__FUNCTION__, (const char*)mDeviceName,
|
||||
reinterpret_cast<const char*>(&mPixelFormat),
|
||||
mFrameWidth, mFrameHeight);
|
||||
mState = ECDS_STARTED;
|
||||
} else {
|
||||
ALOGE("%s: Unable to start device '%s' for %.4s[%dx%d] frames",
|
||||
__FUNCTION__, (const char*)mDeviceName,
|
||||
reinterpret_cast<const char*>(&pix_fmt), width, height);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t EmulatedQemuCameraDevice::stopDevice()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
if (!isStarted()) {
|
||||
ALOGW("%s: Qemu camera device '%s' is not started.",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
/* Stop the actual camera device. */
|
||||
status_t res = mQemuClient.queryStop();
|
||||
if (res == NO_ERROR) {
|
||||
if (mPreviewFrame == NULL) {
|
||||
delete[] mPreviewFrame;
|
||||
mPreviewFrame = NULL;
|
||||
}
|
||||
EmulatedCameraDevice::commonStopDevice();
|
||||
mState = ECDS_CONNECTED;
|
||||
ALOGV("%s: Qemu camera device '%s' is stopped",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
} else {
|
||||
ALOGE("%s: Unable to stop device '%s'",
|
||||
__FUNCTION__, (const char*)mDeviceName);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* EmulatedCameraDevice virtual overrides
|
||||
***************************************************************************/
|
||||
|
||||
status_t EmulatedQemuCameraDevice::getCurrentPreviewFrame(void* buffer)
|
||||
{
|
||||
ALOGW_IF(mPreviewFrame == NULL, "%s: No preview frame", __FUNCTION__);
|
||||
if (mPreviewFrame != NULL) {
|
||||
memcpy(buffer, mPreviewFrame, mTotalPixels * 4);
|
||||
return 0;
|
||||
} else {
|
||||
return EmulatedCameraDevice::getCurrentPreviewFrame(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Worker thread management overrides.
|
||||
***************************************************************************/
|
||||
|
||||
bool EmulatedQemuCameraDevice::inWorkerThread()
|
||||
{
|
||||
/* Wait till FPS timeout expires, or thread exit message is received. */
|
||||
WorkerThread::SelectRes res =
|
||||
getWorkerThread()->Select(-1, 1000000 / mEmulatedFPS);
|
||||
if (res == WorkerThread::EXIT_THREAD) {
|
||||
ALOGV("%s: Worker thread has been terminated.", __FUNCTION__);
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Query frames from the service. */
|
||||
status_t query_res = mQemuClient.queryFrame(mCurrentFrame, mPreviewFrame,
|
||||
mFrameBufferSize,
|
||||
mTotalPixels * 4,
|
||||
mWhiteBalanceScale[0],
|
||||
mWhiteBalanceScale[1],
|
||||
mWhiteBalanceScale[2],
|
||||
mExposureCompensation);
|
||||
if (query_res == NO_ERROR) {
|
||||
/* Timestamp the current frame, and notify the camera HAL. */
|
||||
mCurFrameTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
|
||||
mCameraHAL->onNextFrameAvailable(mCurrentFrame, mCurFrameTimestamp, this);
|
||||
return true;
|
||||
} else {
|
||||
ALOGE("%s: Unable to get current video frame: %s",
|
||||
__FUNCTION__, strerror(query_res));
|
||||
mCameraHAL->onCameraDeviceError(CAMERA_ERROR_SERVER_DIED);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
121
android/camera/EmulatedQemuCameraDevice.h
Executable file
121
android/camera/EmulatedQemuCameraDevice.h
Executable file
|
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H
|
||||
#define HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class EmulatedQemuCameraDevice that encapsulates
|
||||
* an emulated camera device connected to the host.
|
||||
*/
|
||||
|
||||
#include "EmulatedCameraDevice.h"
|
||||
#include "QemuClient.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedQemuCamera;
|
||||
|
||||
/* Encapsulates an emulated camera device connected to the host.
|
||||
*/
|
||||
class EmulatedQemuCameraDevice : public EmulatedCameraDevice {
|
||||
public:
|
||||
/* Constructs EmulatedQemuCameraDevice instance. */
|
||||
explicit EmulatedQemuCameraDevice(EmulatedQemuCamera* camera_hal);
|
||||
|
||||
/* Destructs EmulatedQemuCameraDevice instance. */
|
||||
~EmulatedQemuCameraDevice();
|
||||
|
||||
/***************************************************************************
|
||||
* Public API
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Initializes EmulatedQemuCameraDevice instance.
|
||||
* Param:
|
||||
* device_name - Name of the camera device connected to the host. The name
|
||||
* that is used here must have been reported by the 'factory' camera
|
||||
* service when it listed camera devices connected to the host.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
status_t Initialize(const char* device_name);
|
||||
|
||||
/***************************************************************************
|
||||
* Emulated camera device abstract interface implementation.
|
||||
* See declarations of these methods in EmulatedCameraDevice class for
|
||||
* information on each of these methods.
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Connects to the camera device. */
|
||||
status_t connectDevice();
|
||||
|
||||
/* Disconnects from the camera device. */
|
||||
status_t disconnectDevice();
|
||||
|
||||
/* Starts capturing frames from the camera device. */
|
||||
status_t startDevice(int width, int height, uint32_t pix_fmt);
|
||||
|
||||
/* Stops capturing frames from the camera device. */
|
||||
status_t stopDevice();
|
||||
|
||||
/***************************************************************************
|
||||
* EmulatedCameraDevice virtual overrides
|
||||
* See declarations of these methods in EmulatedCameraDevice class for
|
||||
* information on each of these methods.
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Gets current preview fame into provided buffer.
|
||||
* We override this method in order to provide preview frames cached in this
|
||||
* object.
|
||||
*/
|
||||
status_t getCurrentPreviewFrame(void* buffer);
|
||||
|
||||
/***************************************************************************
|
||||
* Worker thread management overrides.
|
||||
* See declarations of these methods in EmulatedCameraDevice class for
|
||||
* information on each of these methods.
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Implementation of the worker thread routine. */
|
||||
bool inWorkerThread();
|
||||
|
||||
/***************************************************************************
|
||||
* Qemu camera device data members
|
||||
**************************************************************************/
|
||||
|
||||
private:
|
||||
/* Qemu client that is used to communicate with the 'emulated camera'
|
||||
* service, created for this instance in the emulator. */
|
||||
CameraQemuClient mQemuClient;
|
||||
|
||||
/* Name of the camera device connected to the host. */
|
||||
String8 mDeviceName;
|
||||
|
||||
/* Current preview framebuffer. */
|
||||
uint32_t* mPreviewFrame;
|
||||
|
||||
/* Emulated FPS (frames per second).
|
||||
* We will emulate 50 FPS. */
|
||||
static const int mEmulatedFPS = 50;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_EMULATED_QEMU_CAMERA_DEVICE_H */
|
||||
95
android/camera/JpegCompressor.cpp
Normal file
95
android/camera/JpegCompressor.cpp
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class NV21JpegCompressor that encapsulates a
|
||||
* converter between NV21, and JPEG formats.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_JPEG"
|
||||
#include <cutils/log.h>
|
||||
#include <assert.h>
|
||||
#include <dlfcn.h>
|
||||
#include "JpegCompressor.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
void* NV21JpegCompressor::mDl = NULL;
|
||||
|
||||
static void* getSymbol(void* dl, const char* signature) {
|
||||
void* res = dlsym(dl, signature);
|
||||
assert (res != NULL);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
typedef void (*InitFunc)(JpegStub* stub, int* strides);
|
||||
typedef void (*CleanupFunc)(JpegStub* stub);
|
||||
typedef int (*CompressFunc)(JpegStub* stub, const void* image,
|
||||
int width, int height, int quality);
|
||||
typedef void (*GetCompressedImageFunc)(JpegStub* stub, void* buff);
|
||||
typedef size_t (*GetCompressedSizeFunc)(JpegStub* stub);
|
||||
|
||||
NV21JpegCompressor::NV21JpegCompressor()
|
||||
{
|
||||
const char dlName[] = "/system/lib/hw/camera.goldfish.jpeg.so";
|
||||
if (mDl == NULL) {
|
||||
mDl = dlopen(dlName, RTLD_NOW);
|
||||
}
|
||||
assert(mDl != NULL);
|
||||
|
||||
InitFunc f = (InitFunc)getSymbol(mDl, "JpegStub_init");
|
||||
(*f)(&mStub, mStrides);
|
||||
}
|
||||
|
||||
NV21JpegCompressor::~NV21JpegCompressor()
|
||||
{
|
||||
CleanupFunc f = (CleanupFunc)getSymbol(mDl, "JpegStub_cleanup");
|
||||
(*f)(&mStub);
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
status_t NV21JpegCompressor::compressRawImage(const void* image,
|
||||
int width,
|
||||
int height,
|
||||
int quality)
|
||||
{
|
||||
mStrides[0] = width;
|
||||
mStrides[1] = width;
|
||||
CompressFunc f = (CompressFunc)getSymbol(mDl, "JpegStub_compress");
|
||||
return (status_t)(*f)(&mStub, image, width, height, quality);
|
||||
}
|
||||
|
||||
|
||||
size_t NV21JpegCompressor::getCompressedSize()
|
||||
{
|
||||
GetCompressedSizeFunc f = (GetCompressedSizeFunc)getSymbol(mDl,
|
||||
"JpegStub_getCompressedSize");
|
||||
return (*f)(&mStub);
|
||||
}
|
||||
|
||||
void NV21JpegCompressor::getCompressedImage(void* buff)
|
||||
{
|
||||
GetCompressedImageFunc f = (GetCompressedImageFunc)getSymbol(mDl,
|
||||
"JpegStub_getCompressedImage");
|
||||
(*f)(&mStub, buff);
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
95
android/camera/JpegCompressor.h
Normal file
95
android/camera/JpegCompressor.h
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
|
||||
#define HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class NV21JpegCompressor that encapsulates a
|
||||
* converter between YV21, and JPEG formats.
|
||||
*/
|
||||
|
||||
#include "JpegStub.h"
|
||||
#include <utils/threads.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/* Encapsulates a converter between YV12, and JPEG formats.
|
||||
*/
|
||||
class NV21JpegCompressor
|
||||
{
|
||||
public:
|
||||
/* Constructs JpegCompressor instance. */
|
||||
NV21JpegCompressor();
|
||||
/* Destructs JpegCompressor instance. */
|
||||
~NV21JpegCompressor();
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Compresses raw NV21 image into a JPEG.
|
||||
* The compressed image will be saved in mStream member of this class. Use
|
||||
* getCompressedSize method to obtain buffer size of the compressed image,
|
||||
* and getCompressedImage to copy out the compressed image.
|
||||
* Param:
|
||||
* image - Raw NV21 image.
|
||||
* width, height - Image dimensions.
|
||||
* quality - JPEG quality.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*
|
||||
*/
|
||||
status_t compressRawImage(const void* image,
|
||||
int width,
|
||||
int height,
|
||||
int quality);
|
||||
|
||||
/* Get size of the compressed JPEG buffer.
|
||||
* This method must be called only after a successful completion of
|
||||
* compressRawImage call.
|
||||
* Return:
|
||||
* Size of the compressed JPEG buffer.
|
||||
*/
|
||||
size_t getCompressedSize();
|
||||
|
||||
/* Copies out compressed JPEG buffer.
|
||||
* This method must be called only after a successful completion of
|
||||
* compressRawImage call.
|
||||
* Param:
|
||||
* buff - Buffer where to copy the JPEG. Must be large enough to contain the
|
||||
* entire image.
|
||||
*/
|
||||
void getCompressedImage(void* buff);
|
||||
|
||||
/****************************************************************************
|
||||
* Class data
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Strides for Y (the first element), and UV (the second one) panes. */
|
||||
int mStrides[2];
|
||||
|
||||
private:
|
||||
// library handle to dlopen
|
||||
static void* mDl;
|
||||
JpegStub mStub;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_JPEG_COMPRESSOR_H */
|
||||
69
android/camera/JpegStub.cpp
Normal file
69
android/camera/JpegStub.cpp
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_JPEGStub"
|
||||
#include <errno.h>
|
||||
#include <cutils/log.h>
|
||||
#include <YuvToJpegEncoder.h>
|
||||
|
||||
#include "JpegStub.h"
|
||||
|
||||
|
||||
extern "C" void JpegStub_init(JpegStub* stub, int* strides) {
|
||||
stub->mInternalEncoder = (void*) new Yuv420SpToJpegEncoder(strides);
|
||||
stub->mInternalStream = (void*)new SkDynamicMemoryWStream();
|
||||
}
|
||||
|
||||
extern "C" void JpegStub_cleanup(JpegStub* stub) {
|
||||
delete((Yuv420SpToJpegEncoder*)stub->mInternalEncoder);
|
||||
delete((SkDynamicMemoryWStream*)stub->mInternalStream);
|
||||
}
|
||||
|
||||
extern "C" int JpegStub_compress(JpegStub* stub, const void* image,
|
||||
int width, int height, int quality)
|
||||
{
|
||||
void* pY = const_cast<void*>(image);
|
||||
int offsets[2];
|
||||
offsets[0] = 0;
|
||||
offsets[1] = width * height;
|
||||
|
||||
Yuv420SpToJpegEncoder* encoder =
|
||||
(Yuv420SpToJpegEncoder*)stub->mInternalEncoder;
|
||||
SkDynamicMemoryWStream* stream =
|
||||
(SkDynamicMemoryWStream*)stub->mInternalStream;
|
||||
if (encoder->encode(stream, pY, width, height, offsets, quality)) {
|
||||
ALOGV("%s: Compressed JPEG: %d[%dx%d] -> %zu bytes",
|
||||
__FUNCTION__, (width * height * 12) / 8,
|
||||
width, height, stream->getOffset());
|
||||
return 0;
|
||||
} else {
|
||||
ALOGE("%s: JPEG compression failed", __FUNCTION__);
|
||||
return errno ? errno: EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void JpegStub_getCompressedImage(JpegStub* stub, void* buff) {
|
||||
SkDynamicMemoryWStream* stream =
|
||||
(SkDynamicMemoryWStream*)stub->mInternalStream;
|
||||
stream->copyTo(buff);
|
||||
}
|
||||
|
||||
extern "C" size_t JpegStub_getCompressedSize(JpegStub* stub) {
|
||||
SkDynamicMemoryWStream* stream =
|
||||
(SkDynamicMemoryWStream*)stub->mInternalStream;
|
||||
return stream->getOffset();
|
||||
}
|
||||
35
android/camera/JpegStub.h
Normal file
35
android/camera/JpegStub.h
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright (C) 2013 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef JPEGSTUB_H_
|
||||
#define JPEGSTUB_H_
|
||||
|
||||
extern "C" {
|
||||
|
||||
struct JpegStub {
|
||||
void* mInternalEncoder;
|
||||
void* mInternalStream;
|
||||
};
|
||||
|
||||
void JpegStub_init(JpegStub* stub, int* strides);
|
||||
void JpegStub_cleanup(JpegStub* stub);
|
||||
int JpegStub_compress(JpegStub* stub, const void* image,
|
||||
int width, int height, int quality);
|
||||
void JpegStub_getCompressedImage(JpegStub* stub, void* buff);
|
||||
size_t JpegStub_getCompressedSize(JpegStub* stub);
|
||||
|
||||
};
|
||||
#endif // JPEGSTUB_H_
|
||||
216
android/camera/PreviewWindow.cpp
Executable file
216
android/camera/PreviewWindow.cpp
Executable file
|
|
@ -0,0 +1,216 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of a class PreviewWindow that encapsulates
|
||||
* functionality of a preview window set via set_preview_window camera HAL API.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_Preview"
|
||||
#include <cutils/log.h>
|
||||
#include <ui/Rect.h>
|
||||
#include <ui/GraphicBufferMapper.h>
|
||||
#include "EmulatedCameraDevice.h"
|
||||
#include "PreviewWindow.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
PreviewWindow::PreviewWindow()
|
||||
: mPreviewWindow(NULL),
|
||||
mLastPreviewed(0),
|
||||
mPreviewFrameWidth(0),
|
||||
mPreviewFrameHeight(0),
|
||||
mPreviewEnabled(false)
|
||||
{
|
||||
}
|
||||
|
||||
PreviewWindow::~PreviewWindow()
|
||||
{
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Camera API
|
||||
***************************************************************************/
|
||||
|
||||
status_t PreviewWindow::setPreviewWindow(struct preview_stream_ops* window,
|
||||
int preview_fps)
|
||||
{
|
||||
ALOGV("%s: current: %p -> new: %p", __FUNCTION__, mPreviewWindow, window);
|
||||
|
||||
status_t res = NO_ERROR;
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
|
||||
/* Reset preview info. */
|
||||
mPreviewFrameWidth = mPreviewFrameHeight = 0;
|
||||
mPreviewAfter = 0;
|
||||
mLastPreviewed = 0;
|
||||
|
||||
if (window != NULL) {
|
||||
/* The CPU will write each frame to the preview window buffer.
|
||||
* Note that we delay setting preview window buffer geometry until
|
||||
* frames start to come in. */
|
||||
res = window->set_usage(window, GRALLOC_USAGE_SW_WRITE_OFTEN);
|
||||
if (res == NO_ERROR) {
|
||||
/* Set preview frequency. */
|
||||
mPreviewAfter = 1000000 / preview_fps;
|
||||
} else {
|
||||
window = NULL;
|
||||
res = -res; // set_usage returns a negative errno.
|
||||
ALOGE("%s: Error setting preview window usage %d -> %s",
|
||||
__FUNCTION__, res, strerror(res));
|
||||
}
|
||||
}
|
||||
mPreviewWindow = window;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t PreviewWindow::startPreview()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mPreviewEnabled = true;
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
void PreviewWindow::stopPreview()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
mPreviewEnabled = false;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
void PreviewWindow::onNextFrameAvailable(const void* frame,
|
||||
nsecs_t timestamp,
|
||||
EmulatedCameraDevice* camera_dev)
|
||||
{
|
||||
int res;
|
||||
Mutex::Autolock locker(&mObjectLock);
|
||||
|
||||
if (!isPreviewEnabled() || mPreviewWindow == NULL || !isPreviewTime()) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* Make sure that preview window dimensions are OK with the camera device */
|
||||
if (adjustPreviewDimensions(camera_dev)) {
|
||||
/* Need to set / adjust buffer geometry for the preview window.
|
||||
* Note that in the emulator preview window uses only RGB for pixel
|
||||
* formats. */
|
||||
ALOGV("%s: Adjusting preview windows %p geometry to %dx%d",
|
||||
__FUNCTION__, mPreviewWindow, mPreviewFrameWidth,
|
||||
mPreviewFrameHeight);
|
||||
res = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
|
||||
mPreviewFrameWidth,
|
||||
mPreviewFrameHeight,
|
||||
HAL_PIXEL_FORMAT_RGBA_8888);
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: Error in set_buffers_geometry %d -> %s",
|
||||
__FUNCTION__, -res, strerror(-res));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Push new frame to the preview window.
|
||||
*/
|
||||
|
||||
/* Dequeue preview window buffer for the frame. */
|
||||
buffer_handle_t* buffer = NULL;
|
||||
int stride = 0;
|
||||
res = mPreviewWindow->dequeue_buffer(mPreviewWindow, &buffer, &stride);
|
||||
if (res != NO_ERROR || buffer == NULL) {
|
||||
ALOGE("%s: Unable to dequeue preview window buffer: %d -> %s",
|
||||
__FUNCTION__, -res, strerror(-res));
|
||||
return;
|
||||
}
|
||||
|
||||
/* Let the preview window to lock the buffer. */
|
||||
res = mPreviewWindow->lock_buffer(mPreviewWindow, buffer);
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: Unable to lock preview window buffer: %d -> %s",
|
||||
__FUNCTION__, -res, strerror(-res));
|
||||
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Now let the graphics framework to lock the buffer, and provide
|
||||
* us with the framebuffer data address. */
|
||||
void* img = NULL;
|
||||
const Rect rect(mPreviewFrameWidth, mPreviewFrameHeight);
|
||||
GraphicBufferMapper& grbuffer_mapper(GraphicBufferMapper::get());
|
||||
res = grbuffer_mapper.lock(*buffer, GRALLOC_USAGE_SW_WRITE_OFTEN, rect, &img);
|
||||
if (res != NO_ERROR) {
|
||||
ALOGE("%s: grbuffer_mapper.lock failure: %d -> %s",
|
||||
__FUNCTION__, res, strerror(res));
|
||||
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Frames come in in YV12/NV12/NV21 format. Since preview window doesn't
|
||||
* supports those formats, we need to obtain the frame in RGB565. */
|
||||
res = camera_dev->getCurrentPreviewFrame(img);
|
||||
if (res == NO_ERROR) {
|
||||
/* Show it. */
|
||||
mPreviewWindow->set_timestamp(mPreviewWindow, timestamp);
|
||||
mPreviewWindow->enqueue_buffer(mPreviewWindow, buffer);
|
||||
} else {
|
||||
ALOGE("%s: Unable to obtain preview frame: %d", __FUNCTION__, res);
|
||||
mPreviewWindow->cancel_buffer(mPreviewWindow, buffer);
|
||||
}
|
||||
grbuffer_mapper.unlock(*buffer);
|
||||
}
|
||||
|
||||
/***************************************************************************
|
||||
* Private API
|
||||
**************************************************************************/
|
||||
|
||||
bool PreviewWindow::adjustPreviewDimensions(EmulatedCameraDevice* camera_dev)
|
||||
{
|
||||
/* Match the cached frame dimensions against the actual ones. */
|
||||
if (mPreviewFrameWidth == camera_dev->getFrameWidth() &&
|
||||
mPreviewFrameHeight == camera_dev->getFrameHeight()) {
|
||||
/* They match. */
|
||||
return false;
|
||||
}
|
||||
|
||||
/* They don't match: adjust the cache. */
|
||||
mPreviewFrameWidth = camera_dev->getFrameWidth();
|
||||
mPreviewFrameHeight = camera_dev->getFrameHeight();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PreviewWindow::isPreviewTime()
|
||||
{
|
||||
timeval cur_time;
|
||||
gettimeofday(&cur_time, NULL);
|
||||
const uint64_t cur_mks = cur_time.tv_sec * 1000000LL + cur_time.tv_usec;
|
||||
if ((cur_mks - mLastPreviewed) >= mPreviewAfter) {
|
||||
mLastPreviewed = cur_mks;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
165
android/camera/PreviewWindow.h
Executable file
165
android/camera/PreviewWindow.h
Executable file
|
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H
|
||||
#define HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H
|
||||
|
||||
/*
|
||||
* Contains declaration of a class PreviewWindow that encapsulates functionality
|
||||
* of a preview window set via set_preview_window camera HAL API.
|
||||
*/
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedCameraDevice;
|
||||
|
||||
/* Encapsulates functionality of a preview window set via set_preview_window
|
||||
* camera HAL API.
|
||||
*
|
||||
* Objects of this class are contained in EmulatedCamera objects, and handle
|
||||
* relevant camera API callbacks.
|
||||
*/
|
||||
class PreviewWindow {
|
||||
public:
|
||||
/* Constructs PreviewWindow instance. */
|
||||
PreviewWindow();
|
||||
|
||||
/* Destructs PreviewWindow instance. */
|
||||
~PreviewWindow();
|
||||
|
||||
/***************************************************************************
|
||||
* Camera API
|
||||
**************************************************************************/
|
||||
|
||||
public:
|
||||
/* Actual handler for camera_device_ops_t::set_preview_window callback.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::set_preview_window callback.
|
||||
* Param:
|
||||
* window - Preview window to set. This parameter might be NULL, which
|
||||
* indicates preview window reset.
|
||||
* preview_fps - Preview's frame frequency. This parameter determins when
|
||||
* a frame received via onNextFrameAvailable call will be pushed to
|
||||
* the preview window. If 'window' parameter passed to this method is
|
||||
* NULL, this parameter is ignored.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
status_t setPreviewWindow(struct preview_stream_ops* window,
|
||||
int preview_fps);
|
||||
|
||||
/* Starts the preview.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::start_preview callback.
|
||||
*/
|
||||
status_t startPreview();
|
||||
|
||||
/* Stops the preview.
|
||||
* This method is called by the containing emulated camera object when it is
|
||||
* handing the camera_device_ops_t::start_preview callback.
|
||||
*/
|
||||
void stopPreview();
|
||||
|
||||
/* Checks if preview is enabled. */
|
||||
inline bool isPreviewEnabled()
|
||||
{
|
||||
return mPreviewEnabled;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Next frame is available in the camera device.
|
||||
* This is a notification callback that is invoked by the camera device when
|
||||
* a new frame is available.
|
||||
* Note that most likely this method is called in context of a worker thread
|
||||
* that camera device has created for frame capturing.
|
||||
* Param:
|
||||
* frame - Captured frame, or NULL if camera device didn't pull the frame
|
||||
* yet. If NULL is passed in this parameter use GetCurrentFrame method
|
||||
* of the camera device class to obtain the next frame. Also note that
|
||||
* the size of the frame that is passed here (as well as the frame
|
||||
* returned from the GetCurrentFrame method) is defined by the current
|
||||
* frame settings (width + height + pixel format) for the camera device.
|
||||
* timestamp - Frame's timestamp.
|
||||
* camera_dev - Camera device instance that delivered the frame.
|
||||
*/
|
||||
void onNextFrameAvailable(const void* frame,
|
||||
nsecs_t timestamp,
|
||||
EmulatedCameraDevice* camera_dev);
|
||||
|
||||
/***************************************************************************
|
||||
* Private API
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Adjusts cached dimensions of the preview window frame according to the
|
||||
* frame dimensions used by the camera device.
|
||||
*
|
||||
* When preview is started, it's not known (hard to define) what are going
|
||||
* to be the dimensions of the frames that are going to be displayed. Plus,
|
||||
* it might be possible, that such dimensions can be changed on the fly. So,
|
||||
* in order to be always in sync with frame dimensions, this method is
|
||||
* called for each frame passed to onNextFrameAvailable method, in order to
|
||||
* properly adjust frame dimensions, used by the preview window.
|
||||
* Note that this method must be called while object is locked.
|
||||
* Param:
|
||||
* camera_dev - Camera device, prpviding frames displayed in the preview
|
||||
* window.
|
||||
* Return:
|
||||
* true if cached dimensions have been adjusted, or false if cached
|
||||
* dimensions match device's frame dimensions.
|
||||
*/
|
||||
bool adjustPreviewDimensions(EmulatedCameraDevice* camera_dev);
|
||||
|
||||
/* Checks if it's the time to push new frame to the preview window.
|
||||
* Note that this method must be called while object is locked. */
|
||||
bool isPreviewTime();
|
||||
|
||||
/***************************************************************************
|
||||
* Data members
|
||||
**************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Locks this instance for data changes. */
|
||||
Mutex mObjectLock;
|
||||
|
||||
/* Preview window instance. */
|
||||
preview_stream_ops* mPreviewWindow;
|
||||
|
||||
/* Timestamp (abs. microseconds) when last frame has been pushed to the
|
||||
* preview window. */
|
||||
uint64_t mLastPreviewed;
|
||||
|
||||
/* Preview frequency in microseconds. */
|
||||
uint32_t mPreviewAfter;
|
||||
|
||||
/*
|
||||
* Cached preview window frame dimensions.
|
||||
*/
|
||||
|
||||
int mPreviewFrameWidth;
|
||||
int mPreviewFrameHeight;
|
||||
|
||||
/* Preview status. */
|
||||
bool mPreviewEnabled;
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_PREVIEW_WINDOW_H */
|
||||
559
android/camera/QemuClient.cpp
Executable file
559
android/camera/QemuClient.cpp
Executable file
|
|
@ -0,0 +1,559 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Contains implementation of classes that encapsulate connection to camera
|
||||
* services in the emulator via qemu pipe.
|
||||
*/
|
||||
|
||||
#define LOG_NDEBUG 1
|
||||
#define LOG_TAG "EmulatedCamera_QemuClient"
|
||||
#include <cutils/log.h>
|
||||
#include "EmulatedCamera.h"
|
||||
#include "QemuClient.h"
|
||||
|
||||
#define LOG_QUERIES 0
|
||||
#if LOG_QUERIES
|
||||
#define LOGQ(...) ALOGD(__VA_ARGS__)
|
||||
#else
|
||||
#define LOGQ(...) (void(0))
|
||||
|
||||
#endif // LOG_QUERIES
|
||||
namespace android {
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu query
|
||||
***************************************************************************/
|
||||
|
||||
QemuQuery::QemuQuery()
|
||||
: mQuery(mQueryPrealloc),
|
||||
mQueryDeliveryStatus(NO_ERROR),
|
||||
mReplyBuffer(NULL),
|
||||
mReplyData(NULL),
|
||||
mReplySize(0),
|
||||
mReplyDataSize(0),
|
||||
mReplyStatus(0)
|
||||
{
|
||||
*mQuery = '\0';
|
||||
}
|
||||
|
||||
QemuQuery::QemuQuery(const char* query_string)
|
||||
: mQuery(mQueryPrealloc),
|
||||
mQueryDeliveryStatus(NO_ERROR),
|
||||
mReplyBuffer(NULL),
|
||||
mReplyData(NULL),
|
||||
mReplySize(0),
|
||||
mReplyDataSize(0),
|
||||
mReplyStatus(0)
|
||||
{
|
||||
mQueryDeliveryStatus = QemuQuery::createQuery(query_string, NULL);
|
||||
}
|
||||
|
||||
QemuQuery::QemuQuery(const char* query_name, const char* query_param)
|
||||
: mQuery(mQueryPrealloc),
|
||||
mQueryDeliveryStatus(NO_ERROR),
|
||||
mReplyBuffer(NULL),
|
||||
mReplyData(NULL),
|
||||
mReplySize(0),
|
||||
mReplyDataSize(0),
|
||||
mReplyStatus(0)
|
||||
{
|
||||
mQueryDeliveryStatus = QemuQuery::createQuery(query_name, query_param);
|
||||
}
|
||||
|
||||
QemuQuery::~QemuQuery()
|
||||
{
|
||||
QemuQuery::resetQuery();
|
||||
}
|
||||
|
||||
status_t QemuQuery::createQuery(const char* name, const char* param)
|
||||
{
|
||||
/* Reset from the previous use. */
|
||||
resetQuery();
|
||||
|
||||
/* Query name cannot be NULL or an empty string. */
|
||||
if (name == NULL || *name == '\0') {
|
||||
ALOGE("%s: NULL or an empty string is passed as query name.",
|
||||
__FUNCTION__);
|
||||
mQueryDeliveryStatus = EINVAL;
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
const size_t name_len = strlen(name);
|
||||
const size_t param_len = (param != NULL) ? strlen(param) : 0;
|
||||
const size_t required = strlen(name) + (param_len ? (param_len + 2) : 1);
|
||||
|
||||
if (required > sizeof(mQueryPrealloc)) {
|
||||
/* Preallocated buffer was too small. Allocate a bigger query buffer. */
|
||||
mQuery = new char[required];
|
||||
if (mQuery == NULL) {
|
||||
ALOGE("%s: Unable to allocate %zu bytes for query buffer",
|
||||
__FUNCTION__, required);
|
||||
mQueryDeliveryStatus = ENOMEM;
|
||||
return ENOMEM;
|
||||
}
|
||||
}
|
||||
|
||||
/* At this point mQuery buffer is big enough for the query. */
|
||||
if (param_len) {
|
||||
sprintf(mQuery, "%s %s", name, param);
|
||||
} else {
|
||||
memcpy(mQuery, name, name_len + 1);
|
||||
}
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
status_t QemuQuery::completeQuery(status_t status)
|
||||
{
|
||||
/* Save query completion status. */
|
||||
mQueryDeliveryStatus = status;
|
||||
if (mQueryDeliveryStatus != NO_ERROR) {
|
||||
return mQueryDeliveryStatus;
|
||||
}
|
||||
|
||||
/* Make sure reply buffer contains at least 'ok', or 'ko'.
|
||||
* Note that 'ok', or 'ko' prefixes are always 3 characters long: in case
|
||||
* there are more data in the reply, that data will be separated from 'ok'/'ko'
|
||||
* with a ':'. If there is no more data in the reply, the prefix will be
|
||||
* zero-terminated, and the terminator will be inculded in the reply. */
|
||||
if (mReplyBuffer == NULL || mReplySize < 3) {
|
||||
ALOGE("%s: Invalid reply to the query", __FUNCTION__);
|
||||
mQueryDeliveryStatus = EINVAL;
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Lets see the reply status. */
|
||||
if (!memcmp(mReplyBuffer, "ok", 2)) {
|
||||
mReplyStatus = 1;
|
||||
} else if (!memcmp(mReplyBuffer, "ko", 2)) {
|
||||
mReplyStatus = 0;
|
||||
} else {
|
||||
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
|
||||
mQueryDeliveryStatus = EINVAL;
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Lets see if there are reply data that follow. */
|
||||
if (mReplySize > 3) {
|
||||
/* There are extra data. Make sure they are separated from the status
|
||||
* with a ':' */
|
||||
if (mReplyBuffer[2] != ':') {
|
||||
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
|
||||
mQueryDeliveryStatus = EINVAL;
|
||||
return EINVAL;
|
||||
}
|
||||
mReplyData = mReplyBuffer + 3;
|
||||
mReplyDataSize = mReplySize - 3;
|
||||
} else {
|
||||
/* Make sure reply buffer containing just 'ok'/'ko' ends with
|
||||
* zero-terminator. */
|
||||
if (mReplyBuffer[2] != '\0') {
|
||||
ALOGE("%s: Invalid query reply: '%s'", __FUNCTION__, mReplyBuffer);
|
||||
mQueryDeliveryStatus = EINVAL;
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
void QemuQuery::resetQuery()
|
||||
{
|
||||
if (mQuery != NULL && mQuery != mQueryPrealloc) {
|
||||
delete[] mQuery;
|
||||
}
|
||||
mQuery = mQueryPrealloc;
|
||||
mQueryDeliveryStatus = NO_ERROR;
|
||||
if (mReplyBuffer != NULL) {
|
||||
free(mReplyBuffer);
|
||||
mReplyBuffer = NULL;
|
||||
}
|
||||
mReplyData = NULL;
|
||||
mReplySize = mReplyDataSize = 0;
|
||||
mReplyStatus = 0;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client base
|
||||
***************************************************************************/
|
||||
|
||||
/* Camera service name. */
|
||||
const char QemuClient::mCameraServiceName[] = "camera";
|
||||
|
||||
QemuClient::QemuClient()
|
||||
: mPipeFD(-1)
|
||||
{
|
||||
}
|
||||
|
||||
QemuClient::~QemuClient()
|
||||
{
|
||||
if (mPipeFD >= 0) {
|
||||
close(mPipeFD);
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client API
|
||||
***************************************************************************/
|
||||
|
||||
status_t QemuClient::connectClient(const char* param)
|
||||
{
|
||||
ALOGV("%s: '%s'", __FUNCTION__, param ? param : "");
|
||||
|
||||
/* Make sure that client is not connected already. */
|
||||
if (mPipeFD >= 0) {
|
||||
ALOGE("%s: Qemu client is already connected", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Select one of the two: 'factory', or 'emulated camera' service */
|
||||
if (param == NULL || *param == '\0') {
|
||||
/* No parameters: connect to the factory service. */
|
||||
char pipe_name[512];
|
||||
snprintf(pipe_name, sizeof(pipe_name), "qemud:%s", mCameraServiceName);
|
||||
mPipeFD = qemu_pipe_open(pipe_name);
|
||||
} else {
|
||||
/* One extra char ':' that separates service name and parameters + six
|
||||
* characters for 'qemud:'. This is required by qemu pipe protocol. */
|
||||
char* connection_str = new char[strlen(mCameraServiceName) +
|
||||
strlen(param) + 8];
|
||||
sprintf(connection_str, "qemud:%s:%s", mCameraServiceName, param);
|
||||
|
||||
mPipeFD = qemu_pipe_open(connection_str);
|
||||
delete[] connection_str;
|
||||
}
|
||||
if (mPipeFD < 0) {
|
||||
ALOGE("%s: Unable to connect to the camera service '%s': %s",
|
||||
__FUNCTION__, param ? param : "Factory", strerror(errno));
|
||||
return errno ? errno : EINVAL;
|
||||
}
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
void QemuClient::disconnectClient()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
if (mPipeFD >= 0) {
|
||||
close(mPipeFD);
|
||||
mPipeFD = -1;
|
||||
}
|
||||
}
|
||||
|
||||
status_t QemuClient::sendMessage(const void* data, size_t data_size)
|
||||
{
|
||||
if (mPipeFD < 0) {
|
||||
ALOGE("%s: Qemu client is not connected", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Note that we don't use here qemud_client_send, since with qemu pipes we
|
||||
* don't need to provide payload size prior to payload when we're writing to
|
||||
* the pipe. So, we can use simple write, and qemu pipe will take care of the
|
||||
* rest, calling the receiving end with the number of bytes transferred. */
|
||||
const size_t written = qemud_fd_write(mPipeFD, data, data_size);
|
||||
if (written == data_size) {
|
||||
return NO_ERROR;
|
||||
} else {
|
||||
ALOGE("%s: Error sending data via qemu pipe: '%s'",
|
||||
__FUNCTION__, strerror(errno));
|
||||
return errno ? errno : EIO;
|
||||
}
|
||||
}
|
||||
|
||||
status_t QemuClient::receiveMessage(void** data, size_t* data_size)
|
||||
{
|
||||
*data = NULL;
|
||||
*data_size = 0;
|
||||
|
||||
if (mPipeFD < 0) {
|
||||
ALOGE("%s: Qemu client is not connected", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* The way the service replies to a query, it sends payload size first, and
|
||||
* then it sends the payload itself. Note that payload size is sent as a
|
||||
* string, containing 8 characters representing a hexadecimal payload size
|
||||
* value. Note also, that the string doesn't contain zero-terminator. */
|
||||
size_t payload_size;
|
||||
char payload_size_str[9];
|
||||
int rd_res = qemud_fd_read(mPipeFD, payload_size_str, 8);
|
||||
if (rd_res != 8) {
|
||||
ALOGE("%s: Unable to obtain payload size: %s",
|
||||
__FUNCTION__, strerror(errno));
|
||||
return errno ? errno : EIO;
|
||||
}
|
||||
|
||||
/* Convert payload size. */
|
||||
errno = 0;
|
||||
payload_size_str[8] = '\0';
|
||||
payload_size = strtol(payload_size_str, NULL, 16);
|
||||
if (errno) {
|
||||
ALOGE("%s: Invalid payload size '%s'", __FUNCTION__, payload_size_str);
|
||||
return EIO;
|
||||
}
|
||||
|
||||
/* Allocate payload data buffer, and read the payload there. */
|
||||
*data = malloc(payload_size);
|
||||
if (*data == NULL) {
|
||||
ALOGE("%s: Unable to allocate %zu bytes payload buffer",
|
||||
__FUNCTION__, payload_size);
|
||||
return ENOMEM;
|
||||
}
|
||||
rd_res = qemud_fd_read(mPipeFD, *data, payload_size);
|
||||
if (static_cast<size_t>(rd_res) == payload_size) {
|
||||
*data_size = payload_size;
|
||||
return NO_ERROR;
|
||||
} else {
|
||||
ALOGE("%s: Read size %d doesnt match expected payload size %zu: %s",
|
||||
__FUNCTION__, rd_res, payload_size, strerror(errno));
|
||||
free(*data);
|
||||
*data = NULL;
|
||||
return errno ? errno : EIO;
|
||||
}
|
||||
}
|
||||
|
||||
status_t QemuClient::doQuery(QemuQuery* query)
|
||||
{
|
||||
/* Make sure that query has been successfuly constructed. */
|
||||
if (query->mQueryDeliveryStatus != NO_ERROR) {
|
||||
ALOGE("%s: Query is invalid", __FUNCTION__);
|
||||
return query->mQueryDeliveryStatus;
|
||||
}
|
||||
|
||||
LOGQ("Send query '%s'", query->mQuery);
|
||||
|
||||
/* Send the query. */
|
||||
status_t res = sendMessage(query->mQuery, strlen(query->mQuery) + 1);
|
||||
if (res == NO_ERROR) {
|
||||
/* Read the response. */
|
||||
res = receiveMessage(reinterpret_cast<void**>(&query->mReplyBuffer),
|
||||
&query->mReplySize);
|
||||
if (res == NO_ERROR) {
|
||||
LOGQ("Response to query '%s': Status = '%.2s', %d bytes in response",
|
||||
query->mQuery, query->mReplyBuffer, query->mReplySize);
|
||||
} else {
|
||||
ALOGE("%s Response to query '%s' has failed: %s",
|
||||
__FUNCTION__, query->mQuery, strerror(res));
|
||||
}
|
||||
} else {
|
||||
ALOGE("%s: Send query '%s' failed: %s",
|
||||
__FUNCTION__, query->mQuery, strerror(res));
|
||||
}
|
||||
|
||||
/* Complete the query, and return its completion handling status. */
|
||||
const status_t res1 = query->completeQuery(res);
|
||||
ALOGE_IF(res1 != NO_ERROR && res1 != res,
|
||||
"%s: Error %d in query '%s' completion",
|
||||
__FUNCTION__, res1, query->mQuery);
|
||||
return res1;
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client for the 'factory' service.
|
||||
***************************************************************************/
|
||||
|
||||
/*
|
||||
* Factory service queries.
|
||||
*/
|
||||
|
||||
/* Queries list of cameras connected to the host. */
|
||||
const char FactoryQemuClient::mQueryList[] = "list";
|
||||
|
||||
FactoryQemuClient::FactoryQemuClient()
|
||||
: QemuClient()
|
||||
{
|
||||
}
|
||||
|
||||
FactoryQemuClient::~FactoryQemuClient()
|
||||
{
|
||||
}
|
||||
|
||||
status_t FactoryQemuClient::listCameras(char** list)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
QemuQuery query(mQueryList);
|
||||
if (doQuery(&query) || !query.isQuerySucceeded()) {
|
||||
ALOGE("%s: List cameras query failed: %s", __FUNCTION__,
|
||||
query.mReplyData ? query.mReplyData : "No error message");
|
||||
return query.getCompletionStatus();
|
||||
}
|
||||
|
||||
/* Make sure there is a list returned. */
|
||||
if (query.mReplyDataSize == 0) {
|
||||
ALOGE("%s: No camera list is returned.", __FUNCTION__);
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
/* Copy the list over. */
|
||||
*list = (char*)malloc(query.mReplyDataSize);
|
||||
if (*list != NULL) {
|
||||
memcpy(*list, query.mReplyData, query.mReplyDataSize);
|
||||
ALOGD("Emulated camera list: %s", *list);
|
||||
return NO_ERROR;
|
||||
} else {
|
||||
ALOGE("%s: Unable to allocate %zu bytes",
|
||||
__FUNCTION__, query.mReplyDataSize);
|
||||
return ENOMEM;
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client for an 'emulated camera' service.
|
||||
***************************************************************************/
|
||||
|
||||
/*
|
||||
* Emulated camera queries
|
||||
*/
|
||||
|
||||
/* Connect to the camera device. */
|
||||
const char CameraQemuClient::mQueryConnect[] = "connect";
|
||||
/* Disconect from the camera device. */
|
||||
const char CameraQemuClient::mQueryDisconnect[] = "disconnect";
|
||||
/* Start capturing video from the camera device. */
|
||||
const char CameraQemuClient::mQueryStart[] = "start";
|
||||
/* Stop capturing video from the camera device. */
|
||||
const char CameraQemuClient::mQueryStop[] = "stop";
|
||||
/* Get next video frame from the camera device. */
|
||||
const char CameraQemuClient::mQueryFrame[] = "frame";
|
||||
|
||||
CameraQemuClient::CameraQemuClient()
|
||||
: QemuClient()
|
||||
{
|
||||
}
|
||||
|
||||
CameraQemuClient::~CameraQemuClient()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
status_t CameraQemuClient::queryConnect()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
QemuQuery query(mQueryConnect);
|
||||
doQuery(&query);
|
||||
const status_t res = query.getCompletionStatus();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
|
||||
__FUNCTION__, query.mReplyData ? query.mReplyData :
|
||||
"No error message");
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t CameraQemuClient::queryDisconnect()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
QemuQuery query(mQueryDisconnect);
|
||||
doQuery(&query);
|
||||
const status_t res = query.getCompletionStatus();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
|
||||
__FUNCTION__, query.mReplyData ? query.mReplyData :
|
||||
"No error message");
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t CameraQemuClient::queryStart(uint32_t pixel_format,
|
||||
int width,
|
||||
int height)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
char query_str[256];
|
||||
snprintf(query_str, sizeof(query_str), "%s dim=%dx%d pix=%d",
|
||||
mQueryStart, width, height, pixel_format);
|
||||
QemuQuery query(query_str);
|
||||
doQuery(&query);
|
||||
const status_t res = query.getCompletionStatus();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
|
||||
__FUNCTION__, query.mReplyData ? query.mReplyData :
|
||||
"No error message");
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t CameraQemuClient::queryStop()
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
QemuQuery query(mQueryStop);
|
||||
doQuery(&query);
|
||||
const status_t res = query.getCompletionStatus();
|
||||
ALOGE_IF(res != NO_ERROR, "%s: Query failed: %s",
|
||||
__FUNCTION__, query.mReplyData ? query.mReplyData :
|
||||
"No error message");
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t CameraQemuClient::queryFrame(void* vframe,
|
||||
void* pframe,
|
||||
size_t vframe_size,
|
||||
size_t pframe_size,
|
||||
float r_scale,
|
||||
float g_scale,
|
||||
float b_scale,
|
||||
float exposure_comp)
|
||||
{
|
||||
ALOGV("%s", __FUNCTION__);
|
||||
|
||||
char query_str[256];
|
||||
snprintf(query_str, sizeof(query_str), "%s video=%zu preview=%zu whiteb=%g,%g,%g expcomp=%g",
|
||||
mQueryFrame, (vframe && vframe_size) ? vframe_size : 0,
|
||||
(pframe && pframe_size) ? pframe_size : 0, r_scale, g_scale, b_scale,
|
||||
exposure_comp);
|
||||
QemuQuery query(query_str);
|
||||
doQuery(&query);
|
||||
const status_t res = query.getCompletionStatus();
|
||||
if( res != NO_ERROR) {
|
||||
ALOGE("%s: Query failed: %s",
|
||||
__FUNCTION__, query.mReplyData ? query.mReplyData :
|
||||
"No error message");
|
||||
return res;
|
||||
}
|
||||
|
||||
/* Copy requested frames. */
|
||||
size_t cur_offset = 0;
|
||||
const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData);
|
||||
/* Video frame is always first. */
|
||||
if (vframe != NULL && vframe_size != 0) {
|
||||
/* Make sure that video frame is in. */
|
||||
if ((query.mReplyDataSize - cur_offset) >= vframe_size) {
|
||||
memcpy(vframe, frame, vframe_size);
|
||||
cur_offset += vframe_size;
|
||||
} else {
|
||||
ALOGE("%s: Reply %zu bytes is to small to contain %zu bytes video frame",
|
||||
__FUNCTION__, query.mReplyDataSize - cur_offset, vframe_size);
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
if (pframe != NULL && pframe_size != 0) {
|
||||
/* Make sure that preview frame is in. */
|
||||
if ((query.mReplyDataSize - cur_offset) >= pframe_size) {
|
||||
memcpy(pframe, frame + cur_offset, pframe_size);
|
||||
cur_offset += pframe_size;
|
||||
} else {
|
||||
ALOGE("%s: Reply %zu bytes is to small to contain %zu bytes preview frame",
|
||||
__FUNCTION__, query.mReplyDataSize - cur_offset, pframe_size);
|
||||
return EINVAL;
|
||||
}
|
||||
}
|
||||
|
||||
return NO_ERROR;
|
||||
}
|
||||
|
||||
}; /* namespace android */
|
||||
437
android/camera/QemuClient.h
Executable file
437
android/camera/QemuClient.h
Executable file
|
|
@ -0,0 +1,437 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA_QEMU_CLIENT_H
|
||||
#define HW_EMULATOR_CAMERA_QEMU_CLIENT_H
|
||||
|
||||
/*
|
||||
* Contains declaration of classes that encapsulate connection to camera services
|
||||
* in the emulator via qemu pipe.
|
||||
*/
|
||||
|
||||
#include <hardware/qemud.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu query
|
||||
***************************************************************************/
|
||||
|
||||
/* Encapsulates a query to the emulator.
|
||||
* Guest exchanges data with the emulator via queries sent over the qemu pipe.
|
||||
* The queries as well as replies to the queries are all strings (except for the
|
||||
* 'frame' query where reply is a framebuffer).
|
||||
* Each query is formatted as such:
|
||||
*
|
||||
* "<query name>[ <parameters>]",
|
||||
*
|
||||
* where <query name> is a string representing query name, and <parameters> are
|
||||
* optional parameters for the query. If parameters are present, they must be
|
||||
* separated from the query name with a single space, and they must be formatted
|
||||
* as such:
|
||||
*
|
||||
* "<name1>=<value1> <name2>=<value2> ... <nameN>=<valueN>"
|
||||
*
|
||||
* I.e.:
|
||||
* - Every parameter must have a name, and a value.
|
||||
* - Name and value must be separated with '='.
|
||||
* - No spaces are allowed around '=' separating name and value.
|
||||
* - Parameters must be separated with a single space character.
|
||||
* - No '=' character is allowed in name and in value.
|
||||
*
|
||||
* There are certain restrictions on strings used in the query:
|
||||
* - Spaces are allowed only as separators.
|
||||
* - '=' are allowed only to divide parameter names from parameter values.
|
||||
*
|
||||
* Emulator replies to each query in two chunks:
|
||||
* - 8 bytes encoding the payload size as a string containing hexadecimal
|
||||
* representation of the payload size value. This is done in order to simplify
|
||||
* dealing with different endianness on the host, and on the guest.
|
||||
* - Payload, whose size is defined by the first chunk.
|
||||
*
|
||||
* Every payload always begins with two characters, encoding the result of the
|
||||
* query:
|
||||
* - 'ok' Encoding the success
|
||||
* - 'ko' Encoding a failure.
|
||||
* After that payload may have optional data. If payload has more data following
|
||||
* the query result, there is a ':' character separating them. If payload carries
|
||||
* only the result, it always ends with a zero-terminator. So, payload 'ok'/'ko'
|
||||
* prefix is always 3 bytes long: it either includes a zero-terminator, if there
|
||||
* is no data, or a ':' separator.
|
||||
*/
|
||||
class QemuQuery {
|
||||
public:
|
||||
/* Constructs an uninitialized QemuQuery instance. */
|
||||
QemuQuery();
|
||||
|
||||
/* Constructs and initializes QemuQuery instance for a query.
|
||||
* Param:
|
||||
* query_string - Query string. This constructor can also be used to
|
||||
* construct a query that doesn't have parameters. In this case query
|
||||
* name can be passed as a parameter here.
|
||||
*/
|
||||
explicit QemuQuery(const char* query_string);
|
||||
|
||||
/* Constructs and initializes QemuQuery instance for a query with parameters.
|
||||
* Param:
|
||||
* query_name - Query name.
|
||||
* query_param - Query parameters. Can be NULL.
|
||||
*/
|
||||
QemuQuery(const char* query_name, const char* query_param);
|
||||
|
||||
/* Destructs QemuQuery instance. */
|
||||
~QemuQuery();
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
/* Creates new query.
|
||||
* Note: this method will reset this instance prior to creating a new query
|
||||
* in order to discard possible "leftovers" from the previous query.
|
||||
* Param:
|
||||
* query_name - Query name.
|
||||
* query_param - Query parameters. Can be NULL.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
status_t createQuery(const char* name, const char* param);
|
||||
|
||||
/* Completes the query after a reply from the emulator.
|
||||
* This method will parse the reply buffer, and calculate the final query
|
||||
* status, which depends not only on the transport success / failure, but
|
||||
* also on 'ok' / 'ko' in the reply buffer.
|
||||
* Param:
|
||||
* status - Query delivery status. This status doesn't necessarily reflects
|
||||
* the final query status (which is defined by 'ok'/'ko' prefix in the
|
||||
* reply buffer). This status simply states whether or not the query has
|
||||
* been sent, and a reply has been received successfuly. However, if
|
||||
* this status indicates a failure, it means that the entire query has
|
||||
* failed.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure. Note that
|
||||
* status returned here just signals whether or not the method has succeeded.
|
||||
* Use isQuerySucceeded() / getCompletionStatus() methods of this class to
|
||||
* check the final query status.
|
||||
*/
|
||||
status_t completeQuery(status_t status);
|
||||
|
||||
/* Resets the query from a previous use. */
|
||||
void resetQuery();
|
||||
|
||||
/* Checks if query has succeeded.
|
||||
* Note that this method must be called after completeQuery() method of this
|
||||
* class has been executed.
|
||||
*/
|
||||
inline bool isQuerySucceeded() const {
|
||||
return mQueryDeliveryStatus == NO_ERROR && mReplyStatus != 0;
|
||||
}
|
||||
|
||||
/* Gets final completion status of the query.
|
||||
* Note that this method must be called after completeQuery() method of this
|
||||
* class has been executed.
|
||||
* Return:
|
||||
* NO_ERROR if query has succeeded, or an appropriate error status on query
|
||||
* failure.
|
||||
*/
|
||||
inline status_t getCompletionStatus() const {
|
||||
if (mQueryDeliveryStatus == NO_ERROR) {
|
||||
if (mReplyStatus) {
|
||||
return NO_ERROR;
|
||||
} else {
|
||||
return EINVAL;
|
||||
}
|
||||
} else {
|
||||
return mQueryDeliveryStatus;
|
||||
}
|
||||
}
|
||||
|
||||
/****************************************************************************
|
||||
* Public data memebers
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Query string. */
|
||||
char* mQuery;
|
||||
/* Query delivery status. */
|
||||
status_t mQueryDeliveryStatus;
|
||||
/* Reply buffer */
|
||||
char* mReplyBuffer;
|
||||
/* Reply data (past 'ok'/'ko'). If NULL, there were no data in reply. */
|
||||
char* mReplyData;
|
||||
/* Reply buffer size. */
|
||||
size_t mReplySize;
|
||||
/* Reply data size. */
|
||||
size_t mReplyDataSize;
|
||||
/* Reply status: 1 - ok, 0 - ko. */
|
||||
int mReplyStatus;
|
||||
|
||||
/****************************************************************************
|
||||
* Private data memebers
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Preallocated buffer for small queries. */
|
||||
char mQueryPrealloc[256];
|
||||
};
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client base
|
||||
***************************************************************************/
|
||||
|
||||
/* Encapsulates a connection to the 'camera' service in the emulator via qemu
|
||||
* pipe.
|
||||
*/
|
||||
class QemuClient {
|
||||
public:
|
||||
/* Constructs QemuClient instance. */
|
||||
QemuClient();
|
||||
|
||||
/* Destructs QemuClient instance. */
|
||||
virtual ~QemuClient();
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Connects to the 'camera' service in the emulator via qemu pipe.
|
||||
* Param:
|
||||
* param - Parameters to pass to the camera service. There are two types of
|
||||
* camera services implemented by the emulator. The first one is a
|
||||
* 'camera factory' type of service that provides list of cameras
|
||||
* connected to the host. Another one is an 'emulated camera' type of
|
||||
* service that provides interface to a camera connected to the host. At
|
||||
* the connection time emulator makes distinction between the two by
|
||||
* looking at connection parameters: no parameters means connection to
|
||||
* the 'factory' service, while connection with parameters means
|
||||
* connection to an 'emulated camera' service, where camera is identified
|
||||
* by one of the connection parameters. So, passing NULL, or an empty
|
||||
* string to this method will establish a connection with the 'factory'
|
||||
* service, while not empty string passed here will establish connection
|
||||
* with an 'emulated camera' service. Parameters defining the emulated
|
||||
* camera must be formatted as such:
|
||||
*
|
||||
* "name=<device name> [inp_channel=<input channel #>]",
|
||||
*
|
||||
* where 'device name' is a required parameter defining name of the
|
||||
* camera device, and 'input channel' is an optional parameter (positive
|
||||
* integer), defining the input channel to use on the camera device.
|
||||
* Note that device name passed here must have been previously obtained
|
||||
* from the factory service using 'list' query.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status.
|
||||
*/
|
||||
virtual status_t connectClient(const char* param);
|
||||
|
||||
/* Disconnects from the service. */
|
||||
virtual void disconnectClient();
|
||||
|
||||
/* Sends data to the service.
|
||||
* Param:
|
||||
* data, data_size - Data to send.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
virtual status_t sendMessage(const void* data, size_t data_size);
|
||||
|
||||
/* Receives data from the service.
|
||||
* This method assumes that data to receive will come in two chunks: 8
|
||||
* characters encoding the payload size in hexadecimal string, followed by
|
||||
* the paylod (if any).
|
||||
* This method will allocate data buffer where to receive the response.
|
||||
* Param:
|
||||
* data - Upon success contains address of the allocated data buffer with
|
||||
* the data received from the service. The caller is responsible for
|
||||
* freeing allocated data buffer.
|
||||
* data_size - Upon success contains size of the data received from the
|
||||
* service.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
virtual status_t receiveMessage(void** data, size_t* data_size);
|
||||
|
||||
/* Sends a query, and receives a response from the service.
|
||||
* Param:
|
||||
* query - Query to send to the service. When this method returns, the query
|
||||
* is completed, and all its relevant data members are properly initialized.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure. Note that
|
||||
* status returned here is not the final query status. Use isQuerySucceeded(),
|
||||
* or getCompletionStatus() method on the query object to see if it has
|
||||
* succeeded. However, if this method returns a failure, it means that the
|
||||
* query has failed, and there is no guarantee that its data members are
|
||||
* properly initialized (except for the 'mQueryDeliveryStatus', which is
|
||||
* always in the proper state).
|
||||
*/
|
||||
virtual status_t doQuery(QemuQuery* query);
|
||||
|
||||
/****************************************************************************
|
||||
* Data members
|
||||
***************************************************************************/
|
||||
|
||||
protected:
|
||||
/* Qemu pipe handle. */
|
||||
int mPipeFD;
|
||||
|
||||
private:
|
||||
/* Camera service name. */
|
||||
static const char mCameraServiceName[];
|
||||
};
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client for the 'factory' service.
|
||||
***************************************************************************/
|
||||
|
||||
/* Encapsulates QemuClient for the 'factory' service. */
|
||||
class FactoryQemuClient : public QemuClient {
|
||||
public:
|
||||
/* Constructs FactoryQemuClient instance. */
|
||||
FactoryQemuClient();
|
||||
|
||||
/* Destructs FactoryQemuClient instance. */
|
||||
~FactoryQemuClient();
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Lists camera devices connected to the host.
|
||||
* Param:
|
||||
* list - Upon success contains a list of cameras connected to the host. The
|
||||
* list returned here is represented as a string, containing multiple
|
||||
* lines separated with '\n', where each line represents a camera. Each
|
||||
* camera line is formatted as such:
|
||||
*
|
||||
* "name=<device name> channel=<num> pix=<num> framedims=<dimensions>\n"
|
||||
*
|
||||
* Where:
|
||||
* - 'name' is the name of the camera device attached to the host. This
|
||||
* name must be used for subsequent connection to the 'emulated camera'
|
||||
* service for that camera.
|
||||
* - 'channel' - input channel number (positive int) to use to communicate
|
||||
* with the camera.
|
||||
* - 'pix' - pixel format (a "fourcc" uint), chosen for the video frames
|
||||
* by the camera service.
|
||||
* - 'framedims' contains a list of frame dimensions supported by the
|
||||
* camera for the chosen pixel format. Each etry in the list is in form
|
||||
* '<width>x<height>', where 'width' and 'height' are numeric values
|
||||
* for width and height of a supported frame dimension. Entries in
|
||||
* this list are separated with ',' with no spaces between the entries.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t listCameras(char** list);
|
||||
|
||||
/****************************************************************************
|
||||
* Names of the queries available for the emulated camera factory.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/* List cameras connected to the host. */
|
||||
static const char mQueryList[];
|
||||
};
|
||||
|
||||
/****************************************************************************
|
||||
* Qemu client for an 'emulated camera' service.
|
||||
***************************************************************************/
|
||||
|
||||
/* Encapsulates QemuClient for an 'emulated camera' service.
|
||||
*/
|
||||
class CameraQemuClient : public QemuClient {
|
||||
public:
|
||||
/* Constructs CameraQemuClient instance. */
|
||||
CameraQemuClient();
|
||||
|
||||
/* Destructs CameraQemuClient instance. */
|
||||
~CameraQemuClient();
|
||||
|
||||
/****************************************************************************
|
||||
* Public API
|
||||
***************************************************************************/
|
||||
|
||||
public:
|
||||
/* Queries camera connection.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t queryConnect();
|
||||
|
||||
/* Queries camera disconnection.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t queryDisconnect();
|
||||
|
||||
/* Queries camera to start capturing video.
|
||||
* Param:
|
||||
* pixel_format - Pixel format that is used by the client to push video
|
||||
* frames to the camera framework.
|
||||
* width, height - Frame dimensions, requested by the framework.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t queryStart(uint32_t pixel_format, int width, int height);
|
||||
|
||||
/* Queries camera to stop capturing video.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t queryStop();
|
||||
|
||||
/* Queries camera for the next video frame.
|
||||
* Param:
|
||||
* vframe, vframe_size - Define buffer, allocated to receive a video frame.
|
||||
* Any of these parameters can be 0, indicating that the caller is
|
||||
* interested only in preview frame.
|
||||
* pframe, pframe_size - Define buffer, allocated to receive a preview frame.
|
||||
* Any of these parameters can be 0, indicating that the caller is
|
||||
* interested only in video frame.
|
||||
* r_scale, g_scale, b_scale - White balance scale.
|
||||
* exposure_comp - Expsoure compensation.
|
||||
* Return:
|
||||
* NO_ERROR on success, or an appropriate error status on failure.
|
||||
*/
|
||||
status_t queryFrame(void* vframe,
|
||||
void* pframe,
|
||||
size_t vframe_size,
|
||||
size_t pframe_size,
|
||||
float r_scale,
|
||||
float g_scale,
|
||||
float b_scale,
|
||||
float exposure_comp);
|
||||
|
||||
/****************************************************************************
|
||||
* Names of the queries available for the emulated camera.
|
||||
***************************************************************************/
|
||||
|
||||
private:
|
||||
/* Connect to the camera. */
|
||||
static const char mQueryConnect[];
|
||||
/* Disconnect from the camera. */
|
||||
static const char mQueryDisconnect[];
|
||||
/* Start video capturing. */
|
||||
static const char mQueryStart[];
|
||||
/* Stop video capturing. */
|
||||
static const char mQueryStop[];
|
||||
/* Query frame(s). */
|
||||
static const char mQueryFrame[];
|
||||
};
|
||||
|
||||
}; /* namespace android */
|
||||
|
||||
#endif /* HW_EMULATOR_CAMERA_QEMU_CLIENT_H */
|
||||
65
android/camera/fake-pipeline2/Base.h
Normal file
65
android/camera/fake-pipeline2/Base.h
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file includes various basic structures that are needed by multiple parts
|
||||
* of the fake camera 2 implementation.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA2_BASE_H
|
||||
#define HW_EMULATOR_CAMERA2_BASE_H
|
||||
|
||||
#include <system/window.h>
|
||||
#include <hardware/camera2.h>
|
||||
#include <utils/Vector.h>
|
||||
|
||||
namespace android {
|
||||
|
||||
|
||||
/* Internal structure for passing buffers across threads */
|
||||
struct StreamBuffer {
|
||||
// Positive numbers are output streams
|
||||
// Negative numbers are input reprocess streams
|
||||
// Zero is an auxillary buffer
|
||||
int streamId;
|
||||
uint32_t width, height;
|
||||
uint32_t format;
|
||||
uint32_t dataSpace;
|
||||
uint32_t stride;
|
||||
buffer_handle_t *buffer;
|
||||
uint8_t *img;
|
||||
};
|
||||
typedef Vector<StreamBuffer> Buffers;
|
||||
|
||||
struct Stream {
|
||||
const camera2_stream_ops_t *ops;
|
||||
uint32_t width, height;
|
||||
int32_t format;
|
||||
uint32_t stride;
|
||||
};
|
||||
|
||||
struct ReprocessStream {
|
||||
const camera2_stream_in_ops_t *ops;
|
||||
uint32_t width, height;
|
||||
int32_t format;
|
||||
uint32_t stride;
|
||||
// -1 if the reprocessing stream is independent
|
||||
int32_t sourceStreamId;
|
||||
};
|
||||
|
||||
} // namespace android;
|
||||
|
||||
#endif
|
||||
286
android/camera/fake-pipeline2/JpegCompressor.cpp
Normal file
286
android/camera/fake-pipeline2/JpegCompressor.cpp
Normal file
|
|
@ -0,0 +1,286 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera2_JpegCompressor"
|
||||
|
||||
#include <utils/Log.h>
|
||||
#include <ui/GraphicBufferMapper.h>
|
||||
|
||||
#include "JpegCompressor.h"
|
||||
#include "../EmulatedFakeCamera2.h"
|
||||
#include "../EmulatedFakeCamera3.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
JpegCompressor::JpegCompressor():
|
||||
Thread(false),
|
||||
mIsBusy(false),
|
||||
mSynchronous(false),
|
||||
mBuffers(NULL),
|
||||
mListener(NULL) {
|
||||
}
|
||||
|
||||
JpegCompressor::~JpegCompressor() {
|
||||
Mutex::Autolock lock(mMutex);
|
||||
}
|
||||
|
||||
status_t JpegCompressor::start(Buffers *buffers, JpegListener *listener) {
|
||||
if (listener == NULL) {
|
||||
ALOGE("%s: NULL listener not allowed!", __FUNCTION__);
|
||||
return BAD_VALUE;
|
||||
}
|
||||
Mutex::Autolock lock(mMutex);
|
||||
{
|
||||
Mutex::Autolock busyLock(mBusyMutex);
|
||||
|
||||
if (mIsBusy) {
|
||||
ALOGE("%s: Already processing a buffer!", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
mIsBusy = true;
|
||||
mSynchronous = false;
|
||||
mBuffers = buffers;
|
||||
mListener = listener;
|
||||
}
|
||||
|
||||
status_t res;
|
||||
res = run("EmulatedFakeCamera2::JpegCompressor");
|
||||
if (res != OK) {
|
||||
ALOGE("%s: Unable to start up compression thread: %s (%d)",
|
||||
__FUNCTION__, strerror(-res), res);
|
||||
delete mBuffers;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t JpegCompressor::compressSynchronous(Buffers *buffers) {
|
||||
status_t res;
|
||||
|
||||
Mutex::Autolock lock(mMutex);
|
||||
{
|
||||
Mutex::Autolock busyLock(mBusyMutex);
|
||||
|
||||
if (mIsBusy) {
|
||||
ALOGE("%s: Already processing a buffer!", __FUNCTION__);
|
||||
return INVALID_OPERATION;
|
||||
}
|
||||
|
||||
mIsBusy = true;
|
||||
mSynchronous = true;
|
||||
mBuffers = buffers;
|
||||
}
|
||||
|
||||
res = compress();
|
||||
|
||||
cleanUp();
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t JpegCompressor::cancel() {
|
||||
requestExitAndWait();
|
||||
return OK;
|
||||
}
|
||||
|
||||
status_t JpegCompressor::readyToRun() {
|
||||
return OK;
|
||||
}
|
||||
|
||||
bool JpegCompressor::threadLoop() {
|
||||
status_t res;
|
||||
ALOGV("%s: Starting compression thread", __FUNCTION__);
|
||||
|
||||
res = compress();
|
||||
|
||||
mListener->onJpegDone(mJpegBuffer, res == OK);
|
||||
|
||||
cleanUp();
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
status_t JpegCompressor::compress() {
|
||||
// Find source and target buffers. Assumes only one buffer matches
|
||||
// each condition!
|
||||
|
||||
bool foundJpeg = false, mFoundAux = false;
|
||||
for (size_t i = 0; i < mBuffers->size(); i++) {
|
||||
const StreamBuffer &b = (*mBuffers)[i];
|
||||
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
|
||||
mJpegBuffer = b;
|
||||
mFoundJpeg = true;
|
||||
} else if (b.streamId <= 0) {
|
||||
mAuxBuffer = b;
|
||||
mFoundAux = true;
|
||||
}
|
||||
if (mFoundJpeg && mFoundAux) break;
|
||||
}
|
||||
if (!mFoundJpeg || !mFoundAux) {
|
||||
ALOGE("%s: Unable to find buffers for JPEG source/destination",
|
||||
__FUNCTION__);
|
||||
return BAD_VALUE;
|
||||
}
|
||||
|
||||
// Set up error management
|
||||
|
||||
mJpegErrorInfo = NULL;
|
||||
JpegError error;
|
||||
error.parent = this;
|
||||
|
||||
mCInfo.err = jpeg_std_error(&error);
|
||||
mCInfo.err->error_exit = jpegErrorHandler;
|
||||
|
||||
jpeg_create_compress(&mCInfo);
|
||||
if (checkError("Error initializing compression")) return NO_INIT;
|
||||
|
||||
// Route compressed data straight to output stream buffer
|
||||
|
||||
JpegDestination jpegDestMgr;
|
||||
jpegDestMgr.parent = this;
|
||||
jpegDestMgr.init_destination = jpegInitDestination;
|
||||
jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
|
||||
jpegDestMgr.term_destination = jpegTermDestination;
|
||||
|
||||
mCInfo.dest = &jpegDestMgr;
|
||||
|
||||
// Set up compression parameters
|
||||
|
||||
mCInfo.image_width = mAuxBuffer.width;
|
||||
mCInfo.image_height = mAuxBuffer.height;
|
||||
mCInfo.input_components = 3;
|
||||
mCInfo.in_color_space = JCS_RGB;
|
||||
|
||||
jpeg_set_defaults(&mCInfo);
|
||||
if (checkError("Error configuring defaults")) return NO_INIT;
|
||||
|
||||
// Do compression
|
||||
|
||||
jpeg_start_compress(&mCInfo, TRUE);
|
||||
if (checkError("Error starting compression")) return NO_INIT;
|
||||
|
||||
size_t rowStride = mAuxBuffer.stride * 3;
|
||||
const size_t kChunkSize = 32;
|
||||
while (mCInfo.next_scanline < mCInfo.image_height) {
|
||||
JSAMPROW chunk[kChunkSize];
|
||||
for (size_t i = 0 ; i < kChunkSize; i++) {
|
||||
chunk[i] = (JSAMPROW)
|
||||
(mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
|
||||
}
|
||||
jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
|
||||
if (checkError("Error while compressing")) return NO_INIT;
|
||||
if (exitPending()) {
|
||||
ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
|
||||
return TIMED_OUT;
|
||||
}
|
||||
}
|
||||
|
||||
jpeg_finish_compress(&mCInfo);
|
||||
if (checkError("Error while finishing compression")) return NO_INIT;
|
||||
|
||||
// All done
|
||||
|
||||
return OK;
|
||||
}
|
||||
|
||||
bool JpegCompressor::isBusy() {
|
||||
Mutex::Autolock busyLock(mBusyMutex);
|
||||
return mIsBusy;
|
||||
}
|
||||
|
||||
bool JpegCompressor::isStreamInUse(uint32_t id) {
|
||||
Mutex::Autolock lock(mBusyMutex);
|
||||
|
||||
if (mBuffers && mIsBusy) {
|
||||
for (size_t i = 0; i < mBuffers->size(); i++) {
|
||||
if ( (*mBuffers)[i].streamId == (int)id ) return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool JpegCompressor::waitForDone(nsecs_t timeout) {
|
||||
Mutex::Autolock lock(mBusyMutex);
|
||||
status_t res = OK;
|
||||
if (mIsBusy) {
|
||||
res = mDone.waitRelative(mBusyMutex, timeout);
|
||||
}
|
||||
return (res == OK);
|
||||
}
|
||||
|
||||
bool JpegCompressor::checkError(const char *msg) {
|
||||
if (mJpegErrorInfo) {
|
||||
char errBuffer[JMSG_LENGTH_MAX];
|
||||
mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
|
||||
ALOGE("%s: %s: %s",
|
||||
__FUNCTION__, msg, errBuffer);
|
||||
mJpegErrorInfo = NULL;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void JpegCompressor::cleanUp() {
|
||||
status_t res;
|
||||
jpeg_destroy_compress(&mCInfo);
|
||||
Mutex::Autolock lock(mBusyMutex);
|
||||
|
||||
if (mFoundAux) {
|
||||
if (mAuxBuffer.streamId == 0) {
|
||||
delete[] mAuxBuffer.img;
|
||||
} else if (!mSynchronous) {
|
||||
mListener->onJpegInputDone(mAuxBuffer);
|
||||
}
|
||||
}
|
||||
if (!mSynchronous) {
|
||||
delete mBuffers;
|
||||
}
|
||||
|
||||
mBuffers = NULL;
|
||||
|
||||
mIsBusy = false;
|
||||
mDone.signal();
|
||||
}
|
||||
|
||||
void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
|
||||
JpegError *error = static_cast<JpegError*>(cinfo->err);
|
||||
error->parent->mJpegErrorInfo = cinfo;
|
||||
}
|
||||
|
||||
void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
|
||||
JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
|
||||
ALOGV("%s: Setting destination to %p, size %zu",
|
||||
__FUNCTION__, dest->parent->mJpegBuffer.img, kMaxJpegSize);
|
||||
dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer.img);
|
||||
dest->free_in_buffer = kMaxJpegSize;
|
||||
}
|
||||
|
||||
boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
|
||||
ALOGE("%s: JPEG destination buffer overflow!",
|
||||
__FUNCTION__);
|
||||
return true;
|
||||
}
|
||||
|
||||
void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
|
||||
ALOGV("%s: Done writing JPEG data. %zu bytes left in buffer",
|
||||
__FUNCTION__, cinfo->dest->free_in_buffer);
|
||||
}
|
||||
|
||||
JpegCompressor::JpegListener::~JpegListener() {
|
||||
}
|
||||
|
||||
} // namespace android
|
||||
120
android/camera/fake-pipeline2/JpegCompressor.h
Normal file
120
android/camera/fake-pipeline2/JpegCompressor.h
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* This class simulates a hardware JPEG compressor. It receives image buffers
|
||||
* in RGBA_8888 format, processes them in a worker thread, and then pushes them
|
||||
* out to their destination stream.
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA2_JPEG_H
|
||||
#define HW_EMULATOR_CAMERA2_JPEG_H
|
||||
|
||||
#include "utils/Thread.h"
|
||||
#include "utils/Mutex.h"
|
||||
#include "utils/Timers.h"
|
||||
|
||||
#include "Base.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
extern "C" {
|
||||
#include <jpeglib.h>
|
||||
}
|
||||
|
||||
namespace android {
|
||||
|
||||
class JpegCompressor: private Thread, public virtual RefBase {
|
||||
public:
|
||||
|
||||
JpegCompressor();
|
||||
~JpegCompressor();
|
||||
|
||||
struct JpegListener {
|
||||
// Called when JPEG compression has finished, or encountered an error
|
||||
virtual void onJpegDone(const StreamBuffer &jpegBuffer,
|
||||
bool success) = 0;
|
||||
// Called when the input buffer for JPEG is not needed any more,
|
||||
// if the buffer came from the framework.
|
||||
virtual void onJpegInputDone(const StreamBuffer &inputBuffer) = 0;
|
||||
virtual ~JpegListener();
|
||||
};
|
||||
|
||||
// Start compressing COMPRESSED format buffers; JpegCompressor takes
|
||||
// ownership of the Buffers vector.
|
||||
status_t start(Buffers *buffers, JpegListener *listener);
|
||||
|
||||
// Compress and block until buffer is complete.
|
||||
status_t compressSynchronous(Buffers *buffers);
|
||||
|
||||
status_t cancel();
|
||||
|
||||
bool isBusy();
|
||||
bool isStreamInUse(uint32_t id);
|
||||
|
||||
bool waitForDone(nsecs_t timeout);
|
||||
|
||||
// TODO: Measure this
|
||||
static const size_t kMaxJpegSize = 300000;
|
||||
|
||||
private:
|
||||
Mutex mBusyMutex;
|
||||
bool mIsBusy;
|
||||
Condition mDone;
|
||||
bool mSynchronous;
|
||||
|
||||
Mutex mMutex;
|
||||
|
||||
Buffers *mBuffers;
|
||||
JpegListener *mListener;
|
||||
|
||||
StreamBuffer mJpegBuffer, mAuxBuffer;
|
||||
bool mFoundJpeg, mFoundAux;
|
||||
|
||||
jpeg_compress_struct mCInfo;
|
||||
|
||||
struct JpegError : public jpeg_error_mgr {
|
||||
JpegCompressor *parent;
|
||||
};
|
||||
j_common_ptr mJpegErrorInfo;
|
||||
|
||||
struct JpegDestination : public jpeg_destination_mgr {
|
||||
JpegCompressor *parent;
|
||||
};
|
||||
|
||||
static void jpegErrorHandler(j_common_ptr cinfo);
|
||||
|
||||
static void jpegInitDestination(j_compress_ptr cinfo);
|
||||
static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
|
||||
static void jpegTermDestination(j_compress_ptr cinfo);
|
||||
|
||||
bool checkError(const char *msg);
|
||||
status_t compress();
|
||||
|
||||
void cleanUp();
|
||||
|
||||
/**
|
||||
* Inherited Thread virtual overrides
|
||||
*/
|
||||
private:
|
||||
virtual status_t readyToRun();
|
||||
virtual bool threadLoop();
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
|
||||
#endif
|
||||
478
android/camera/fake-pipeline2/Scene.cpp
Normal file
478
android/camera/fake-pipeline2/Scene.cpp
Normal file
|
|
@ -0,0 +1,478 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera_Scene"
|
||||
#include <utils/Log.h>
|
||||
#include <stdlib.h>
|
||||
#include <cmath>
|
||||
#include "Scene.h"
|
||||
|
||||
// TODO: This should probably be done host-side in OpenGL for speed and better
|
||||
// quality
|
||||
|
||||
namespace android {
|
||||
|
||||
// Define single-letter shortcuts for scene definition, for directly indexing
|
||||
// mCurrentColors
|
||||
#define G (Scene::GRASS * Scene::NUM_CHANNELS)
|
||||
#define S (Scene::GRASS_SHADOW * Scene::NUM_CHANNELS)
|
||||
#define H (Scene::HILL * Scene::NUM_CHANNELS)
|
||||
#define W (Scene::WALL * Scene::NUM_CHANNELS)
|
||||
#define R (Scene::ROOF * Scene::NUM_CHANNELS)
|
||||
#define D (Scene::DOOR * Scene::NUM_CHANNELS)
|
||||
#define C (Scene::CHIMNEY * Scene::NUM_CHANNELS)
|
||||
#define I (Scene::WINDOW * Scene::NUM_CHANNELS)
|
||||
#define U (Scene::SUN * Scene::NUM_CHANNELS)
|
||||
#define K (Scene::SKY * Scene::NUM_CHANNELS)
|
||||
#define M (Scene::MOON * Scene::NUM_CHANNELS)
|
||||
|
||||
const int Scene::kSceneWidth = 20;
|
||||
const int Scene::kSceneHeight = 20;
|
||||
|
||||
const uint8_t Scene::kScene[Scene::kSceneWidth * Scene::kSceneHeight] = {
|
||||
// 5 10 15 20
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K, // 5
|
||||
K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,K,
|
||||
K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,H,H,H,
|
||||
K,K,K,K,K,K,K,K,H,H,H,H,H,H,H,C,C,H,H,H,
|
||||
K,K,K,K,K,K,H,H,H,H,H,H,H,H,H,C,C,H,H,H,
|
||||
H,K,K,K,K,K,H,R,R,R,R,R,R,R,R,R,R,R,R,H, // 10
|
||||
H,K,K,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
|
||||
H,H,H,K,K,H,H,R,R,R,R,R,R,R,R,R,R,R,R,H,
|
||||
H,H,H,K,K,H,H,H,W,W,W,W,W,W,W,W,W,W,H,H,
|
||||
S,S,S,G,G,S,S,S,W,W,W,W,W,W,W,W,W,W,S,S,
|
||||
S,G,G,G,G,S,S,S,W,I,I,W,D,D,W,I,I,W,S,S, // 15
|
||||
G,G,G,G,G,G,S,S,W,I,I,W,D,D,W,I,I,W,S,S,
|
||||
G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
|
||||
G,G,G,G,G,G,G,G,W,W,W,W,D,D,W,W,W,W,G,G,
|
||||
G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G,
|
||||
G,G,G,G,G,G,G,G,S,S,S,S,S,S,S,S,S,S,G,G, // 20
|
||||
// 5 10 15 20
|
||||
};
|
||||
|
||||
#undef G
|
||||
#undef S
|
||||
#undef H
|
||||
#undef W
|
||||
#undef R
|
||||
#undef D
|
||||
#undef C
|
||||
#undef I
|
||||
#undef U
|
||||
#undef K
|
||||
#undef M
|
||||
|
||||
Scene::Scene(
|
||||
int sensorWidthPx,
|
||||
int sensorHeightPx,
|
||||
float sensorSensitivity):
|
||||
mSensorWidth(sensorWidthPx),
|
||||
mSensorHeight(sensorHeightPx),
|
||||
mHour(12),
|
||||
mExposureDuration(0.033f),
|
||||
mSensorSensitivity(sensorSensitivity)
|
||||
{
|
||||
// Map scene to sensor pixels
|
||||
if (mSensorWidth > mSensorHeight) {
|
||||
mMapDiv = (mSensorWidth / (kSceneWidth + 1) ) + 1;
|
||||
} else {
|
||||
mMapDiv = (mSensorHeight / (kSceneHeight + 1) ) + 1;
|
||||
}
|
||||
mOffsetX = (kSceneWidth * mMapDiv - mSensorWidth) / 2;
|
||||
mOffsetY = (kSceneHeight * mMapDiv - mSensorHeight) / 2;
|
||||
|
||||
// Assume that sensor filters are sRGB primaries to start
|
||||
mFilterR[0] = 3.2406f; mFilterR[1] = -1.5372f; mFilterR[2] = -0.4986f;
|
||||
mFilterGr[0] = -0.9689f; mFilterGr[1] = 1.8758f; mFilterGr[2] = 0.0415f;
|
||||
mFilterGb[0] = -0.9689f; mFilterGb[1] = 1.8758f; mFilterGb[2] = 0.0415f;
|
||||
mFilterB[0] = 0.0557f; mFilterB[1] = -0.2040f; mFilterB[2] = 1.0570f;
|
||||
|
||||
|
||||
}
|
||||
|
||||
Scene::~Scene() {
|
||||
}
|
||||
|
||||
void Scene::setColorFilterXYZ(
|
||||
float rX, float rY, float rZ,
|
||||
float grX, float grY, float grZ,
|
||||
float gbX, float gbY, float gbZ,
|
||||
float bX, float bY, float bZ) {
|
||||
mFilterR[0] = rX; mFilterR[1] = rY; mFilterR[2] = rZ;
|
||||
mFilterGr[0] = grX; mFilterGr[1] = grY; mFilterGr[2] = grZ;
|
||||
mFilterGb[0] = gbX; mFilterGb[1] = gbY; mFilterGb[2] = gbZ;
|
||||
mFilterB[0] = bX; mFilterB[1] = bY; mFilterB[2] = bZ;
|
||||
}
|
||||
|
||||
void Scene::setHour(int hour) {
|
||||
ALOGV("Hour set to: %d", hour);
|
||||
mHour = hour % 24;
|
||||
}
|
||||
|
||||
int Scene::getHour() {
|
||||
return mHour;
|
||||
}
|
||||
|
||||
void Scene::setExposureDuration(float seconds) {
|
||||
mExposureDuration = seconds;
|
||||
}
|
||||
|
||||
void Scene::calculateScene(nsecs_t time) {
|
||||
// Calculate time fractions for interpolation
|
||||
int timeIdx = mHour / kTimeStep;
|
||||
int nextTimeIdx = (timeIdx + 1) % (24 / kTimeStep);
|
||||
const nsecs_t kOneHourInNsec = 1e9 * 60 * 60;
|
||||
nsecs_t timeSinceIdx = (mHour - timeIdx * kTimeStep) * kOneHourInNsec + time;
|
||||
float timeFrac = timeSinceIdx / (float)(kOneHourInNsec * kTimeStep);
|
||||
|
||||
// Determine overall sunlight levels
|
||||
float sunLux =
|
||||
kSunlight[timeIdx] * (1 - timeFrac) +
|
||||
kSunlight[nextTimeIdx] * timeFrac;
|
||||
ALOGV("Sun lux: %f", sunLux);
|
||||
|
||||
float sunShadeLux = sunLux * (kDaylightShadeIllum / kDirectSunIllum);
|
||||
|
||||
// Determine sun/shade illumination chromaticity
|
||||
float currentSunXY[2];
|
||||
float currentShadeXY[2];
|
||||
|
||||
const float *prevSunXY, *nextSunXY;
|
||||
const float *prevShadeXY, *nextShadeXY;
|
||||
if (kSunlight[timeIdx] == kSunsetIllum ||
|
||||
kSunlight[timeIdx] == kTwilightIllum) {
|
||||
prevSunXY = kSunsetXY;
|
||||
prevShadeXY = kSunsetXY;
|
||||
} else {
|
||||
prevSunXY = kDirectSunlightXY;
|
||||
prevShadeXY = kDaylightXY;
|
||||
}
|
||||
if (kSunlight[nextTimeIdx] == kSunsetIllum ||
|
||||
kSunlight[nextTimeIdx] == kTwilightIllum) {
|
||||
nextSunXY = kSunsetXY;
|
||||
nextShadeXY = kSunsetXY;
|
||||
} else {
|
||||
nextSunXY = kDirectSunlightXY;
|
||||
nextShadeXY = kDaylightXY;
|
||||
}
|
||||
currentSunXY[0] = prevSunXY[0] * (1 - timeFrac) +
|
||||
nextSunXY[0] * timeFrac;
|
||||
currentSunXY[1] = prevSunXY[1] * (1 - timeFrac) +
|
||||
nextSunXY[1] * timeFrac;
|
||||
|
||||
currentShadeXY[0] = prevShadeXY[0] * (1 - timeFrac) +
|
||||
nextShadeXY[0] * timeFrac;
|
||||
currentShadeXY[1] = prevShadeXY[1] * (1 - timeFrac) +
|
||||
nextShadeXY[1] * timeFrac;
|
||||
|
||||
ALOGV("Sun XY: %f, %f, Shade XY: %f, %f",
|
||||
currentSunXY[0], currentSunXY[1],
|
||||
currentShadeXY[0], currentShadeXY[1]);
|
||||
|
||||
// Converting for xyY to XYZ:
|
||||
// X = Y / y * x
|
||||
// Y = Y
|
||||
// Z = Y / y * (1 - x - y);
|
||||
float sunXYZ[3] = {
|
||||
sunLux / currentSunXY[1] * currentSunXY[0],
|
||||
sunLux,
|
||||
sunLux / currentSunXY[1] *
|
||||
(1 - currentSunXY[0] - currentSunXY[1])
|
||||
};
|
||||
float sunShadeXYZ[3] = {
|
||||
sunShadeLux / currentShadeXY[1] * currentShadeXY[0],
|
||||
sunShadeLux,
|
||||
sunShadeLux / currentShadeXY[1] *
|
||||
(1 - currentShadeXY[0] - currentShadeXY[1])
|
||||
};
|
||||
ALOGV("Sun XYZ: %f, %f, %f",
|
||||
sunXYZ[0], sunXYZ[1], sunXYZ[2]);
|
||||
ALOGV("Sun shade XYZ: %f, %f, %f",
|
||||
sunShadeXYZ[0], sunShadeXYZ[1], sunShadeXYZ[2]);
|
||||
|
||||
// Determine moonlight levels
|
||||
float moonLux =
|
||||
kMoonlight[timeIdx] * (1 - timeFrac) +
|
||||
kMoonlight[nextTimeIdx] * timeFrac;
|
||||
float moonShadeLux = moonLux * (kDaylightShadeIllum / kDirectSunIllum);
|
||||
|
||||
float moonXYZ[3] = {
|
||||
moonLux / kMoonlightXY[1] * kMoonlightXY[0],
|
||||
moonLux,
|
||||
moonLux / kMoonlightXY[1] *
|
||||
(1 - kMoonlightXY[0] - kMoonlightXY[1])
|
||||
};
|
||||
float moonShadeXYZ[3] = {
|
||||
moonShadeLux / kMoonlightXY[1] * kMoonlightXY[0],
|
||||
moonShadeLux,
|
||||
moonShadeLux / kMoonlightXY[1] *
|
||||
(1 - kMoonlightXY[0] - kMoonlightXY[1])
|
||||
};
|
||||
|
||||
// Determine starlight level
|
||||
const float kClearNightXYZ[3] = {
|
||||
kClearNightIllum / kMoonlightXY[1] * kMoonlightXY[0],
|
||||
kClearNightIllum,
|
||||
kClearNightIllum / kMoonlightXY[1] *
|
||||
(1 - kMoonlightXY[0] - kMoonlightXY[1])
|
||||
};
|
||||
|
||||
// Calculate direct and shaded light
|
||||
float directIllumXYZ[3] = {
|
||||
sunXYZ[0] + moonXYZ[0] + kClearNightXYZ[0],
|
||||
sunXYZ[1] + moonXYZ[1] + kClearNightXYZ[1],
|
||||
sunXYZ[2] + moonXYZ[2] + kClearNightXYZ[2],
|
||||
};
|
||||
|
||||
float shadeIllumXYZ[3] = {
|
||||
kClearNightXYZ[0],
|
||||
kClearNightXYZ[1],
|
||||
kClearNightXYZ[2]
|
||||
};
|
||||
|
||||
shadeIllumXYZ[0] += (mHour < kSunOverhead) ? sunXYZ[0] : sunShadeXYZ[0];
|
||||
shadeIllumXYZ[1] += (mHour < kSunOverhead) ? sunXYZ[1] : sunShadeXYZ[1];
|
||||
shadeIllumXYZ[2] += (mHour < kSunOverhead) ? sunXYZ[2] : sunShadeXYZ[2];
|
||||
|
||||
// Moon up period covers 23->0 transition, shift for simplicity
|
||||
int adjHour = (mHour + 12) % 24;
|
||||
int adjMoonOverhead = (kMoonOverhead + 12 ) % 24;
|
||||
shadeIllumXYZ[0] += (adjHour < adjMoonOverhead) ?
|
||||
moonXYZ[0] : moonShadeXYZ[0];
|
||||
shadeIllumXYZ[1] += (adjHour < adjMoonOverhead) ?
|
||||
moonXYZ[1] : moonShadeXYZ[1];
|
||||
shadeIllumXYZ[2] += (adjHour < adjMoonOverhead) ?
|
||||
moonXYZ[2] : moonShadeXYZ[2];
|
||||
|
||||
ALOGV("Direct XYZ: %f, %f, %f",
|
||||
directIllumXYZ[0],directIllumXYZ[1],directIllumXYZ[2]);
|
||||
ALOGV("Shade XYZ: %f, %f, %f",
|
||||
shadeIllumXYZ[0], shadeIllumXYZ[1], shadeIllumXYZ[2]);
|
||||
|
||||
for (int i = 0; i < NUM_MATERIALS; i++) {
|
||||
// Converting for xyY to XYZ:
|
||||
// X = Y / y * x
|
||||
// Y = Y
|
||||
// Z = Y / y * (1 - x - y);
|
||||
float matXYZ[3] = {
|
||||
kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
|
||||
kMaterials_xyY[i][0],
|
||||
kMaterials_xyY[i][2],
|
||||
kMaterials_xyY[i][2] / kMaterials_xyY[i][1] *
|
||||
(1 - kMaterials_xyY[i][0] - kMaterials_xyY[i][1])
|
||||
};
|
||||
|
||||
if (kMaterialsFlags[i] == 0 || kMaterialsFlags[i] & kSky) {
|
||||
matXYZ[0] *= directIllumXYZ[0];
|
||||
matXYZ[1] *= directIllumXYZ[1];
|
||||
matXYZ[2] *= directIllumXYZ[2];
|
||||
} else if (kMaterialsFlags[i] & kShadowed) {
|
||||
matXYZ[0] *= shadeIllumXYZ[0];
|
||||
matXYZ[1] *= shadeIllumXYZ[1];
|
||||
matXYZ[2] *= shadeIllumXYZ[2];
|
||||
} // else if (kMaterialsFlags[i] * kSelfLit), do nothing
|
||||
|
||||
ALOGV("Mat %d XYZ: %f, %f, %f", i, matXYZ[0], matXYZ[1], matXYZ[2]);
|
||||
float luxToElectrons = mSensorSensitivity * mExposureDuration /
|
||||
(kAperture * kAperture);
|
||||
mCurrentColors[i*NUM_CHANNELS + 0] =
|
||||
(mFilterR[0] * matXYZ[0] +
|
||||
mFilterR[1] * matXYZ[1] +
|
||||
mFilterR[2] * matXYZ[2])
|
||||
* luxToElectrons;
|
||||
mCurrentColors[i*NUM_CHANNELS + 1] =
|
||||
(mFilterGr[0] * matXYZ[0] +
|
||||
mFilterGr[1] * matXYZ[1] +
|
||||
mFilterGr[2] * matXYZ[2])
|
||||
* luxToElectrons;
|
||||
mCurrentColors[i*NUM_CHANNELS + 2] =
|
||||
(mFilterGb[0] * matXYZ[0] +
|
||||
mFilterGb[1] * matXYZ[1] +
|
||||
mFilterGb[2] * matXYZ[2])
|
||||
* luxToElectrons;
|
||||
mCurrentColors[i*NUM_CHANNELS + 3] =
|
||||
(mFilterB[0] * matXYZ[0] +
|
||||
mFilterB[1] * matXYZ[1] +
|
||||
mFilterB[2] * matXYZ[2])
|
||||
* luxToElectrons;
|
||||
|
||||
ALOGV("Color %d RGGB: %d, %d, %d, %d", i,
|
||||
mCurrentColors[i*NUM_CHANNELS + 0],
|
||||
mCurrentColors[i*NUM_CHANNELS + 1],
|
||||
mCurrentColors[i*NUM_CHANNELS + 2],
|
||||
mCurrentColors[i*NUM_CHANNELS + 3]);
|
||||
}
|
||||
// Shake viewpoint; horizontal and vertical sinusoids at roughly
|
||||
// human handshake frequencies
|
||||
mHandshakeX =
|
||||
( kFreq1Magnitude * std::sin(kHorizShakeFreq1 * timeSinceIdx) +
|
||||
kFreq2Magnitude * std::sin(kHorizShakeFreq2 * timeSinceIdx) ) *
|
||||
mMapDiv * kShakeFraction;
|
||||
|
||||
mHandshakeY =
|
||||
( kFreq1Magnitude * std::sin(kVertShakeFreq1 * timeSinceIdx) +
|
||||
kFreq2Magnitude * std::sin(kVertShakeFreq2 * timeSinceIdx) ) *
|
||||
mMapDiv * kShakeFraction;
|
||||
|
||||
// Set starting pixel
|
||||
setReadoutPixel(0,0);
|
||||
}
|
||||
|
||||
void Scene::setReadoutPixel(int x, int y) {
|
||||
mCurrentX = x;
|
||||
mCurrentY = y;
|
||||
mSubX = (x + mOffsetX + mHandshakeX) % mMapDiv;
|
||||
mSubY = (y + mOffsetY + mHandshakeY) % mMapDiv;
|
||||
mSceneX = (x + mOffsetX + mHandshakeX) / mMapDiv;
|
||||
mSceneY = (y + mOffsetY + mHandshakeY) / mMapDiv;
|
||||
mSceneIdx = mSceneY * kSceneWidth + mSceneX;
|
||||
mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
|
||||
}
|
||||
|
||||
const uint32_t* Scene::getPixelElectrons() {
|
||||
const uint32_t *pixel = mCurrentSceneMaterial;
|
||||
mCurrentX++;
|
||||
mSubX++;
|
||||
if (mCurrentX >= mSensorWidth) {
|
||||
mCurrentX = 0;
|
||||
mCurrentY++;
|
||||
if (mCurrentY >= mSensorHeight) mCurrentY = 0;
|
||||
setReadoutPixel(mCurrentX, mCurrentY);
|
||||
} else if (mSubX > mMapDiv) {
|
||||
mSceneIdx++;
|
||||
mSceneX++;
|
||||
mCurrentSceneMaterial = &(mCurrentColors[kScene[mSceneIdx]]);
|
||||
mSubX = 0;
|
||||
}
|
||||
return pixel;
|
||||
}
|
||||
|
||||
// Handshake model constants.
|
||||
// Frequencies measured in a nanosecond timebase
|
||||
const float Scene::kHorizShakeFreq1 = 2 * M_PI * 2 / 1e9; // 2 Hz
|
||||
const float Scene::kHorizShakeFreq2 = 2 * M_PI * 13 / 1e9; // 13 Hz
|
||||
const float Scene::kVertShakeFreq1 = 2 * M_PI * 3 / 1e9; // 3 Hz
|
||||
const float Scene::kVertShakeFreq2 = 2 * M_PI * 11 / 1e9; // 1 Hz
|
||||
const float Scene::kFreq1Magnitude = 5;
|
||||
const float Scene::kFreq2Magnitude = 1;
|
||||
const float Scene::kShakeFraction = 0.03; // As a fraction of a scene tile
|
||||
|
||||
// RGB->YUV, Jpeg standard
|
||||
const float Scene::kRgb2Yuv[12] = {
|
||||
0.299f, 0.587f, 0.114f, 0.f,
|
||||
-0.16874f, -0.33126f, 0.5f, -128.f,
|
||||
0.5f, -0.41869f, -0.08131f, -128.f,
|
||||
};
|
||||
|
||||
// Aperture of imaging lens
|
||||
const float Scene::kAperture = 2.8;
|
||||
|
||||
// Sun illumination levels through the day
|
||||
const float Scene::kSunlight[24/kTimeStep] =
|
||||
{
|
||||
0, // 00:00
|
||||
0,
|
||||
0,
|
||||
kTwilightIllum, // 06:00
|
||||
kDirectSunIllum,
|
||||
kDirectSunIllum,
|
||||
kDirectSunIllum, // 12:00
|
||||
kDirectSunIllum,
|
||||
kDirectSunIllum,
|
||||
kSunsetIllum, // 18:00
|
||||
kTwilightIllum,
|
||||
0
|
||||
};
|
||||
|
||||
// Moon illumination levels through the day
|
||||
const float Scene::kMoonlight[24/kTimeStep] =
|
||||
{
|
||||
kFullMoonIllum, // 00:00
|
||||
kFullMoonIllum,
|
||||
0,
|
||||
0, // 06:00
|
||||
0,
|
||||
0,
|
||||
0, // 12:00
|
||||
0,
|
||||
0,
|
||||
0, // 18:00
|
||||
0,
|
||||
kFullMoonIllum
|
||||
};
|
||||
|
||||
const int Scene::kSunOverhead = 12;
|
||||
const int Scene::kMoonOverhead = 0;
|
||||
|
||||
// Used for sun illumination levels
|
||||
const float Scene::kDirectSunIllum = 100000;
|
||||
const float Scene::kSunsetIllum = 400;
|
||||
const float Scene::kTwilightIllum = 4;
|
||||
// Used for moon illumination levels
|
||||
const float Scene::kFullMoonIllum = 1;
|
||||
// Other illumination levels
|
||||
const float Scene::kDaylightShadeIllum = 20000;
|
||||
const float Scene::kClearNightIllum = 2e-3;
|
||||
const float Scene::kStarIllum = 2e-6;
|
||||
const float Scene::kLivingRoomIllum = 50;
|
||||
|
||||
const float Scene::kIncandescentXY[2] = { 0.44757f, 0.40745f};
|
||||
const float Scene::kDirectSunlightXY[2] = { 0.34842f, 0.35161f};
|
||||
const float Scene::kDaylightXY[2] = { 0.31271f, 0.32902f};
|
||||
const float Scene::kNoonSkyXY[2] = { 0.346f, 0.359f};
|
||||
const float Scene::kMoonlightXY[2] = { 0.34842f, 0.35161f};
|
||||
const float Scene::kSunsetXY[2] = { 0.527f, 0.413f};
|
||||
|
||||
const uint8_t Scene::kSelfLit = 0x01;
|
||||
const uint8_t Scene::kShadowed = 0x02;
|
||||
const uint8_t Scene::kSky = 0x04;
|
||||
|
||||
// For non-self-lit materials, the Y component is normalized with 1=full
|
||||
// reflectance; for self-lit materials, it's the constant illuminance in lux.
|
||||
const float Scene::kMaterials_xyY[Scene::NUM_MATERIALS][3] = {
|
||||
{ 0.3688f, 0.4501f, .1329f }, // GRASS
|
||||
{ 0.3688f, 0.4501f, .1329f }, // GRASS_SHADOW
|
||||
{ 0.3986f, 0.5002f, .4440f }, // HILL
|
||||
{ 0.3262f, 0.5040f, .2297f }, // WALL
|
||||
{ 0.4336f, 0.3787f, .1029f }, // ROOF
|
||||
{ 0.3316f, 0.2544f, .0639f }, // DOOR
|
||||
{ 0.3425f, 0.3577f, .0887f }, // CHIMNEY
|
||||
{ kIncandescentXY[0], kIncandescentXY[1], kLivingRoomIllum }, // WINDOW
|
||||
{ kDirectSunlightXY[0], kDirectSunlightXY[1], kDirectSunIllum }, // SUN
|
||||
{ kNoonSkyXY[0], kNoonSkyXY[1], kDaylightShadeIllum / kDirectSunIllum }, // SKY
|
||||
{ kMoonlightXY[0], kMoonlightXY[1], kFullMoonIllum } // MOON
|
||||
};
|
||||
|
||||
const uint8_t Scene::kMaterialsFlags[Scene::NUM_MATERIALS] = {
|
||||
0,
|
||||
kShadowed,
|
||||
kShadowed,
|
||||
kShadowed,
|
||||
kShadowed,
|
||||
kShadowed,
|
||||
kShadowed,
|
||||
kSelfLit,
|
||||
kSelfLit,
|
||||
kSky,
|
||||
kSelfLit,
|
||||
};
|
||||
|
||||
} // namespace android
|
||||
191
android/camera/fake-pipeline2/Scene.h
Normal file
191
android/camera/fake-pipeline2/Scene.h
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The Scene class implements a simple physical simulation of a scene, using the
|
||||
* CIE 1931 colorspace to represent light in physical units (lux).
|
||||
*
|
||||
* It's fairly approximate, but does provide a scene with realistic widely
|
||||
* variable illumination levels and colors over time.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA2_SCENE_H
|
||||
#define HW_EMULATOR_CAMERA2_SCENE_H
|
||||
|
||||
#include "utils/Timers.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
class Scene {
|
||||
public:
|
||||
Scene(int sensorWidthPx,
|
||||
int sensorHeightPx,
|
||||
float sensorSensitivity);
|
||||
~Scene();
|
||||
|
||||
// Set the filter coefficients for the red, green, and blue filters on the
|
||||
// sensor. Used as an optimization to pre-calculate various illuminance
|
||||
// values. Two different green filters can be provided, to account for
|
||||
// possible cross-talk on a Bayer sensor. Must be called before
|
||||
// calculateScene.
|
||||
void setColorFilterXYZ(
|
||||
float rX, float rY, float rZ,
|
||||
float grX, float grY, float grZ,
|
||||
float gbX, float gbY, float gbZ,
|
||||
float bX, float bY, float bZ);
|
||||
|
||||
// Set time of day (24-hour clock). This controls the general light levels
|
||||
// in the scene. Must be called before calculateScene
|
||||
void setHour(int hour);
|
||||
// Get current hour
|
||||
int getHour();
|
||||
|
||||
// Set the duration of exposure for determining luminous exposure.
|
||||
// Must be called before calculateScene
|
||||
void setExposureDuration(float seconds);
|
||||
|
||||
// Calculate scene information for current hour and the time offset since
|
||||
// the hour. Must be called at least once before calling getLuminousExposure.
|
||||
// Resets pixel readout location to 0,0
|
||||
void calculateScene(nsecs_t time);
|
||||
|
||||
// Set sensor pixel readout location.
|
||||
void setReadoutPixel(int x, int y);
|
||||
|
||||
// Get sensor response in physical units (electrons) for light hitting the
|
||||
// current readout pixel, after passing through color filters. The readout
|
||||
// pixel will be auto-incremented. The returned array can be indexed with
|
||||
// ColorChannels.
|
||||
const uint32_t* getPixelElectrons();
|
||||
|
||||
enum ColorChannels {
|
||||
R = 0,
|
||||
Gr,
|
||||
Gb,
|
||||
B,
|
||||
Y,
|
||||
Cb,
|
||||
Cr,
|
||||
NUM_CHANNELS
|
||||
};
|
||||
|
||||
private:
|
||||
// Sensor color filtering coefficients in XYZ
|
||||
float mFilterR[3];
|
||||
float mFilterGr[3];
|
||||
float mFilterGb[3];
|
||||
float mFilterB[3];
|
||||
|
||||
int mOffsetX, mOffsetY;
|
||||
int mMapDiv;
|
||||
|
||||
int mHandshakeX, mHandshakeY;
|
||||
|
||||
int mSensorWidth;
|
||||
int mSensorHeight;
|
||||
int mCurrentX;
|
||||
int mCurrentY;
|
||||
int mSubX;
|
||||
int mSubY;
|
||||
int mSceneX;
|
||||
int mSceneY;
|
||||
int mSceneIdx;
|
||||
uint32_t *mCurrentSceneMaterial;
|
||||
|
||||
int mHour;
|
||||
float mExposureDuration;
|
||||
float mSensorSensitivity;
|
||||
|
||||
enum Materials {
|
||||
GRASS = 0,
|
||||
GRASS_SHADOW,
|
||||
HILL,
|
||||
WALL,
|
||||
ROOF,
|
||||
DOOR,
|
||||
CHIMNEY,
|
||||
WINDOW,
|
||||
SUN,
|
||||
SKY,
|
||||
MOON,
|
||||
NUM_MATERIALS
|
||||
};
|
||||
|
||||
uint32_t mCurrentColors[NUM_MATERIALS*NUM_CHANNELS];
|
||||
|
||||
/**
|
||||
* Constants for scene definition. These are various degrees of approximate.
|
||||
*/
|
||||
|
||||
// Fake handshake parameters. Two shake frequencies per axis, plus magnitude
|
||||
// as a fraction of a scene tile, and relative magnitudes for the frequencies
|
||||
static const float kHorizShakeFreq1;
|
||||
static const float kHorizShakeFreq2;
|
||||
static const float kVertShakeFreq1;
|
||||
static const float kVertShakeFreq2;
|
||||
static const float kFreq1Magnitude;
|
||||
static const float kFreq2Magnitude;
|
||||
|
||||
static const float kShakeFraction;
|
||||
|
||||
// RGB->YUV conversion
|
||||
static const float kRgb2Yuv[12];
|
||||
|
||||
// Aperture of imaging lens
|
||||
static const float kAperture;
|
||||
|
||||
// Sun, moon illuminance levels in 2-hour increments. These don't match any
|
||||
// real day anywhere.
|
||||
static const uint32_t kTimeStep = 2;
|
||||
static const float kSunlight[];
|
||||
static const float kMoonlight[];
|
||||
static const int kSunOverhead;
|
||||
static const int kMoonOverhead;
|
||||
|
||||
// Illumination levels for various conditions, in lux
|
||||
static const float kDirectSunIllum;
|
||||
static const float kDaylightShadeIllum;
|
||||
static const float kSunsetIllum;
|
||||
static const float kTwilightIllum;
|
||||
static const float kFullMoonIllum;
|
||||
static const float kClearNightIllum;
|
||||
static const float kStarIllum;
|
||||
static const float kLivingRoomIllum;
|
||||
|
||||
// Chromaticity of various illumination sources
|
||||
static const float kIncandescentXY[2];
|
||||
static const float kDirectSunlightXY[2];
|
||||
static const float kDaylightXY[2];
|
||||
static const float kNoonSkyXY[2];
|
||||
static const float kMoonlightXY[2];
|
||||
static const float kSunsetXY[2];
|
||||
|
||||
static const uint8_t kSelfLit;
|
||||
static const uint8_t kShadowed;
|
||||
static const uint8_t kSky;
|
||||
|
||||
static const float kMaterials_xyY[NUM_MATERIALS][3];
|
||||
static const uint8_t kMaterialsFlags[NUM_MATERIALS];
|
||||
|
||||
static const int kSceneWidth;
|
||||
static const int kSceneHeight;
|
||||
static const uint8_t kScene[];
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // HW_EMULATOR_CAMERA2_SCENE_H
|
||||
610
android/camera/fake-pipeline2/Sensor.cpp
Normal file
610
android/camera/fake-pipeline2/Sensor.cpp
Normal file
|
|
@ -0,0 +1,610 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
//#define LOG_NDEBUG 0
|
||||
//#define LOG_NNDEBUG 0
|
||||
#define LOG_TAG "EmulatedCamera2_Sensor"
|
||||
|
||||
#ifdef LOG_NNDEBUG
|
||||
#define ALOGVV(...) ALOGV(__VA_ARGS__)
|
||||
#else
|
||||
#define ALOGVV(...) ((void)0)
|
||||
#endif
|
||||
|
||||
#include <utils/Log.h>
|
||||
|
||||
#include "../EmulatedFakeCamera2.h"
|
||||
#include "Sensor.h"
|
||||
#include <cmath>
|
||||
#include <cstdlib>
|
||||
#include "system/camera_metadata.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
const unsigned int Sensor::kResolution[2] = {640, 480};
|
||||
const unsigned int Sensor::kActiveArray[4] = {0, 0, 640, 480};
|
||||
|
||||
//const nsecs_t Sensor::kExposureTimeRange[2] =
|
||||
// {1000L, 30000000000L} ; // 1 us - 30 sec
|
||||
//const nsecs_t Sensor::kFrameDurationRange[2] =
|
||||
// {33331760L, 30000000000L}; // ~1/30 s - 30 sec
|
||||
const nsecs_t Sensor::kExposureTimeRange[2] =
|
||||
{1000L, 300000000L} ; // 1 us - 0.3 sec
|
||||
const nsecs_t Sensor::kFrameDurationRange[2] =
|
||||
{33331760L, 300000000L}; // ~1/30 s - 0.3 sec
|
||||
|
||||
const nsecs_t Sensor::kMinVerticalBlank = 10000L;
|
||||
|
||||
const uint8_t Sensor::kColorFilterArrangement =
|
||||
ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
|
||||
|
||||
// Output image data characteristics
|
||||
const uint32_t Sensor::kMaxRawValue = 4000;
|
||||
const uint32_t Sensor::kBlackLevel = 1000;
|
||||
|
||||
// Sensor sensitivity
|
||||
const float Sensor::kSaturationVoltage = 0.520f;
|
||||
const uint32_t Sensor::kSaturationElectrons = 2000;
|
||||
const float Sensor::kVoltsPerLuxSecond = 0.100f;
|
||||
|
||||
const float Sensor::kElectronsPerLuxSecond =
|
||||
Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
|
||||
* Sensor::kVoltsPerLuxSecond;
|
||||
|
||||
const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
|
||||
Sensor::kSaturationElectrons;
|
||||
|
||||
const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
|
||||
const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
|
||||
const float Sensor::kReadNoiseVarBeforeGain =
|
||||
Sensor::kReadNoiseStddevBeforeGain *
|
||||
Sensor::kReadNoiseStddevBeforeGain;
|
||||
const float Sensor::kReadNoiseVarAfterGain =
|
||||
Sensor::kReadNoiseStddevAfterGain *
|
||||
Sensor::kReadNoiseStddevAfterGain;
|
||||
|
||||
// While each row has to read out, reset, and then expose, the (reset +
|
||||
// expose) sequence can be overlapped by other row readouts, so the final
|
||||
// minimum frame duration is purely a function of row readout time, at least
|
||||
// if there's a reasonable number of rows.
|
||||
const nsecs_t Sensor::kRowReadoutTime =
|
||||
Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
|
||||
|
||||
const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
|
||||
const uint32_t Sensor::kDefaultSensitivity = 100;
|
||||
|
||||
/** A few utility functions for math, normal distributions */
|
||||
|
||||
// Take advantage of IEEE floating-point format to calculate an approximate
|
||||
// square root. Accurate to within +-3.6%
|
||||
float sqrtf_approx(float r) {
|
||||
// Modifier is based on IEEE floating-point representation; the
|
||||
// manipulations boil down to finding approximate log2, dividing by two, and
|
||||
// then inverting the log2. A bias is added to make the relative error
|
||||
// symmetric about the real answer.
|
||||
const int32_t modifier = 0x1FBB4000;
|
||||
|
||||
int32_t r_i = *(int32_t*)(&r);
|
||||
r_i = (r_i >> 1) + modifier;
|
||||
|
||||
return *(float*)(&r_i);
|
||||
}
|
||||
|
||||
|
||||
|
||||
Sensor::Sensor():
|
||||
Thread(false),
|
||||
mGotVSync(false),
|
||||
mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
|
||||
mFrameDuration(kFrameDurationRange[0]),
|
||||
mGainFactor(kDefaultSensitivity),
|
||||
mNextBuffers(NULL),
|
||||
mFrameNumber(0),
|
||||
mCapturedBuffers(NULL),
|
||||
mListener(NULL),
|
||||
mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Sensor::~Sensor() {
|
||||
shutDown();
|
||||
}
|
||||
|
||||
status_t Sensor::startUp() {
|
||||
ALOGV("%s: E", __FUNCTION__);
|
||||
|
||||
int res;
|
||||
mCapturedBuffers = NULL;
|
||||
res = run("EmulatedFakeCamera2::Sensor",
|
||||
ANDROID_PRIORITY_URGENT_DISPLAY);
|
||||
|
||||
if (res != OK) {
|
||||
ALOGE("Unable to start up sensor capture thread: %d", res);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
status_t Sensor::shutDown() {
|
||||
ALOGV("%s: E", __FUNCTION__);
|
||||
|
||||
int res;
|
||||
res = requestExitAndWait();
|
||||
if (res != OK) {
|
||||
ALOGE("Unable to shut down sensor capture thread: %d", res);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
Scene &Sensor::getScene() {
|
||||
return mScene;
|
||||
}
|
||||
|
||||
void Sensor::setExposureTime(uint64_t ns) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
ALOGVV("Exposure set to %f", ns/1000000.f);
|
||||
mExposureTime = ns;
|
||||
}
|
||||
|
||||
void Sensor::setFrameDuration(uint64_t ns) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
ALOGVV("Frame duration set to %f", ns/1000000.f);
|
||||
mFrameDuration = ns;
|
||||
}
|
||||
|
||||
void Sensor::setSensitivity(uint32_t gain) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
ALOGVV("Gain set to %d", gain);
|
||||
mGainFactor = gain;
|
||||
}
|
||||
|
||||
void Sensor::setDestinationBuffers(Buffers *buffers) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
mNextBuffers = buffers;
|
||||
}
|
||||
|
||||
void Sensor::setFrameNumber(uint32_t frameNumber) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
mFrameNumber = frameNumber;
|
||||
}
|
||||
|
||||
bool Sensor::waitForVSync(nsecs_t reltime) {
|
||||
int res;
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
|
||||
mGotVSync = false;
|
||||
res = mVSync.waitRelative(mControlMutex, reltime);
|
||||
if (res != OK && res != TIMED_OUT) {
|
||||
ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
|
||||
return false;
|
||||
}
|
||||
return mGotVSync;
|
||||
}
|
||||
|
||||
bool Sensor::waitForNewFrame(nsecs_t reltime,
|
||||
nsecs_t *captureTime) {
|
||||
Mutex::Autolock lock(mReadoutMutex);
|
||||
uint8_t *ret;
|
||||
if (mCapturedBuffers == NULL) {
|
||||
int res;
|
||||
res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
|
||||
if (res == TIMED_OUT) {
|
||||
return false;
|
||||
} else if (res != OK || mCapturedBuffers == NULL) {
|
||||
ALOGE("Error waiting for sensor readout signal: %d", res);
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
mReadoutComplete.signal();
|
||||
}
|
||||
|
||||
*captureTime = mCaptureTime;
|
||||
mCapturedBuffers = NULL;
|
||||
return true;
|
||||
}
|
||||
|
||||
Sensor::SensorListener::~SensorListener() {
|
||||
}
|
||||
|
||||
void Sensor::setSensorListener(SensorListener *listener) {
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
mListener = listener;
|
||||
}
|
||||
|
||||
status_t Sensor::readyToRun() {
|
||||
ALOGV("Starting up sensor thread");
|
||||
mStartupTime = systemTime();
|
||||
mNextCaptureTime = 0;
|
||||
mNextCapturedBuffers = NULL;
|
||||
return OK;
|
||||
}
|
||||
|
||||
bool Sensor::threadLoop() {
|
||||
/**
|
||||
* Sensor capture operation main loop.
|
||||
*
|
||||
* Stages are out-of-order relative to a single frame's processing, but
|
||||
* in-order in time.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Stage 1: Read in latest control parameters
|
||||
*/
|
||||
uint64_t exposureDuration;
|
||||
uint64_t frameDuration;
|
||||
uint32_t gain;
|
||||
Buffers *nextBuffers;
|
||||
uint32_t frameNumber;
|
||||
SensorListener *listener = NULL;
|
||||
{
|
||||
Mutex::Autolock lock(mControlMutex);
|
||||
exposureDuration = mExposureTime;
|
||||
frameDuration = mFrameDuration;
|
||||
gain = mGainFactor;
|
||||
nextBuffers = mNextBuffers;
|
||||
frameNumber = mFrameNumber;
|
||||
listener = mListener;
|
||||
// Don't reuse a buffer set
|
||||
mNextBuffers = NULL;
|
||||
|
||||
// Signal VSync for start of readout
|
||||
ALOGVV("Sensor VSync");
|
||||
mGotVSync = true;
|
||||
mVSync.signal();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stage 3: Read out latest captured image
|
||||
*/
|
||||
|
||||
Buffers *capturedBuffers = NULL;
|
||||
nsecs_t captureTime = 0;
|
||||
|
||||
nsecs_t startRealTime = systemTime();
|
||||
// Stagefright cares about system time for timestamps, so base simulated
|
||||
// time on that.
|
||||
nsecs_t simulatedTime = startRealTime;
|
||||
nsecs_t frameEndRealTime = startRealTime + frameDuration;
|
||||
nsecs_t frameReadoutEndRealTime = startRealTime +
|
||||
kRowReadoutTime * kResolution[1];
|
||||
|
||||
if (mNextCapturedBuffers != NULL) {
|
||||
ALOGVV("Sensor starting readout");
|
||||
// Pretend we're doing readout now; will signal once enough time has elapsed
|
||||
capturedBuffers = mNextCapturedBuffers;
|
||||
captureTime = mNextCaptureTime;
|
||||
}
|
||||
simulatedTime += kRowReadoutTime + kMinVerticalBlank;
|
||||
|
||||
// TODO: Move this signal to another thread to simulate readout
|
||||
// time properly
|
||||
if (capturedBuffers != NULL) {
|
||||
ALOGVV("Sensor readout complete");
|
||||
Mutex::Autolock lock(mReadoutMutex);
|
||||
if (mCapturedBuffers != NULL) {
|
||||
ALOGV("Waiting for readout thread to catch up!");
|
||||
mReadoutComplete.wait(mReadoutMutex);
|
||||
}
|
||||
|
||||
mCapturedBuffers = capturedBuffers;
|
||||
mCaptureTime = captureTime;
|
||||
mReadoutAvailable.signal();
|
||||
capturedBuffers = NULL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stage 2: Capture new image
|
||||
*/
|
||||
mNextCaptureTime = simulatedTime;
|
||||
mNextCapturedBuffers = nextBuffers;
|
||||
|
||||
if (mNextCapturedBuffers != NULL) {
|
||||
if (listener != NULL) {
|
||||
listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
|
||||
mNextCaptureTime);
|
||||
}
|
||||
ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
|
||||
(float)exposureDuration/1e6, gain);
|
||||
mScene.setExposureDuration((float)exposureDuration/1e9);
|
||||
mScene.calculateScene(mNextCaptureTime);
|
||||
|
||||
// Might be adding more buffers, so size isn't constant
|
||||
for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
|
||||
const StreamBuffer &b = (*mNextCapturedBuffers)[i];
|
||||
ALOGVV("Sensor capturing buffer %d: stream %d,"
|
||||
" %d x %d, format %x, stride %d, buf %p, img %p",
|
||||
i, b.streamId, b.width, b.height, b.format, b.stride,
|
||||
b.buffer, b.img);
|
||||
switch(b.format) {
|
||||
case HAL_PIXEL_FORMAT_RAW16:
|
||||
captureRaw(b.img, gain, b.stride);
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_RGB_888:
|
||||
captureRGB(b.img, gain, b.stride);
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_RGBA_8888:
|
||||
captureRGBA(b.img, gain, b.stride);
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_BLOB:
|
||||
if (b.dataSpace != HAL_DATASPACE_DEPTH) {
|
||||
// Add auxillary buffer of the right size
|
||||
// Assumes only one BLOB (JPEG) buffer in
|
||||
// mNextCapturedBuffers
|
||||
StreamBuffer bAux;
|
||||
bAux.streamId = 0;
|
||||
bAux.width = b.width;
|
||||
bAux.height = b.height;
|
||||
bAux.format = HAL_PIXEL_FORMAT_RGB_888;
|
||||
bAux.stride = b.width;
|
||||
bAux.buffer = NULL;
|
||||
// TODO: Reuse these
|
||||
bAux.img = new uint8_t[b.width * b.height * 3];
|
||||
mNextCapturedBuffers->push_back(bAux);
|
||||
} else {
|
||||
captureDepthCloud(b.img);
|
||||
}
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
|
||||
captureNV21(b.img, gain, b.stride);
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_YV12:
|
||||
// TODO:
|
||||
ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
|
||||
break;
|
||||
case HAL_PIXEL_FORMAT_Y16:
|
||||
captureDepth(b.img, gain, b.stride);
|
||||
break;
|
||||
default:
|
||||
ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
|
||||
b.format);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ALOGVV("Sensor vertical blanking interval");
|
||||
nsecs_t workDoneRealTime = systemTime();
|
||||
const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
|
||||
if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
|
||||
timespec t;
|
||||
t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
|
||||
t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
|
||||
|
||||
int ret;
|
||||
do {
|
||||
ret = nanosleep(&t, &t);
|
||||
} while (ret != 0);
|
||||
}
|
||||
nsecs_t endRealTime = systemTime();
|
||||
ALOGVV("Frame cycle took %d ms, target %d ms",
|
||||
(int)((endRealTime - startRealTime)/1000000),
|
||||
(int)(frameDuration / 1000000));
|
||||
return true;
|
||||
};
|
||||
|
||||
void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
|
||||
float totalGain = gain/100.0 * kBaseGainFactor;
|
||||
float noiseVarGain = totalGain * totalGain;
|
||||
float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
|
||||
+ kReadNoiseVarAfterGain;
|
||||
|
||||
int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
|
||||
mScene.setReadoutPixel(0,0);
|
||||
for (unsigned int y = 0; y < kResolution[1]; y++ ) {
|
||||
int *bayerRow = bayerSelect + (y & 0x1) * 2;
|
||||
uint16_t *px = (uint16_t*)img + y * stride;
|
||||
for (unsigned int x = 0; x < kResolution[0]; x++) {
|
||||
uint32_t electronCount;
|
||||
electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
|
||||
|
||||
// TODO: Better pixel saturation curve?
|
||||
electronCount = (electronCount < kSaturationElectrons) ?
|
||||
electronCount : kSaturationElectrons;
|
||||
|
||||
// TODO: Better A/D saturation curve?
|
||||
uint16_t rawCount = electronCount * totalGain;
|
||||
rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
|
||||
|
||||
// Calculate noise value
|
||||
// TODO: Use more-correct Gaussian instead of uniform noise
|
||||
float photonNoiseVar = electronCount * noiseVarGain;
|
||||
float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
|
||||
// Scaled to roughly match gaussian/uniform noise stddev
|
||||
float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
|
||||
|
||||
rawCount += kBlackLevel;
|
||||
rawCount += noiseStddev * noiseSample;
|
||||
|
||||
*px++ = rawCount;
|
||||
}
|
||||
// TODO: Handle this better
|
||||
//simulatedTime += kRowReadoutTime;
|
||||
}
|
||||
ALOGVV("Raw sensor image captured");
|
||||
}
|
||||
|
||||
void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
|
||||
float totalGain = gain/100.0 * kBaseGainFactor;
|
||||
// In fixed-point math, calculate total scaling from electrons to 8bpp
|
||||
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
|
||||
uint32_t inc = kResolution[0] / stride;
|
||||
|
||||
for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
|
||||
uint8_t *px = img + outY * stride * 4;
|
||||
mScene.setReadoutPixel(0, y);
|
||||
for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
|
||||
uint32_t rCount, gCount, bCount;
|
||||
// TODO: Perfect demosaicing is a cheat
|
||||
const uint32_t *pixel = mScene.getPixelElectrons();
|
||||
rCount = pixel[Scene::R] * scale64x;
|
||||
gCount = pixel[Scene::Gr] * scale64x;
|
||||
bCount = pixel[Scene::B] * scale64x;
|
||||
|
||||
*px++ = rCount < 255*64 ? rCount / 64 : 255;
|
||||
*px++ = gCount < 255*64 ? gCount / 64 : 255;
|
||||
*px++ = bCount < 255*64 ? bCount / 64 : 255;
|
||||
*px++ = 255;
|
||||
for (unsigned int j = 1; j < inc; j++)
|
||||
mScene.getPixelElectrons();
|
||||
}
|
||||
// TODO: Handle this better
|
||||
//simulatedTime += kRowReadoutTime;
|
||||
}
|
||||
ALOGVV("RGBA sensor image captured");
|
||||
}
|
||||
|
||||
void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
|
||||
float totalGain = gain/100.0 * kBaseGainFactor;
|
||||
// In fixed-point math, calculate total scaling from electrons to 8bpp
|
||||
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
|
||||
uint32_t inc = kResolution[0] / stride;
|
||||
|
||||
for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
|
||||
mScene.setReadoutPixel(0, y);
|
||||
uint8_t *px = img + outY * stride * 3;
|
||||
for (unsigned int x = 0; x < kResolution[0]; x += inc) {
|
||||
uint32_t rCount, gCount, bCount;
|
||||
// TODO: Perfect demosaicing is a cheat
|
||||
const uint32_t *pixel = mScene.getPixelElectrons();
|
||||
rCount = pixel[Scene::R] * scale64x;
|
||||
gCount = pixel[Scene::Gr] * scale64x;
|
||||
bCount = pixel[Scene::B] * scale64x;
|
||||
|
||||
*px++ = rCount < 255*64 ? rCount / 64 : 255;
|
||||
*px++ = gCount < 255*64 ? gCount / 64 : 255;
|
||||
*px++ = bCount < 255*64 ? bCount / 64 : 255;
|
||||
for (unsigned int j = 1; j < inc; j++)
|
||||
mScene.getPixelElectrons();
|
||||
}
|
||||
// TODO: Handle this better
|
||||
//simulatedTime += kRowReadoutTime;
|
||||
}
|
||||
ALOGVV("RGB sensor image captured");
|
||||
}
|
||||
|
||||
void Sensor::captureNV21(uint8_t *img, uint32_t gain, uint32_t stride) {
|
||||
float totalGain = gain/100.0 * kBaseGainFactor;
|
||||
// Using fixed-point math with 6 bits of fractional precision.
|
||||
// In fixed-point math, calculate total scaling from electrons to 8bpp
|
||||
const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
|
||||
// In fixed-point math, saturation point of sensor after gain
|
||||
const int saturationPoint = 64 * 255;
|
||||
// Fixed-point coefficients for RGB-YUV transform
|
||||
// Based on JFIF RGB->YUV transform.
|
||||
// Cb/Cr offset scaled by 64x twice since they're applied post-multiply
|
||||
const int rgbToY[] = {19, 37, 7};
|
||||
const int rgbToCb[] = {-10,-21, 32, 524288};
|
||||
const int rgbToCr[] = {32,-26, -5, 524288};
|
||||
// Scale back to 8bpp non-fixed-point
|
||||
const int scaleOut = 64;
|
||||
const int scaleOutSq = scaleOut * scaleOut; // after multiplies
|
||||
|
||||
uint32_t inc = kResolution[0] / stride;
|
||||
uint32_t outH = kResolution[1] / inc;
|
||||
for (unsigned int y = 0, outY = 0;
|
||||
y < kResolution[1]; y+=inc, outY++) {
|
||||
uint8_t *pxY = img + outY * stride;
|
||||
uint8_t *pxVU = img + (outH + outY / 2) * stride;
|
||||
mScene.setReadoutPixel(0,y);
|
||||
for (unsigned int outX = 0; outX < stride; outX++) {
|
||||
int32_t rCount, gCount, bCount;
|
||||
// TODO: Perfect demosaicing is a cheat
|
||||
const uint32_t *pixel = mScene.getPixelElectrons();
|
||||
rCount = pixel[Scene::R] * scale64x;
|
||||
rCount = rCount < saturationPoint ? rCount : saturationPoint;
|
||||
gCount = pixel[Scene::Gr] * scale64x;
|
||||
gCount = gCount < saturationPoint ? gCount : saturationPoint;
|
||||
bCount = pixel[Scene::B] * scale64x;
|
||||
bCount = bCount < saturationPoint ? bCount : saturationPoint;
|
||||
|
||||
*pxY++ = (rgbToY[0] * rCount +
|
||||
rgbToY[1] * gCount +
|
||||
rgbToY[2] * bCount) / scaleOutSq;
|
||||
if (outY % 2 == 0 && outX % 2 == 0) {
|
||||
*pxVU++ = (rgbToCr[0] * rCount +
|
||||
rgbToCr[1] * gCount +
|
||||
rgbToCr[2] * bCount +
|
||||
rgbToCr[3]) / scaleOutSq;
|
||||
*pxVU++ = (rgbToCb[0] * rCount +
|
||||
rgbToCb[1] * gCount +
|
||||
rgbToCb[2] * bCount +
|
||||
rgbToCb[3]) / scaleOutSq;
|
||||
}
|
||||
for (unsigned int j = 1; j < inc; j++)
|
||||
mScene.getPixelElectrons();
|
||||
}
|
||||
}
|
||||
ALOGVV("NV21 sensor image captured");
|
||||
}
|
||||
|
||||
void Sensor::captureDepth(uint8_t *img, uint32_t gain, uint32_t stride) {
|
||||
float totalGain = gain/100.0 * kBaseGainFactor;
|
||||
// In fixed-point math, calculate scaling factor to 13bpp millimeters
|
||||
int scale64x = 64 * totalGain * 8191 / kMaxRawValue;
|
||||
uint32_t inc = kResolution[0] / stride;
|
||||
|
||||
for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
|
||||
mScene.setReadoutPixel(0, y);
|
||||
uint16_t *px = ((uint16_t*)img) + outY * stride;
|
||||
for (unsigned int x = 0; x < kResolution[0]; x += inc) {
|
||||
uint32_t depthCount;
|
||||
// TODO: Make up real depth scene instead of using green channel
|
||||
// as depth
|
||||
const uint32_t *pixel = mScene.getPixelElectrons();
|
||||
depthCount = pixel[Scene::Gr] * scale64x;
|
||||
|
||||
*px++ = depthCount < 8191*64 ? depthCount / 64 : 0;
|
||||
for (unsigned int j = 1; j < inc; j++)
|
||||
mScene.getPixelElectrons();
|
||||
}
|
||||
// TODO: Handle this better
|
||||
//simulatedTime += kRowReadoutTime;
|
||||
}
|
||||
ALOGVV("Depth sensor image captured");
|
||||
}
|
||||
|
||||
void Sensor::captureDepthCloud(uint8_t *img) {
|
||||
|
||||
android_depth_points *cloud = reinterpret_cast<android_depth_points*>(img);
|
||||
|
||||
cloud->num_points = 16;
|
||||
|
||||
// TODO: Create point cloud values that match RGB scene
|
||||
const int FLOATS_PER_POINT = 4;
|
||||
const float JITTER_STDDEV = 0.1f;
|
||||
for (size_t y = 0, i = 0; y < 4; y++) {
|
||||
for (size_t x = 0; x < 4; x++, i++) {
|
||||
float randSampleX = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
|
||||
randSampleX *= JITTER_STDDEV;
|
||||
|
||||
float randSampleY = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
|
||||
randSampleY *= JITTER_STDDEV;
|
||||
|
||||
float randSampleZ = std::rand() * (2.5f / (1.0f + RAND_MAX)) - 1.25f;
|
||||
randSampleZ *= JITTER_STDDEV;
|
||||
|
||||
cloud->xyzc_points[i * FLOATS_PER_POINT + 0] = x - 1.5f + randSampleX;
|
||||
cloud->xyzc_points[i * FLOATS_PER_POINT + 1] = y - 1.5f + randSampleY;
|
||||
cloud->xyzc_points[i * FLOATS_PER_POINT + 2] = 3.f + randSampleZ;
|
||||
cloud->xyzc_points[i * FLOATS_PER_POINT + 3] = 0.8f;
|
||||
}
|
||||
}
|
||||
|
||||
ALOGVV("Depth point cloud captured");
|
||||
|
||||
}
|
||||
|
||||
} // namespace android
|
||||
245
android/camera/fake-pipeline2/Sensor.h
Normal file
245
android/camera/fake-pipeline2/Sensor.h
Normal file
|
|
@ -0,0 +1,245 @@
|
|||
/*
|
||||
* Copyright (C) 2012 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This class is a simple simulation of a typical CMOS cellphone imager chip,
|
||||
* which outputs 12-bit Bayer-mosaic raw images.
|
||||
*
|
||||
* Unlike most real image sensors, this one's native color space is linear sRGB.
|
||||
*
|
||||
* The sensor is abstracted as operating as a pipeline 3 stages deep;
|
||||
* conceptually, each frame to be captured goes through these three stages. The
|
||||
* processing step for the sensor is marked off by vertical sync signals, which
|
||||
* indicate the start of readout of the oldest frame. The interval between
|
||||
* processing steps depends on the frame duration of the frame currently being
|
||||
* captured. The stages are 1) configure, 2) capture, and 3) readout. During
|
||||
* configuration, the sensor's registers for settings such as exposure time,
|
||||
* frame duration, and gain are set for the next frame to be captured. In stage
|
||||
* 2, the image data for the frame is actually captured by the sensor. Finally,
|
||||
* in stage 3, the just-captured data is read out and sent to the rest of the
|
||||
* system.
|
||||
*
|
||||
* The sensor is assumed to be rolling-shutter, so low-numbered rows of the
|
||||
* sensor are exposed earlier in time than larger-numbered rows, with the time
|
||||
* offset between each row being equal to the row readout time.
|
||||
*
|
||||
* The characteristics of this sensor don't correspond to any actual sensor,
|
||||
* but are not far off typical sensors.
|
||||
*
|
||||
* Example timing diagram, with three frames:
|
||||
* Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
|
||||
* Frame 2: Frame duration 75 ms, exposure time 65 ms.
|
||||
* Legend:
|
||||
* C = update sensor registers for frame
|
||||
* v = row in reset (vertical blanking interval)
|
||||
* E = row capturing image data
|
||||
* R = row being read out
|
||||
* | = vertical sync signal
|
||||
*time(ms)| 0 55 105 155 230 270
|
||||
* Frame 0| :configure : capture : readout : : :
|
||||
* Row # | ..|CCCC______|_________|_________| : :
|
||||
* 0 | :\ \vvvvvEEEER \ : :
|
||||
* 500 | : \ \vvvvvEEEER \ : :
|
||||
* 1000 | : \ \vvvvvEEEER \ : :
|
||||
* 1500 | : \ \vvvvvEEEER \ : :
|
||||
* 2000 | : \__________\vvvvvEEEER_________\ : :
|
||||
* Frame 1| : configure capture readout : :
|
||||
* Row # | : |CCCC_____|_________|______________| :
|
||||
* 0 | : :\ \vvvvvEEEER \ :
|
||||
* 500 | : : \ \vvvvvEEEER \ :
|
||||
* 1000 | : : \ \vvvvvEEEER \ :
|
||||
* 1500 | : : \ \vvvvvEEEER \ :
|
||||
* 2000 | : : \_________\vvvvvEEEER______________\ :
|
||||
* Frame 2| : : configure capture readout:
|
||||
* Row # | : : |CCCC_____|______________|_______|...
|
||||
* 0 | : : :\ \vEEEEEEEEEEEEER \
|
||||
* 500 | : : : \ \vEEEEEEEEEEEEER \
|
||||
* 1000 | : : : \ \vEEEEEEEEEEEEER \
|
||||
* 1500 | : : : \ \vEEEEEEEEEEEEER \
|
||||
* 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
|
||||
*/
|
||||
|
||||
#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
|
||||
#define HW_EMULATOR_CAMERA2_SENSOR_H
|
||||
|
||||
#include "utils/Thread.h"
|
||||
#include "utils/Mutex.h"
|
||||
#include "utils/Timers.h"
|
||||
|
||||
#include "Scene.h"
|
||||
#include "Base.h"
|
||||
|
||||
namespace android {
|
||||
|
||||
class EmulatedFakeCamera2;
|
||||
|
||||
class Sensor: private Thread, public virtual RefBase {
|
||||
public:
|
||||
|
||||
Sensor();
|
||||
~Sensor();
|
||||
|
||||
/*
|
||||
* Power control
|
||||
*/
|
||||
|
||||
status_t startUp();
|
||||
status_t shutDown();
|
||||
|
||||
/*
|
||||
* Access to scene
|
||||
*/
|
||||
Scene &getScene();
|
||||
|
||||
/*
|
||||
* Controls that can be updated every frame
|
||||
*/
|
||||
|
||||
void setExposureTime(uint64_t ns);
|
||||
void setFrameDuration(uint64_t ns);
|
||||
void setSensitivity(uint32_t gain);
|
||||
// Buffer must be at least stride*height*2 bytes in size
|
||||
void setDestinationBuffers(Buffers *buffers);
|
||||
// To simplify tracking sensor's current frame
|
||||
void setFrameNumber(uint32_t frameNumber);
|
||||
|
||||
/*
|
||||
* Controls that cause reconfiguration delay
|
||||
*/
|
||||
|
||||
void setBinning(int horizontalFactor, int verticalFactor);
|
||||
|
||||
/*
|
||||
* Synchronizing with sensor operation (vertical sync)
|
||||
*/
|
||||
|
||||
// Wait until the sensor outputs its next vertical sync signal, meaning it
|
||||
// is starting readout of its latest frame of data. Returns true if vertical
|
||||
// sync is signaled, false if the wait timed out.
|
||||
bool waitForVSync(nsecs_t reltime);
|
||||
|
||||
// Wait until a new frame has been read out, and then return the time
|
||||
// capture started. May return immediately if a new frame has been pushed
|
||||
// since the last wait for a new frame. Returns true if new frame is
|
||||
// returned, false if timed out.
|
||||
bool waitForNewFrame(nsecs_t reltime,
|
||||
nsecs_t *captureTime);
|
||||
|
||||
/*
|
||||
* Interrupt event servicing from the sensor. Only triggers for sensor
|
||||
* cycles that have valid buffers to write to.
|
||||
*/
|
||||
struct SensorListener {
|
||||
enum Event {
|
||||
EXPOSURE_START, // Start of exposure
|
||||
};
|
||||
|
||||
virtual void onSensorEvent(uint32_t frameNumber, Event e,
|
||||
nsecs_t timestamp) = 0;
|
||||
virtual ~SensorListener();
|
||||
};
|
||||
|
||||
void setSensorListener(SensorListener *listener);
|
||||
|
||||
/**
|
||||
* Static sensor characteristics
|
||||
*/
|
||||
static const unsigned int kResolution[2];
|
||||
static const unsigned int kActiveArray[4];
|
||||
|
||||
static const nsecs_t kExposureTimeRange[2];
|
||||
static const nsecs_t kFrameDurationRange[2];
|
||||
static const nsecs_t kMinVerticalBlank;
|
||||
|
||||
static const uint8_t kColorFilterArrangement;
|
||||
|
||||
// Output image data characteristics
|
||||
static const uint32_t kMaxRawValue;
|
||||
static const uint32_t kBlackLevel;
|
||||
// Sensor sensitivity, approximate
|
||||
|
||||
static const float kSaturationVoltage;
|
||||
static const uint32_t kSaturationElectrons;
|
||||
static const float kVoltsPerLuxSecond;
|
||||
static const float kElectronsPerLuxSecond;
|
||||
|
||||
static const float kBaseGainFactor;
|
||||
|
||||
static const float kReadNoiseStddevBeforeGain; // In electrons
|
||||
static const float kReadNoiseStddevAfterGain; // In raw digital units
|
||||
static const float kReadNoiseVarBeforeGain;
|
||||
static const float kReadNoiseVarAfterGain;
|
||||
|
||||
// While each row has to read out, reset, and then expose, the (reset +
|
||||
// expose) sequence can be overlapped by other row readouts, so the final
|
||||
// minimum frame duration is purely a function of row readout time, at least
|
||||
// if there's a reasonable number of rows.
|
||||
static const nsecs_t kRowReadoutTime;
|
||||
|
||||
static const int32_t kSensitivityRange[2];
|
||||
static const uint32_t kDefaultSensitivity;
|
||||
|
||||
private:
|
||||
Mutex mControlMutex; // Lock before accessing control parameters
|
||||
// Start of control parameters
|
||||
Condition mVSync;
|
||||
bool mGotVSync;
|
||||
uint64_t mExposureTime;
|
||||
uint64_t mFrameDuration;
|
||||
uint32_t mGainFactor;
|
||||
Buffers *mNextBuffers;
|
||||
uint32_t mFrameNumber;
|
||||
|
||||
// End of control parameters
|
||||
|
||||
Mutex mReadoutMutex; // Lock before accessing readout variables
|
||||
// Start of readout variables
|
||||
Condition mReadoutAvailable;
|
||||
Condition mReadoutComplete;
|
||||
Buffers *mCapturedBuffers;
|
||||
nsecs_t mCaptureTime;
|
||||
SensorListener *mListener;
|
||||
// End of readout variables
|
||||
|
||||
// Time of sensor startup, used for simulation zero-time point
|
||||
nsecs_t mStartupTime;
|
||||
|
||||
/**
|
||||
* Inherited Thread virtual overrides, and members only used by the
|
||||
* processing thread
|
||||
*/
|
||||
private:
|
||||
virtual status_t readyToRun();
|
||||
|
||||
virtual bool threadLoop();
|
||||
|
||||
nsecs_t mNextCaptureTime;
|
||||
Buffers *mNextCapturedBuffers;
|
||||
|
||||
Scene mScene;
|
||||
|
||||
void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
|
||||
void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
|
||||
void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
|
||||
void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride);
|
||||
void captureDepth(uint8_t *img, uint32_t gain, uint32_t stride);
|
||||
void captureDepthCloud(uint8_t *img);
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // HW_EMULATOR_CAMERA2_SENSOR_H
|
||||
84
android/camera/media_codecs.xml
Normal file
84
android/camera/media_codecs.xml
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
<?xml version="1.0" encoding="utf-8" ?>
|
||||
<!-- Copyright (C) 2012 The Android Open Source Project
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
<!DOCTYPE MediaCodecs [
|
||||
<!ELEMENT Include EMPTY>
|
||||
<!ATTLIST Include href CDATA #REQUIRED>
|
||||
<!ELEMENT MediaCodecs (Decoders|Encoders|Include)*>
|
||||
<!ELEMENT Decoders (MediaCodec|Include)*>
|
||||
<!ELEMENT Encoders (MediaCodec|Include)*>
|
||||
<!ELEMENT MediaCodec (Type|Quirk|Include)*>
|
||||
<!ATTLIST MediaCodec name CDATA #REQUIRED>
|
||||
<!ATTLIST MediaCodec type CDATA>
|
||||
<!ELEMENT Type EMPTY>
|
||||
<!ATTLIST Type name CDATA #REQUIRED>
|
||||
<!ELEMENT Quirk EMPTY>
|
||||
<!ATTLIST Quirk name CDATA #REQUIRED>
|
||||
]>
|
||||
|
||||
There's a simple and a complex syntax to declare the availability of a
|
||||
media codec:
|
||||
|
||||
A codec that properly follows the OpenMax spec and therefore doesn't have any
|
||||
quirks and that only supports a single content type can be declared like so:
|
||||
|
||||
<MediaCodec name="OMX.foo.bar" type="something/interesting" />
|
||||
|
||||
If a codec has quirks OR supports multiple content types, the following syntax
|
||||
can be used:
|
||||
|
||||
<MediaCodec name="OMX.foo.bar" >
|
||||
<Type name="something/interesting" />
|
||||
<Type name="something/else" />
|
||||
...
|
||||
<Quirk name="requires-allocate-on-input-ports" />
|
||||
<Quirk name="requires-allocate-on-output-ports" />
|
||||
<Quirk name="output-buffers-are-unreadable" />
|
||||
</MediaCodec>
|
||||
|
||||
Only the three quirks included above are recognized at this point:
|
||||
|
||||
"requires-allocate-on-input-ports"
|
||||
must be advertised if the component does not properly support specification
|
||||
of input buffers using the OMX_UseBuffer(...) API but instead requires
|
||||
OMX_AllocateBuffer to be used.
|
||||
|
||||
"requires-allocate-on-output-ports"
|
||||
must be advertised if the component does not properly support specification
|
||||
of output buffers using the OMX_UseBuffer(...) API but instead requires
|
||||
OMX_AllocateBuffer to be used.
|
||||
|
||||
"output-buffers-are-unreadable"
|
||||
must be advertised if the emitted output buffers of a decoder component
|
||||
are not readable, i.e. use a custom format even though abusing one of
|
||||
the official OMX colorspace constants.
|
||||
Clients of such decoders will not be able to access the decoded data,
|
||||
naturally making the component much less useful. The only use for
|
||||
a component with this quirk is to render the output to the screen.
|
||||
Audio decoders MUST NOT advertise this quirk.
|
||||
Video decoders that advertise this quirk must be accompanied by a
|
||||
corresponding color space converter for thumbnail extraction,
|
||||
matching surfaceflinger support that can render the custom format to
|
||||
a texture and possibly other code, so just DON'T USE THIS QUIRK.
|
||||
|
||||
-->
|
||||
|
||||
<MediaCodecs>
|
||||
<Include href="media_codecs_google_audio.xml" />
|
||||
<Include href="media_codecs_google_telephony.xml" />
|
||||
<Include href="media_codecs_google_video.xml" />
|
||||
</MediaCodecs>
|
||||
368
android/camera/media_profiles.xml
Normal file
368
android/camera/media_profiles.xml
Normal file
|
|
@ -0,0 +1,368 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Copyright (C) 2010 The Android Open Source Project
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!DOCTYPE MediaSettings [
|
||||
<!ELEMENT MediaSettings (CamcorderProfiles,
|
||||
EncoderOutputFileFormat+,
|
||||
VideoEncoderCap+,
|
||||
AudioEncoderCap+,
|
||||
VideoDecoderCap,
|
||||
AudioDecoderCap)>
|
||||
<!ELEMENT CamcorderProfiles (EncoderProfile+, ImageEncoding+, ImageDecoding, Camera)>
|
||||
<!ELEMENT EncoderProfile (Video, Audio)>
|
||||
<!ATTLIST EncoderProfile quality (high|low) #REQUIRED>
|
||||
<!ATTLIST EncoderProfile fileFormat (mp4|3gp) #REQUIRED>
|
||||
<!ATTLIST EncoderProfile duration (30|60) #REQUIRED>
|
||||
<!ATTLIST EncoderProfile cameraId (0|1) #REQUIRED>
|
||||
<!ELEMENT Video EMPTY>
|
||||
<!ATTLIST Video codec (h264|h263|m4v) #REQUIRED>
|
||||
<!ATTLIST Video bitRate CDATA #REQUIRED>
|
||||
<!ATTLIST Video width CDATA #REQUIRED>
|
||||
<!ATTLIST Video height CDATA #REQUIRED>
|
||||
<!ATTLIST Video frameRate CDATA #REQUIRED>
|
||||
<!ELEMENT Audio EMPTY>
|
||||
<!ATTLIST Audio codec (amrnb|amrwb|aac) #REQUIRED>
|
||||
<!ATTLIST Audio bitRate CDATA #REQUIRED>
|
||||
<!ATTLIST Audio sampleRate CDATA #REQUIRED>
|
||||
<!ATTLIST Audio channels (1|2) #REQUIRED>
|
||||
<!ELEMENT ImageEncoding EMPTY>
|
||||
<!ATTLIST ImageEncoding quality (90|80|70|60|50|40) #REQUIRED>
|
||||
<!ELEMENT ImageDecoding EMPTY>
|
||||
<!ATTLIST ImageDecoding memCap CDATA #REQUIRED>
|
||||
<!ELEMENT Camera EMPTY>
|
||||
<!ELEMENT EncoderOutputFileFormat EMPTY>
|
||||
<!ATTLIST EncoderOutputFileFormat name (mp4|3gp) #REQUIRED>
|
||||
<!ELEMENT VideoEncoderCap EMPTY>
|
||||
<!ATTLIST VideoEncoderCap name (h264|h263|m4v|wmv) #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap enabled (true|false) #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap minBitRate CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap maxBitRate CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap minFrameWidth CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap maxFrameWidth CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap minFrameHeight CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap maxFrameHeight CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap minFrameRate CDATA #REQUIRED>
|
||||
<!ATTLIST VideoEncoderCap maxFrameRate CDATA #REQUIRED>
|
||||
<!ELEMENT AudioEncoderCap EMPTY>
|
||||
<!ATTLIST AudioEncoderCap name (amrnb|amrwb|aac|wma) #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap enabled (true|false) #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap minBitRate CDATA #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap maxBitRate CDATA #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap minSampleRate CDATA #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap maxSampleRate CDATA #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap minChannels (1|2) #REQUIRED>
|
||||
<!ATTLIST AudioEncoderCap maxChannels (1|2) #REQUIRED>
|
||||
<!ELEMENT VideoDecoderCap EMPTY>
|
||||
<!ATTLIST VideoDecoderCap name (wmv) #REQUIRED>
|
||||
<!ATTLIST VideoDecoderCap enabled (true|false) #REQUIRED>
|
||||
<!ELEMENT AudioDecoderCap EMPTY>
|
||||
<!ATTLIST AudioDecoderCap name (wma) #REQUIRED>
|
||||
<!ATTLIST AudioDecoderCap enabled (true|false) #REQUIRED>
|
||||
]>
|
||||
<!--
|
||||
This file is used to declare the multimedia profiles and capabilities
|
||||
on an android-powered device.
|
||||
-->
|
||||
<MediaSettings>
|
||||
<!-- Each camcorder profile defines a set of predefined configuration parameters -->
|
||||
<CamcorderProfiles cameraId="0">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="1">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="2">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="3">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="4">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="5">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<CamcorderProfiles cameraId="6">
|
||||
|
||||
<EncoderProfile quality="qvga" fileFormat="mp4" duration="60">
|
||||
<Video codec="m4v"
|
||||
bitRate="128000"
|
||||
width="320"
|
||||
height="240"
|
||||
frameRate="15" />
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<EncoderProfile quality="timelapseqcif" fileFormat="mp4" duration="30">
|
||||
<Video codec="h264"
|
||||
bitRate="192000"
|
||||
width="176"
|
||||
height="144"
|
||||
frameRate="30" />
|
||||
<!-- audio setting is ignored -->
|
||||
<Audio codec="amrnb"
|
||||
bitRate="12200"
|
||||
sampleRate="8000"
|
||||
channels="1" />
|
||||
</EncoderProfile>
|
||||
|
||||
<ImageEncoding quality="95" />
|
||||
<ImageEncoding quality="80" />
|
||||
<ImageEncoding quality="70" />
|
||||
<ImageDecoding memCap="20000000" />
|
||||
|
||||
</CamcorderProfiles>
|
||||
|
||||
<EncoderOutputFileFormat name="3gp" />
|
||||
<EncoderOutputFileFormat name="mp4" />
|
||||
|
||||
<!--
|
||||
If a codec is not enabled, it is invisible to the applications
|
||||
In other words, the applications won't be able to use the codec
|
||||
or query the capabilities of the codec at all if it is disabled
|
||||
-->
|
||||
<VideoEncoderCap name="h264" enabled="true"
|
||||
minBitRate="64000" maxBitRate="192000"
|
||||
minFrameWidth="176" maxFrameWidth="320"
|
||||
minFrameHeight="144" maxFrameHeight="240"
|
||||
minFrameRate="15" maxFrameRate="30" />
|
||||
|
||||
<VideoEncoderCap name="h263" enabled="true"
|
||||
minBitRate="64000" maxBitRate="192000"
|
||||
minFrameWidth="176" maxFrameWidth="320"
|
||||
minFrameHeight="144" maxFrameHeight="240"
|
||||
minFrameRate="15" maxFrameRate="30" />
|
||||
|
||||
<VideoEncoderCap name="m4v" enabled="true"
|
||||
minBitRate="64000" maxBitRate="192000"
|
||||
minFrameWidth="176" maxFrameWidth="320"
|
||||
minFrameHeight="144" maxFrameHeight="240"
|
||||
minFrameRate="15" maxFrameRate="30" />
|
||||
|
||||
<AudioEncoderCap name="aac" enabled="true"
|
||||
minBitRate="8000" maxBitRate="96000"
|
||||
minSampleRate="8000" maxSampleRate="48000"
|
||||
minChannels="1" maxChannels="1" />
|
||||
|
||||
<AudioEncoderCap name="amrwb" enabled="true"
|
||||
minBitRate="6600" maxBitRate="23050"
|
||||
minSampleRate="16000" maxSampleRate="16000"
|
||||
minChannels="1" maxChannels="1" />
|
||||
|
||||
<AudioEncoderCap name="amrnb" enabled="true"
|
||||
minBitRate="5525" maxBitRate="12200"
|
||||
minSampleRate="8000" maxSampleRate="8000"
|
||||
minChannels="1" maxChannels="1" />
|
||||
|
||||
<!--
|
||||
FIXME:
|
||||
We do not check decoder capabilities at present
|
||||
At present, we only check whether windows media is visible
|
||||
for TEST applications. For other applications, we do
|
||||
not perform any checks at all.
|
||||
-->
|
||||
<VideoDecoderCap name="wmv" enabled="false"/>
|
||||
<AudioDecoderCap name="wma" enabled="false"/>
|
||||
</MediaSettings>
|
||||
265
android/data/etc/apns-conf.xml
Normal file
265
android/data/etc/apns-conf.xml
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
/*
|
||||
** Copyright 2006, Google Inc.
|
||||
**
|
||||
** Licensed under the Apache License, Version 2.0 (the "License");
|
||||
** you may not use this file except in compliance with the License.
|
||||
** You may obtain a copy of the License at
|
||||
**
|
||||
** http://www.apache.org/licenses/LICENSE-2.0
|
||||
**
|
||||
** Unless required by applicable law or agreed to in writing, software
|
||||
** distributed under the License is distributed on an "AS IS" BASIS,
|
||||
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
** See the License for the specific language governing permissions and
|
||||
** limitations under the License.
|
||||
*/
|
||||
-->
|
||||
|
||||
<!-- use empty string to specify no proxy or port -->
|
||||
<!-- This version must agree with that in apps/common/res/apns.xml -->
|
||||
<apns version="8">
|
||||
<apn carrier="T-Mobile US"
|
||||
mcc="310"
|
||||
mnc="260"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 250"
|
||||
mcc="310"
|
||||
mnc="250"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 660"
|
||||
mcc="310"
|
||||
mnc="660"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 230"
|
||||
mcc="310"
|
||||
mnc="230"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 310"
|
||||
mcc="310"
|
||||
mnc="310"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 580"
|
||||
mcc="310"
|
||||
mnc="580"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 240"
|
||||
mcc="310"
|
||||
mnc="240"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 800"
|
||||
mcc="310"
|
||||
mnc="800"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 210"
|
||||
mcc="310"
|
||||
mnc="210"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 160"
|
||||
mcc="310"
|
||||
mnc="160"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 270"
|
||||
mcc="310"
|
||||
mnc="270"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 200"
|
||||
mcc="310"
|
||||
mnc="200"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 220"
|
||||
mcc="310"
|
||||
mnc="220"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile US 490"
|
||||
mcc="310"
|
||||
mnc="490"
|
||||
apn="epc.tmobile.com"
|
||||
user="none"
|
||||
server="*"
|
||||
password="none"
|
||||
mmsc="http://mms.msg.eng.t-mobile.com/mms/wapenc"
|
||||
/>
|
||||
|
||||
<!-- T-Mobile Europe -->
|
||||
<apn carrier="T-Mobile UK"
|
||||
mcc="234"
|
||||
mnc="30"
|
||||
apn="general.t-mobile.uk"
|
||||
user="t-mobile"
|
||||
password="tm"
|
||||
server="*"
|
||||
mmsproxy="149.254.201.135"
|
||||
mmsport="8080"
|
||||
mmsc="http://mmsc.t-mobile.co.uk:8002"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile D"
|
||||
mcc="262"
|
||||
mnc="01"
|
||||
apn="internet.t-mobile"
|
||||
user="t-mobile"
|
||||
password="tm"
|
||||
server="*"
|
||||
mmsproxy="172.028.023.131"
|
||||
mmsport="8008"
|
||||
mmsc="http://mms.t-mobile.de/servlets/mms"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile A"
|
||||
mcc="232"
|
||||
mnc="03"
|
||||
apn="gprsinternet"
|
||||
user="t-mobile"
|
||||
password="tm"
|
||||
server="*"
|
||||
mmsproxy="010.012.000.020"
|
||||
mmsport="80"
|
||||
mmsc="http://mmsc.t-mobile.at/servlets/mms"
|
||||
type="default,supl"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile A MMS"
|
||||
mcc="232"
|
||||
mnc="03"
|
||||
apn="gprsmms"
|
||||
user="t-mobile"
|
||||
password="tm"
|
||||
server="*"
|
||||
mmsproxy="010.012.000.020"
|
||||
mmsport="80"
|
||||
mmsc="http://mmsc.t-mobile.at/servlets/mms"
|
||||
type="mms"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile CZ"
|
||||
mcc="230"
|
||||
mnc="01"
|
||||
apn="internet.t-mobile.cz"
|
||||
user="wap"
|
||||
password="wap"
|
||||
server="*"
|
||||
mmsproxy="010.000.000.010"
|
||||
mmsport="80"
|
||||
mmsc="http://mms"
|
||||
type="default,supl"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile CZ MMS"
|
||||
mcc="230"
|
||||
mnc="01"
|
||||
apn="mms.t-mobile.cz"
|
||||
user="mms"
|
||||
password="mms"
|
||||
server="*"
|
||||
mmsproxy="010.000.000.010"
|
||||
mmsport="80"
|
||||
mmsc="http://mms"
|
||||
type="mms"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile NL"
|
||||
mcc="204"
|
||||
mnc="16"
|
||||
apn="internet"
|
||||
user="*"
|
||||
password="*"
|
||||
server="*"
|
||||
mmsproxy="010.010.010.011"
|
||||
mmsport="8080"
|
||||
mmsc="http://t-mobilemms"
|
||||
type="default,supl"
|
||||
/>
|
||||
|
||||
<apn carrier="T-Mobile NL MMS"
|
||||
mcc="204"
|
||||
mnc="16"
|
||||
apn="mms"
|
||||
user="tmobilemms"
|
||||
password="tmobilemms"
|
||||
server="*"
|
||||
mmsproxy="010.010.010.011"
|
||||
mmsport="8080"
|
||||
mmsc="http://t-mobilemms"
|
||||
type="mms"
|
||||
/>
|
||||
</apns>
|
||||
33
android/fingerprint/Android.mk
Normal file
33
android/fingerprint/Android.mk
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# Copyright (C) 2013 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE := fingerprint.goldfish
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_SRC_FILES := fingerprint.c
|
||||
LOCAL_SHARED_LIBRARIES := liblog
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE := fingerprint.ranchu
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_SRC_FILES := fingerprint.c
|
||||
LOCAL_SHARED_LIBRARIES := liblog
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
857
android/fingerprint/fingerprint.c
Normal file
857
android/fingerprint/fingerprint.c
Normal file
|
|
@ -0,0 +1,857 @@
|
|||
/*
|
||||
* Copyright (C) 2015 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This is a very basic implementation of fingerprint to allow testing on the emulator. It
|
||||
* is *not* meant to be the final implementation on real devices. For example, it does *not*
|
||||
* implement all of the required features, such as secure template storage and recognition
|
||||
* inside a Trusted Execution Environment (TEE). However, this file is a reasonable starting
|
||||
* point as developers add fingerprint support to their platform. See inline comments and
|
||||
* recommendations for details.
|
||||
*
|
||||
* Please see the Android Compatibility Definition Document (CDD) for a full list of requirements
|
||||
* and suggestions.
|
||||
*/
|
||||
#define LOG_TAG "FingerprintHal"
|
||||
|
||||
#include <errno.h>
|
||||
#include <endian.h>
|
||||
#include <inttypes.h>
|
||||
#include <malloc.h>
|
||||
#include <string.h>
|
||||
#include <cutils/log.h>
|
||||
#include <hardware/hardware.h>
|
||||
#include <hardware/fingerprint.h>
|
||||
#include <hardware/qemud.h>
|
||||
|
||||
#include <poll.h>
|
||||
|
||||
#define FINGERPRINT_LISTEN_SERVICE_NAME "fingerprintlisten"
|
||||
#define FINGERPRINT_FILENAME "emufp.bin"
|
||||
#define AUTHENTICATOR_ID_FILENAME "emuauthid.bin"
|
||||
#define MAX_COMM_CHARS 128
|
||||
#define MAX_COMM_ERRORS 8
|
||||
// Typical devices will allow up to 5 fingerprints per user to maintain performance of
|
||||
// t < 500ms for recognition. This is the total number of fingerprints we'll store.
|
||||
#define MAX_NUM_FINGERS 20
|
||||
#define MAX_FID_VALUE 0x7FFFFFFF // Arbitrary limit
|
||||
|
||||
/**
|
||||
* Most devices will have an internal state machine resembling this. There are 3 basic states, as
|
||||
* shown below. When device is not authenticating or enrolling, it is expected to be in
|
||||
* the idle state.
|
||||
*
|
||||
* Note that this is completely independent of device wake state. If the hardware device was in
|
||||
* the "scan" state when the device drops into power collapse, it should resume scanning when power
|
||||
* is restored. This is to facilitate rapid touch-to-unlock from keyguard.
|
||||
*/
|
||||
typedef enum worker_state_t {
|
||||
STATE_IDLE = 0,
|
||||
STATE_ENROLL,
|
||||
STATE_SCAN,
|
||||
STATE_EXIT
|
||||
} worker_state_t;
|
||||
|
||||
typedef struct worker_thread_t {
|
||||
pthread_t thread;
|
||||
worker_state_t state;
|
||||
uint64_t secureid[MAX_NUM_FINGERS];
|
||||
uint64_t fingerid[MAX_NUM_FINGERS];
|
||||
char fp_filename[PATH_MAX];
|
||||
char authid_filename[PATH_MAX];
|
||||
} worker_thread_t;
|
||||
|
||||
typedef struct qemu_fingerprint_device_t {
|
||||
fingerprint_device_t device; // "inheritance"
|
||||
worker_thread_t listener;
|
||||
uint64_t op_id;
|
||||
uint64_t challenge;
|
||||
uint64_t user_id;
|
||||
uint64_t group_id;
|
||||
uint64_t secure_user_id;
|
||||
uint64_t authenticator_id;
|
||||
int qchanfd;
|
||||
pthread_mutex_t lock;
|
||||
} qemu_fingerprint_device_t;
|
||||
|
||||
/******************************************************************************/
|
||||
|
||||
static FILE* openForWrite(const char* filename);
|
||||
|
||||
static void saveFingerprint(worker_thread_t* listener, int idx) {
|
||||
ALOGD("----------------> %s -----------------> idx %d", __FUNCTION__, idx);
|
||||
|
||||
// Save fingerprints to file
|
||||
FILE* fp = openForWrite(listener->fp_filename);
|
||||
if (fp == NULL) {
|
||||
ALOGE("Could not open fingerprints storage at %s; "
|
||||
"fingerprints won't be saved",
|
||||
listener->fp_filename);
|
||||
perror("Failed to open file");
|
||||
return;
|
||||
}
|
||||
|
||||
ALOGD("Write fingerprint[%d] (0x%" PRIx64 ",0x%" PRIx64 ")", idx,
|
||||
listener->secureid[idx], listener->fingerid[idx]);
|
||||
|
||||
if (fseek(fp, (idx) * sizeof(uint64_t), SEEK_SET) < 0) {
|
||||
ALOGE("Failed while seeking for fingerprint[%d] in emulator storage",
|
||||
idx);
|
||||
fclose(fp);
|
||||
return;
|
||||
}
|
||||
int ns = fwrite(&listener->secureid[idx], sizeof(uint64_t), 1, fp);
|
||||
|
||||
if (fseek(fp, (MAX_NUM_FINGERS + idx) * sizeof(uint64_t), SEEK_SET) < 0) {
|
||||
ALOGE("Failed while seeking for fingerprint[%d] in emulator storage",
|
||||
idx);
|
||||
fclose(fp);
|
||||
return;
|
||||
}
|
||||
int nf = fwrite(&listener->fingerid[idx], sizeof(uint64_t), 1, fp);
|
||||
if (ns != 1 || ns !=1)
|
||||
ALOGW("Corrupt emulator fingerprints storage; could not save "
|
||||
"fingerprints");
|
||||
|
||||
fclose(fp);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static FILE* openForWrite(const char* filename) {
|
||||
|
||||
if (!filename) return NULL;
|
||||
|
||||
FILE* fp = fopen(filename, "r+"); // write but don't truncate
|
||||
if (fp == NULL) {
|
||||
fp = fopen(filename, "w");
|
||||
if (fp) {
|
||||
uint64_t zero = 0;
|
||||
int i = 0;
|
||||
for (i = 0; i < 2*MAX_NUM_FINGERS; ++i) {
|
||||
fwrite(&zero, sizeof(uint64_t), 1, fp);
|
||||
}
|
||||
|
||||
//the last one is for authenticator id
|
||||
fwrite(&zero, sizeof(uint64_t), 1, fp);
|
||||
}
|
||||
}
|
||||
return fp;
|
||||
}
|
||||
|
||||
static void saveAuthenticatorId(const char* filename, uint64_t authenid) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
FILE* fp = openForWrite(filename);
|
||||
if (!fp) {
|
||||
ALOGE("Failed to open emulator storage file to save authenticator id");
|
||||
return;
|
||||
}
|
||||
|
||||
rewind(fp);
|
||||
|
||||
int na = fwrite(&authenid, sizeof(authenid), 1, fp);
|
||||
if (na != 1) {
|
||||
ALOGE("Failed while writing authenticator id in emulator storage");
|
||||
}
|
||||
|
||||
ALOGD("Save authenticator id (0x%" PRIx64 ")", authenid);
|
||||
|
||||
fclose(fp);
|
||||
}
|
||||
|
||||
static void loadAuthenticatorId(const char* authid_filename, uint64_t* pauthenid) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
FILE* fp = fopen(authid_filename, "r");
|
||||
if (fp == NULL) {
|
||||
ALOGE("Could not load authenticator id from storage at %s; "
|
||||
"it has not yet been created.",
|
||||
authid_filename);
|
||||
perror("Failed to open/create file");
|
||||
return;
|
||||
}
|
||||
|
||||
rewind(fp);
|
||||
|
||||
int na = fread(pauthenid, sizeof(*pauthenid), 1, fp);
|
||||
if (na != 1)
|
||||
ALOGW("Corrupt emulator authenticator id storage (read %d)", na);
|
||||
|
||||
ALOGD("Read authenticator id (0x%" PRIx64 ")", *pauthenid);
|
||||
|
||||
fclose(fp);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static void loadFingerprints(worker_thread_t* listener) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
FILE* fp = fopen(listener->fp_filename, "r");
|
||||
if (fp == NULL) {
|
||||
ALOGE("Could not load fingerprints from storage at %s; "
|
||||
"it has not yet been created.",
|
||||
listener->fp_filename);
|
||||
perror("Failed to open/create file");
|
||||
return;
|
||||
}
|
||||
|
||||
int ns = fread(listener->secureid, MAX_NUM_FINGERS * sizeof(uint64_t), 1,
|
||||
fp);
|
||||
int nf = fread(listener->fingerid, MAX_NUM_FINGERS * sizeof(uint64_t), 1,
|
||||
fp);
|
||||
if (ns != 1 || nf != 1)
|
||||
ALOGW("Corrupt emulator fingerprints storage (read %d+%db)", ns, nf);
|
||||
|
||||
int i = 0;
|
||||
for (i = 0; i < MAX_NUM_FINGERS; i++)
|
||||
ALOGD("Read fingerprint %d (0x%" PRIx64 ",0x%" PRIx64 ")", i,
|
||||
listener->secureid[i], listener->fingerid[i]);
|
||||
|
||||
fclose(fp);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/******************************************************************************/
|
||||
|
||||
static uint64_t get_64bit_rand() {
|
||||
// This should use a cryptographically-secure random number generator like arc4random().
|
||||
// It should be generated inside of the TEE where possible. Here we just use something
|
||||
// very simple.
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
uint64_t r = (((uint64_t)rand()) << 32) | ((uint64_t)rand());
|
||||
return r != 0 ? r : 1;
|
||||
}
|
||||
|
||||
static uint64_t fingerprint_get_auth_id(struct fingerprint_device* device) {
|
||||
// This should return the authentication_id generated when the fingerprint template database
|
||||
// was created. Though this isn't expected to be secret, it is reasonable to expect it to be
|
||||
// cryptographically generated to avoid replay attacks.
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
uint64_t authenticator_id = 0;
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
authenticator_id = qdev->authenticator_id;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
ALOGD("----------------> %s auth id %" PRIx64 "----------------->", __FUNCTION__, authenticator_id);
|
||||
return authenticator_id;
|
||||
}
|
||||
|
||||
static int fingerprint_set_active_group(struct fingerprint_device *device, uint32_t gid,
|
||||
const char *path) {
|
||||
ALOGD("----------------> %s -----------------> path %s", __FUNCTION__, path);
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->group_id = gid;
|
||||
snprintf(qdev->listener.fp_filename, sizeof(qdev->listener.fp_filename),
|
||||
"%s/%s", path, FINGERPRINT_FILENAME);
|
||||
snprintf(qdev->listener.authid_filename, sizeof(qdev->listener.authid_filename),
|
||||
"%s/%s", path, AUTHENTICATOR_ID_FILENAME);
|
||||
uint64_t authenticator_id = 0;
|
||||
loadFingerprints(&qdev->listener);
|
||||
loadAuthenticatorId(qdev->listener.authid_filename, &authenticator_id);
|
||||
if (authenticator_id == 0) {
|
||||
// firs time, create an authenticator id
|
||||
authenticator_id = get_64bit_rand();
|
||||
// save it to disk
|
||||
saveAuthenticatorId(qdev->listener.authid_filename, authenticator_id);
|
||||
}
|
||||
|
||||
qdev->authenticator_id = authenticator_id;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* If fingerprints are enrolled, then this function is expected to put the sensor into a
|
||||
* "scanning" state where it's actively scanning and recognizing fingerprint features.
|
||||
* Actual authentication must happen in TEE and should be monitored in a separate thread
|
||||
* since this function is expected to return immediately.
|
||||
*/
|
||||
static int fingerprint_authenticate(struct fingerprint_device *device,
|
||||
uint64_t operation_id, __unused uint32_t gid)
|
||||
{
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->op_id = operation_id;
|
||||
qdev->listener.state = STATE_SCAN;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is expected to put the sensor into an "enroll" state where it's actively scanning and
|
||||
* working towards a finished fingerprint database entry. Authentication must happen in
|
||||
* a separate thread since this function is expected to return immediately.
|
||||
*
|
||||
* Note: This method should always generate a new random authenticator_id.
|
||||
*
|
||||
* Note: As with fingerprint_authenticate(), this would run in TEE on a real device.
|
||||
*/
|
||||
static int fingerprint_enroll(struct fingerprint_device *device,
|
||||
const hw_auth_token_t *hat,
|
||||
uint32_t __unused gid,
|
||||
uint32_t __unused timeout_sec) {
|
||||
ALOGD("fingerprint_enroll");
|
||||
qemu_fingerprint_device_t* dev = (qemu_fingerprint_device_t*)device;
|
||||
if (!hat) {
|
||||
ALOGW("%s: null auth token", __func__);
|
||||
return -EPROTONOSUPPORT;
|
||||
}
|
||||
if (hat->challenge == dev->challenge) {
|
||||
// The secure_user_id retrieved from the auth token should be stored
|
||||
// with the enrolled fingerprint template and returned in the auth result
|
||||
// for a successful authentication with that finger.
|
||||
dev->secure_user_id = hat->user_id;
|
||||
} else {
|
||||
ALOGW("%s: invalid auth token", __func__);
|
||||
}
|
||||
|
||||
if (hat->version != HW_AUTH_TOKEN_VERSION) {
|
||||
return -EPROTONOSUPPORT;
|
||||
}
|
||||
if (hat->challenge != dev->challenge && !(hat->authenticator_type & HW_AUTH_FINGERPRINT)) {
|
||||
return -EPERM;
|
||||
}
|
||||
|
||||
dev->user_id = hat->user_id;
|
||||
|
||||
pthread_mutex_lock(&dev->lock);
|
||||
dev->listener.state = STATE_ENROLL;
|
||||
pthread_mutex_unlock(&dev->lock);
|
||||
|
||||
// fingerprint id, authenticator id, and secure_user_id
|
||||
// will be stored by worked thread
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* The pre-enrollment step is simply to get an authentication token that can be wrapped and
|
||||
* verified at a later step. The primary purpose is to return a token that protects against
|
||||
* spoofing and replay attacks. It is passed to password authentication where it is wrapped and
|
||||
* propagated to the enroll step.
|
||||
*/
|
||||
static uint64_t fingerprint_pre_enroll(struct fingerprint_device *device) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
uint64_t challenge = 0;
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
|
||||
// The challenge will typically be a cryptographically-secure key
|
||||
// coming from the TEE so it can be verified at a later step. For now we just generate a
|
||||
// random value.
|
||||
challenge = get_64bit_rand();
|
||||
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->challenge = challenge;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
return challenge;
|
||||
}
|
||||
|
||||
static int fingerprint_post_enroll(struct fingerprint_device* device) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->challenge = 0;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel is called by the framework to cancel an outstanding event. This should *not* be called
|
||||
* by the driver since it will cause the framework to stop listening for fingerprints.
|
||||
*/
|
||||
static int fingerprint_cancel(struct fingerprint_device *device) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
fingerprint_msg_t msg = {0, {0}};
|
||||
msg.type = FINGERPRINT_ERROR;
|
||||
msg.data.error = FINGERPRINT_ERROR_CANCELED;
|
||||
qdev->device.notify(&msg);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int fingerprint_enumerate(struct fingerprint_device *device) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
if (device == NULL) {
|
||||
ALOGE("Cannot enumerate saved fingerprints with uninitialized params");
|
||||
return -1;
|
||||
}
|
||||
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
int template_count = 0;
|
||||
for (int i = 0; i < MAX_NUM_FINGERS; i++) {
|
||||
if (qdev->listener.secureid[i] != 0 ||
|
||||
qdev->listener.fingerid[i] != 0) {
|
||||
ALOGD("ENUM: Fingerprint [%d] = 0x%" PRIx64 ",%" PRIx64, i,
|
||||
qdev->listener.secureid[i], qdev->listener.fingerid[i]);
|
||||
template_count++;
|
||||
}
|
||||
}
|
||||
fingerprint_msg_t message = {0, {0}};
|
||||
message.type = FINGERPRINT_TEMPLATE_ENUMERATING;
|
||||
message.data.enumerated.finger.gid = qdev->group_id;
|
||||
for (int i = 0; i < MAX_NUM_FINGERS; i++) {
|
||||
if (qdev->listener.secureid[i] != 0 ||
|
||||
qdev->listener.fingerid[i] != 0) {
|
||||
template_count--;
|
||||
message.data.enumerated.remaining_templates = template_count;
|
||||
message.data.enumerated.finger.fid = qdev->listener.fingerid[i];
|
||||
qdev->device.notify(&message);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int fingerprint_remove(struct fingerprint_device *device,
|
||||
uint32_t __unused gid, uint32_t fid) {
|
||||
int idx = 0;
|
||||
fingerprint_msg_t msg = {0, {0}};
|
||||
ALOGD("----------------> %s -----------------> fid %d", __FUNCTION__, fid);
|
||||
if (device == NULL) {
|
||||
ALOGE("Can't remove fingerprint (gid=%d, fid=%d); "
|
||||
"device not initialized properly",
|
||||
gid, fid);
|
||||
return -1;
|
||||
}
|
||||
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
|
||||
if (fid == 0) {
|
||||
// Delete all fingerprints
|
||||
// I'll do this one at a time, so I am not
|
||||
// holding the mutext during the notification
|
||||
bool listIsEmpty;
|
||||
do {
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
listIsEmpty = true; // Haven't seen a valid entry yet
|
||||
for (idx = 0; idx < MAX_NUM_FINGERS; idx++) {
|
||||
uint32_t theFid = qdev->listener.fingerid[idx];
|
||||
if (theFid != 0) {
|
||||
// Delete this entry
|
||||
qdev->listener.secureid[idx] = 0;
|
||||
qdev->listener.fingerid[idx] = 0;
|
||||
saveFingerprint(&qdev->listener, idx);
|
||||
|
||||
// Send a notification that we deleted this one
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
msg.type = FINGERPRINT_TEMPLATE_REMOVED;
|
||||
msg.data.removed.finger.fid = theFid;
|
||||
device->notify(&msg);
|
||||
|
||||
// Because we released the mutex, the list
|
||||
// may have changed. Restart the 'for' loop
|
||||
// after reacquiring the mutex.
|
||||
listIsEmpty = false;
|
||||
break;
|
||||
}
|
||||
} // end for (idx < MAX_NUM_FINGERS)
|
||||
} while (!listIsEmpty);
|
||||
msg.type = FINGERPRINT_TEMPLATE_REMOVED;
|
||||
msg.data.removed.finger.fid = 0;
|
||||
device->notify(&msg);
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
} else {
|
||||
// Delete one fingerprint
|
||||
// Look for this finger ID in our table.
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
for (idx = 0; idx < MAX_NUM_FINGERS; idx++) {
|
||||
if (qdev->listener.fingerid[idx] == fid &&
|
||||
qdev->listener.secureid[idx] != 0) {
|
||||
// Found it!
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (idx >= MAX_NUM_FINGERS) {
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
ALOGE("Fingerprint ID %d not found", fid);
|
||||
return FINGERPRINT_ERROR;
|
||||
}
|
||||
|
||||
qdev->listener.secureid[idx] = 0;
|
||||
qdev->listener.fingerid[idx] = 0;
|
||||
saveFingerprint(&qdev->listener, idx);
|
||||
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
msg.type = FINGERPRINT_TEMPLATE_REMOVED;
|
||||
msg.data.removed.finger.fid = fid;
|
||||
device->notify(&msg);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int set_notify_callback(struct fingerprint_device *device,
|
||||
fingerprint_notify_t notify) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
if (device == NULL || notify == NULL) {
|
||||
ALOGE("Failed to set notify callback @ %p for fingerprint device %p",
|
||||
device, notify);
|
||||
return -1;
|
||||
}
|
||||
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
device->notify = notify;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
ALOGD("fingerprint callback notification set");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool is_valid_fid(qemu_fingerprint_device_t* qdev, uint64_t fid) {
|
||||
int idx = 0;
|
||||
if (0 == fid) { return false; }
|
||||
for (idx = 0; idx < MAX_NUM_FINGERS; idx++) {
|
||||
if (qdev->listener.fingerid[idx] == fid) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static void send_scan_notice(qemu_fingerprint_device_t* qdev, int fid) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
|
||||
// acquired message
|
||||
fingerprint_msg_t acqu_msg = {0, {0}};
|
||||
acqu_msg.type = FINGERPRINT_ACQUIRED;
|
||||
acqu_msg.data.acquired.acquired_info = FINGERPRINT_ACQUIRED_GOOD;
|
||||
|
||||
// authenticated message
|
||||
fingerprint_msg_t auth_msg = {0, {0}};
|
||||
auth_msg.type = FINGERPRINT_AUTHENTICATED;
|
||||
auth_msg.data.authenticated.finger.fid = is_valid_fid(qdev, fid) ? fid : 0;
|
||||
auth_msg.data.authenticated.finger.gid = 0; // unused
|
||||
auth_msg.data.authenticated.hat.version = HW_AUTH_TOKEN_VERSION;
|
||||
auth_msg.data.authenticated.hat.authenticator_type =
|
||||
htobe32(HW_AUTH_FINGERPRINT);
|
||||
auth_msg.data.authenticated.hat.challenge = qdev->op_id;
|
||||
auth_msg.data.authenticated.hat.authenticator_id = qdev->authenticator_id;
|
||||
auth_msg.data.authenticated.hat.user_id = qdev->secure_user_id;
|
||||
struct timespec ts;
|
||||
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||||
auth_msg.data.authenticated.hat.timestamp =
|
||||
htobe64((uint64_t)ts.tv_sec * 1000 + ts.tv_nsec / 1000000);
|
||||
|
||||
// pthread_mutex_lock(&qdev->lock);
|
||||
qdev->device.notify(&acqu_msg);
|
||||
qdev->device.notify(&auth_msg);
|
||||
// pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static void send_enroll_notice(qemu_fingerprint_device_t* qdev, int fid) {
|
||||
ALOGD("----------------> %s -----------------> fid %d", __FUNCTION__, fid);
|
||||
|
||||
if (fid == 0) {
|
||||
ALOGD("Fingerprint ID is zero (invalid)");
|
||||
return;
|
||||
}
|
||||
if (qdev->secure_user_id == 0) {
|
||||
ALOGD("Secure user ID is zero (invalid)");
|
||||
return;
|
||||
}
|
||||
|
||||
// Find an available entry in the table
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
int idx = 0;
|
||||
for (idx = 0; idx < MAX_NUM_FINGERS; idx++) {
|
||||
if (qdev->listener.secureid[idx] == 0 ||
|
||||
qdev->listener.fingerid[idx] == 0) {
|
||||
// This entry is available
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (idx >= MAX_NUM_FINGERS) {
|
||||
qdev->listener.state = STATE_SCAN;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
ALOGD("Fingerprint ID table is full");
|
||||
return;
|
||||
}
|
||||
|
||||
qdev->listener.secureid[idx] = qdev->secure_user_id;
|
||||
qdev->listener.fingerid[idx] = fid;
|
||||
saveFingerprint(&qdev->listener, idx);
|
||||
|
||||
qdev->listener.state = STATE_SCAN;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
// LOCKED notification?
|
||||
fingerprint_msg_t msg = {0, {0}};
|
||||
msg.type = FINGERPRINT_TEMPLATE_ENROLLING;
|
||||
msg.data.enroll.finger.fid = fid;
|
||||
msg.data.enroll.samples_remaining = 0;
|
||||
qdev->device.notify(&msg);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static worker_state_t getListenerState(qemu_fingerprint_device_t* dev) {
|
||||
ALOGV("----------------> %s ----------------->", __FUNCTION__);
|
||||
worker_state_t state = STATE_IDLE;
|
||||
|
||||
pthread_mutex_lock(&dev->lock);
|
||||
state = dev->listener.state;
|
||||
pthread_mutex_unlock(&dev->lock);
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
/**
|
||||
* This a very simple event loop for the fingerprint sensor. For a given state (enroll, scan),
|
||||
* this would receive events from the sensor and forward them to fingerprintd using the
|
||||
* notify() method.
|
||||
*
|
||||
* In this simple example, we open a qemu channel (a pipe) where the developer can inject events to
|
||||
* exercise the API and test application code.
|
||||
*
|
||||
* The scanner should remain in the scanning state until either an error occurs or the operation
|
||||
* completes.
|
||||
*
|
||||
* Recoverable errors such as EINTR should be handled locally; they should not
|
||||
* be propagated unless there's something the user can do about it (e.g. "clean sensor"). Such
|
||||
* messages should go through the onAcquired() interface.
|
||||
*
|
||||
* If an unrecoverable error occurs, an acquired message (e.g. ACQUIRED_PARTIAL) should be sent,
|
||||
* followed by an error message (e.g. FINGERPRINT_ERROR_UNABLE_TO_PROCESS).
|
||||
*
|
||||
* Note that this event loop would typically run in TEE since it must interact with the sensor
|
||||
* hardware and handle raw fingerprint data and encrypted templates. It is expected that
|
||||
* this code monitors the TEE for resulting events, such as enrollment and authentication status.
|
||||
* Here we just have a very simple event loop that monitors a qemu channel for pseudo events.
|
||||
*/
|
||||
static void* listenerFunction(void* data) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)data;
|
||||
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
qdev->qchanfd = qemud_channel_open(FINGERPRINT_LISTEN_SERVICE_NAME);
|
||||
if (qdev->qchanfd < 0) {
|
||||
ALOGE("listener cannot open fingerprint listener service exit");
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
return NULL;
|
||||
}
|
||||
qdev->listener.state = STATE_IDLE;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
const char* cmd = "listen";
|
||||
if (qemud_channel_send(qdev->qchanfd, cmd, strlen(cmd)) < 0) {
|
||||
ALOGE("cannot write fingerprint 'listen' to host");
|
||||
goto done_quiet;
|
||||
}
|
||||
|
||||
int comm_errors = 0;
|
||||
struct pollfd pfd = {
|
||||
.fd = qdev->qchanfd,
|
||||
.events = POLLIN,
|
||||
};
|
||||
while (1) {
|
||||
int size = 0;
|
||||
int fid = 0;
|
||||
char buffer[MAX_COMM_CHARS] = {0};
|
||||
bool disconnected = false;
|
||||
while (1) {
|
||||
if (getListenerState(qdev) == STATE_EXIT) {
|
||||
ALOGD("Received request to exit listener thread");
|
||||
goto done;
|
||||
}
|
||||
|
||||
// Reset revents before poll() (just to be safe)
|
||||
pfd.revents = 0;
|
||||
|
||||
// Poll qemud channel for 5 seconds
|
||||
// TODO: Eliminate the timeout so that polling can be interrupted
|
||||
// instantly. One possible solution is to follow the example of
|
||||
// android::Looper ($AOSP/system/core/include/utils/Looper.h and
|
||||
// $AOSP/system/core/libutils/Looper.cpp), which makes use of an
|
||||
// additional file descriptor ("wake event fd").
|
||||
int nfds = poll(&pfd, 1, 5000);
|
||||
if (nfds < 0) {
|
||||
ALOGE("Could not poll qemud channel: %s", strerror(errno));
|
||||
goto done;
|
||||
}
|
||||
|
||||
if (!nfds) {
|
||||
// poll() timed out - try again
|
||||
continue;
|
||||
}
|
||||
|
||||
// assert(nfds == 1)
|
||||
if (pfd.revents & POLLIN) {
|
||||
// Input data being available doesn't rule out a disconnection
|
||||
disconnected = pfd.revents & (POLLERR | POLLHUP);
|
||||
break; // Exit inner while loop
|
||||
} else {
|
||||
// Some event(s) other than "input data available" occurred,
|
||||
// i.e. POLLERR or POLLHUP, indicating a disconnection
|
||||
ALOGW("Lost connection to qemud channel");
|
||||
goto done;
|
||||
}
|
||||
}
|
||||
|
||||
// Shouldn't block since we were just notified of a POLLIN event
|
||||
if ((size = qemud_channel_recv(qdev->qchanfd, buffer,
|
||||
sizeof(buffer) - 1)) > 0) {
|
||||
buffer[size] = '\0';
|
||||
if (sscanf(buffer, "on:%d", &fid) == 1) {
|
||||
if (fid > 0 && fid <= MAX_FID_VALUE) {
|
||||
switch (qdev->listener.state) {
|
||||
case STATE_ENROLL:
|
||||
send_enroll_notice(qdev, fid);
|
||||
break;
|
||||
case STATE_SCAN:
|
||||
send_scan_notice(qdev, fid);
|
||||
break;
|
||||
default:
|
||||
ALOGE("fingerprint event listener at unexpected "
|
||||
"state 0%x",
|
||||
qdev->listener.state);
|
||||
}
|
||||
} else {
|
||||
ALOGE("fingerprintid %d not in valid range [%d, %d] and "
|
||||
"will be "
|
||||
"ignored",
|
||||
fid, 1, MAX_FID_VALUE);
|
||||
continue;
|
||||
}
|
||||
} else if (strncmp("off", buffer, 3) == 0) {
|
||||
// TODO: Nothing to do here ? Looks valid
|
||||
ALOGD("fingerprint ID %d off", fid);
|
||||
} else {
|
||||
ALOGE("Invalid command '%s' to fingerprint listener", buffer);
|
||||
}
|
||||
|
||||
if (disconnected) {
|
||||
ALOGW("Connection to qemud channel has been lost");
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
ALOGE("fingerprint listener receive failure");
|
||||
if (comm_errors > MAX_COMM_ERRORS)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
done:
|
||||
ALOGD("Listener exit with %d receive errors", comm_errors);
|
||||
done_quiet:
|
||||
close(qdev->qchanfd);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static int fingerprint_close(hw_device_t* device) {
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
if (device == NULL) {
|
||||
ALOGE("fingerprint hw device is NULL");
|
||||
return -1;
|
||||
}
|
||||
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)device;
|
||||
pthread_mutex_lock(&qdev->lock);
|
||||
// Ask listener thread to exit
|
||||
qdev->listener.state = STATE_EXIT;
|
||||
pthread_mutex_unlock(&qdev->lock);
|
||||
|
||||
pthread_join(qdev->listener.thread, NULL);
|
||||
pthread_mutex_destroy(&qdev->lock);
|
||||
free(qdev);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int fingerprint_open(const hw_module_t* module, const char __unused *id,
|
||||
hw_device_t** device)
|
||||
{
|
||||
|
||||
ALOGD("----------------> %s ----------------->", __FUNCTION__);
|
||||
if (device == NULL) {
|
||||
ALOGE("NULL device on open");
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
qemu_fingerprint_device_t* qdev = (qemu_fingerprint_device_t*)calloc(
|
||||
1, sizeof(qemu_fingerprint_device_t));
|
||||
if (qdev == NULL) {
|
||||
ALOGE("Insufficient memory for virtual fingerprint device");
|
||||
return -ENOMEM;
|
||||
}
|
||||
|
||||
|
||||
qdev->device.common.tag = HARDWARE_DEVICE_TAG;
|
||||
qdev->device.common.version = HARDWARE_MODULE_API_VERSION(2, 1);
|
||||
qdev->device.common.module = (struct hw_module_t*)module;
|
||||
qdev->device.common.close = fingerprint_close;
|
||||
|
||||
qdev->device.pre_enroll = fingerprint_pre_enroll;
|
||||
qdev->device.enroll = fingerprint_enroll;
|
||||
qdev->device.post_enroll = fingerprint_post_enroll;
|
||||
qdev->device.get_authenticator_id = fingerprint_get_auth_id;
|
||||
qdev->device.set_active_group = fingerprint_set_active_group;
|
||||
qdev->device.authenticate = fingerprint_authenticate;
|
||||
qdev->device.cancel = fingerprint_cancel;
|
||||
qdev->device.enumerate = fingerprint_enumerate;
|
||||
qdev->device.remove = fingerprint_remove;
|
||||
qdev->device.set_notify = set_notify_callback;
|
||||
qdev->device.notify = NULL;
|
||||
|
||||
// init and create listener thread
|
||||
pthread_mutex_init(&qdev->lock, NULL);
|
||||
if (pthread_create(&qdev->listener.thread, NULL, listenerFunction, qdev) !=
|
||||
0)
|
||||
return -1;
|
||||
|
||||
// "Inheritance" / casting
|
||||
*device = &qdev->device.common;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct hw_module_methods_t fingerprint_module_methods = {
|
||||
.open = fingerprint_open,
|
||||
};
|
||||
|
||||
fingerprint_module_t HAL_MODULE_INFO_SYM = {
|
||||
.common = {
|
||||
.tag = HARDWARE_MODULE_TAG,
|
||||
.module_api_version = FINGERPRINT_MODULE_API_VERSION_2_1,
|
||||
.hal_api_version = HARDWARE_HAL_API_VERSION,
|
||||
.id = FINGERPRINT_HARDWARE_MODULE_ID,
|
||||
.name = "Emulator Fingerprint HAL",
|
||||
.author = "The Android Open Source Project",
|
||||
.methods = &fingerprint_module_methods,
|
||||
},
|
||||
};
|
||||
1
android/fstab.goldfish
Normal file
1
android/fstab.goldfish
Normal file
|
|
@ -0,0 +1 @@
|
|||
# Android fstab file.
|
||||
47
android/gps/Android.mk
Normal file
47
android/gps/Android.mk
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# Copyright (C) 2010 The Android Open Source Project
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
# We're moving the emulator-specific platform libs to
|
||||
# development.git/tools/emulator/. The following test is to ensure
|
||||
# smooth builds even if the tree contains both versions.
|
||||
#
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
# HAL module implemenation stored in
|
||||
# hw/<GPS_HARDWARE_MODULE_ID>.<ro.hardware>.so
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_CFLAGS += -DQEMU_HARDWARE
|
||||
LOCAL_SHARED_LIBRARIES := liblog libcutils libhardware
|
||||
LOCAL_SRC_FILES := gps_qemu.c
|
||||
ifeq ($(TARGET_PRODUCT),vbox_x86)
|
||||
LOCAL_MODULE := gps.vbox_x86
|
||||
else
|
||||
LOCAL_MODULE := gps.goldfish
|
||||
endif
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_CFLAGS += -DQEMU_HARDWARE
|
||||
LOCAL_SHARED_LIBRARIES := liblog libcutils libhardware
|
||||
LOCAL_SRC_FILES := gps_qemu.c
|
||||
LOCAL_MODULE := gps.ranchu
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
953
android/gps/gps_qemu.c
Normal file
953
android/gps/gps_qemu.c
Normal file
|
|
@ -0,0 +1,953 @@
|
|||
/*
|
||||
* Copyright (C) 2010 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/* this implements a GPS hardware library for the Android emulator.
|
||||
* the following code should be built as a shared library that will be
|
||||
* placed into /system/lib/hw/gps.goldfish.so
|
||||
*
|
||||
* it will be loaded by the code in hardware/libhardware/hardware.c
|
||||
* which is itself called from android_location_GpsLocationProvider.cpp
|
||||
*/
|
||||
|
||||
|
||||
#include <errno.h>
|
||||
#include <pthread.h>
|
||||
#include <fcntl.h>
|
||||
#include <sys/epoll.h>
|
||||
#include <math.h>
|
||||
#include <time.h>
|
||||
|
||||
#define LOG_TAG "gps_qemu"
|
||||
#include <cutils/log.h>
|
||||
#include <cutils/sockets.h>
|
||||
#include <hardware/gps.h>
|
||||
#include <hardware/qemud.h>
|
||||
|
||||
/* the name of the qemud-controlled socket */
|
||||
#define QEMU_CHANNEL_NAME "gps"
|
||||
|
||||
#define GPS_DEBUG 0
|
||||
|
||||
#if GPS_DEBUG
|
||||
# define D(...) ALOGD(__VA_ARGS__)
|
||||
#else
|
||||
# define D(...) ((void)0)
|
||||
#endif
|
||||
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
/***** *****/
|
||||
/***** N M E A T O K E N I Z E R *****/
|
||||
/***** *****/
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
|
||||
typedef struct {
|
||||
const char* p;
|
||||
const char* end;
|
||||
} Token;
|
||||
|
||||
#define MAX_NMEA_TOKENS 16
|
||||
|
||||
typedef struct {
|
||||
int count;
|
||||
Token tokens[ MAX_NMEA_TOKENS ];
|
||||
} NmeaTokenizer;
|
||||
|
||||
static int
|
||||
nmea_tokenizer_init( NmeaTokenizer* t, const char* p, const char* end )
|
||||
{
|
||||
int count = 0;
|
||||
char* q;
|
||||
|
||||
// the initial '$' is optional
|
||||
if (p < end && p[0] == '$')
|
||||
p += 1;
|
||||
|
||||
// remove trailing newline
|
||||
if (end > p && end[-1] == '\n') {
|
||||
end -= 1;
|
||||
if (end > p && end[-1] == '\r')
|
||||
end -= 1;
|
||||
}
|
||||
|
||||
// get rid of checksum at the end of the sentecne
|
||||
if (end >= p+3 && end[-3] == '*') {
|
||||
end -= 3;
|
||||
}
|
||||
|
||||
while (p < end) {
|
||||
const char* q = p;
|
||||
|
||||
q = memchr(p, ',', end-p);
|
||||
if (q == NULL)
|
||||
q = end;
|
||||
|
||||
if (count < MAX_NMEA_TOKENS) {
|
||||
t->tokens[count].p = p;
|
||||
t->tokens[count].end = q;
|
||||
count += 1;
|
||||
}
|
||||
if (q < end)
|
||||
q += 1;
|
||||
|
||||
p = q;
|
||||
}
|
||||
|
||||
t->count = count;
|
||||
return count;
|
||||
}
|
||||
|
||||
static Token
|
||||
nmea_tokenizer_get( NmeaTokenizer* t, int index )
|
||||
{
|
||||
Token tok;
|
||||
static const char* dummy = "";
|
||||
|
||||
if (index < 0 || index >= t->count) {
|
||||
tok.p = tok.end = dummy;
|
||||
} else
|
||||
tok = t->tokens[index];
|
||||
|
||||
return tok;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
str2int( const char* p, const char* end )
|
||||
{
|
||||
int result = 0;
|
||||
int len = end - p;
|
||||
|
||||
for ( ; len > 0; len--, p++ )
|
||||
{
|
||||
int c;
|
||||
|
||||
if (p >= end)
|
||||
goto Fail;
|
||||
|
||||
c = *p - '0';
|
||||
if ((unsigned)c >= 10)
|
||||
goto Fail;
|
||||
|
||||
result = result*10 + c;
|
||||
}
|
||||
return result;
|
||||
|
||||
Fail:
|
||||
return -1;
|
||||
}
|
||||
|
||||
static double
|
||||
str2float( const char* p, const char* end )
|
||||
{
|
||||
int result = 0;
|
||||
int len = end - p;
|
||||
char temp[16];
|
||||
|
||||
if (len >= (int)sizeof(temp))
|
||||
return 0.;
|
||||
|
||||
memcpy( temp, p, len );
|
||||
temp[len] = 0;
|
||||
return strtod( temp, NULL );
|
||||
}
|
||||
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
/***** *****/
|
||||
/***** N M E A P A R S E R *****/
|
||||
/***** *****/
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
|
||||
#define NMEA_MAX_SIZE 83
|
||||
|
||||
typedef struct {
|
||||
int pos;
|
||||
int overflow;
|
||||
int utc_year;
|
||||
int utc_mon;
|
||||
int utc_day;
|
||||
int utc_diff;
|
||||
GpsLocation fix;
|
||||
gps_location_callback callback;
|
||||
char in[ NMEA_MAX_SIZE+1 ];
|
||||
} NmeaReader;
|
||||
|
||||
|
||||
static void
|
||||
nmea_reader_update_utc_diff( NmeaReader* r )
|
||||
{
|
||||
time_t now = time(NULL);
|
||||
struct tm tm_local;
|
||||
struct tm tm_utc;
|
||||
long time_local, time_utc;
|
||||
|
||||
gmtime_r( &now, &tm_utc );
|
||||
localtime_r( &now, &tm_local );
|
||||
|
||||
time_local = tm_local.tm_sec +
|
||||
60*(tm_local.tm_min +
|
||||
60*(tm_local.tm_hour +
|
||||
24*(tm_local.tm_yday +
|
||||
365*tm_local.tm_year)));
|
||||
|
||||
time_utc = tm_utc.tm_sec +
|
||||
60*(tm_utc.tm_min +
|
||||
60*(tm_utc.tm_hour +
|
||||
24*(tm_utc.tm_yday +
|
||||
365*tm_utc.tm_year)));
|
||||
|
||||
r->utc_diff = time_utc - time_local;
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
nmea_reader_init( NmeaReader* r )
|
||||
{
|
||||
memset( r, 0, sizeof(*r) );
|
||||
|
||||
r->pos = 0;
|
||||
r->overflow = 0;
|
||||
r->utc_year = -1;
|
||||
r->utc_mon = -1;
|
||||
r->utc_day = -1;
|
||||
r->callback = NULL;
|
||||
r->fix.size = sizeof(r->fix);
|
||||
|
||||
nmea_reader_update_utc_diff( r );
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
nmea_reader_set_callback( NmeaReader* r, gps_location_callback cb )
|
||||
{
|
||||
r->callback = cb;
|
||||
if (cb != NULL && r->fix.flags != 0) {
|
||||
D("%s: sending latest fix to new callback", __FUNCTION__);
|
||||
r->callback( &r->fix );
|
||||
r->fix.flags = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
nmea_reader_update_time( NmeaReader* r, Token tok )
|
||||
{
|
||||
int hour, minute;
|
||||
double seconds;
|
||||
struct tm tm;
|
||||
time_t fix_time;
|
||||
|
||||
if (tok.p + 6 > tok.end)
|
||||
return -1;
|
||||
|
||||
if (r->utc_year < 0) {
|
||||
// no date yet, get current one
|
||||
time_t now = time(NULL);
|
||||
gmtime_r( &now, &tm );
|
||||
r->utc_year = tm.tm_year + 1900;
|
||||
r->utc_mon = tm.tm_mon + 1;
|
||||
r->utc_day = tm.tm_mday;
|
||||
}
|
||||
|
||||
hour = str2int(tok.p, tok.p+2);
|
||||
minute = str2int(tok.p+2, tok.p+4);
|
||||
seconds = str2float(tok.p+4, tok.end);
|
||||
|
||||
tm.tm_hour = hour;
|
||||
tm.tm_min = minute;
|
||||
tm.tm_sec = (int) seconds;
|
||||
tm.tm_year = r->utc_year - 1900;
|
||||
tm.tm_mon = r->utc_mon - 1;
|
||||
tm.tm_mday = r->utc_day;
|
||||
tm.tm_isdst = -1;
|
||||
|
||||
fix_time = mktime( &tm ) + r->utc_diff;
|
||||
r->fix.timestamp = (long long)fix_time * 1000;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
nmea_reader_update_date( NmeaReader* r, Token date, Token time )
|
||||
{
|
||||
Token tok = date;
|
||||
int day, mon, year;
|
||||
|
||||
if (tok.p + 6 != tok.end) {
|
||||
D("date not properly formatted: '%.*s'", tok.end-tok.p, tok.p);
|
||||
return -1;
|
||||
}
|
||||
day = str2int(tok.p, tok.p+2);
|
||||
mon = str2int(tok.p+2, tok.p+4);
|
||||
year = str2int(tok.p+4, tok.p+6) + 2000;
|
||||
|
||||
if ((day|mon|year) < 0) {
|
||||
D("date not properly formatted: '%.*s'", tok.end-tok.p, tok.p);
|
||||
return -1;
|
||||
}
|
||||
|
||||
r->utc_year = year;
|
||||
r->utc_mon = mon;
|
||||
r->utc_day = day;
|
||||
|
||||
return nmea_reader_update_time( r, time );
|
||||
}
|
||||
|
||||
|
||||
static double
|
||||
convert_from_hhmm( Token tok )
|
||||
{
|
||||
double val = str2float(tok.p, tok.end);
|
||||
int degrees = (int)(floor(val) / 100);
|
||||
double minutes = val - degrees*100.;
|
||||
double dcoord = degrees + minutes / 60.0;
|
||||
return dcoord;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
nmea_reader_update_latlong( NmeaReader* r,
|
||||
Token latitude,
|
||||
char latitudeHemi,
|
||||
Token longitude,
|
||||
char longitudeHemi )
|
||||
{
|
||||
double lat, lon;
|
||||
Token tok;
|
||||
|
||||
tok = latitude;
|
||||
if (tok.p + 6 > tok.end) {
|
||||
D("latitude is too short: '%.*s'", tok.end-tok.p, tok.p);
|
||||
return -1;
|
||||
}
|
||||
lat = convert_from_hhmm(tok);
|
||||
if (latitudeHemi == 'S')
|
||||
lat = -lat;
|
||||
|
||||
tok = longitude;
|
||||
if (tok.p + 6 > tok.end) {
|
||||
D("longitude is too short: '%.*s'", tok.end-tok.p, tok.p);
|
||||
return -1;
|
||||
}
|
||||
lon = convert_from_hhmm(tok);
|
||||
if (longitudeHemi == 'W')
|
||||
lon = -lon;
|
||||
|
||||
r->fix.flags |= GPS_LOCATION_HAS_LAT_LONG;
|
||||
r->fix.latitude = lat;
|
||||
r->fix.longitude = lon;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
nmea_reader_update_altitude( NmeaReader* r,
|
||||
Token altitude,
|
||||
Token units )
|
||||
{
|
||||
double alt;
|
||||
Token tok = altitude;
|
||||
|
||||
if (tok.p >= tok.end)
|
||||
return -1;
|
||||
|
||||
r->fix.flags |= GPS_LOCATION_HAS_ALTITUDE;
|
||||
r->fix.altitude = str2float(tok.p, tok.end);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
nmea_reader_update_bearing( NmeaReader* r,
|
||||
Token bearing )
|
||||
{
|
||||
double alt;
|
||||
Token tok = bearing;
|
||||
|
||||
if (tok.p >= tok.end)
|
||||
return -1;
|
||||
|
||||
r->fix.flags |= GPS_LOCATION_HAS_BEARING;
|
||||
r->fix.bearing = str2float(tok.p, tok.end);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
nmea_reader_update_speed( NmeaReader* r,
|
||||
Token speed )
|
||||
{
|
||||
double alt;
|
||||
Token tok = speed;
|
||||
|
||||
if (tok.p >= tok.end)
|
||||
return -1;
|
||||
|
||||
r->fix.flags |= GPS_LOCATION_HAS_SPEED;
|
||||
r->fix.speed = str2float(tok.p, tok.end);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
nmea_reader_update_accuracy( NmeaReader* r )
|
||||
{
|
||||
// Always return 20m accuracy.
|
||||
// Possibly parse it from the NMEA sentence in the future.
|
||||
r->fix.flags |= GPS_LOCATION_HAS_ACCURACY;
|
||||
r->fix.accuracy = 20;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
nmea_reader_parse( NmeaReader* r )
|
||||
{
|
||||
/* we received a complete sentence, now parse it to generate
|
||||
* a new GPS fix...
|
||||
*/
|
||||
NmeaTokenizer tzer[1];
|
||||
Token tok;
|
||||
|
||||
D("Received: '%.*s'", r->pos, r->in);
|
||||
if (r->pos < 9) {
|
||||
D("Too short. discarded.");
|
||||
return;
|
||||
}
|
||||
|
||||
nmea_tokenizer_init(tzer, r->in, r->in + r->pos);
|
||||
#if GPS_DEBUG
|
||||
{
|
||||
int n;
|
||||
D("Found %d tokens", tzer->count);
|
||||
for (n = 0; n < tzer->count; n++) {
|
||||
Token tok = nmea_tokenizer_get(tzer,n);
|
||||
D("%2d: '%.*s'", n, tok.end-tok.p, tok.p);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
tok = nmea_tokenizer_get(tzer, 0);
|
||||
if (tok.p + 5 > tok.end) {
|
||||
D("sentence id '%.*s' too short, ignored.", tok.end-tok.p, tok.p);
|
||||
return;
|
||||
}
|
||||
|
||||
// ignore first two characters.
|
||||
tok.p += 2;
|
||||
if ( !memcmp(tok.p, "GGA", 3) ) {
|
||||
// GPS fix
|
||||
Token tok_time = nmea_tokenizer_get(tzer,1);
|
||||
Token tok_latitude = nmea_tokenizer_get(tzer,2);
|
||||
Token tok_latitudeHemi = nmea_tokenizer_get(tzer,3);
|
||||
Token tok_longitude = nmea_tokenizer_get(tzer,4);
|
||||
Token tok_longitudeHemi = nmea_tokenizer_get(tzer,5);
|
||||
Token tok_altitude = nmea_tokenizer_get(tzer,9);
|
||||
Token tok_altitudeUnits = nmea_tokenizer_get(tzer,10);
|
||||
|
||||
nmea_reader_update_time(r, tok_time);
|
||||
nmea_reader_update_latlong(r, tok_latitude,
|
||||
tok_latitudeHemi.p[0],
|
||||
tok_longitude,
|
||||
tok_longitudeHemi.p[0]);
|
||||
nmea_reader_update_altitude(r, tok_altitude, tok_altitudeUnits);
|
||||
|
||||
} else if ( !memcmp(tok.p, "GSA", 3) ) {
|
||||
// do something ?
|
||||
} else if ( !memcmp(tok.p, "RMC", 3) ) {
|
||||
Token tok_time = nmea_tokenizer_get(tzer,1);
|
||||
Token tok_fixStatus = nmea_tokenizer_get(tzer,2);
|
||||
Token tok_latitude = nmea_tokenizer_get(tzer,3);
|
||||
Token tok_latitudeHemi = nmea_tokenizer_get(tzer,4);
|
||||
Token tok_longitude = nmea_tokenizer_get(tzer,5);
|
||||
Token tok_longitudeHemi = nmea_tokenizer_get(tzer,6);
|
||||
Token tok_speed = nmea_tokenizer_get(tzer,7);
|
||||
Token tok_bearing = nmea_tokenizer_get(tzer,8);
|
||||
Token tok_date = nmea_tokenizer_get(tzer,9);
|
||||
|
||||
D("in RMC, fixStatus=%c", tok_fixStatus.p[0]);
|
||||
if (tok_fixStatus.p[0] == 'A')
|
||||
{
|
||||
nmea_reader_update_date( r, tok_date, tok_time );
|
||||
|
||||
nmea_reader_update_latlong( r, tok_latitude,
|
||||
tok_latitudeHemi.p[0],
|
||||
tok_longitude,
|
||||
tok_longitudeHemi.p[0] );
|
||||
|
||||
nmea_reader_update_bearing( r, tok_bearing );
|
||||
nmea_reader_update_speed ( r, tok_speed );
|
||||
}
|
||||
} else {
|
||||
tok.p -= 2;
|
||||
D("unknown sentence '%.*s", tok.end-tok.p, tok.p);
|
||||
}
|
||||
|
||||
// Always update accuracy
|
||||
nmea_reader_update_accuracy( r );
|
||||
|
||||
if (r->fix.flags != 0) {
|
||||
#if GPS_DEBUG
|
||||
char temp[256];
|
||||
char* p = temp;
|
||||
char* end = p + sizeof(temp);
|
||||
struct tm utc;
|
||||
|
||||
p += snprintf( p, end-p, "sending fix" );
|
||||
if (r->fix.flags & GPS_LOCATION_HAS_LAT_LONG) {
|
||||
p += snprintf(p, end-p, " lat=%g lon=%g", r->fix.latitude, r->fix.longitude);
|
||||
}
|
||||
if (r->fix.flags & GPS_LOCATION_HAS_ALTITUDE) {
|
||||
p += snprintf(p, end-p, " altitude=%g", r->fix.altitude);
|
||||
}
|
||||
if (r->fix.flags & GPS_LOCATION_HAS_SPEED) {
|
||||
p += snprintf(p, end-p, " speed=%g", r->fix.speed);
|
||||
}
|
||||
if (r->fix.flags & GPS_LOCATION_HAS_BEARING) {
|
||||
p += snprintf(p, end-p, " bearing=%g", r->fix.bearing);
|
||||
}
|
||||
if (r->fix.flags & GPS_LOCATION_HAS_ACCURACY) {
|
||||
p += snprintf(p,end-p, " accuracy=%g", r->fix.accuracy);
|
||||
}
|
||||
gmtime_r( (time_t*) &r->fix.timestamp, &utc );
|
||||
p += snprintf(p, end-p, " time=%s", asctime( &utc ) );
|
||||
D(temp);
|
||||
#endif
|
||||
if (r->callback) {
|
||||
r->callback( &r->fix );
|
||||
r->fix.flags = 0;
|
||||
}
|
||||
else {
|
||||
D("no callback, keeping data until needed !");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
nmea_reader_addc( NmeaReader* r, int c )
|
||||
{
|
||||
if (r->overflow) {
|
||||
r->overflow = (c != '\n');
|
||||
return;
|
||||
}
|
||||
|
||||
if (r->pos >= (int) sizeof(r->in)-1 ) {
|
||||
r->overflow = 1;
|
||||
r->pos = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
r->in[r->pos] = (char)c;
|
||||
r->pos += 1;
|
||||
|
||||
if (c == '\n') {
|
||||
nmea_reader_parse( r );
|
||||
r->pos = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
/***** *****/
|
||||
/***** C O N N E C T I O N S T A T E *****/
|
||||
/***** *****/
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
|
||||
/* commands sent to the gps thread */
|
||||
enum {
|
||||
CMD_QUIT = 0,
|
||||
CMD_START = 1,
|
||||
CMD_STOP = 2
|
||||
};
|
||||
|
||||
|
||||
/* this is the state of our connection to the qemu_gpsd daemon */
|
||||
typedef struct {
|
||||
int init;
|
||||
int fd;
|
||||
GpsCallbacks callbacks;
|
||||
pthread_t thread;
|
||||
int control[2];
|
||||
} GpsState;
|
||||
|
||||
static GpsState _gps_state[1];
|
||||
|
||||
|
||||
static void
|
||||
gps_state_done( GpsState* s )
|
||||
{
|
||||
// tell the thread to quit, and wait for it
|
||||
char cmd = CMD_QUIT;
|
||||
void* dummy;
|
||||
write( s->control[0], &cmd, 1 );
|
||||
pthread_join(s->thread, &dummy);
|
||||
|
||||
// close the control socket pair
|
||||
close( s->control[0] ); s->control[0] = -1;
|
||||
close( s->control[1] ); s->control[1] = -1;
|
||||
|
||||
// close connection to the QEMU GPS daemon
|
||||
close( s->fd ); s->fd = -1;
|
||||
s->init = 0;
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
gps_state_start( GpsState* s )
|
||||
{
|
||||
char cmd = CMD_START;
|
||||
int ret;
|
||||
|
||||
do { ret=write( s->control[0], &cmd, 1 ); }
|
||||
while (ret < 0 && errno == EINTR);
|
||||
|
||||
if (ret != 1)
|
||||
D("%s: could not send CMD_START command: ret=%d: %s",
|
||||
__FUNCTION__, ret, strerror(errno));
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
gps_state_stop( GpsState* s )
|
||||
{
|
||||
char cmd = CMD_STOP;
|
||||
int ret;
|
||||
|
||||
do { ret=write( s->control[0], &cmd, 1 ); }
|
||||
while (ret < 0 && errno == EINTR);
|
||||
|
||||
if (ret != 1)
|
||||
D("%s: could not send CMD_STOP command: ret=%d: %s",
|
||||
__FUNCTION__, ret, strerror(errno));
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
epoll_register( int epoll_fd, int fd )
|
||||
{
|
||||
struct epoll_event ev;
|
||||
int ret, flags;
|
||||
|
||||
/* important: make the fd non-blocking */
|
||||
flags = fcntl(fd, F_GETFL);
|
||||
fcntl(fd, F_SETFL, flags | O_NONBLOCK);
|
||||
|
||||
ev.events = EPOLLIN;
|
||||
ev.data.fd = fd;
|
||||
do {
|
||||
ret = epoll_ctl( epoll_fd, EPOLL_CTL_ADD, fd, &ev );
|
||||
} while (ret < 0 && errno == EINTR);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
epoll_deregister( int epoll_fd, int fd )
|
||||
{
|
||||
int ret;
|
||||
do {
|
||||
ret = epoll_ctl( epoll_fd, EPOLL_CTL_DEL, fd, NULL );
|
||||
} while (ret < 0 && errno == EINTR);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* this is the main thread, it waits for commands from gps_state_start/stop and,
|
||||
* when started, messages from the QEMU GPS daemon. these are simple NMEA sentences
|
||||
* that must be parsed to be converted into GPS fixes sent to the framework
|
||||
*/
|
||||
static void
|
||||
gps_state_thread( void* arg )
|
||||
{
|
||||
GpsState* state = (GpsState*) arg;
|
||||
NmeaReader reader[1];
|
||||
int epoll_fd = epoll_create(2);
|
||||
int started = 0;
|
||||
int gps_fd = state->fd;
|
||||
int control_fd = state->control[1];
|
||||
|
||||
nmea_reader_init( reader );
|
||||
|
||||
// register control file descriptors for polling
|
||||
epoll_register( epoll_fd, control_fd );
|
||||
epoll_register( epoll_fd, gps_fd );
|
||||
|
||||
D("gps thread running");
|
||||
|
||||
// now loop
|
||||
for (;;) {
|
||||
struct epoll_event events[2];
|
||||
int ne, nevents;
|
||||
|
||||
nevents = epoll_wait( epoll_fd, events, 2, -1 );
|
||||
if (nevents < 0) {
|
||||
if (errno != EINTR)
|
||||
ALOGE("epoll_wait() unexpected error: %s", strerror(errno));
|
||||
continue;
|
||||
}
|
||||
D("gps thread received %d events", nevents);
|
||||
for (ne = 0; ne < nevents; ne++) {
|
||||
if ((events[ne].events & (EPOLLERR|EPOLLHUP)) != 0) {
|
||||
ALOGE("EPOLLERR or EPOLLHUP after epoll_wait() !?");
|
||||
return;
|
||||
}
|
||||
if ((events[ne].events & EPOLLIN) != 0) {
|
||||
int fd = events[ne].data.fd;
|
||||
|
||||
if (fd == control_fd)
|
||||
{
|
||||
char cmd = 255;
|
||||
int ret;
|
||||
D("gps control fd event");
|
||||
do {
|
||||
ret = read( fd, &cmd, 1 );
|
||||
} while (ret < 0 && errno == EINTR);
|
||||
|
||||
if (cmd == CMD_QUIT) {
|
||||
D("gps thread quitting on demand");
|
||||
return;
|
||||
}
|
||||
else if (cmd == CMD_START) {
|
||||
if (!started) {
|
||||
D("gps thread starting location_cb=%p", state->callbacks.location_cb);
|
||||
started = 1;
|
||||
nmea_reader_set_callback( reader, state->callbacks.location_cb );
|
||||
}
|
||||
}
|
||||
else if (cmd == CMD_STOP) {
|
||||
if (started) {
|
||||
D("gps thread stopping");
|
||||
started = 0;
|
||||
nmea_reader_set_callback( reader, NULL );
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (fd == gps_fd)
|
||||
{
|
||||
char buff[32];
|
||||
D("gps fd event");
|
||||
for (;;) {
|
||||
int nn, ret;
|
||||
|
||||
ret = read( fd, buff, sizeof(buff) );
|
||||
if (ret < 0) {
|
||||
if (errno == EINTR)
|
||||
continue;
|
||||
if (errno != EWOULDBLOCK)
|
||||
ALOGE("error while reading from gps daemon socket: %s:", strerror(errno));
|
||||
break;
|
||||
}
|
||||
D("received %d bytes: %.*s", ret, ret, buff);
|
||||
for (nn = 0; nn < ret; nn++)
|
||||
nmea_reader_addc( reader, buff[nn] );
|
||||
}
|
||||
D("gps fd event end");
|
||||
}
|
||||
else
|
||||
{
|
||||
ALOGE("epoll_wait() returned unkown fd %d ?", fd);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
gps_state_init( GpsState* state, GpsCallbacks* callbacks )
|
||||
{
|
||||
state->init = 1;
|
||||
state->control[0] = -1;
|
||||
state->control[1] = -1;
|
||||
state->fd = -1;
|
||||
|
||||
state->fd = qemud_channel_open(QEMU_CHANNEL_NAME);
|
||||
|
||||
if (state->fd < 0) {
|
||||
D("no gps emulation detected");
|
||||
return;
|
||||
}
|
||||
|
||||
D("gps emulation will read from '%s' qemud channel", QEMU_CHANNEL_NAME );
|
||||
|
||||
if ( socketpair( AF_LOCAL, SOCK_STREAM, 0, state->control ) < 0 ) {
|
||||
ALOGE("could not create thread control socket pair: %s", strerror(errno));
|
||||
goto Fail;
|
||||
}
|
||||
|
||||
state->thread = callbacks->create_thread_cb( "gps_state_thread", gps_state_thread, state );
|
||||
|
||||
if ( !state->thread ) {
|
||||
ALOGE("could not create gps thread: %s", strerror(errno));
|
||||
goto Fail;
|
||||
}
|
||||
|
||||
state->callbacks = *callbacks;
|
||||
|
||||
D("gps state initialized");
|
||||
return;
|
||||
|
||||
Fail:
|
||||
gps_state_done( state );
|
||||
}
|
||||
|
||||
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
/***** *****/
|
||||
/***** I N T E R F A C E *****/
|
||||
/***** *****/
|
||||
/*****************************************************************/
|
||||
/*****************************************************************/
|
||||
|
||||
|
||||
static int
|
||||
qemu_gps_init(GpsCallbacks* callbacks)
|
||||
{
|
||||
GpsState* s = _gps_state;
|
||||
|
||||
if (!s->init)
|
||||
gps_state_init(s, callbacks);
|
||||
|
||||
if (s->fd < 0)
|
||||
return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
qemu_gps_cleanup(void)
|
||||
{
|
||||
GpsState* s = _gps_state;
|
||||
|
||||
if (s->init)
|
||||
gps_state_done(s);
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
qemu_gps_start()
|
||||
{
|
||||
GpsState* s = _gps_state;
|
||||
|
||||
if (!s->init) {
|
||||
D("%s: called with uninitialized state !!", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
D("%s: called", __FUNCTION__);
|
||||
gps_state_start(s);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
qemu_gps_stop()
|
||||
{
|
||||
GpsState* s = _gps_state;
|
||||
|
||||
if (!s->init) {
|
||||
D("%s: called with uninitialized state !!", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
D("%s: called", __FUNCTION__);
|
||||
gps_state_stop(s);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
qemu_gps_inject_time(GpsUtcTime time, int64_t timeReference, int uncertainty)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
qemu_gps_inject_location(double latitude, double longitude, float accuracy)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
qemu_gps_delete_aiding_data(GpsAidingData flags)
|
||||
{
|
||||
}
|
||||
|
||||
static int qemu_gps_set_position_mode(GpsPositionMode mode, int fix_frequency)
|
||||
{
|
||||
// FIXME - support fix_frequency
|
||||
return 0;
|
||||
}
|
||||
|
||||
static const void*
|
||||
qemu_gps_get_extension(const char* name)
|
||||
{
|
||||
// no extensions supported
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static const GpsInterface qemuGpsInterface = {
|
||||
sizeof(GpsInterface),
|
||||
qemu_gps_init,
|
||||
qemu_gps_start,
|
||||
qemu_gps_stop,
|
||||
qemu_gps_cleanup,
|
||||
qemu_gps_inject_time,
|
||||
qemu_gps_inject_location,
|
||||
qemu_gps_delete_aiding_data,
|
||||
qemu_gps_set_position_mode,
|
||||
qemu_gps_get_extension,
|
||||
};
|
||||
|
||||
const GpsInterface* gps__get_gps_interface(struct gps_device_t* dev)
|
||||
{
|
||||
return &qemuGpsInterface;
|
||||
}
|
||||
|
||||
static int open_gps(const struct hw_module_t* module, char const* name,
|
||||
struct hw_device_t** device)
|
||||
{
|
||||
struct gps_device_t *dev = malloc(sizeof(struct gps_device_t));
|
||||
memset(dev, 0, sizeof(*dev));
|
||||
|
||||
dev->common.tag = HARDWARE_DEVICE_TAG;
|
||||
dev->common.version = 0;
|
||||
dev->common.module = (struct hw_module_t*)module;
|
||||
// dev->common.close = (int (*)(struct hw_device_t*))close_lights;
|
||||
dev->get_gps_interface = gps__get_gps_interface;
|
||||
|
||||
*device = (struct hw_device_t*)dev;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static struct hw_module_methods_t gps_module_methods = {
|
||||
.open = open_gps
|
||||
};
|
||||
|
||||
struct hw_module_t HAL_MODULE_INFO_SYM = {
|
||||
.tag = HARDWARE_MODULE_TAG,
|
||||
.version_major = 1,
|
||||
.version_minor = 0,
|
||||
.id = GPS_HARDWARE_MODULE_ID,
|
||||
.name = "Goldfish GPS Module",
|
||||
.author = "The Android Open Source Project",
|
||||
.methods = &gps_module_methods,
|
||||
};
|
||||
99
android/init.goldfish.rc
Normal file
99
android/init.goldfish.rc
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
|
||||
on early-init
|
||||
mount debugfs debugfs /sys/kernel/debug
|
||||
|
||||
on init
|
||||
|
||||
on boot
|
||||
setprop ARGH ARGH
|
||||
setprop net.eth0.gw 10.0.2.2
|
||||
setprop net.eth0.dns1 10.0.2.3
|
||||
setprop net.dns1 10.0.2.3
|
||||
setprop net.gprs.local-ip 10.0.2.15
|
||||
setprop ro.radio.use-ppp no
|
||||
setprop ro.build.product generic
|
||||
setprop ro.product.device generic
|
||||
|
||||
# fake some battery state
|
||||
setprop status.battery.state Slow
|
||||
setprop status.battery.level 5
|
||||
setprop status.battery.level_raw 50
|
||||
setprop status.battery.level_scale 9
|
||||
|
||||
# set up the GPU caching
|
||||
setprop ro.hwui.texture_cache_size 72
|
||||
setprop ro.hwui.layer_cache_size 48
|
||||
setprop ro.hwui.r_buffer_cache_size 8
|
||||
setprop ro.hwui.path_cache_size 32
|
||||
setprop ro.hwui.gradient_cache_size 1
|
||||
setprop ro.hwui.drop_shadow_cache_size 6
|
||||
setprop ro.hwui.texture_cache_flushrate 0.4
|
||||
setprop ro.hwui.text_small_cache_width 1024
|
||||
setprop ro.hwui.text_small_cache_height 1024
|
||||
setprop ro.hwui.text_large_cache_width 2048
|
||||
setprop ro.hwui.text_large_cache_height 1024
|
||||
|
||||
# disable some daemons the emulator doesn't want
|
||||
stop dund
|
||||
stop akmd
|
||||
|
||||
# start essential services
|
||||
start qemud
|
||||
start goldfish-logcat
|
||||
start goldfish-setup
|
||||
|
||||
setprop ro.setupwizard.mode EMULATOR
|
||||
|
||||
# enable Google-specific location features,
|
||||
# like NetworkLocationProvider and LocationCollector
|
||||
setprop ro.com.google.locationfeatures 1
|
||||
|
||||
# For the emulator, which bypasses Setup Wizard, you can specify
|
||||
# account info for the device via these two properties. Google
|
||||
# Login Service will insert these accounts into the database when
|
||||
# it is created (ie, after a data wipe).
|
||||
#
|
||||
# setprop ro.config.hosted_account username@hosteddomain.org:password
|
||||
# setprop ro.config.google_account username@gmail.com:password
|
||||
#
|
||||
# You MUST have a Google account on the device, and you MAY
|
||||
# additionally have a hosted account. No other configuration is
|
||||
# supported, and arbitrary breakage may result if you specify
|
||||
# something else.
|
||||
|
||||
on fs
|
||||
mount_all /fstab.goldfish
|
||||
|
||||
service goldfish-setup /system/etc/init.goldfish.sh
|
||||
user root
|
||||
group root
|
||||
oneshot
|
||||
|
||||
# The qemu-props program is used to set various system
|
||||
# properties on boot. It must be run early during the boot
|
||||
# process to avoid race conditions with other daemons that
|
||||
# might read them (e.g. surface flinger), so define it in
|
||||
# class 'core'
|
||||
#
|
||||
service qemu-props /system/bin/qemu-props
|
||||
class core
|
||||
user root
|
||||
group root
|
||||
oneshot
|
||||
|
||||
service qemud /system/bin/qemud
|
||||
socket qemud stream 666
|
||||
oneshot
|
||||
|
||||
# -Q is a special logcat option that forces the
|
||||
# program to check wether it runs on the emulator
|
||||
# if it does, it redirects its output to the device
|
||||
# named by the androidboot.console kernel option
|
||||
# if not, is simply exits immediately
|
||||
|
||||
service goldfish-logcat /system/bin/logcat -Q
|
||||
oneshot
|
||||
|
||||
service fingerprintd /system/bin/fingerprintd
|
||||
class late_start
|
||||
user system
|
||||
68
android/init.goldfish.sh
Executable file
68
android/init.goldfish.sh
Executable file
|
|
@ -0,0 +1,68 @@
|
|||
#!/system/bin/sh
|
||||
|
||||
# Setup networking when boot starts
|
||||
ifconfig eth0 10.0.2.15 netmask 255.255.255.0 up
|
||||
route add default gw 10.0.2.2 dev eth0
|
||||
|
||||
# ro.kernel.android.qemud is normally set when we
|
||||
# want the RIL (radio interface layer) to talk to
|
||||
# the emulated modem through qemud.
|
||||
#
|
||||
# However, this will be undefined in two cases:
|
||||
#
|
||||
# - When we want the RIL to talk directly to a guest
|
||||
# serial device that is connected to a host serial
|
||||
# device by the emulator.
|
||||
#
|
||||
# - We don't want to use the RIL but the VM-based
|
||||
# modem emulation that runs inside the guest system
|
||||
# instead.
|
||||
#
|
||||
# The following detects the latter case and sets up the
|
||||
# system for it.
|
||||
#
|
||||
qemud=`getprop ro.kernel.android.qemud`
|
||||
case "$qemud" in
|
||||
"")
|
||||
radio_ril=`getprop ro.kernel.android.ril`
|
||||
case "$radio_ril" in
|
||||
"")
|
||||
# no need for the radio interface daemon
|
||||
# telephony is entirely emulated in Java
|
||||
setprop ro.radio.noril yes
|
||||
stop ril-daemon
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Setup additionnal DNS servers if needed
|
||||
num_dns=`getprop ro.kernel.ndns`
|
||||
case "$num_dns" in
|
||||
2) setprop net.eth0.dns2 10.0.2.4
|
||||
;;
|
||||
3) setprop net.eth0.dns2 10.0.2.4
|
||||
setprop net.eth0.dns3 10.0.2.5
|
||||
;;
|
||||
4) setprop net.eth0.dns2 10.0.2.4
|
||||
setprop net.eth0.dns3 10.0.2.5
|
||||
setprop net.eth0.dns4 10.0.2.6
|
||||
;;
|
||||
esac
|
||||
|
||||
# disable boot animation for a faster boot sequence when needed
|
||||
boot_anim=`getprop ro.kernel.android.bootanim`
|
||||
case "$boot_anim" in
|
||||
0) setprop debug.sf.nobootanimation 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# set up the second interface (for inter-emulator connections)
|
||||
# if required
|
||||
my_ip=`getprop net.shared_net_ip`
|
||||
case "$my_ip" in
|
||||
"")
|
||||
;;
|
||||
*) ifconfig eth1 "$my_ip" netmask 255.255.255.0 up
|
||||
;;
|
||||
esac
|
||||
25
android/lights/Android.mk
Normal file
25
android/lights/Android.mk
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
# Copyright (C) 2011 The Android Open Source Project.
|
||||
#
|
||||
# Original code licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this software except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
# HAL module implemenation, not prelinked and stored in
|
||||
# hw/<LIGHTS_HARDWARE_MODULE_ID>.<ro.hardware>.so
|
||||
include $(CLEAR_VARS)
|
||||
LOCAL_MODULE_RELATIVE_PATH := hw
|
||||
LOCAL_SHARED_LIBRARIES := liblog libcutils
|
||||
LOCAL_SRC_FILES := lights_qemu.c
|
||||
LOCAL_MODULE := lights.goldfish
|
||||
LOCAL_CFLAGS += -DLIGHT_BACKLIGHT
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
213
android/lights/lights_qemu.c
Normal file
213
android/lights/lights_qemu.c
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
/* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Original code licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this software except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
* This implements a lights hardware library for the Android emulator.
|
||||
* the following code should be built as a shared library that will be
|
||||
* placed into /system/lib/hw/lights.goldfish.so
|
||||
*
|
||||
* It will be loaded by the code in hardware/libhardware/hardware.c
|
||||
* which is itself called from
|
||||
* ./frameworks/base/services/jni/com_android_server_HardwareService.cpp
|
||||
*/
|
||||
|
||||
#ifdef LOG_TAG
|
||||
#undef LOG_TAG
|
||||
#define LOG_TAG "Lights"
|
||||
#endif
|
||||
|
||||
/* we connect with the emulator through the "hw-control" qemud service */
|
||||
#define LIGHTS_SERVICE_NAME "hw-control"
|
||||
|
||||
#include <cutils/log.h>
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <errno.h>
|
||||
#include <fcntl.h>
|
||||
#include <pthread.h>
|
||||
#include <sys/ioctl.h>
|
||||
#include <sys/types.h>
|
||||
#include <hardware/lights.h>
|
||||
#include <hardware/qemud.h>
|
||||
|
||||
/* Set to 1 to enable debug messages to the log */
|
||||
#define DEBUG 0
|
||||
#if DEBUG
|
||||
# define D(...) ALOGD(__VA_ARGS__)
|
||||
#else
|
||||
# define D(...) do{}while(0)
|
||||
#endif
|
||||
|
||||
#define E(...) ALOGE(__VA_ARGS__)
|
||||
|
||||
/* Get brightness(0~255) from state. */
|
||||
static int
|
||||
rgb_to_brightness( struct light_state_t const* state )
|
||||
{
|
||||
int color = state->color & 0x00ffffff;
|
||||
return ((77 * ((color >> 16) & 0x00ff))
|
||||
+ (150 * ((color >> 8) & 0x00ff)) + (29 * (color & 0x00ff))) >> 8;
|
||||
}
|
||||
|
||||
/* set backlight brightness by LIGHTS_SERVICE_NAME service. */
|
||||
static int
|
||||
set_light_backlight( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* Get Lights service. */
|
||||
int fd = qemud_channel_open( LIGHTS_SERVICE_NAME );
|
||||
|
||||
if (fd < 0) {
|
||||
E( "%s: no qemud connection", __FUNCTION__ );
|
||||
return -1;
|
||||
}
|
||||
|
||||
D( "%s: On/Off %d/%d flashMode %d brightnessMode %d"
|
||||
" RGB = 0x%08x", __func__,
|
||||
state->flashOnMS,
|
||||
state->flashOffMS,
|
||||
state->flashMode,
|
||||
state->brightnessMode,
|
||||
state->color );
|
||||
|
||||
int brightness = rgb_to_brightness( state );
|
||||
|
||||
char buffer[64];
|
||||
snprintf( buffer, sizeof(buffer), "power:light:brightness:lcd_backlight:%d", brightness );
|
||||
D( "%s: lcd_backlight command: %s", __FUNCTION__, buffer );
|
||||
|
||||
/* send backlight command to perform the backlight setting. */
|
||||
if (qemud_channel_send( fd, buffer, -1 ) < 0) {
|
||||
E( "%s: could not query lcd_backlight: %s", __FUNCTION__, strerror(errno) );
|
||||
close( fd );
|
||||
return -1;
|
||||
}
|
||||
|
||||
close( fd );
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
set_light_buttons( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* @Waiting for later implementation. */
|
||||
D( "%s: Not implemented.", __FUNCTION__ );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
set_light_battery( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* @Waiting for later implementation. */
|
||||
D( "%s: Not implemented.", __FUNCTION__ );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
set_light_keyboard( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* @Waiting for later implementation. */
|
||||
D( "%s: Not implemented.", __FUNCTION__ );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
set_light_notifications( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* @Waiting for later implementation. */
|
||||
D( "%s: Not implemented.", __FUNCTION__ );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
set_light_attention( struct light_device_t* dev, struct light_state_t const* state )
|
||||
{
|
||||
/* @Waiting for later implementation. */
|
||||
D( "%s: Not implemented.", __FUNCTION__ );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Close the lights device */
|
||||
static int
|
||||
close_lights( struct light_device_t *dev )
|
||||
{
|
||||
free( dev );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* module methods
|
||||
*/
|
||||
|
||||
/** Open a new instance of a lights device using name */
|
||||
static int
|
||||
open_lights( const struct hw_module_t* module, char const *name,
|
||||
struct hw_device_t **device )
|
||||
{
|
||||
void* set_light;
|
||||
|
||||
if (0 == strcmp( LIGHT_ID_BACKLIGHT, name )) {
|
||||
set_light = set_light_backlight;
|
||||
} else if (0 == strcmp( LIGHT_ID_KEYBOARD, name )) {
|
||||
set_light = set_light_keyboard;
|
||||
} else if (0 == strcmp( LIGHT_ID_BUTTONS, name )) {
|
||||
set_light = set_light_buttons;
|
||||
} else if (0 == strcmp( LIGHT_ID_BATTERY, name )) {
|
||||
set_light = set_light_battery;
|
||||
} else if (0 == strcmp( LIGHT_ID_NOTIFICATIONS, name )) {
|
||||
set_light = set_light_notifications;
|
||||
} else if (0 == strcmp( LIGHT_ID_ATTENTION, name )) {
|
||||
set_light = set_light_attention;
|
||||
} else {
|
||||
D( "%s: %s light isn't supported yet.", __FUNCTION__, name );
|
||||
return -EINVAL;
|
||||
}
|
||||
|
||||
struct light_device_t *dev = malloc( sizeof(struct light_device_t) );
|
||||
if (dev == NULL) {
|
||||
return -EINVAL;
|
||||
}
|
||||
memset( dev, 0, sizeof(*dev) );
|
||||
|
||||
dev->common.tag = HARDWARE_DEVICE_TAG;
|
||||
dev->common.version = 0;
|
||||
dev->common.module = (struct hw_module_t*)module;
|
||||
dev->common.close = (int (*)(struct hw_device_t*))close_lights;
|
||||
dev->set_light = set_light;
|
||||
|
||||
*device = (struct hw_device_t*)dev;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static struct hw_module_methods_t lights_module_methods = {
|
||||
.open = open_lights,
|
||||
};
|
||||
|
||||
/*
|
||||
* The emulator lights Module
|
||||
*/
|
||||
struct hw_module_t HAL_MODULE_INFO_SYM = {
|
||||
.tag = HARDWARE_MODULE_TAG,
|
||||
.version_major = 1,
|
||||
.version_minor = 0,
|
||||
.id = LIGHTS_HARDWARE_MODULE_ID,
|
||||
.name = "Goldfish lights Module",
|
||||
.author = "The Android Open Source Project",
|
||||
.methods = &lights_module_methods,
|
||||
};
|
||||
65
android/opengl/Android.mk
Normal file
65
android/opengl/Android.mk
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
# This is the top-level build file for the Android HW OpenGL ES emulation
|
||||
# in Android.
|
||||
#
|
||||
# You must define BUILD_EMULATOR_OPENGL to 'true' in your environment to
|
||||
# build the following files.
|
||||
#
|
||||
# Also define BUILD_EMULATOR_OPENGL_DRIVER to 'true' to build the gralloc
|
||||
# stuff as well.
|
||||
#
|
||||
ifeq (true,$(BUILD_EMULATOR_OPENGL))
|
||||
|
||||
# Top-level for all modules
|
||||
EMUGL_PATH := $(call my-dir)
|
||||
|
||||
# Directory containing common headers used by several modules
|
||||
# This is always set to a module's LOCAL_C_INCLUDES
|
||||
# See the definition of emugl-begin-module in common.mk
|
||||
#
|
||||
EMUGL_COMMON_INCLUDES := $(EMUGL_PATH)/host/include/libOpenglRender
|
||||
|
||||
# common cflags used by several modules
|
||||
# This is always set to a module's LOCAL_CFLAGS
|
||||
# See the definition of emugl-begin-module in common.mk
|
||||
#
|
||||
EMUGL_COMMON_CFLAGS := -DWITH_GLES2
|
||||
|
||||
# Uncomment the following line if you want to enable debug traces
|
||||
# in the GLES emulation libraries.
|
||||
# EMUGL_COMMON_CFLAGS += -DEMUGL_DEBUG=1
|
||||
|
||||
# Include common definitions used by all the modules included later
|
||||
# in this build file. This contains the definition of all useful
|
||||
# emugl-xxxx functions.
|
||||
#
|
||||
include $(EMUGL_PATH)/common.mk
|
||||
|
||||
# IMPORTANT: ORDER IS CRUCIAL HERE
|
||||
#
|
||||
# For the import/export feature to work properly, you must include
|
||||
# modules below in correct order. That is, if module B depends on
|
||||
# module A, then it must be included after module A below.
|
||||
#
|
||||
# This ensures that anything exported by module A will be correctly
|
||||
# be imported by module B when it is declared.
|
||||
#
|
||||
# Note that the build system will complain if you try to import a
|
||||
# module that hasn't been declared yet anyway.
|
||||
#
|
||||
|
||||
include $(EMUGL_PATH)/shared/OpenglCodecCommon/Android.mk
|
||||
|
||||
# System static libraries
|
||||
include $(EMUGL_PATH)/system/GLESv1_enc/Android.mk
|
||||
include $(EMUGL_PATH)/system/GLESv2_enc/Android.mk
|
||||
include $(EMUGL_PATH)/system/renderControl_enc/Android.mk
|
||||
include $(EMUGL_PATH)/system/OpenglSystemCommon/Android.mk
|
||||
|
||||
# System shared libraries
|
||||
include $(EMUGL_PATH)/system/GLESv1/Android.mk
|
||||
include $(EMUGL_PATH)/system/GLESv2/Android.mk
|
||||
|
||||
include $(EMUGL_PATH)/system/gralloc/Android.mk
|
||||
include $(EMUGL_PATH)/system/egl/Android.mk
|
||||
|
||||
endif # BUILD_EMULATOR_OPENGL == true
|
||||
19
android/opengl/README
Normal file
19
android/opengl/README
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
This directory contains Android-side modules related to hardware OpenGL ES
|
||||
emulation. The host-side modules and documentation are in
|
||||
$ANDROID_BUILD_TOP/sdk/emulator/opengl.
|
||||
|
||||
Note that this directory contains encoder sources that are auto-generated
|
||||
with the 'emugen' host tool (see sdk/emulator/opengl/host/tools/emugen).
|
||||
|
||||
To regenerate them, run external/qemu/distrib/update-emugl-sources.sh,
|
||||
after building the emulator from sources, this will populate the content
|
||||
here with the appropriate updated source files.
|
||||
|
||||
You should do this whenever you update one of the *.types, *.in and *.attrib
|
||||
files located under one of:
|
||||
|
||||
$AOSP/sdk/emulator/opengl/libs/GLESv1_dec/
|
||||
$AOSP/sdk/emulator/opengl/libs/GLESv2_dec/
|
||||
$AOSP/sdk/emulator/opengl/libs/renderControl_dec/
|
||||
|
||||
or when the 'emugen' tool itself is modified.
|
||||
237
android/opengl/common.mk
Normal file
237
android/opengl/common.mk
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
# This top-level build file is included by all modules that implement
|
||||
# the hardware OpenGL ES emulation for Android.
|
||||
#
|
||||
# We use it to ensure that all sub-Makefiles are included in the right
|
||||
# order for various variable definitions and usage to happen in the correct
|
||||
# order.
|
||||
#
|
||||
|
||||
# The following macros are used to start a new GLES emulation module.
|
||||
#
|
||||
# This will define LOCAL_MODULE as $1, plus a few other variables
|
||||
# needed by the build system (e.g. LOCAL_MODULE_TAGS, LOCAL_MODULE_CLASS...)
|
||||
#
|
||||
# NOTE: You still need to define LOCAL_PATH before this
|
||||
#
|
||||
# Usage example:
|
||||
#
|
||||
# $(call emugl-begin-static-library,<name>)
|
||||
# LOCAL_SRC_FILES := ....
|
||||
# LOCAL_C_INCLUDES += ....
|
||||
# $(call emugl-end-module)
|
||||
#
|
||||
emugl-begin-static-library = $(call emugl-begin-module,$1,STATIC_LIBRARY)
|
||||
emugl-begin-shared-library = $(call emugl-begin-module,$1,SHARED_LIBRARY)
|
||||
|
||||
# Internal list of all declared modules (used for sanity checking)
|
||||
_emugl_modules :=
|
||||
_emugl_HOST_modules :=
|
||||
|
||||
# do not use directly, see functions above instead
|
||||
emugl-begin-module = \
|
||||
$(eval include $(CLEAR_VARS)) \
|
||||
$(eval LOCAL_MODULE := $1) \
|
||||
$(eval LOCAL_MODULE_CLASS := $(patsubst HOST_%,%,$(patsubst %EXECUTABLE,%EXECUTABLES,$(patsubst %LIBRARY,%LIBRARIES,$2)))) \
|
||||
$(eval LOCAL_IS_HOST_MODULE := $(if $3,true,))\
|
||||
$(eval LOCAL_C_INCLUDES := $(EMUGL_COMMON_INCLUDES)) \
|
||||
$(eval LOCAL_CFLAGS := $(EMUGL_COMMON_CFLAGS)) \
|
||||
$(eval _EMUGL_INCLUDE_TYPE := $(BUILD_$2)) \
|
||||
$(call _emugl-init-module,$1,$2,$3)
|
||||
|
||||
# Used to end a module definition, see function definitions above
|
||||
emugl-end-module = \
|
||||
$(eval include $(_EMUGL_INCLUDE_TYPE))\
|
||||
$(eval _EMUGL_INCLUDE_TYPE :=) \
|
||||
$(eval _emugl_$(_emugl_HOST)modules += $(_emugl_MODULE))\
|
||||
$(if $(EMUGL_DEBUG),$(call emugl-dump-module))
|
||||
|
||||
# Managing module exports and imports.
|
||||
#
|
||||
# A module can 'import' another module, by calling emugl-import. This will
|
||||
# make the current LOCAL_MODULE inherit various definitions exported from
|
||||
# the imported module.
|
||||
#
|
||||
# Module exports are defined by calling emugl-export. Here is an example:
|
||||
#
|
||||
# $(call emugl-begin-static-library,foo)
|
||||
# LOCAL_SRC_FILES := foo.c
|
||||
# $(call emugl-export,C_INCLUDES,$(LOCAL_PATH))
|
||||
# $(call emugl-export,SHARED_LIBRARIES,libcutils)
|
||||
# $(call emugl-end-module)
|
||||
#
|
||||
# $(call emugl-begin-shared-library,bar)
|
||||
# LOCAL_SRC_FILES := bar.cpp
|
||||
# $(call emugl-import,foo)
|
||||
# $(call emugl-end-module)
|
||||
#
|
||||
# Here, we define a static library named 'foo' which exports an include
|
||||
# path and a shared library requirement, and a shared library 'bar' which
|
||||
# imports it.
|
||||
#
|
||||
# What this means is that:
|
||||
#
|
||||
# - 'bar' will automatically inherit foo's LOCAL_PATH in its LOCAL_C_INCLUDES
|
||||
# - 'bar' will automatically inherit libcutils in its own LOCAL_SHARED_LIBRARIES
|
||||
#
|
||||
# Note that order of declaration matters. If 'foo' is defined after 'bar' in
|
||||
# the example above, nothing will work correctly because dependencies are
|
||||
# computed at import time.
|
||||
#
|
||||
#
|
||||
# IMPORTANT: Imports are transitive, i.e. when module A imports B,
|
||||
# it automatically imports anything imported by B too.
|
||||
|
||||
# This is the list of recognized export types we support for now.
|
||||
EMUGL_EXPORT_TYPES := \
|
||||
CFLAGS \
|
||||
LDLIBS \
|
||||
LDFLAGS \
|
||||
C_INCLUDES \
|
||||
SHARED_LIBRARIES \
|
||||
STATIC_LIBRARIES \
|
||||
ADDITIONAL_DEPENDENCIES
|
||||
|
||||
# Initialize a module in our database
|
||||
# $1: Module name
|
||||
# $2: Module type
|
||||
# $3: "HOST" for a host module, empty for a target one.
|
||||
_emugl-init-module = \
|
||||
$(eval _emugl_HOST := $(if $3,HOST_,))\
|
||||
$(eval _emugl_MODULE := $(_emugl_HOST)$1)\
|
||||
$(if $(filter $(_emugl_$(_emugl_HOST)modules),$(_emugl_MODULE)),\
|
||||
$(error There is already a $(if $3,host,) module named $1!)\
|
||||
)\
|
||||
$(eval _mod = $(_emugl_MODULE)) \
|
||||
$(eval _emugl.$(_mod).type := $(patsubst HOST_%,%,$2))\
|
||||
$(eval _emugl.$(_mod).imports :=) \
|
||||
$(eval _emugl,$(_mod).moved :=) \
|
||||
$(foreach _type,$(EMUGL_EXPORT_TYPES),\
|
||||
$(eval _emugl.$(_mod).export.$(_type) :=)\
|
||||
)
|
||||
|
||||
# Called to indicate that a module exports a given local variable for its
|
||||
# users. This also adds this to LOCAL_$1
|
||||
# $1: Local variable type (e.g. CFLAGS, LDLIBS, etc...)
|
||||
# $2: Value(s) to append to the export
|
||||
emugl-export = \
|
||||
$(eval _emugl.$(_emugl_MODULE).export.$1 += $2)\
|
||||
$(eval LOCAL_$1 := $2 $(LOCAL_$1))
|
||||
|
||||
emugl-export-outer = \
|
||||
$(eval _emugl.$(_emugl_MODULE).export.$1 += $2)
|
||||
|
||||
# Called to indicate that a module imports the exports of another module
|
||||
# $1: list of modules to import
|
||||
#
|
||||
emugl-import = \
|
||||
$(foreach _imod,$1,\
|
||||
$(call _emugl-module-import,$(_emugl_HOST)$(_imod))\
|
||||
)
|
||||
|
||||
_emugl-module-import = \
|
||||
$(eval _mod := $(_emugl_MODULE))\
|
||||
$(if $(filter-out $(_emugl_$(_emugl_HOST)modules),$1),\
|
||||
$(info Unknown imported emugles module: $1)\
|
||||
$(if $(_emugl_HOST),\
|
||||
$(eval _names := $(patsubst HOST_%,%,$(_emugl_HOST_modules))),\
|
||||
$(eval _names := $(_emugl_modules))\
|
||||
)\
|
||||
$(info Please one of the following names: $(_names))\
|
||||
$(error Aborting)\
|
||||
)\
|
||||
$(if $(filter-out $(_emugl.$(_mod).imports),$1),\
|
||||
$(eval _emugl.$(_mod).imports += $1)\
|
||||
$(foreach _sub,$(_emugl.$1.imports),\
|
||||
$(call _emugl-module-import,$(_sub))\
|
||||
)\
|
||||
$(foreach _type,$(EMUGL_EXPORT_TYPES),\
|
||||
$(eval LOCAL_$(_type) := $(_emugl.$1.export.$(_type)) $(LOCAL_$(_type)))\
|
||||
)\
|
||||
$(if $(filter EXECUTABLE SHARED_LIBRARY,$(_emugl.$(_emugl_MODULE).type)),\
|
||||
$(if $(filter STATIC_LIBRARY,$(_emugl.$1.type)),\
|
||||
$(eval LOCAL_STATIC_LIBRARIES := $(1:HOST_%=%) $(LOCAL_STATIC_LIBRARIES))\
|
||||
)\
|
||||
$(if $(filter SHARED_LIBRARY,$(_emugl.$1.type)),\
|
||||
$(if $(_emugl.$1.moved),,\
|
||||
$(eval LOCAL_SHARED_LIBRARIES := $(1:HOST_%=%) $(LOCAL_SHARED_LIBRARIES))\
|
||||
)\
|
||||
)\
|
||||
)\
|
||||
)
|
||||
|
||||
_emugl-dump-list = \
|
||||
$(foreach _list_item,$(strip $1),$(info . $(_list_item)))
|
||||
|
||||
emugl-dump-module = \
|
||||
$(info MODULE=$(_emugl_MODULE))\
|
||||
$(info . HOST=$(_emugl_HOST))\
|
||||
$(info . TYPE=$(_emugl.$(_emugl_MODULE).type))\
|
||||
$(info . IMPORTS=$(_emugl.$(_emugl_MODULE).imports))\
|
||||
$(foreach _type,$(EMUGL_EXPORT_TYPES),\
|
||||
$(if $(filter C_INCLUDES ADDITIONAL_DEPENDENCIES,$(_type)),\
|
||||
$(info . EXPORT.$(_type) :=)\
|
||||
$(call _emugl-dump-list,$(_emugl.$(_emugl_MODULE).export.$(_type)))\
|
||||
$(info . LOCAL_$(_type) :=)\
|
||||
$(call _emugl-dump-list,$(LOCAL_$(_type)))\
|
||||
,\
|
||||
$(info . EXPORT.$(_type) := $(strip $(_emugl.$(_emugl_MODULE).export.$(_type))))\
|
||||
$(info . LOCAL_$(_type) := $(strip $(LOCAL_$(_type))))\
|
||||
)\
|
||||
)\
|
||||
$(info . LOCAL_SRC_FILES := $(LOCAL_SRC_FILES))\
|
||||
|
||||
# This function can be called to generate the wrapper source files.
|
||||
# LOCAL_MODULE and LOCAL_MODULE_CLASS must be defined or the build will abort.
|
||||
# Source files will be stored in the local intermediates directory that will
|
||||
# be automatically added to your LOCAL_C_INCLUDES.
|
||||
# Usage:
|
||||
# $(call emugl-gen-wrapper,<input-dir>,<basename>)
|
||||
#
|
||||
emugl-gen-wrapper = \
|
||||
$(eval _emugl_out := $(call local-intermediates-dir)) \
|
||||
$(call emugl-gen-wrapper-generic,$(_emugl_out),$1,$2) \
|
||||
$(call emugl-export,C_INCLUDES,$(_emugl_out))
|
||||
|
||||
# DO NOT CALL DIRECTLY, USE emugl-gen-wrapper instead.
|
||||
#
|
||||
# The following function can be called to generate GL library wrapper
|
||||
# Usage is:
|
||||
#
|
||||
# $(call emugl-gen-wrapper-generic,<dst-dir>,<src-dir>,<basename>)
|
||||
#
|
||||
# <dst-dir> is the destination directory where the generated sources are stored
|
||||
# <src-dir> is the source directory where to find <basename>.attrib, etc..
|
||||
# <basename> is the emugen basename (see host/tools/emugen/README)
|
||||
#
|
||||
emugl-gen-wrapper-generic = $(eval $(emugl-gen-wrapper-generic-ev))
|
||||
|
||||
define emugl-gen-wrapper-generic-ev
|
||||
_emugl_wrap := $$1/$$3
|
||||
_emugl_src := $$2/$$3
|
||||
GEN := $$(_emugl_wrap)_wrapper_entry.cpp \
|
||||
$$(_emugl_wrap)_wrapper_context.cpp \
|
||||
$$(_emugl_wrap)_wrapper_context.h \
|
||||
$$(_emugl_wrap)_wrapper_proc.h
|
||||
|
||||
$$(GEN): PRIVATE_PATH := $$(LOCAL_PATH)
|
||||
$$(GEN): PRIVATE_CUSTOM_TOOL := $$(EMUGL_EMUGEN) -W $$1 -i $$2 $$3
|
||||
$$(GEN): $$(EMUGL_EMUGEN) $$(_emugl_src).attrib $$(_emugl_src).in $$(_emugl_src).types
|
||||
$$(transform-generated-source)
|
||||
|
||||
$$(call emugl-export,ADDITIONAL_DEPENDENCIES,$$(GEN))
|
||||
LOCAL_GENERATED_SOURCES += $$(GEN)
|
||||
LOCAL_C_INCLUDES += $$1
|
||||
|
||||
#ifneq ($$(HOST_OS),windows)
|
||||
$$(call emugl-export,LDFLAGS,-ldl)
|
||||
#endif
|
||||
|
||||
endef
|
||||
|
||||
# Call this function when your shared library must be placed in a non-standard
|
||||
# library path (i.e. not under /system/lib
|
||||
# $1: library sub-path,relative to /system/lib
|
||||
# For example: $(call emugl-set-shared-library-subpath,egl)
|
||||
emugl-set-shared-library-subpath = \
|
||||
$(eval LOCAL_MODULE_RELATIVE_PATH := $1)\
|
||||
$(eval _emugl.$(LOCAL_MODULE).moved := true)
|
||||
102
android/opengl/host/include/libOpenglRender/IOStream.h
Normal file
102
android/opengl/host/include/libOpenglRender/IOStream.h
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __IO_STREAM_H__
|
||||
#define __IO_STREAM_H__
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include "ErrorLog.h"
|
||||
|
||||
class IOStream {
|
||||
public:
|
||||
|
||||
IOStream(size_t bufSize) {
|
||||
m_buf = NULL;
|
||||
m_bufsize = bufSize;
|
||||
m_free = 0;
|
||||
}
|
||||
|
||||
virtual void *allocBuffer(size_t minSize) = 0;
|
||||
virtual int commitBuffer(size_t size) = 0;
|
||||
virtual const unsigned char *readFully( void *buf, size_t len) = 0;
|
||||
virtual const unsigned char *read( void *buf, size_t *inout_len) = 0;
|
||||
virtual int writeFully(const void* buf, size_t len) = 0;
|
||||
|
||||
virtual ~IOStream() {
|
||||
|
||||
// NOTE: m_buf is 'owned' by the child class thus we expect it to be released by it
|
||||
}
|
||||
|
||||
unsigned char *alloc(size_t len) {
|
||||
|
||||
if (m_buf && len > m_free) {
|
||||
if (flush() < 0) {
|
||||
ERR("Failed to flush in alloc\n");
|
||||
return NULL; // we failed to flush so something is wrong
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_buf || len > m_bufsize) {
|
||||
int allocLen = m_bufsize < len ? len : m_bufsize;
|
||||
m_buf = (unsigned char *)allocBuffer(allocLen);
|
||||
if (!m_buf) {
|
||||
ERR("Alloc (%u bytes) failed\n", allocLen);
|
||||
return NULL;
|
||||
}
|
||||
m_bufsize = m_free = allocLen;
|
||||
}
|
||||
|
||||
unsigned char *ptr;
|
||||
|
||||
ptr = m_buf + (m_bufsize - m_free);
|
||||
m_free -= len;
|
||||
|
||||
return ptr;
|
||||
}
|
||||
|
||||
int flush() {
|
||||
|
||||
if (!m_buf || m_free == m_bufsize) return 0;
|
||||
|
||||
int stat = commitBuffer(m_bufsize - m_free);
|
||||
m_buf = NULL;
|
||||
m_free = 0;
|
||||
return stat;
|
||||
}
|
||||
|
||||
const unsigned char *readback(void *buf, size_t len) {
|
||||
flush();
|
||||
return readFully(buf, len);
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
unsigned char *m_buf;
|
||||
size_t m_bufsize;
|
||||
size_t m_free;
|
||||
};
|
||||
|
||||
//
|
||||
// When a client opens a connection to the renderer, it should
|
||||
// send unsigned int value indicating the "clientFlags".
|
||||
// The following are the bitmask of the clientFlags.
|
||||
// currently only one bit is used which flags the server
|
||||
// it should exit.
|
||||
//
|
||||
#define IOSTREAM_CLIENT_EXIT_SERVER 1
|
||||
|
||||
#endif
|
||||
23
android/opengl/shared/OpenglCodecCommon/Android.mk
Normal file
23
android/opengl/shared/OpenglCodecCommon/Android.mk
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# This build script corresponds to a library containing many definitions
|
||||
# common to both the guest and the host. They relate to
|
||||
#
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
commonSources := \
|
||||
GLClientState.cpp \
|
||||
ChecksumCalculator.cpp \
|
||||
GLSharedGroup.cpp \
|
||||
glUtils.cpp \
|
||||
SocketStream.cpp \
|
||||
TcpStream.cpp \
|
||||
|
||||
### CodecCommon guest ##############################################
|
||||
$(call emugl-begin-static-library,libOpenglCodecCommon)
|
||||
|
||||
LOCAL_SRC_FILES := $(commonSources)
|
||||
|
||||
LOCAL_CFLAGS += -DLOG_TAG=\"eglCodecCommon\"
|
||||
|
||||
$(call emugl-export,SHARED_LIBRARIES,libcutils libutils liblog)
|
||||
$(call emugl-export,C_INCLUDES,$(LOCAL_PATH))
|
||||
$(call emugl-end-module)
|
||||
154
android/opengl/shared/OpenglCodecCommon/ChecksumCalculator.cpp
Normal file
154
android/opengl/shared/OpenglCodecCommon/ChecksumCalculator.cpp
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "ChecksumCalculator.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <string.h>
|
||||
|
||||
// Checklist when implementing new protocol:
|
||||
// 1. update CHECKSUMHELPER_MAX_VERSION
|
||||
// 2. update maxChecksumSize()
|
||||
// 3. update checksumByteSize()
|
||||
// 4. update addBuffer, writeChecksum, resetChecksum, validate
|
||||
|
||||
// change CHECKSUMHELPER_MAX_VERSION when you want to update the protocol version
|
||||
#define CHECKSUMHELPER_MAX_VERSION 1
|
||||
|
||||
// checksum buffer size
|
||||
// Please add a new checksum buffer size when implementing a new protocol,
|
||||
// as well as modifying the maxChecksumSize function.
|
||||
static const size_t kV1ChecksumSize = 8;
|
||||
|
||||
static constexpr size_t maxChecksumSize() {
|
||||
return 0 > kV1ChecksumSize ? 0 : kV1ChecksumSize;
|
||||
}
|
||||
|
||||
static const size_t kMaxChecksumSize = maxChecksumSize();
|
||||
|
||||
// utility macros to create checksum string at compilation time
|
||||
#define CHECKSUMHELPER_VERSION_STR_PREFIX "ANDROID_EMU_CHECKSUM_HELPER_v"
|
||||
#define CHECKSUMHELPER_MACRO_TO_STR(x) #x
|
||||
#define CHECKSUMHELPER_MACRO_VAL_TO_STR(x) CHECKSUMHELPER_MACRO_TO_STR(x)
|
||||
|
||||
static const uint32_t kMaxVersion = CHECKSUMHELPER_MAX_VERSION;
|
||||
static const char* kMaxVersionStrPrefix = CHECKSUMHELPER_VERSION_STR_PREFIX;
|
||||
static const char* kMaxVersionStr = CHECKSUMHELPER_VERSION_STR_PREFIX CHECKSUMHELPER_MACRO_VAL_TO_STR(CHECKSUMHELPER_MAX_VERSION);
|
||||
|
||||
#undef CHECKSUMHELPER_MAX_VERSION
|
||||
#undef CHECKSUMHELPER_VERSION_STR_PREFIX
|
||||
#undef CHECKSUMHELPER_MACRO_TO_STR
|
||||
#undef CHECKSUMHELPER_MACRO_VAL_TO_STR
|
||||
|
||||
uint32_t ChecksumCalculator::getMaxVersion() {return kMaxVersion;}
|
||||
const char* ChecksumCalculator::getMaxVersionStr() {return kMaxVersionStr;}
|
||||
const char* ChecksumCalculator::getMaxVersionStrPrefix() {return kMaxVersionStrPrefix;}
|
||||
|
||||
bool ChecksumCalculator::setVersion(uint32_t version) {
|
||||
if (version > kMaxVersion) { // unsupported version
|
||||
LOG_CHECKSUMHELPER("%s: ChecksumCalculator Set Unsupported version Version %d\n",
|
||||
__FUNCTION__, m_version);
|
||||
return false;
|
||||
}
|
||||
if (m_isEncodingChecksum) { // setVersion is called in the middle of encoding checksums
|
||||
LOG_CHECKSUMHELPER("%s: called between addBuffer and writeChecksum\n",
|
||||
__FUNCTION__);
|
||||
return false;
|
||||
}
|
||||
m_version = version;
|
||||
LOG_CHECKSUMHELPER("%s: ChecksumCalculator Set Version %d\n", __FUNCTION__,
|
||||
m_version);
|
||||
return true;
|
||||
}
|
||||
|
||||
size_t ChecksumCalculator::checksumByteSize() const {
|
||||
switch (m_version) {
|
||||
case 0:
|
||||
return 0;
|
||||
case 1:
|
||||
return sizeof(uint32_t) + sizeof(m_numWrite);
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void ChecksumCalculator::addBuffer(const void* buf, size_t packetLen) {
|
||||
m_isEncodingChecksum = true;
|
||||
switch (m_version) {
|
||||
case 1:
|
||||
m_v1BufferTotalLength += packetLen;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool ChecksumCalculator::writeChecksum(void* outputChecksum, size_t outputChecksumLen) {
|
||||
if (outputChecksumLen < checksumByteSize()) return false;
|
||||
char *checksumPtr = (char *)outputChecksum;
|
||||
switch (m_version) {
|
||||
case 1: { // protocol v1 is to reverse the packetLen and write it at the end
|
||||
uint32_t val = computeV1Checksum();
|
||||
memcpy(checksumPtr, &val, sizeof(val));
|
||||
memcpy(checksumPtr+sizeof(val), &m_numWrite, sizeof(m_numWrite));
|
||||
break;
|
||||
}
|
||||
}
|
||||
resetChecksum();
|
||||
m_numWrite++;
|
||||
return true;
|
||||
}
|
||||
|
||||
void ChecksumCalculator::resetChecksum() {
|
||||
switch (m_version) {
|
||||
case 1:
|
||||
m_v1BufferTotalLength = 0;
|
||||
break;
|
||||
}
|
||||
m_isEncodingChecksum = false;
|
||||
}
|
||||
|
||||
bool ChecksumCalculator::validate(const void* expectedChecksum, size_t expectedChecksumLen) {
|
||||
size_t checksumSize = checksumByteSize();
|
||||
if (expectedChecksumLen != checksumSize) {
|
||||
m_numRead++;
|
||||
resetChecksum();
|
||||
return false;
|
||||
}
|
||||
// buffers for computing the checksum
|
||||
unsigned char sChecksumBuffer[kMaxChecksumSize];
|
||||
switch (m_version) {
|
||||
case 1: {
|
||||
uint32_t val = computeV1Checksum();
|
||||
memcpy(sChecksumBuffer, &val, sizeof(val));
|
||||
memcpy(sChecksumBuffer+sizeof(val), &m_numRead, sizeof(m_numRead));
|
||||
break;
|
||||
}
|
||||
}
|
||||
bool isValid = !memcmp(sChecksumBuffer, expectedChecksum, checksumSize);
|
||||
m_numRead++;
|
||||
resetChecksum();
|
||||
return isValid;
|
||||
}
|
||||
|
||||
uint32_t ChecksumCalculator::computeV1Checksum() {
|
||||
uint32_t revLen = m_v1BufferTotalLength;
|
||||
revLen = (revLen & 0xffff0000) >> 16 | (revLen & 0x0000ffff) << 16;
|
||||
revLen = (revLen & 0xff00ff00) >> 8 | (revLen & 0x00ff00ff) << 8;
|
||||
revLen = (revLen & 0xf0f0f0f0) >> 4 | (revLen & 0x0f0f0f0f) << 4;
|
||||
revLen = (revLen & 0xcccccccc) >> 2 | (revLen & 0x33333333) << 2;
|
||||
revLen = (revLen & 0xaaaaaaaa) >> 1 | (revLen & 0x55555555) << 1;
|
||||
return revLen;
|
||||
}
|
||||
181
android/opengl/shared/OpenglCodecCommon/ChecksumCalculator.h
Normal file
181
android/opengl/shared/OpenglCodecCommon/ChecksumCalculator.h
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
/*
|
||||
* Copyright (C) 2016 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
// Set TRACE_CHECKSUMHELPER to 1 to debug creation/destruction of GLprotocol
|
||||
// instances.
|
||||
#define TRACE_CHECKSUMHELPER 0
|
||||
|
||||
#if TRACE_CHECKSUMHELPER
|
||||
#define LOG_CHECKSUMHELPER(x...) fprintf(stderr, x)
|
||||
#else
|
||||
#define LOG_CHECKSUMHELPER(x...)
|
||||
#endif
|
||||
|
||||
// ChecksumCalculator adds checksum as an array of bytes to GL pipe communication, which
|
||||
// size depends on the protocol version. Each pipe should use one ChecksumCalculator.
|
||||
// It can:
|
||||
// (1) take a list of buffers one by one and compute their checksum string,
|
||||
// in this case the checksum should be as the data in those buffers are
|
||||
// concatenated;
|
||||
// (2) compute the checksum of the buffer list, then either write them into
|
||||
// a buffer provided by user, or compare it against a checksum provided
|
||||
// by user
|
||||
// (3) support different checksum version in future.
|
||||
//
|
||||
// For backward compatibility, checksum version 0 behaves the same as there is
|
||||
// no checksum (i.e., checksumByteSize returns 0, validate always returns true,
|
||||
// addBuffer and writeCheckSum does nothing).
|
||||
//
|
||||
// Notice that to detect package lost, ChecksumCalculator also keeps track of how
|
||||
// many times it generates/validates checksums, and might use it as part of the
|
||||
// checksum.
|
||||
//
|
||||
// To evaluate checksums from a list of data buffers buf1, buf2... Please call
|
||||
// addBuffer(buf1, buf1len), addBuffer(buf2, buf2len) ... in order.
|
||||
// Then if the checksum needs to be encoded into a buffer, one needs to allocate
|
||||
// a checksum buffer with size checksumByteSize(), and call
|
||||
// writeChecksum(checksumBuffer) to write the checksum to the buffer.
|
||||
// If the checksum needs to be validated against an existing one, one needs to
|
||||
// call validate(existChecksum, existChecksumLen).
|
||||
//
|
||||
// The checksum generator and validator must be set to the same version, and
|
||||
// the validator must check ALL checksums in the order they are generated,
|
||||
// otherwise the validation function will return false.
|
||||
//
|
||||
// It is allowed to change the checksum version between calculating two
|
||||
// checksums. This is designed for backward compatibility reason.
|
||||
//
|
||||
// Example 1, encoding and decoding:
|
||||
//
|
||||
// bool testChecksum(void* buf, size_t bufLen) {
|
||||
// // encoding message
|
||||
// ChecksumCalculator encoder;
|
||||
// encoder.setVersion(1);
|
||||
// encoder.addBuffer(buf, bufLen);
|
||||
// std::vector<unsigned char> message(bufLen + encoder.checksumByteSize());
|
||||
// memcpy(&message[0], buf, bufLen);
|
||||
// encoder.writeChecksum(&message[0] + bufLen, encoder.checksumByteSize());
|
||||
//
|
||||
// // decoding message
|
||||
// ChecksumCalculator decoder;
|
||||
// decoder.setVersion(1);
|
||||
// decoder.addBuffer(&message[0], bufLen);
|
||||
// return decoder.validate(&message[0] + bufLen, decoder.checksumByteSize());
|
||||
// }
|
||||
// The return value is true.
|
||||
//
|
||||
// Example 2, decoding will fail if the order of messages is wrong:
|
||||
//
|
||||
// bool testChecksumOrder(void* buf1, size_t bufLen1,
|
||||
// void* buf2, size_t bufLen2) {
|
||||
// // encoding messages
|
||||
// ChecksumCalculator encoder;
|
||||
// encoder.setVersion(1);
|
||||
//
|
||||
// std::vector<unsigned char> message1(bufLen1 + encoder.checksumByteSize());
|
||||
// std::vector<unsigned char> message2(bufLen2 + encoder.checksumByteSize());
|
||||
//
|
||||
// encoder.addBuffer(buf1, bufLen1);
|
||||
// std::vector<unsigned char> message1(bufLen1 + encoder.checksumByteSize());
|
||||
// memcpy(&message1[0], buf1, bufLen1);
|
||||
// encoder.writeChecksum(&message1[0] + bufLen1, encoder.checksumByteSize());
|
||||
//
|
||||
// encoder.addBuffer(buf2, bufLen2);
|
||||
// std::vector<unsigned char> message2(bufLen2 + encoder.checksumByteSize());
|
||||
// memcpy(&message2[0], buf2, bufLen2);
|
||||
// encoder.writeChecksum(&message2[0] + bufLen2, encoder.checksumByteSize());
|
||||
//
|
||||
// // decoding messages
|
||||
// ChecksumCalculator decoder;
|
||||
// decoder.setVersion(1);
|
||||
// decoder.addBuffer(&message2[0], bufLen2);
|
||||
// // returns false because the decoding order is not consistent with
|
||||
// // encoding order
|
||||
// if (!decoder.validate(&message2[0]+bufLen2, decoder.checksumByteSize())) {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// decoder.addBuffer(&message1[0], bufLen1);
|
||||
// if (!decoder.validate(&message1[0]+bufLen1, decoder.checksumByteSize())) {
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// return false;
|
||||
// }
|
||||
|
||||
class ChecksumCalculator {
|
||||
public:
|
||||
// Get and set current checksum version
|
||||
uint32_t getVersion() const { return m_version; }
|
||||
// Call setVersion to set a checksum version. It should be called before
|
||||
// addBuffer(), writeChecksum() and validate(). And it should be called
|
||||
// exact once per rendering thread if both host and guest support checksum.
|
||||
// It won't be called if either host or guest does not support checksum.
|
||||
bool setVersion(uint32_t version);
|
||||
|
||||
// Maximum supported checksum version
|
||||
static uint32_t getMaxVersion();
|
||||
// A version string that looks like "ANDROID_EMU_CHECKSUM_HELPER_v1"
|
||||
// Used multiple times when the guest queries the maximum supported version
|
||||
// from the host.
|
||||
// The library owns the returned pointer. The returned pointer will be
|
||||
// deconstructed when unloading library.
|
||||
static const char* getMaxVersionStr();
|
||||
static const char* getMaxVersionStrPrefix();
|
||||
|
||||
// Size of checksum in the current version
|
||||
size_t checksumByteSize() const;
|
||||
|
||||
// Update the current checksum value from the data
|
||||
// at |buf| of |bufLen| bytes. Once all buffers
|
||||
// have been added, call writeChecksum() to store
|
||||
// the final checksum value and reset its state.
|
||||
void addBuffer(const void* buf, size_t bufLen);
|
||||
// Write the checksum from the list of buffers to outputChecksum
|
||||
// Will reset the list of buffers by calling resetChecksum.
|
||||
// Return false if the buffer is not long enough
|
||||
// Please query buffer size from checksumByteSize()
|
||||
bool writeChecksum(void* outputChecksum, size_t outputChecksumLen);
|
||||
// Restore the states for computing checksums.
|
||||
// Automatically called at the end of writeChecksum and validate.
|
||||
// Can also be used to abandon the current checksum being calculated.
|
||||
// Notes: it doesn't update the internal read / write counter
|
||||
void resetChecksum();
|
||||
|
||||
// Calculate the checksum from the list of buffers and
|
||||
// compare it with the checksum encoded in expectedChecksum
|
||||
// Will reset the list of buffers by calling resetChecksum.
|
||||
bool validate(const void* expectedChecksum, size_t expectedChecksumLen);
|
||||
protected:
|
||||
uint32_t m_version = 0;
|
||||
// A temporary state used to compute the total length of a list of buffers,
|
||||
// if addBuffer is called.
|
||||
uint32_t m_numRead = 0;
|
||||
uint32_t m_numWrite = 0;
|
||||
// m_isEncodingChecksum is true when between addBuffer and writeChecksum
|
||||
bool m_isEncodingChecksum = false;
|
||||
private:
|
||||
// Compute a 32bit checksum
|
||||
// Used in protocol v1
|
||||
uint32_t computeV1Checksum();
|
||||
// The buffer used in protocol version 1 to compute checksum.
|
||||
uint32_t m_v1BufferTotalLength = 0;
|
||||
};
|
||||
37
android/opengl/shared/OpenglCodecCommon/ErrorLog.h
Normal file
37
android/opengl/shared/OpenglCodecCommon/ErrorLog.h
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _ERROR_LOG_H_
|
||||
#define _ERROR_LOG_H_
|
||||
|
||||
#if defined(__ANDROID__)
|
||||
# include <cutils/log.h>
|
||||
# define ERR(...) ALOGE(__VA_ARGS__)
|
||||
# ifdef EMUGL_DEBUG
|
||||
# define DBG(...) ALOGD(__VA_ARGS__)
|
||||
# else
|
||||
# define DBG(...) ((void)0)
|
||||
# endif
|
||||
#else
|
||||
# include <stdio.h>
|
||||
# define ERR(...) fprintf(stderr, __VA_ARGS__)
|
||||
# ifdef EMUGL_DEBUG
|
||||
# define DBG(...) fprintf(stderr, __VA_ARGS__)
|
||||
# else
|
||||
# define DBG(...) ((void)0)
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#endif
|
||||
53
android/opengl/shared/OpenglCodecCommon/FixedBuffer.h
Normal file
53
android/opengl/shared/OpenglCodecCommon/FixedBuffer.h
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _FIXED_BUFFER_H
|
||||
#define _FIXED_BUFFER_H
|
||||
|
||||
class FixedBuffer {
|
||||
public:
|
||||
FixedBuffer(size_t initialSize = 0) {
|
||||
m_buffer = NULL;
|
||||
m_bufferLen = initialSize;
|
||||
alloc(m_bufferLen);
|
||||
}
|
||||
|
||||
~FixedBuffer() {
|
||||
delete [] m_buffer;
|
||||
m_bufferLen = 0;
|
||||
}
|
||||
|
||||
void * alloc(size_t size) {
|
||||
if (m_bufferLen >= size)
|
||||
return (void *)(m_buffer);
|
||||
|
||||
if (m_buffer != NULL)
|
||||
delete[] m_buffer;
|
||||
|
||||
m_bufferLen = size;
|
||||
m_buffer = new unsigned char[m_bufferLen];
|
||||
if (m_buffer == NULL)
|
||||
m_bufferLen = 0;
|
||||
|
||||
return m_buffer;
|
||||
}
|
||||
void *ptr() { return m_buffer; }
|
||||
size_t len() { return m_bufferLen; }
|
||||
private:
|
||||
unsigned char *m_buffer;
|
||||
size_t m_bufferLen;
|
||||
};
|
||||
|
||||
#endif
|
||||
422
android/opengl/shared/OpenglCodecCommon/GLClientState.cpp
Normal file
422
android/opengl/shared/OpenglCodecCommon/GLClientState.cpp
Normal file
|
|
@ -0,0 +1,422 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "GLClientState.h"
|
||||
#include "ErrorLog.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "glUtils.h"
|
||||
#include <cutils/log.h>
|
||||
|
||||
#ifndef MAX
|
||||
#define MAX(a, b) ((a) < (b) ? (b) : (a))
|
||||
#endif
|
||||
|
||||
GLClientState::GLClientState(int nLocations)
|
||||
{
|
||||
if (nLocations < LAST_LOCATION) {
|
||||
nLocations = LAST_LOCATION;
|
||||
}
|
||||
m_nLocations = nLocations;
|
||||
m_states = new VertexAttribState[m_nLocations];
|
||||
for (int i = 0; i < m_nLocations; i++) {
|
||||
m_states[i].enabled = 0;
|
||||
m_states[i].enableDirty = false;
|
||||
m_states[i].data = 0;
|
||||
}
|
||||
m_currentArrayVbo = 0;
|
||||
m_currentIndexVbo = 0;
|
||||
// init gl constans;
|
||||
m_states[VERTEX_LOCATION].glConst = GL_VERTEX_ARRAY;
|
||||
m_states[NORMAL_LOCATION].glConst = GL_NORMAL_ARRAY;
|
||||
m_states[COLOR_LOCATION].glConst = GL_COLOR_ARRAY;
|
||||
m_states[POINTSIZE_LOCATION].glConst = GL_POINT_SIZE_ARRAY_OES;
|
||||
m_states[TEXCOORD0_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD1_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD2_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD3_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD4_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD5_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD6_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[TEXCOORD7_LOCATION].glConst = GL_TEXTURE_COORD_ARRAY;
|
||||
m_states[MATRIXINDEX_LOCATION].glConst = GL_MATRIX_INDEX_ARRAY_OES;
|
||||
m_states[WEIGHT_LOCATION].glConst = GL_WEIGHT_ARRAY_OES;
|
||||
m_activeTexture = 0;
|
||||
m_currentProgram = 0;
|
||||
|
||||
m_pixelStore.unpack_alignment = 4;
|
||||
m_pixelStore.pack_alignment = 4;
|
||||
|
||||
memset(m_tex.unit, 0, sizeof(m_tex.unit));
|
||||
m_tex.activeUnit = &m_tex.unit[0];
|
||||
m_tex.textures = NULL;
|
||||
m_tex.numTextures = 0;
|
||||
m_tex.allocTextures = 0;
|
||||
|
||||
m_maxVertexAttribsDirty = true;
|
||||
}
|
||||
|
||||
GLClientState::~GLClientState()
|
||||
{
|
||||
delete m_states;
|
||||
}
|
||||
|
||||
void GLClientState::enable(int location, int state)
|
||||
{
|
||||
if (!validLocation(location)) {
|
||||
return;
|
||||
}
|
||||
|
||||
m_states[location].enableDirty |= (state != m_states[location].enabled);
|
||||
m_states[location].enabled = state;
|
||||
}
|
||||
|
||||
void GLClientState::setState(int location, int size, GLenum type, GLboolean normalized, GLsizei stride, const void *data)
|
||||
{
|
||||
if (!validLocation(location)) {
|
||||
return;
|
||||
}
|
||||
m_states[location].size = size;
|
||||
m_states[location].type = type;
|
||||
m_states[location].stride = stride;
|
||||
m_states[location].data = (void*)data;
|
||||
m_states[location].bufferObject = m_currentArrayVbo;
|
||||
m_states[location].elementSize = size ? (glSizeof(type) * size) : 0;
|
||||
m_states[location].normalized = normalized;
|
||||
}
|
||||
|
||||
void GLClientState::setBufferObject(int location, GLuint id)
|
||||
{
|
||||
if (!validLocation(location)) {
|
||||
return;
|
||||
}
|
||||
|
||||
m_states[location].bufferObject = id;
|
||||
}
|
||||
|
||||
const GLClientState::VertexAttribState * GLClientState::getState(int location)
|
||||
{
|
||||
if (!validLocation(location)) {
|
||||
return NULL;
|
||||
}
|
||||
return & m_states[location];
|
||||
}
|
||||
|
||||
const GLClientState::VertexAttribState * GLClientState::getStateAndEnableDirty(int location, bool *enableChanged)
|
||||
{
|
||||
if (!validLocation(location)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (enableChanged) {
|
||||
*enableChanged = m_states[location].enableDirty;
|
||||
}
|
||||
|
||||
m_states[location].enableDirty = false;
|
||||
return & m_states[location];
|
||||
}
|
||||
|
||||
int GLClientState::getLocation(GLenum loc)
|
||||
{
|
||||
int retval;
|
||||
|
||||
switch(loc) {
|
||||
case GL_VERTEX_ARRAY:
|
||||
retval = int(VERTEX_LOCATION);
|
||||
break;
|
||||
case GL_NORMAL_ARRAY:
|
||||
retval = int(NORMAL_LOCATION);
|
||||
break;
|
||||
case GL_COLOR_ARRAY:
|
||||
retval = int(COLOR_LOCATION);
|
||||
break;
|
||||
case GL_POINT_SIZE_ARRAY_OES:
|
||||
retval = int(POINTSIZE_LOCATION);
|
||||
break;
|
||||
case GL_TEXTURE_COORD_ARRAY:
|
||||
retval = int (TEXCOORD0_LOCATION + m_activeTexture);
|
||||
break;
|
||||
case GL_MATRIX_INDEX_ARRAY_OES:
|
||||
retval = int (MATRIXINDEX_LOCATION);
|
||||
break;
|
||||
case GL_WEIGHT_ARRAY_OES:
|
||||
retval = int (WEIGHT_LOCATION);
|
||||
break;
|
||||
default:
|
||||
retval = loc;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
void GLClientState::getClientStatePointer(GLenum pname, GLvoid** params)
|
||||
{
|
||||
const GLClientState::VertexAttribState *state = NULL;
|
||||
switch (pname) {
|
||||
case GL_VERTEX_ARRAY_POINTER: {
|
||||
state = getState(GLClientState::VERTEX_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_NORMAL_ARRAY_POINTER: {
|
||||
state = getState(GLClientState::NORMAL_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_COLOR_ARRAY_POINTER: {
|
||||
state = getState(GLClientState::COLOR_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_TEXTURE_COORD_ARRAY_POINTER: {
|
||||
state = getState(getActiveTexture() + GLClientState::TEXCOORD0_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_POINT_SIZE_ARRAY_POINTER_OES: {
|
||||
state = getState(GLClientState::POINTSIZE_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_MATRIX_INDEX_ARRAY_POINTER_OES: {
|
||||
state = getState(GLClientState::MATRIXINDEX_LOCATION);
|
||||
break;
|
||||
}
|
||||
case GL_WEIGHT_ARRAY_POINTER_OES: {
|
||||
state = getState(GLClientState::WEIGHT_LOCATION);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (state && params)
|
||||
*params = state->data;
|
||||
}
|
||||
|
||||
int GLClientState::setPixelStore(GLenum param, GLint value)
|
||||
{
|
||||
int retval = 0;
|
||||
switch(param) {
|
||||
case GL_UNPACK_ALIGNMENT:
|
||||
if (value == 1 || value == 2 || value == 4 || value == 8) {
|
||||
m_pixelStore.unpack_alignment = value;
|
||||
} else {
|
||||
retval = GL_INVALID_VALUE;
|
||||
}
|
||||
break;
|
||||
case GL_PACK_ALIGNMENT:
|
||||
if (value == 1 || value == 2 || value == 4 || value == 8) {
|
||||
m_pixelStore.pack_alignment = value;
|
||||
} else {
|
||||
retval = GL_INVALID_VALUE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
retval = GL_INVALID_ENUM;
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
size_t GLClientState::pixelDataSize(GLsizei width, GLsizei height, GLenum format, GLenum type, int pack) const
|
||||
{
|
||||
if (width <= 0 || height <= 0) return 0;
|
||||
|
||||
int pixelsize = glUtilsPixelBitSize(format, type) >> 3;
|
||||
|
||||
int alignment = pack ? m_pixelStore.pack_alignment : m_pixelStore.unpack_alignment;
|
||||
|
||||
if (pixelsize == 0 ) {
|
||||
ERR("unknown pixel size: width: %d height: %d format: %d type: %d pack: %d align: %d\n",
|
||||
width, height, format, type, pack, alignment);
|
||||
}
|
||||
size_t linesize = pixelsize * width;
|
||||
size_t aligned_linesize = int(linesize / alignment) * alignment;
|
||||
if (aligned_linesize < linesize) {
|
||||
aligned_linesize += alignment;
|
||||
}
|
||||
return aligned_linesize * height;
|
||||
}
|
||||
|
||||
GLenum GLClientState::setActiveTextureUnit(GLenum texture)
|
||||
{
|
||||
GLuint unit = texture - GL_TEXTURE0;
|
||||
if (unit >= MAX_TEXTURE_UNITS) {
|
||||
return GL_INVALID_ENUM;
|
||||
}
|
||||
m_tex.activeUnit = &m_tex.unit[unit];
|
||||
return GL_NO_ERROR;
|
||||
}
|
||||
|
||||
GLenum GLClientState::getActiveTextureUnit() const
|
||||
{
|
||||
return GL_TEXTURE0 + (m_tex.activeUnit - &m_tex.unit[0]);
|
||||
}
|
||||
|
||||
void GLClientState::enableTextureTarget(GLenum target)
|
||||
{
|
||||
switch (target) {
|
||||
case GL_TEXTURE_2D:
|
||||
m_tex.activeUnit->enables |= (1u << TEXTURE_2D);
|
||||
break;
|
||||
case GL_TEXTURE_EXTERNAL_OES:
|
||||
m_tex.activeUnit->enables |= (1u << TEXTURE_EXTERNAL);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void GLClientState::disableTextureTarget(GLenum target)
|
||||
{
|
||||
switch (target) {
|
||||
case GL_TEXTURE_2D:
|
||||
m_tex.activeUnit->enables &= ~(1u << TEXTURE_2D);
|
||||
break;
|
||||
case GL_TEXTURE_EXTERNAL_OES:
|
||||
m_tex.activeUnit->enables &= ~(1u << TEXTURE_EXTERNAL);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
GLenum GLClientState::getPriorityEnabledTarget(GLenum allDisabled) const
|
||||
{
|
||||
unsigned int enables = m_tex.activeUnit->enables;
|
||||
if (enables & (1u << TEXTURE_EXTERNAL)) {
|
||||
return GL_TEXTURE_EXTERNAL_OES;
|
||||
} else if (enables & (1u << TEXTURE_2D)) {
|
||||
return GL_TEXTURE_2D;
|
||||
} else {
|
||||
return allDisabled;
|
||||
}
|
||||
}
|
||||
|
||||
int GLClientState::compareTexId(const void* pid, const void* prec)
|
||||
{
|
||||
const GLuint* id = (const GLuint*)pid;
|
||||
const TextureRec* rec = (const TextureRec*)prec;
|
||||
return (GLint)(*id) - (GLint)rec->id;
|
||||
}
|
||||
|
||||
GLenum GLClientState::bindTexture(GLenum target, GLuint texture,
|
||||
GLboolean* firstUse)
|
||||
{
|
||||
GLboolean first = GL_FALSE;
|
||||
TextureRec* texrec = NULL;
|
||||
if (texture != 0) {
|
||||
if (m_tex.textures) {
|
||||
texrec = (TextureRec*)bsearch(&texture, m_tex.textures,
|
||||
m_tex.numTextures, sizeof(TextureRec), compareTexId);
|
||||
}
|
||||
if (!texrec) {
|
||||
if (!(texrec = addTextureRec(texture, target))) {
|
||||
return GL_OUT_OF_MEMORY;
|
||||
}
|
||||
first = GL_TRUE;
|
||||
}
|
||||
if (target != texrec->target) {
|
||||
return GL_INVALID_OPERATION;
|
||||
}
|
||||
}
|
||||
|
||||
switch (target) {
|
||||
case GL_TEXTURE_2D:
|
||||
m_tex.activeUnit->texture[TEXTURE_2D] = texture;
|
||||
break;
|
||||
case GL_TEXTURE_EXTERNAL_OES:
|
||||
m_tex.activeUnit->texture[TEXTURE_EXTERNAL] = texture;
|
||||
break;
|
||||
}
|
||||
|
||||
if (firstUse) {
|
||||
*firstUse = first;
|
||||
}
|
||||
|
||||
return GL_NO_ERROR;
|
||||
}
|
||||
|
||||
GLClientState::TextureRec* GLClientState::addTextureRec(GLuint id,
|
||||
GLenum target)
|
||||
{
|
||||
if (m_tex.numTextures == m_tex.allocTextures) {
|
||||
const GLuint MAX_TEXTURES = 0xFFFFFFFFu;
|
||||
|
||||
GLuint newAlloc;
|
||||
if (MAX_TEXTURES - m_tex.allocTextures >= m_tex.allocTextures) {
|
||||
newAlloc = MAX(4, 2 * m_tex.allocTextures);
|
||||
} else {
|
||||
if (m_tex.allocTextures == MAX_TEXTURES) {
|
||||
return NULL;
|
||||
}
|
||||
newAlloc = MAX_TEXTURES;
|
||||
}
|
||||
|
||||
TextureRec* newTextures = (TextureRec*)realloc(m_tex.textures,
|
||||
newAlloc * sizeof(TextureRec));
|
||||
if (!newTextures) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
m_tex.textures = newTextures;
|
||||
m_tex.allocTextures = newAlloc;
|
||||
}
|
||||
|
||||
TextureRec* tex = m_tex.textures + m_tex.numTextures;
|
||||
TextureRec* prev = tex - 1;
|
||||
while (tex != m_tex.textures && id < prev->id) {
|
||||
*tex-- = *prev--;
|
||||
}
|
||||
tex->id = id;
|
||||
tex->target = target;
|
||||
m_tex.numTextures++;
|
||||
|
||||
return tex;
|
||||
}
|
||||
|
||||
GLuint GLClientState::getBoundTexture(GLenum target) const
|
||||
{
|
||||
switch (target) {
|
||||
case GL_TEXTURE_2D:
|
||||
return m_tex.activeUnit->texture[TEXTURE_2D];
|
||||
case GL_TEXTURE_EXTERNAL_OES:
|
||||
return m_tex.activeUnit->texture[TEXTURE_EXTERNAL];
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void GLClientState::deleteTextures(GLsizei n, const GLuint* textures)
|
||||
{
|
||||
// Updating the textures array could be made more efficient when deleting
|
||||
// several textures:
|
||||
// - compacting the array could be done in a single pass once the deleted
|
||||
// textures are marked, or
|
||||
// - could swap deleted textures to the end and re-sort.
|
||||
TextureRec* texrec;
|
||||
for (const GLuint* texture = textures; texture != textures + n; texture++) {
|
||||
texrec = (TextureRec*)bsearch(texture, m_tex.textures,
|
||||
m_tex.numTextures, sizeof(TextureRec), compareTexId);
|
||||
if (texrec) {
|
||||
const TextureRec* end = m_tex.textures + m_tex.numTextures;
|
||||
memmove(texrec, texrec + 1,
|
||||
(end - texrec - 1) * sizeof(TextureRec));
|
||||
m_tex.numTextures--;
|
||||
|
||||
for (TextureUnit* unit = m_tex.unit;
|
||||
unit != m_tex.unit + MAX_TEXTURE_UNITS;
|
||||
unit++)
|
||||
{
|
||||
if (unit->texture[TEXTURE_2D] == *texture) {
|
||||
unit->texture[TEXTURE_2D] = 0;
|
||||
} else if (unit->texture[TEXTURE_EXTERNAL] == *texture) {
|
||||
unit->texture[TEXTURE_EXTERNAL] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
462
android/opengl/shared/OpenglCodecCommon/GLClientState.h
Normal file
462
android/opengl/shared/OpenglCodecCommon/GLClientState.h
Normal file
|
|
@ -0,0 +1,462 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _GL_CLIENT_STATE_H_
|
||||
#define _GL_CLIENT_STATE_H_
|
||||
|
||||
#define GL_API
|
||||
#ifndef ANDROID
|
||||
#define GL_APIENTRY
|
||||
#define GL_APIENTRYP
|
||||
#endif
|
||||
|
||||
#include <GLES/gl.h>
|
||||
#include <GLES/glext.h>
|
||||
#include <GLES2/gl2.h>
|
||||
#include <GLES2/gl2ext.h>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include "ErrorLog.h"
|
||||
#include "codec_defs.h"
|
||||
|
||||
class GLClientState {
|
||||
public:
|
||||
typedef enum {
|
||||
VERTEX_LOCATION = 0,
|
||||
NORMAL_LOCATION = 1,
|
||||
COLOR_LOCATION = 2,
|
||||
POINTSIZE_LOCATION = 3,
|
||||
TEXCOORD0_LOCATION = 4,
|
||||
TEXCOORD1_LOCATION = 5,
|
||||
TEXCOORD2_LOCATION = 6,
|
||||
TEXCOORD3_LOCATION = 7,
|
||||
TEXCOORD4_LOCATION = 8,
|
||||
TEXCOORD5_LOCATION = 9,
|
||||
TEXCOORD6_LOCATION = 10,
|
||||
TEXCOORD7_LOCATION = 11,
|
||||
MATRIXINDEX_LOCATION = 12,
|
||||
WEIGHT_LOCATION = 13,
|
||||
LAST_LOCATION = 14
|
||||
} StateLocation;
|
||||
|
||||
typedef struct {
|
||||
GLint enabled;
|
||||
GLint size;
|
||||
GLenum type;
|
||||
GLsizei stride;
|
||||
void *data;
|
||||
GLuint bufferObject;
|
||||
GLenum glConst;
|
||||
unsigned int elementSize;
|
||||
bool enableDirty; // true if any enable state has changed since last draw
|
||||
bool normalized;
|
||||
} VertexAttribState;
|
||||
|
||||
typedef struct {
|
||||
int unpack_alignment;
|
||||
int pack_alignment;
|
||||
} PixelStoreState;
|
||||
|
||||
enum {
|
||||
MAX_TEXTURE_UNITS = 32,
|
||||
};
|
||||
|
||||
public:
|
||||
GLClientState(int nLocations = CODEC_MAX_VERTEX_ATTRIBUTES);
|
||||
~GLClientState();
|
||||
int nLocations() { return m_nLocations; }
|
||||
const PixelStoreState *pixelStoreState() { return &m_pixelStore; }
|
||||
int setPixelStore(GLenum param, GLint value);
|
||||
GLuint currentArrayVbo() { return m_currentArrayVbo; }
|
||||
GLuint currentIndexVbo() { return m_currentIndexVbo; }
|
||||
void enable(int location, int state);
|
||||
void setState(int location, int size, GLenum type, GLboolean normalized, GLsizei stride, const void *data);
|
||||
void setBufferObject(int location, GLuint id);
|
||||
const VertexAttribState *getState(int location);
|
||||
const VertexAttribState *getStateAndEnableDirty(int location, bool *enableChanged);
|
||||
int getLocation(GLenum loc);
|
||||
void setActiveTexture(int texUnit) {m_activeTexture = texUnit; };
|
||||
int getActiveTexture() const { return m_activeTexture; }
|
||||
void setMaxVertexAttribs(int val) {
|
||||
m_maxVertexAttribs = val;
|
||||
m_maxVertexAttribsDirty = false;
|
||||
}
|
||||
|
||||
void unBindBuffer(GLuint id)
|
||||
{
|
||||
if (m_currentArrayVbo == id) m_currentArrayVbo = 0;
|
||||
else if (m_currentIndexVbo == id) m_currentIndexVbo = 0;
|
||||
}
|
||||
|
||||
int bindBuffer(GLenum target, GLuint id)
|
||||
{
|
||||
int err = 0;
|
||||
switch(target) {
|
||||
case GL_ARRAY_BUFFER:
|
||||
m_currentArrayVbo = id;
|
||||
break;
|
||||
case GL_ELEMENT_ARRAY_BUFFER:
|
||||
m_currentIndexVbo = id;
|
||||
break;
|
||||
default:
|
||||
err = -1;
|
||||
}
|
||||
return err;
|
||||
}
|
||||
|
||||
int getBuffer(GLenum target)
|
||||
{
|
||||
int ret=0;
|
||||
switch (target) {
|
||||
case GL_ARRAY_BUFFER:
|
||||
ret = m_currentArrayVbo;
|
||||
break;
|
||||
case GL_ELEMENT_ARRAY_BUFFER:
|
||||
ret = m_currentIndexVbo;
|
||||
break;
|
||||
default:
|
||||
ret = -1;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
size_t pixelDataSize(GLsizei width, GLsizei height, GLenum format, GLenum type, int pack) const;
|
||||
|
||||
void setCurrentProgram(GLint program) { m_currentProgram = program; }
|
||||
GLint currentProgram() const { return m_currentProgram; }
|
||||
|
||||
/* OES_EGL_image_external
|
||||
*
|
||||
* These functions manipulate GL state which interacts with the
|
||||
* OES_EGL_image_external extension, to support client-side emulation on
|
||||
* top of host implementations that don't have it.
|
||||
*
|
||||
* Most of these calls should only be used with TEXTURE_2D or
|
||||
* TEXTURE_EXTERNAL_OES texture targets; TEXTURE_CUBE_MAP or other extension
|
||||
* targets should bypass this. An exception is bindTexture(), which should
|
||||
* see all glBindTexture() calls for any target.
|
||||
*/
|
||||
|
||||
// glActiveTexture(GL_TEXTURE0 + i)
|
||||
// Sets the active texture unit. Up to MAX_TEXTURE_UNITS are supported.
|
||||
GLenum setActiveTextureUnit(GLenum texture);
|
||||
GLenum getActiveTextureUnit() const;
|
||||
|
||||
// glEnable(GL_TEXTURE_(2D|EXTERNAL_OES))
|
||||
void enableTextureTarget(GLenum target);
|
||||
|
||||
// glDisable(GL_TEXTURE_(2D|EXTERNAL_OES))
|
||||
void disableTextureTarget(GLenum target);
|
||||
|
||||
// Implements the target priority logic:
|
||||
// * Return GL_TEXTURE_EXTERNAL_OES if enabled, else
|
||||
// * Return GL_TEXTURE_2D if enabled, else
|
||||
// * Return the allDisabled value.
|
||||
// For some cases passing GL_TEXTURE_2D for allDisabled makes callee code
|
||||
// simpler; for other cases passing a recognizable enum like GL_ZERO or
|
||||
// GL_INVALID_ENUM is appropriate.
|
||||
GLenum getPriorityEnabledTarget(GLenum allDisabled) const;
|
||||
|
||||
// glBindTexture(GL_TEXTURE_*, ...)
|
||||
// Set the target binding of the active texture unit to texture. Returns
|
||||
// GL_NO_ERROR on success or GL_INVALID_OPERATION if the texture has
|
||||
// previously been bound to a different target. If firstUse is not NULL,
|
||||
// it is set to indicate whether this is the first use of the texture.
|
||||
// For accurate error detection, bindTexture should be called for *all*
|
||||
// targets, not just 2D and EXTERNAL_OES.
|
||||
GLenum bindTexture(GLenum target, GLuint texture, GLboolean* firstUse);
|
||||
|
||||
// Return the texture currently bound to GL_TEXTURE_(2D|EXTERNAL_OES).
|
||||
GLuint getBoundTexture(GLenum target) const;
|
||||
|
||||
// glDeleteTextures(...)
|
||||
// Remove references to the to-be-deleted textures.
|
||||
void deleteTextures(GLsizei n, const GLuint* textures);
|
||||
|
||||
private:
|
||||
PixelStoreState m_pixelStore;
|
||||
VertexAttribState *m_states;
|
||||
int m_maxVertexAttribs;
|
||||
bool m_maxVertexAttribsDirty;
|
||||
int m_nLocations;
|
||||
GLuint m_currentArrayVbo;
|
||||
GLuint m_currentIndexVbo;
|
||||
int m_activeTexture;
|
||||
GLint m_currentProgram;
|
||||
|
||||
bool validLocation(int location) { return (location >= 0 && location < m_nLocations); }
|
||||
|
||||
enum TextureTarget {
|
||||
TEXTURE_2D = 0,
|
||||
TEXTURE_EXTERNAL = 1,
|
||||
TEXTURE_TARGET_COUNT
|
||||
};
|
||||
struct TextureUnit {
|
||||
unsigned int enables;
|
||||
GLuint texture[TEXTURE_TARGET_COUNT];
|
||||
};
|
||||
struct TextureRec {
|
||||
GLuint id;
|
||||
GLenum target;
|
||||
};
|
||||
struct TextureState {
|
||||
TextureUnit unit[MAX_TEXTURE_UNITS];
|
||||
TextureUnit* activeUnit;
|
||||
TextureRec* textures;
|
||||
GLuint numTextures;
|
||||
GLuint allocTextures;
|
||||
};
|
||||
TextureState m_tex;
|
||||
|
||||
static int compareTexId(const void* pid, const void* prec);
|
||||
TextureRec* addTextureRec(GLuint id, GLenum target);
|
||||
|
||||
public:
|
||||
void getClientStatePointer(GLenum pname, GLvoid** params);
|
||||
|
||||
template <class T>
|
||||
int getVertexAttribParameter(GLuint index, GLenum param, T *ptr)
|
||||
{
|
||||
bool handled = true;
|
||||
const VertexAttribState *vertexAttrib = getState(index);
|
||||
if (vertexAttrib == NULL) {
|
||||
ERR("getVeterxAttriParameter for non existant index %d\n", index);
|
||||
// set gl error;
|
||||
return handled;
|
||||
}
|
||||
|
||||
switch(param) {
|
||||
case GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING:
|
||||
*ptr = (T)(vertexAttrib->bufferObject);
|
||||
break;
|
||||
case GL_VERTEX_ATTRIB_ARRAY_ENABLED:
|
||||
*ptr = (T)(vertexAttrib->enabled);
|
||||
break;
|
||||
case GL_VERTEX_ATTRIB_ARRAY_SIZE:
|
||||
*ptr = (T)(vertexAttrib->size);
|
||||
break;
|
||||
case GL_VERTEX_ATTRIB_ARRAY_STRIDE:
|
||||
*ptr = (T)(vertexAttrib->stride);
|
||||
break;
|
||||
case GL_VERTEX_ATTRIB_ARRAY_TYPE:
|
||||
*ptr = (T)(vertexAttrib->type);
|
||||
break;
|
||||
case GL_VERTEX_ATTRIB_ARRAY_NORMALIZED:
|
||||
*ptr = (T)(vertexAttrib->normalized);
|
||||
break;
|
||||
case GL_CURRENT_VERTEX_ATTRIB:
|
||||
handled = false;
|
||||
break;
|
||||
default:
|
||||
handled = false;
|
||||
ERR("unknown vertex-attrib parameter param %d\n", param);
|
||||
}
|
||||
return handled;
|
||||
}
|
||||
|
||||
template <class T>
|
||||
bool getClientStateParameter(GLenum param, T* ptr)
|
||||
{
|
||||
bool isClientStateParam = false;
|
||||
switch (param) {
|
||||
case GL_CLIENT_ACTIVE_TEXTURE: {
|
||||
GLint tex = getActiveTexture() + GL_TEXTURE0;
|
||||
*ptr = tex;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_VERTEX_ARRAY_SIZE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::VERTEX_LOCATION);
|
||||
*ptr = state->size;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_VERTEX_ARRAY_TYPE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::VERTEX_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_VERTEX_ARRAY_STRIDE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::VERTEX_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_COLOR_ARRAY_SIZE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::COLOR_LOCATION);
|
||||
*ptr = state->size;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_COLOR_ARRAY_TYPE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::COLOR_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_COLOR_ARRAY_STRIDE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::COLOR_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_NORMAL_ARRAY_TYPE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::NORMAL_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_NORMAL_ARRAY_STRIDE: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::NORMAL_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_TEXTURE_COORD_ARRAY_SIZE: {
|
||||
const GLClientState::VertexAttribState *state = getState(getActiveTexture() + GLClientState::TEXCOORD0_LOCATION);
|
||||
*ptr = state->size;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_TEXTURE_COORD_ARRAY_TYPE: {
|
||||
const GLClientState::VertexAttribState *state = getState(getActiveTexture() + GLClientState::TEXCOORD0_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_TEXTURE_COORD_ARRAY_STRIDE: {
|
||||
const GLClientState::VertexAttribState *state = getState(getActiveTexture() + GLClientState::TEXCOORD0_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_POINT_SIZE_ARRAY_TYPE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::POINTSIZE_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_POINT_SIZE_ARRAY_STRIDE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::POINTSIZE_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_MATRIX_INDEX_ARRAY_SIZE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::MATRIXINDEX_LOCATION);
|
||||
*ptr = state->size;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_MATRIX_INDEX_ARRAY_TYPE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::MATRIXINDEX_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_MATRIX_INDEX_ARRAY_STRIDE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::MATRIXINDEX_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_WEIGHT_ARRAY_SIZE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::WEIGHT_LOCATION);
|
||||
*ptr = state->size;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_WEIGHT_ARRAY_TYPE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::WEIGHT_LOCATION);
|
||||
*ptr = state->type;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_WEIGHT_ARRAY_STRIDE_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::WEIGHT_LOCATION);
|
||||
*ptr = state->stride;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_VERTEX_ARRAY_BUFFER_BINDING: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::VERTEX_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_NORMAL_ARRAY_BUFFER_BINDING: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::NORMAL_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_COLOR_ARRAY_BUFFER_BINDING: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::COLOR_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING: {
|
||||
const GLClientState::VertexAttribState *state = getState(getActiveTexture()+GLClientState::TEXCOORD0_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_POINT_SIZE_ARRAY_BUFFER_BINDING_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::POINTSIZE_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_MATRIX_INDEX_ARRAY_BUFFER_BINDING_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::MATRIXINDEX_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_WEIGHT_ARRAY_BUFFER_BINDING_OES: {
|
||||
const GLClientState::VertexAttribState *state = getState(GLClientState::WEIGHT_LOCATION);
|
||||
*ptr = state->bufferObject;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_ARRAY_BUFFER_BINDING: {
|
||||
int buffer = getBuffer(GL_ARRAY_BUFFER);
|
||||
*ptr = buffer;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_ELEMENT_ARRAY_BUFFER_BINDING: {
|
||||
int buffer = getBuffer(GL_ELEMENT_ARRAY_BUFFER);
|
||||
*ptr = buffer;
|
||||
isClientStateParam = true;
|
||||
break;
|
||||
}
|
||||
case GL_MAX_VERTEX_ATTRIBS: {
|
||||
if (m_maxVertexAttribsDirty) {
|
||||
isClientStateParam = false;
|
||||
} else {
|
||||
*ptr = m_maxVertexAttribs;
|
||||
isClientStateParam = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return isClientStateParam;
|
||||
}
|
||||
|
||||
};
|
||||
#endif
|
||||
497
android/opengl/shared/OpenglCodecCommon/GLSharedGroup.cpp
Executable file
497
android/opengl/shared/OpenglCodecCommon/GLSharedGroup.cpp
Executable file
|
|
@ -0,0 +1,497 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "GLSharedGroup.h"
|
||||
|
||||
/**** KeyedVector utilities ****/
|
||||
|
||||
template <typename T>
|
||||
static void clearObjectMap(android::DefaultKeyedVector<GLuint, T>& v) {
|
||||
for (size_t i = 0; i < v.size(); i++)
|
||||
delete v.valueAt(i);
|
||||
v.clear();
|
||||
}
|
||||
|
||||
/**** BufferData ****/
|
||||
|
||||
BufferData::BufferData() : m_size(0) {};
|
||||
BufferData::BufferData(GLsizeiptr size, void * data) : m_size(size)
|
||||
{
|
||||
void * buffer = NULL;
|
||||
if (size>0) buffer = m_fixedBuffer.alloc(size);
|
||||
if (data) memcpy(buffer, data, size);
|
||||
}
|
||||
|
||||
/**** ProgramData ****/
|
||||
ProgramData::ProgramData() : m_numIndexes(0),
|
||||
m_initialized(false),
|
||||
m_locShiftWAR(false)
|
||||
{
|
||||
m_Indexes = NULL;
|
||||
}
|
||||
|
||||
void ProgramData::initProgramData(GLuint numIndexes)
|
||||
{
|
||||
m_initialized = true;
|
||||
m_numIndexes = numIndexes;
|
||||
delete[] m_Indexes;
|
||||
m_Indexes = new IndexInfo[numIndexes];
|
||||
m_locShiftWAR = false;
|
||||
}
|
||||
|
||||
bool ProgramData::isInitialized()
|
||||
{
|
||||
return m_initialized;
|
||||
}
|
||||
|
||||
ProgramData::~ProgramData()
|
||||
{
|
||||
delete[] m_Indexes;
|
||||
m_Indexes = NULL;
|
||||
}
|
||||
|
||||
void ProgramData::setIndexInfo(GLuint index, GLint base, GLint size, GLenum type)
|
||||
{
|
||||
if (index>=m_numIndexes)
|
||||
return;
|
||||
m_Indexes[index].base = base;
|
||||
m_Indexes[index].size = size;
|
||||
m_Indexes[index].type = type;
|
||||
if (index > 0) {
|
||||
m_Indexes[index].appBase = m_Indexes[index-1].appBase +
|
||||
m_Indexes[index-1].size;
|
||||
}
|
||||
else {
|
||||
m_Indexes[index].appBase = 0;
|
||||
}
|
||||
m_Indexes[index].hostLocsPerElement = 1;
|
||||
m_Indexes[index].flags = 0;
|
||||
m_Indexes[index].samplerValue = 0;
|
||||
}
|
||||
|
||||
void ProgramData::setIndexFlags(GLuint index, GLuint flags)
|
||||
{
|
||||
if (index >= m_numIndexes)
|
||||
return;
|
||||
m_Indexes[index].flags |= flags;
|
||||
}
|
||||
|
||||
GLuint ProgramData::getIndexForLocation(GLint location)
|
||||
{
|
||||
GLuint index = m_numIndexes;
|
||||
GLint minDist = -1;
|
||||
for (GLuint i=0;i<m_numIndexes;++i)
|
||||
{
|
||||
GLint dist = location - m_Indexes[i].base;
|
||||
if (dist >= 0 &&
|
||||
(minDist < 0 || dist < minDist)) {
|
||||
index = i;
|
||||
minDist = dist;
|
||||
}
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
GLenum ProgramData::getTypeForLocation(GLint location)
|
||||
{
|
||||
GLuint index = getIndexForLocation(location);
|
||||
if (index<m_numIndexes) {
|
||||
return m_Indexes[index].type;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ProgramData::setupLocationShiftWAR()
|
||||
{
|
||||
m_locShiftWAR = false;
|
||||
for (GLuint i=0; i<m_numIndexes; i++) {
|
||||
if (0 != (m_Indexes[i].base & 0xffff)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// if we have one uniform at location 0, we do not need the WAR.
|
||||
if (m_numIndexes > 1) {
|
||||
m_locShiftWAR = true;
|
||||
}
|
||||
}
|
||||
|
||||
GLint ProgramData::locationWARHostToApp(GLint hostLoc, GLint arrIndex)
|
||||
{
|
||||
if (!m_locShiftWAR) return hostLoc;
|
||||
|
||||
GLuint index = getIndexForLocation(hostLoc);
|
||||
if (index<m_numIndexes) {
|
||||
if (arrIndex > 0) {
|
||||
m_Indexes[index].hostLocsPerElement =
|
||||
(hostLoc - m_Indexes[index].base) / arrIndex;
|
||||
}
|
||||
return m_Indexes[index].appBase + arrIndex;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
GLint ProgramData::locationWARAppToHost(GLint appLoc)
|
||||
{
|
||||
if (!m_locShiftWAR) return appLoc;
|
||||
|
||||
for(GLuint i=0; i<m_numIndexes; i++) {
|
||||
GLint elemIndex = appLoc - m_Indexes[i].appBase;
|
||||
if (elemIndex >= 0 && elemIndex < m_Indexes[i].size) {
|
||||
return m_Indexes[i].base +
|
||||
elemIndex * m_Indexes[i].hostLocsPerElement;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
GLint ProgramData::getNextSamplerUniform(GLint index, GLint* val, GLenum* target)
|
||||
{
|
||||
for (GLint i = index + 1; i >= 0 && i < (GLint)m_numIndexes; i++) {
|
||||
if (m_Indexes[i].type == GL_SAMPLER_2D) {
|
||||
if (val) *val = m_Indexes[i].samplerValue;
|
||||
if (target) {
|
||||
if (m_Indexes[i].flags & INDEX_FLAG_SAMPLER_EXTERNAL) {
|
||||
*target = GL_TEXTURE_EXTERNAL_OES;
|
||||
} else {
|
||||
*target = GL_TEXTURE_2D;
|
||||
}
|
||||
}
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
bool ProgramData::setSamplerUniform(GLint appLoc, GLint val, GLenum* target)
|
||||
{
|
||||
for (GLuint i = 0; i < m_numIndexes; i++) {
|
||||
GLint elemIndex = appLoc - m_Indexes[i].appBase;
|
||||
if (elemIndex >= 0 && elemIndex < m_Indexes[i].size) {
|
||||
if (m_Indexes[i].type == GL_TEXTURE_2D) {
|
||||
m_Indexes[i].samplerValue = val;
|
||||
if (target) {
|
||||
if (m_Indexes[i].flags & INDEX_FLAG_SAMPLER_EXTERNAL) {
|
||||
*target = GL_TEXTURE_EXTERNAL_OES;
|
||||
} else {
|
||||
*target = GL_TEXTURE_2D;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ProgramData::attachShader(GLuint shader)
|
||||
{
|
||||
size_t n = m_shaders.size();
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
if (m_shaders[i] == shader) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// AKA m_shaders.push_back(), but that has an ambiguous call to insertAt()
|
||||
// due to the default parameters. This is the desired insertAt() overload.
|
||||
m_shaders.insertAt(shader, m_shaders.size(), 1);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ProgramData::detachShader(GLuint shader)
|
||||
{
|
||||
size_t n = m_shaders.size();
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
if (m_shaders[i] == shader) {
|
||||
m_shaders.removeAt(i);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/***** GLSharedGroup ****/
|
||||
|
||||
GLSharedGroup::GLSharedGroup() :
|
||||
m_buffers(android::DefaultKeyedVector<GLuint, BufferData*>(NULL)),
|
||||
m_programs(android::DefaultKeyedVector<GLuint, ProgramData*>(NULL)),
|
||||
m_shaders(android::DefaultKeyedVector<GLuint, ShaderData*>(NULL))
|
||||
{
|
||||
}
|
||||
|
||||
GLSharedGroup::~GLSharedGroup()
|
||||
{
|
||||
m_buffers.clear();
|
||||
m_programs.clear();
|
||||
clearObjectMap(m_buffers);
|
||||
clearObjectMap(m_programs);
|
||||
clearObjectMap(m_shaders);
|
||||
}
|
||||
|
||||
bool GLSharedGroup::isObject(GLuint obj)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
return ((m_shaders.valueFor(obj)!=NULL) || (m_programs.valueFor(obj)!=NULL));
|
||||
}
|
||||
|
||||
BufferData * GLSharedGroup::getBufferData(GLuint bufferId)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
return m_buffers.valueFor(bufferId);
|
||||
}
|
||||
|
||||
void GLSharedGroup::addBufferData(GLuint bufferId, GLsizeiptr size, void * data)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
m_buffers.add(bufferId, new BufferData(size, data));
|
||||
}
|
||||
|
||||
void GLSharedGroup::updateBufferData(GLuint bufferId, GLsizeiptr size, void * data)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ssize_t idx = m_buffers.indexOfKey(bufferId);
|
||||
if (idx >= 0) {
|
||||
delete m_buffers.valueAt(idx);
|
||||
m_buffers.editValueAt(idx) = new BufferData(size, data);
|
||||
} else {
|
||||
m_buffers.add(bufferId, new BufferData(size, data));
|
||||
}
|
||||
}
|
||||
|
||||
GLenum GLSharedGroup::subUpdateBufferData(GLuint bufferId, GLintptr offset, GLsizeiptr size, void * data)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
BufferData * buf = m_buffers.valueFor(bufferId);
|
||||
if ((!buf) || (buf->m_size < offset+size) || (offset < 0) || (size<0)) return GL_INVALID_VALUE;
|
||||
|
||||
//it's safe to update now
|
||||
memcpy((char*)buf->m_fixedBuffer.ptr() + offset, data, size);
|
||||
return GL_NO_ERROR;
|
||||
}
|
||||
|
||||
void GLSharedGroup::deleteBufferData(GLuint bufferId)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ssize_t idx = m_buffers.indexOfKey(bufferId);
|
||||
if (idx >= 0) {
|
||||
delete m_buffers.valueAt(idx);
|
||||
m_buffers.removeItemsAt(idx);
|
||||
}
|
||||
}
|
||||
|
||||
void GLSharedGroup::addProgramData(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData *pData = m_programs.valueFor(program);
|
||||
if (pData)
|
||||
{
|
||||
m_programs.removeItem(program);
|
||||
delete pData;
|
||||
}
|
||||
|
||||
m_programs.add(program,new ProgramData());
|
||||
}
|
||||
|
||||
void GLSharedGroup::initProgramData(GLuint program, GLuint numIndexes)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData *pData = m_programs.valueFor(program);
|
||||
if (pData)
|
||||
{
|
||||
pData->initProgramData(numIndexes);
|
||||
}
|
||||
}
|
||||
|
||||
bool GLSharedGroup::isProgramInitialized(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData)
|
||||
{
|
||||
return pData->isInitialized();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void GLSharedGroup::deleteProgramData(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData *pData = m_programs.valueFor(program);
|
||||
if (pData)
|
||||
delete pData;
|
||||
m_programs.removeItem(program);
|
||||
}
|
||||
|
||||
void GLSharedGroup::attachShader(GLuint program, GLuint shader)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* programData = m_programs.valueFor(program);
|
||||
ssize_t idx = m_shaders.indexOfKey(shader);
|
||||
if (programData && idx >= 0) {
|
||||
if (programData->attachShader(shader)) {
|
||||
refShaderDataLocked(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GLSharedGroup::detachShader(GLuint program, GLuint shader)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* programData = m_programs.valueFor(program);
|
||||
ssize_t idx = m_shaders.indexOfKey(shader);
|
||||
if (programData && idx >= 0) {
|
||||
if (programData->detachShader(shader)) {
|
||||
unrefShaderDataLocked(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GLSharedGroup::setProgramIndexInfo(GLuint program, GLuint index, GLint base, GLint size, GLenum type, const char* name)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData)
|
||||
{
|
||||
pData->setIndexInfo(index,base,size,type);
|
||||
|
||||
if (type == GL_SAMPLER_2D) {
|
||||
size_t n = pData->getNumShaders();
|
||||
for (size_t i = 0; i < n; i++) {
|
||||
GLuint shaderId = pData->getShader(i);
|
||||
ShaderData* shader = m_shaders.valueFor(shaderId);
|
||||
if (!shader) continue;
|
||||
ShaderData::StringList::iterator nameIter = shader->samplerExternalNames.begin();
|
||||
ShaderData::StringList::iterator nameEnd = shader->samplerExternalNames.end();
|
||||
while (nameIter != nameEnd) {
|
||||
if (*nameIter == name) {
|
||||
pData->setIndexFlags(index, ProgramData::INDEX_FLAG_SAMPLER_EXTERNAL);
|
||||
break;
|
||||
}
|
||||
++nameIter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GLenum GLSharedGroup::getProgramUniformType(GLuint program, GLint location)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
GLenum type=0;
|
||||
if (pData)
|
||||
{
|
||||
type = pData->getTypeForLocation(location);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
bool GLSharedGroup::isProgram(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
return (pData!=NULL);
|
||||
}
|
||||
|
||||
void GLSharedGroup::setupLocationShiftWAR(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData) pData->setupLocationShiftWAR();
|
||||
}
|
||||
|
||||
GLint GLSharedGroup::locationWARHostToApp(GLuint program, GLint hostLoc, GLint arrIndex)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData) return pData->locationWARHostToApp(hostLoc, arrIndex);
|
||||
else return hostLoc;
|
||||
}
|
||||
|
||||
GLint GLSharedGroup::locationWARAppToHost(GLuint program, GLint appLoc)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData) return pData->locationWARAppToHost(appLoc);
|
||||
else return appLoc;
|
||||
}
|
||||
|
||||
bool GLSharedGroup::needUniformLocationWAR(GLuint program)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
if (pData) return pData->needUniformLocationWAR();
|
||||
return false;
|
||||
}
|
||||
|
||||
GLint GLSharedGroup::getNextSamplerUniform(GLuint program, GLint index, GLint* val, GLenum* target) const
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
return pData ? pData->getNextSamplerUniform(index, val, target) : -1;
|
||||
}
|
||||
|
||||
bool GLSharedGroup::setSamplerUniform(GLuint program, GLint appLoc, GLint val, GLenum* target)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ProgramData* pData = m_programs.valueFor(program);
|
||||
return pData ? pData->setSamplerUniform(appLoc, val, target) : false;
|
||||
}
|
||||
|
||||
bool GLSharedGroup::addShaderData(GLuint shader)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ShaderData* data = new ShaderData;
|
||||
if (data) {
|
||||
if (m_shaders.add(shader, data) < 0) {
|
||||
delete data;
|
||||
data = NULL;
|
||||
}
|
||||
data->refcount = 1;
|
||||
}
|
||||
return data != NULL;
|
||||
}
|
||||
|
||||
ShaderData* GLSharedGroup::getShaderData(GLuint shader)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
return m_shaders.valueFor(shader);
|
||||
}
|
||||
|
||||
void GLSharedGroup::unrefShaderData(GLuint shader)
|
||||
{
|
||||
android::AutoMutex _lock(m_lock);
|
||||
ssize_t idx = m_shaders.indexOfKey(shader);
|
||||
if (idx >= 0) {
|
||||
unrefShaderDataLocked(idx);
|
||||
}
|
||||
}
|
||||
|
||||
void GLSharedGroup::refShaderDataLocked(ssize_t shaderIdx)
|
||||
{
|
||||
assert(shaderIdx >= 0 && shaderIdx <= m_shaders.size());
|
||||
ShaderData* data = m_shaders.valueAt(shaderIdx);
|
||||
data->refcount++;
|
||||
}
|
||||
|
||||
void GLSharedGroup::unrefShaderDataLocked(ssize_t shaderIdx)
|
||||
{
|
||||
assert(shaderIdx >= 0 && shaderIdx <= m_shaders.size());
|
||||
ShaderData* data = m_shaders.valueAt(shaderIdx);
|
||||
if (--data->refcount == 0) {
|
||||
delete data;
|
||||
m_shaders.removeItemsAt(shaderIdx);
|
||||
}
|
||||
}
|
||||
144
android/opengl/shared/OpenglCodecCommon/GLSharedGroup.h
Executable file
144
android/opengl/shared/OpenglCodecCommon/GLSharedGroup.h
Executable file
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _GL_SHARED_GROUP_H_
|
||||
#define _GL_SHARED_GROUP_H_
|
||||
|
||||
#define GL_API
|
||||
#ifndef ANDROID
|
||||
#define GL_APIENTRY
|
||||
#define GL_APIENTRYP
|
||||
#endif
|
||||
|
||||
#include <GLES/gl.h>
|
||||
#include <GLES/glext.h>
|
||||
#include <GLES2/gl2.h>
|
||||
#include <GLES2/gl2ext.h>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include "ErrorLog.h"
|
||||
#include <utils/KeyedVector.h>
|
||||
#include <utils/List.h>
|
||||
#include <utils/String8.h>
|
||||
#include <utils/threads.h>
|
||||
#include "FixedBuffer.h"
|
||||
#include "SmartPtr.h"
|
||||
|
||||
struct BufferData {
|
||||
BufferData();
|
||||
BufferData(GLsizeiptr size, void * data);
|
||||
GLsizeiptr m_size;
|
||||
FixedBuffer m_fixedBuffer;
|
||||
};
|
||||
|
||||
class ProgramData {
|
||||
private:
|
||||
typedef struct _IndexInfo {
|
||||
GLint base;
|
||||
GLint size;
|
||||
GLenum type;
|
||||
GLint appBase;
|
||||
GLint hostLocsPerElement;
|
||||
GLuint flags;
|
||||
GLint samplerValue; // only set for sampler uniforms
|
||||
} IndexInfo;
|
||||
|
||||
GLuint m_numIndexes;
|
||||
IndexInfo* m_Indexes;
|
||||
bool m_initialized;
|
||||
bool m_locShiftWAR;
|
||||
|
||||
android::Vector<GLuint> m_shaders;
|
||||
|
||||
public:
|
||||
enum {
|
||||
INDEX_FLAG_SAMPLER_EXTERNAL = 0x00000001,
|
||||
};
|
||||
|
||||
ProgramData();
|
||||
void initProgramData(GLuint numIndexes);
|
||||
bool isInitialized();
|
||||
virtual ~ProgramData();
|
||||
void setIndexInfo(GLuint index, GLint base, GLint size, GLenum type);
|
||||
void setIndexFlags(GLuint index, GLuint flags);
|
||||
GLuint getIndexForLocation(GLint location);
|
||||
GLenum getTypeForLocation(GLint location);
|
||||
|
||||
bool needUniformLocationWAR() const { return m_locShiftWAR; }
|
||||
void setupLocationShiftWAR();
|
||||
GLint locationWARHostToApp(GLint hostLoc, GLint arrIndex);
|
||||
GLint locationWARAppToHost(GLint appLoc);
|
||||
|
||||
GLint getNextSamplerUniform(GLint index, GLint* val, GLenum* target);
|
||||
bool setSamplerUniform(GLint appLoc, GLint val, GLenum* target);
|
||||
|
||||
bool attachShader(GLuint shader);
|
||||
bool detachShader(GLuint shader);
|
||||
size_t getNumShaders() const { return m_shaders.size(); }
|
||||
GLuint getShader(size_t i) const { return m_shaders[i]; }
|
||||
};
|
||||
|
||||
struct ShaderData {
|
||||
typedef android::List<android::String8> StringList;
|
||||
StringList samplerExternalNames;
|
||||
int refcount;
|
||||
};
|
||||
|
||||
class GLSharedGroup {
|
||||
private:
|
||||
android::DefaultKeyedVector<GLuint, BufferData*> m_buffers;
|
||||
android::DefaultKeyedVector<GLuint, ProgramData*> m_programs;
|
||||
android::DefaultKeyedVector<GLuint, ShaderData*> m_shaders;
|
||||
mutable android::Mutex m_lock;
|
||||
|
||||
void refShaderDataLocked(ssize_t shaderIdx);
|
||||
void unrefShaderDataLocked(ssize_t shaderIdx);
|
||||
|
||||
public:
|
||||
GLSharedGroup();
|
||||
~GLSharedGroup();
|
||||
bool isObject(GLuint obj);
|
||||
BufferData * getBufferData(GLuint bufferId);
|
||||
void addBufferData(GLuint bufferId, GLsizeiptr size, void * data);
|
||||
void updateBufferData(GLuint bufferId, GLsizeiptr size, void * data);
|
||||
GLenum subUpdateBufferData(GLuint bufferId, GLintptr offset, GLsizeiptr size, void * data);
|
||||
void deleteBufferData(GLuint);
|
||||
|
||||
bool isProgram(GLuint program);
|
||||
bool isProgramInitialized(GLuint program);
|
||||
void addProgramData(GLuint program);
|
||||
void initProgramData(GLuint program, GLuint numIndexes);
|
||||
void attachShader(GLuint program, GLuint shader);
|
||||
void detachShader(GLuint program, GLuint shader);
|
||||
void deleteProgramData(GLuint program);
|
||||
void setProgramIndexInfo(GLuint program, GLuint index, GLint base, GLint size, GLenum type, const char* name);
|
||||
GLenum getProgramUniformType(GLuint program, GLint location);
|
||||
void setupLocationShiftWAR(GLuint program);
|
||||
GLint locationWARHostToApp(GLuint program, GLint hostLoc, GLint arrIndex);
|
||||
GLint locationWARAppToHost(GLuint program, GLint appLoc);
|
||||
bool needUniformLocationWAR(GLuint program);
|
||||
GLint getNextSamplerUniform(GLuint program, GLint index, GLint* val, GLenum* target) const;
|
||||
bool setSamplerUniform(GLuint program, GLint appLoc, GLint val, GLenum* target);
|
||||
|
||||
bool addShaderData(GLuint shader);
|
||||
// caller must hold a reference to the shader as long as it holds the pointer
|
||||
ShaderData* getShaderData(GLuint shader);
|
||||
void unrefShaderData(GLuint shader);
|
||||
};
|
||||
|
||||
typedef SmartPtr<GLSharedGroup> GLSharedGroupPtr;
|
||||
|
||||
#endif //_GL_SHARED_GROUP_H_
|
||||
13
android/opengl/shared/OpenglCodecCommon/Makefile
Normal file
13
android/opengl/shared/OpenglCodecCommon/Makefile
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
|
||||
ROOT=../..
|
||||
|
||||
include $(ROOT)/make/commondefs
|
||||
|
||||
CXXFILES = TcpStream.cpp GLClientState.cpp glUtils.cpp
|
||||
CXXINCS += -I$(ROOT)/libs/GLESv1 -I$(ROOT)/include
|
||||
|
||||
LIBRARY_NAME = libcodecCommon.a
|
||||
|
||||
include $(COMMONRULES)
|
||||
|
||||
|
||||
167
android/opengl/shared/OpenglCodecCommon/SmartPtr.h
Normal file
167
android/opengl/shared/OpenglCodecCommon/SmartPtr.h
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __SMART_PTR_H
|
||||
#define __SMART_PTR_H
|
||||
|
||||
#include <cutils/threads.h>
|
||||
#include <cutils/atomic.h>
|
||||
|
||||
template <class T, bool threadSafe = false>
|
||||
class SmartPtr
|
||||
{
|
||||
public:
|
||||
explicit SmartPtr(T* ptr = (T*)NULL) {
|
||||
if (threadSafe) {
|
||||
m_lock = new mutex_t;
|
||||
mutex_init(m_lock);
|
||||
}
|
||||
else m_lock = NULL;
|
||||
|
||||
m_ptr = ptr;
|
||||
if (ptr)
|
||||
m_pRefCount = new int32_t(1);
|
||||
else
|
||||
m_pRefCount = NULL;
|
||||
}
|
||||
|
||||
SmartPtr<T,threadSafe>(const SmartPtr<T,false>& rhs) {
|
||||
if (threadSafe) {
|
||||
m_lock = new mutex_t;
|
||||
mutex_init(m_lock);
|
||||
}
|
||||
else m_lock = NULL;
|
||||
|
||||
m_pRefCount = rhs.m_pRefCount;
|
||||
m_ptr = rhs.m_ptr;
|
||||
use();
|
||||
}
|
||||
|
||||
SmartPtr<T,threadSafe>(SmartPtr<T,true>& rhs) {
|
||||
if (threadSafe) {
|
||||
m_lock = new mutex_t;
|
||||
mutex_init(m_lock);
|
||||
}
|
||||
else m_lock = NULL;
|
||||
|
||||
if (rhs.m_lock) mutex_lock(rhs.m_lock);
|
||||
m_pRefCount = rhs.m_pRefCount;
|
||||
m_ptr = rhs.m_ptr;
|
||||
use();
|
||||
if (rhs.m_lock) mutex_unlock(rhs.m_lock);
|
||||
}
|
||||
|
||||
~SmartPtr() {
|
||||
if (m_lock) mutex_lock(m_lock);
|
||||
release();
|
||||
if (m_lock)
|
||||
{
|
||||
mutex_unlock(m_lock);
|
||||
mutex_destroy(m_lock);
|
||||
delete m_lock;
|
||||
}
|
||||
}
|
||||
|
||||
T* Ptr() const {
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
const T* constPtr() const
|
||||
{
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
T* operator->() const {
|
||||
return m_ptr;
|
||||
}
|
||||
|
||||
T& operator*() const {
|
||||
return *m_ptr;
|
||||
}
|
||||
|
||||
operator void*() const {
|
||||
return (void *)m_ptr;
|
||||
}
|
||||
|
||||
// This gives STL lists something to compare.
|
||||
bool operator <(const SmartPtr<T>& t1) const {
|
||||
return m_ptr < t1.m_ptr;
|
||||
}
|
||||
|
||||
SmartPtr<T,threadSafe>& operator=(const SmartPtr<T,false>& rhs)
|
||||
{
|
||||
if (m_ptr == rhs.m_ptr)
|
||||
return *this;
|
||||
|
||||
if (m_lock) mutex_lock(m_lock);
|
||||
release();
|
||||
m_pRefCount = rhs.m_pRefCount;
|
||||
m_ptr = rhs.m_ptr;
|
||||
use();
|
||||
if (m_lock) mutex_unlock(m_lock);
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
SmartPtr<T,threadSafe>& operator=(SmartPtr<T,true>& rhs)
|
||||
{
|
||||
if (m_ptr == rhs.m_ptr)
|
||||
return *this;
|
||||
|
||||
if (m_lock) mutex_lock(m_lock);
|
||||
release();
|
||||
if (rhs.m_lock) mutex_lock(rhs.m_lock);
|
||||
m_pRefCount = rhs.m_pRefCount;
|
||||
m_ptr = rhs.m_ptr;
|
||||
use();
|
||||
if (rhs.m_lock) mutex_unlock(rhs.m_lock);
|
||||
if (m_lock) mutex_unlock(m_lock);
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
private:
|
||||
int32_t *m_pRefCount;
|
||||
mutex_t *m_lock;
|
||||
T* m_ptr;
|
||||
|
||||
// Increment the reference count on this pointer by 1.
|
||||
int use() {
|
||||
if (!m_pRefCount) return 0;
|
||||
return android_atomic_inc(m_pRefCount) + 1;
|
||||
}
|
||||
|
||||
// Decrement the reference count on the pointer by 1.
|
||||
// If the reference count goes to (or below) 0, the pointer is deleted.
|
||||
int release() {
|
||||
if (!m_pRefCount) return 0;
|
||||
|
||||
int iVal = android_atomic_dec(m_pRefCount);
|
||||
if (iVal > 1)
|
||||
return iVal - 1;
|
||||
|
||||
delete m_pRefCount;
|
||||
m_pRefCount = NULL;
|
||||
|
||||
if (m_ptr) {
|
||||
delete m_ptr;
|
||||
m_ptr = NULL;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
#endif // of __SMART_PTR_H
|
||||
168
android/opengl/shared/OpenglCodecCommon/SocketStream.cpp
Normal file
168
android/opengl/shared/OpenglCodecCommon/SocketStream.cpp
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "SocketStream.h"
|
||||
#include <cutils/sockets.h>
|
||||
#include <errno.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <netinet/in.h>
|
||||
#include <netinet/tcp.h>
|
||||
#include <sys/un.h>
|
||||
#else
|
||||
#include <ws2tcpip.h>
|
||||
#endif
|
||||
|
||||
SocketStream::SocketStream(size_t bufSize) :
|
||||
IOStream(bufSize),
|
||||
m_sock(-1),
|
||||
m_bufsize(bufSize),
|
||||
m_buf(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
SocketStream::SocketStream(int sock, size_t bufSize) :
|
||||
IOStream(bufSize),
|
||||
m_sock(sock),
|
||||
m_bufsize(bufSize),
|
||||
m_buf(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
SocketStream::~SocketStream()
|
||||
{
|
||||
if (m_sock >= 0) {
|
||||
#ifdef _WIN32
|
||||
closesocket(m_sock);
|
||||
#else
|
||||
::close(m_sock);
|
||||
#endif
|
||||
}
|
||||
if (m_buf != NULL) {
|
||||
free(m_buf);
|
||||
m_buf = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void *SocketStream::allocBuffer(size_t minSize)
|
||||
{
|
||||
size_t allocSize = (m_bufsize < minSize ? minSize : m_bufsize);
|
||||
if (!m_buf) {
|
||||
m_buf = (unsigned char *)malloc(allocSize);
|
||||
}
|
||||
else if (m_bufsize < allocSize) {
|
||||
unsigned char *p = (unsigned char *)realloc(m_buf, allocSize);
|
||||
if (p != NULL) {
|
||||
m_buf = p;
|
||||
m_bufsize = allocSize;
|
||||
} else {
|
||||
ERR("%s: realloc (%zu) failed\n", __FUNCTION__, allocSize);
|
||||
free(m_buf);
|
||||
m_buf = NULL;
|
||||
m_bufsize = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return m_buf;
|
||||
};
|
||||
|
||||
int SocketStream::commitBuffer(size_t size)
|
||||
{
|
||||
return writeFully(m_buf, size);
|
||||
}
|
||||
|
||||
int SocketStream::writeFully(const void* buffer, size_t size)
|
||||
{
|
||||
if (!valid()) return -1;
|
||||
|
||||
size_t res = size;
|
||||
int retval = 0;
|
||||
|
||||
while (res > 0) {
|
||||
ssize_t stat = ::send(m_sock, (const char *)buffer + (size - res), res, 0);
|
||||
if (stat < 0) {
|
||||
if (errno != EINTR) {
|
||||
retval = stat;
|
||||
ERR("%s: failed: %s\n", __FUNCTION__, strerror(errno));
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
res -= stat;
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
const unsigned char *SocketStream::readFully(void *buf, size_t len)
|
||||
{
|
||||
const unsigned char* ret = NULL;
|
||||
if (!valid()) return NULL;
|
||||
if (!buf) {
|
||||
return NULL; // do not allow NULL buf in that implementation
|
||||
}
|
||||
size_t res = len;
|
||||
while (res > 0) {
|
||||
ssize_t stat = ::recv(m_sock, (char *)(buf) + len - res, res, 0);
|
||||
if (stat > 0) {
|
||||
res -= stat;
|
||||
continue;
|
||||
}
|
||||
if (stat == 0 || errno != EINTR) { // client shutdown or error
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return (const unsigned char *)buf;
|
||||
}
|
||||
|
||||
const unsigned char *SocketStream::read( void *buf, size_t *inout_len)
|
||||
{
|
||||
if (!valid()) return NULL;
|
||||
if (!buf) {
|
||||
return NULL; // do not allow NULL buf in that implementation
|
||||
}
|
||||
|
||||
int n;
|
||||
do {
|
||||
n = recv(buf, *inout_len);
|
||||
} while( n < 0 && errno == EINTR );
|
||||
|
||||
if (n > 0) {
|
||||
*inout_len = n;
|
||||
return (const unsigned char *)buf;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int SocketStream::recv(void *buf, size_t len)
|
||||
{
|
||||
if (!valid()) return int(ERR_INVALID_SOCKET);
|
||||
int res = 0;
|
||||
while(true) {
|
||||
res = ::recv(m_sock, (char *)buf, len, 0);
|
||||
if (res < 0) {
|
||||
if (errno == EINTR) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
50
android/opengl/shared/OpenglCodecCommon/SocketStream.h
Normal file
50
android/opengl/shared/OpenglCodecCommon/SocketStream.h
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __SOCKET_STREAM_H
|
||||
#define __SOCKET_STREAM_H
|
||||
|
||||
#include <stdlib.h>
|
||||
#include "IOStream.h"
|
||||
|
||||
class SocketStream : public IOStream {
|
||||
public:
|
||||
typedef enum { ERR_INVALID_SOCKET = -1000 } SocketStreamError;
|
||||
|
||||
explicit SocketStream(size_t bufsize = 10000);
|
||||
virtual ~SocketStream();
|
||||
|
||||
virtual int listen(unsigned short port) = 0;
|
||||
virtual SocketStream *accept() = 0;
|
||||
virtual int connect(unsigned short port) = 0;
|
||||
|
||||
virtual void *allocBuffer(size_t minSize);
|
||||
virtual int commitBuffer(size_t size);
|
||||
virtual const unsigned char *readFully(void *buf, size_t len);
|
||||
virtual const unsigned char *read(void *buf, size_t *inout_len);
|
||||
|
||||
bool valid() { return m_sock >= 0; }
|
||||
virtual int recv(void *buf, size_t len);
|
||||
virtual int writeFully(const void *buf, size_t len);
|
||||
|
||||
protected:
|
||||
int m_sock;
|
||||
size_t m_bufsize;
|
||||
unsigned char *m_buf;
|
||||
|
||||
SocketStream(int sock, size_t bufSize);
|
||||
};
|
||||
|
||||
#endif /* __SOCKET_STREAM_H */
|
||||
91
android/opengl/shared/OpenglCodecCommon/TcpStream.cpp
Normal file
91
android/opengl/shared/OpenglCodecCommon/TcpStream.cpp
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "TcpStream.h"
|
||||
#include <cutils/sockets.h>
|
||||
#include <errno.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
#include <netinet/in.h>
|
||||
#include <netinet/tcp.h>
|
||||
#else
|
||||
#include <ws2tcpip.h>
|
||||
#endif
|
||||
|
||||
TcpStream::TcpStream(size_t bufSize) :
|
||||
SocketStream(bufSize)
|
||||
{
|
||||
}
|
||||
|
||||
TcpStream::TcpStream(int sock, size_t bufSize) :
|
||||
SocketStream(sock, bufSize)
|
||||
{
|
||||
// disable Nagle algorithm to improve bandwidth of small
|
||||
// packets which are quite common in our implementation.
|
||||
#ifdef _WIN32
|
||||
DWORD flag;
|
||||
#else
|
||||
int flag;
|
||||
#endif
|
||||
flag = 1;
|
||||
setsockopt( sock, IPPROTO_TCP, TCP_NODELAY, (const char*)&flag, sizeof(flag) );
|
||||
}
|
||||
|
||||
int TcpStream::listen(unsigned short port)
|
||||
{
|
||||
m_sock = socket_loopback_server(port, SOCK_STREAM);
|
||||
if (!valid()) return int(ERR_INVALID_SOCKET);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
SocketStream * TcpStream::accept()
|
||||
{
|
||||
int clientSock = -1;
|
||||
|
||||
while (true) {
|
||||
struct sockaddr_in addr;
|
||||
socklen_t len = sizeof(addr);
|
||||
clientSock = ::accept(m_sock, (sockaddr *)&addr, &len);
|
||||
|
||||
if (clientSock < 0 && errno == EINTR) {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
TcpStream *clientStream = NULL;
|
||||
|
||||
if (clientSock >= 0) {
|
||||
clientStream = new TcpStream(clientSock, m_bufsize);
|
||||
}
|
||||
return clientStream;
|
||||
}
|
||||
|
||||
int TcpStream::connect(unsigned short port)
|
||||
{
|
||||
return connect("127.0.0.1",port);
|
||||
}
|
||||
|
||||
int TcpStream::connect(const char* hostname, unsigned short port)
|
||||
{
|
||||
m_sock = socket_network_client(hostname, port, SOCK_STREAM);
|
||||
if (!valid()) return -1;
|
||||
return 0;
|
||||
}
|
||||
32
android/opengl/shared/OpenglCodecCommon/TcpStream.h
Normal file
32
android/opengl/shared/OpenglCodecCommon/TcpStream.h
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __TCP_STREAM_H
|
||||
#define __TCP_STREAM_H
|
||||
|
||||
#include "SocketStream.h"
|
||||
|
||||
class TcpStream : public SocketStream {
|
||||
public:
|
||||
explicit TcpStream(size_t bufsize = 10000);
|
||||
virtual int listen(unsigned short port);
|
||||
virtual SocketStream *accept();
|
||||
virtual int connect(unsigned short port);
|
||||
int connect(const char* hostname, unsigned short port);
|
||||
private:
|
||||
TcpStream(int sock, size_t bufSize);
|
||||
};
|
||||
|
||||
#endif
|
||||
23
android/opengl/shared/OpenglCodecCommon/codec_defs.h
Normal file
23
android/opengl/shared/OpenglCodecCommon/codec_defs.h
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _CODEC_DEFS_H
|
||||
#define _CODEC_DEFS_H
|
||||
|
||||
#define CODEC_SERVER_PORT 22468
|
||||
|
||||
#define CODEC_MAX_VERTEX_ATTRIBUTES 64
|
||||
|
||||
#endif
|
||||
475
android/opengl/shared/OpenglCodecCommon/glUtils.cpp
Normal file
475
android/opengl/shared/OpenglCodecCommon/glUtils.cpp
Normal file
|
|
@ -0,0 +1,475 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "glUtils.h"
|
||||
#include <string.h>
|
||||
#include "ErrorLog.h"
|
||||
#include <IOStream.h>
|
||||
|
||||
size_t glSizeof(GLenum type)
|
||||
{
|
||||
size_t retval = 0;
|
||||
switch(type) {
|
||||
case GL_BYTE:
|
||||
case GL_UNSIGNED_BYTE:
|
||||
retval = 1;
|
||||
break;
|
||||
case GL_SHORT:
|
||||
case GL_UNSIGNED_SHORT:
|
||||
case GL_HALF_FLOAT_OES:
|
||||
retval = 2;
|
||||
break;
|
||||
case GL_UNSIGNED_INT:
|
||||
case GL_INT:
|
||||
case GL_FLOAT:
|
||||
case GL_FIXED:
|
||||
case GL_BOOL:
|
||||
retval = 4;
|
||||
break;
|
||||
#ifdef GL_DOUBLE
|
||||
case GL_DOUBLE:
|
||||
retval = 8;
|
||||
break;
|
||||
#endif
|
||||
case GL_FLOAT_VEC2:
|
||||
case GL_INT_VEC2:
|
||||
case GL_BOOL_VEC2:
|
||||
retval = 8;
|
||||
break;
|
||||
case GL_INT_VEC3:
|
||||
case GL_BOOL_VEC3:
|
||||
case GL_FLOAT_VEC3:
|
||||
retval = 12;
|
||||
break;
|
||||
case GL_FLOAT_VEC4:
|
||||
case GL_BOOL_VEC4:
|
||||
case GL_INT_VEC4:
|
||||
case GL_FLOAT_MAT2:
|
||||
retval = 16;
|
||||
break;
|
||||
case GL_FLOAT_MAT3:
|
||||
retval = 36;
|
||||
break;
|
||||
case GL_FLOAT_MAT4:
|
||||
retval = 64;
|
||||
break;
|
||||
case GL_SAMPLER_2D:
|
||||
case GL_SAMPLER_CUBE:
|
||||
retval = 4;
|
||||
break;
|
||||
default:
|
||||
ERR("**** ERROR unknown type 0x%x (%s,%d)\n", type, __FUNCTION__,__LINE__);
|
||||
}
|
||||
return retval;
|
||||
|
||||
}
|
||||
|
||||
size_t glUtilsParamSize(GLenum param)
|
||||
{
|
||||
size_t s = 0;
|
||||
|
||||
switch(param)
|
||||
{
|
||||
case GL_DEPTH_TEST:
|
||||
case GL_DEPTH_FUNC:
|
||||
case GL_DEPTH_BITS:
|
||||
case GL_MAX_CLIP_PLANES:
|
||||
case GL_GREEN_BITS:
|
||||
case GL_MAX_MODELVIEW_STACK_DEPTH:
|
||||
case GL_MAX_PROJECTION_STACK_DEPTH:
|
||||
case GL_MAX_TEXTURE_STACK_DEPTH:
|
||||
case GL_IMPLEMENTATION_COLOR_READ_FORMAT_OES:
|
||||
case GL_IMPLEMENTATION_COLOR_READ_TYPE_OES:
|
||||
case GL_NUM_COMPRESSED_TEXTURE_FORMATS:
|
||||
case GL_MAX_TEXTURE_SIZE:
|
||||
case GL_TEXTURE_GEN_MODE_OES:
|
||||
case GL_TEXTURE_ENV_MODE:
|
||||
case GL_FOG_MODE:
|
||||
case GL_FOG_DENSITY:
|
||||
case GL_FOG_START:
|
||||
case GL_FOG_END:
|
||||
case GL_SPOT_EXPONENT:
|
||||
case GL_CONSTANT_ATTENUATION:
|
||||
case GL_LINEAR_ATTENUATION:
|
||||
case GL_QUADRATIC_ATTENUATION:
|
||||
case GL_SHININESS:
|
||||
case GL_LIGHT_MODEL_TWO_SIDE:
|
||||
case GL_POINT_SIZE:
|
||||
case GL_POINT_SIZE_MIN:
|
||||
case GL_POINT_SIZE_MAX:
|
||||
case GL_POINT_FADE_THRESHOLD_SIZE:
|
||||
case GL_CULL_FACE:
|
||||
case GL_CULL_FACE_MODE:
|
||||
case GL_FRONT_FACE:
|
||||
case GL_SHADE_MODEL:
|
||||
case GL_DEPTH_WRITEMASK:
|
||||
case GL_DEPTH_CLEAR_VALUE:
|
||||
case GL_STENCIL_FAIL:
|
||||
case GL_STENCIL_PASS_DEPTH_FAIL:
|
||||
case GL_STENCIL_PASS_DEPTH_PASS:
|
||||
case GL_STENCIL_REF:
|
||||
case GL_STENCIL_WRITEMASK:
|
||||
case GL_MATRIX_MODE:
|
||||
case GL_MODELVIEW_STACK_DEPTH:
|
||||
case GL_PROJECTION_STACK_DEPTH:
|
||||
case GL_TEXTURE_STACK_DEPTH:
|
||||
case GL_ALPHA_TEST_FUNC:
|
||||
case GL_ALPHA_TEST_REF:
|
||||
case GL_ALPHA_TEST:
|
||||
case GL_DITHER:
|
||||
case GL_BLEND_DST:
|
||||
case GL_BLEND_SRC:
|
||||
case GL_BLEND:
|
||||
case GL_LOGIC_OP_MODE:
|
||||
case GL_SCISSOR_TEST:
|
||||
case GL_MAX_TEXTURE_UNITS:
|
||||
case GL_ACTIVE_TEXTURE:
|
||||
case GL_ALPHA_BITS:
|
||||
case GL_ARRAY_BUFFER_BINDING:
|
||||
case GL_BLUE_BITS:
|
||||
case GL_CLIENT_ACTIVE_TEXTURE:
|
||||
case GL_CLIP_PLANE0:
|
||||
case GL_CLIP_PLANE1:
|
||||
case GL_CLIP_PLANE2:
|
||||
case GL_CLIP_PLANE3:
|
||||
case GL_CLIP_PLANE4:
|
||||
case GL_CLIP_PLANE5:
|
||||
case GL_COLOR_ARRAY:
|
||||
case GL_COLOR_ARRAY_BUFFER_BINDING:
|
||||
case GL_COLOR_ARRAY_SIZE:
|
||||
case GL_COLOR_ARRAY_STRIDE:
|
||||
case GL_COLOR_ARRAY_TYPE:
|
||||
case GL_COLOR_LOGIC_OP:
|
||||
case GL_COLOR_MATERIAL:
|
||||
case GL_PACK_ALIGNMENT:
|
||||
case GL_PERSPECTIVE_CORRECTION_HINT:
|
||||
case GL_POINT_SIZE_ARRAY_BUFFER_BINDING_OES:
|
||||
case GL_POINT_SIZE_ARRAY_STRIDE_OES:
|
||||
case GL_POINT_SIZE_ARRAY_TYPE_OES:
|
||||
case GL_POINT_SMOOTH:
|
||||
case GL_POINT_SMOOTH_HINT:
|
||||
case GL_POINT_SPRITE_OES:
|
||||
case GL_COORD_REPLACE_OES:
|
||||
case GL_COMBINE_ALPHA:
|
||||
case GL_SRC0_RGB:
|
||||
case GL_SRC1_RGB:
|
||||
case GL_SRC2_RGB:
|
||||
case GL_OPERAND0_RGB:
|
||||
case GL_OPERAND1_RGB:
|
||||
case GL_OPERAND2_RGB:
|
||||
case GL_SRC0_ALPHA:
|
||||
case GL_SRC1_ALPHA:
|
||||
case GL_SRC2_ALPHA:
|
||||
case GL_OPERAND0_ALPHA:
|
||||
case GL_OPERAND1_ALPHA:
|
||||
case GL_OPERAND2_ALPHA:
|
||||
case GL_RGB_SCALE:
|
||||
case GL_ALPHA_SCALE:
|
||||
case GL_COMBINE_RGB:
|
||||
case GL_POLYGON_OFFSET_FACTOR:
|
||||
case GL_POLYGON_OFFSET_FILL:
|
||||
case GL_POLYGON_OFFSET_UNITS:
|
||||
case GL_RED_BITS:
|
||||
case GL_RESCALE_NORMAL:
|
||||
case GL_SAMPLE_ALPHA_TO_COVERAGE:
|
||||
case GL_SAMPLE_ALPHA_TO_ONE:
|
||||
case GL_SAMPLE_BUFFERS:
|
||||
case GL_SAMPLE_COVERAGE:
|
||||
case GL_SAMPLE_COVERAGE_INVERT:
|
||||
case GL_SAMPLE_COVERAGE_VALUE:
|
||||
case GL_SAMPLES:
|
||||
case GL_MAX_SAMPLES_EXT:
|
||||
case GL_STENCIL_BITS:
|
||||
case GL_STENCIL_CLEAR_VALUE:
|
||||
case GL_STENCIL_FUNC:
|
||||
case GL_STENCIL_TEST:
|
||||
case GL_STENCIL_VALUE_MASK:
|
||||
case GL_STENCIL_BACK_FUNC:
|
||||
case GL_STENCIL_BACK_VALUE_MASK:
|
||||
case GL_STENCIL_BACK_REF:
|
||||
case GL_STENCIL_BACK_FAIL:
|
||||
case GL_STENCIL_BACK_PASS_DEPTH_FAIL:
|
||||
case GL_STENCIL_BACK_PASS_DEPTH_PASS:
|
||||
case GL_STENCIL_BACK_WRITEMASK:
|
||||
case GL_TEXTURE_2D:
|
||||
case GL_TEXTURE_BINDING_2D:
|
||||
case GL_TEXTURE_BINDING_CUBE_MAP:
|
||||
case GL_TEXTURE_BINDING_EXTERNAL_OES:
|
||||
case GL_TEXTURE_COORD_ARRAY:
|
||||
case GL_TEXTURE_COORD_ARRAY_BUFFER_BINDING:
|
||||
case GL_TEXTURE_COORD_ARRAY_SIZE:
|
||||
case GL_TEXTURE_COORD_ARRAY_STRIDE:
|
||||
case GL_TEXTURE_COORD_ARRAY_TYPE:
|
||||
case GL_UNPACK_ALIGNMENT:
|
||||
case GL_VERTEX_ARRAY:
|
||||
case GL_VERTEX_ARRAY_BUFFER_BINDING:
|
||||
case GL_VERTEX_ARRAY_SIZE:
|
||||
case GL_VERTEX_ARRAY_STRIDE:
|
||||
case GL_VERTEX_ARRAY_TYPE:
|
||||
case GL_SPOT_CUTOFF:
|
||||
case GL_TEXTURE_MIN_FILTER:
|
||||
case GL_TEXTURE_MAG_FILTER:
|
||||
case GL_TEXTURE_WRAP_S:
|
||||
case GL_TEXTURE_WRAP_T:
|
||||
case GL_GENERATE_MIPMAP:
|
||||
case GL_GENERATE_MIPMAP_HINT:
|
||||
case GL_RENDERBUFFER_WIDTH_OES:
|
||||
case GL_RENDERBUFFER_HEIGHT_OES:
|
||||
case GL_RENDERBUFFER_INTERNAL_FORMAT_OES:
|
||||
case GL_RENDERBUFFER_RED_SIZE_OES:
|
||||
case GL_RENDERBUFFER_GREEN_SIZE_OES:
|
||||
case GL_RENDERBUFFER_BLUE_SIZE_OES:
|
||||
case GL_RENDERBUFFER_ALPHA_SIZE_OES:
|
||||
case GL_RENDERBUFFER_DEPTH_SIZE_OES:
|
||||
case GL_RENDERBUFFER_STENCIL_SIZE_OES:
|
||||
case GL_RENDERBUFFER_BINDING:
|
||||
case GL_FRAMEBUFFER_BINDING:
|
||||
case GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_OES:
|
||||
case GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_OES:
|
||||
case GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_OES:
|
||||
case GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_OES:
|
||||
case GL_FENCE_STATUS_NV:
|
||||
case GL_FENCE_CONDITION_NV:
|
||||
case GL_TEXTURE_WIDTH_QCOM:
|
||||
case GL_TEXTURE_HEIGHT_QCOM:
|
||||
case GL_TEXTURE_DEPTH_QCOM:
|
||||
case GL_TEXTURE_INTERNAL_FORMAT_QCOM:
|
||||
case GL_TEXTURE_FORMAT_QCOM:
|
||||
case GL_TEXTURE_TYPE_QCOM:
|
||||
case GL_TEXTURE_IMAGE_VALID_QCOM:
|
||||
case GL_TEXTURE_NUM_LEVELS_QCOM:
|
||||
case GL_TEXTURE_TARGET_QCOM:
|
||||
case GL_TEXTURE_OBJECT_VALID_QCOM:
|
||||
case GL_BLEND_EQUATION_RGB_OES:
|
||||
case GL_BLEND_EQUATION_ALPHA_OES:
|
||||
case GL_BLEND_DST_RGB_OES:
|
||||
case GL_BLEND_SRC_RGB_OES:
|
||||
case GL_BLEND_DST_ALPHA_OES:
|
||||
case GL_BLEND_SRC_ALPHA_OES:
|
||||
case GL_MAX_LIGHTS:
|
||||
case GL_SHADER_TYPE:
|
||||
case GL_DELETE_STATUS:
|
||||
case GL_COMPILE_STATUS:
|
||||
case GL_INFO_LOG_LENGTH:
|
||||
case GL_SHADER_SOURCE_LENGTH:
|
||||
case GL_CURRENT_PROGRAM:
|
||||
case GL_LINK_STATUS:
|
||||
case GL_VALIDATE_STATUS:
|
||||
case GL_ATTACHED_SHADERS:
|
||||
case GL_ACTIVE_UNIFORMS:
|
||||
case GL_ACTIVE_ATTRIBUTES:
|
||||
case GL_SUBPIXEL_BITS:
|
||||
case GL_MAX_CUBE_MAP_TEXTURE_SIZE:
|
||||
case GL_NUM_SHADER_BINARY_FORMATS:
|
||||
case GL_SHADER_COMPILER:
|
||||
case GL_MAX_VERTEX_ATTRIBS:
|
||||
case GL_MAX_VERTEX_UNIFORM_VECTORS:
|
||||
case GL_MAX_VARYING_VECTORS:
|
||||
case GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS:
|
||||
case GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS:
|
||||
case GL_MAX_FRAGMENT_UNIFORM_VECTORS:
|
||||
case GL_MAX_RENDERBUFFER_SIZE:
|
||||
case GL_MAX_TEXTURE_IMAGE_UNITS:
|
||||
case GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES:
|
||||
case GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES:
|
||||
case GL_LINE_WIDTH:
|
||||
s = 1;
|
||||
break;
|
||||
case GL_ALIASED_LINE_WIDTH_RANGE:
|
||||
case GL_ALIASED_POINT_SIZE_RANGE:
|
||||
case GL_DEPTH_RANGE:
|
||||
case GL_MAX_VIEWPORT_DIMS:
|
||||
case GL_SMOOTH_POINT_SIZE_RANGE:
|
||||
case GL_SMOOTH_LINE_WIDTH_RANGE:
|
||||
s= 2;
|
||||
break;
|
||||
case GL_SPOT_DIRECTION:
|
||||
case GL_POINT_DISTANCE_ATTENUATION:
|
||||
case GL_CURRENT_NORMAL:
|
||||
s = 3;
|
||||
break;
|
||||
case GL_CURRENT_VERTEX_ATTRIB:
|
||||
case GL_CURRENT_TEXTURE_COORDS:
|
||||
case GL_CURRENT_COLOR:
|
||||
case GL_FOG_COLOR:
|
||||
case GL_AMBIENT:
|
||||
case GL_DIFFUSE:
|
||||
case GL_SPECULAR:
|
||||
case GL_EMISSION:
|
||||
case GL_POSITION:
|
||||
case GL_LIGHT_MODEL_AMBIENT:
|
||||
case GL_TEXTURE_ENV_COLOR:
|
||||
case GL_SCISSOR_BOX:
|
||||
case GL_VIEWPORT:
|
||||
case GL_TEXTURE_CROP_RECT_OES:
|
||||
case GL_COLOR_CLEAR_VALUE:
|
||||
case GL_COLOR_WRITEMASK:
|
||||
case GL_AMBIENT_AND_DIFFUSE:
|
||||
case GL_BLEND_COLOR:
|
||||
s = 4;
|
||||
break;
|
||||
case GL_MODELVIEW_MATRIX:
|
||||
case GL_PROJECTION_MATRIX:
|
||||
case GL_TEXTURE_MATRIX:
|
||||
s = 16;
|
||||
break;
|
||||
default:
|
||||
ERR("glUtilsParamSize: unknow param 0x%08x\n", param);
|
||||
s = 1; // assume 1
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
void glUtilsPackPointerData(unsigned char *dst, unsigned char *src,
|
||||
int size, GLenum type, unsigned int stride,
|
||||
unsigned int datalen)
|
||||
{
|
||||
unsigned int vsize = size * glSizeof(type);
|
||||
if (stride == 0) stride = vsize;
|
||||
|
||||
if (stride == vsize) {
|
||||
memcpy(dst, src, datalen);
|
||||
} else {
|
||||
for (unsigned int i = 0; i < datalen; i += vsize) {
|
||||
memcpy(dst, src, vsize);
|
||||
dst += vsize;
|
||||
src += stride;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void glUtilsWritePackPointerData(void* _stream, unsigned char *src,
|
||||
int size, GLenum type, unsigned int stride,
|
||||
unsigned int datalen)
|
||||
{
|
||||
IOStream* stream = reinterpret_cast<IOStream*>(_stream);
|
||||
|
||||
unsigned int vsize = size * glSizeof(type);
|
||||
if (stride == 0) stride = vsize;
|
||||
|
||||
if (stride == vsize) {
|
||||
stream->writeFully(src, datalen);
|
||||
} else {
|
||||
for (unsigned int i = 0; i < datalen; i += vsize) {
|
||||
stream->writeFully(src, (size_t)vsize);
|
||||
src += stride;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int glUtilsPixelBitSize(GLenum format, GLenum type)
|
||||
{
|
||||
int components = 0;
|
||||
int componentsize = 0;
|
||||
int pixelsize = 0;
|
||||
switch(type) {
|
||||
case GL_BYTE:
|
||||
case GL_UNSIGNED_BYTE:
|
||||
componentsize = 8;
|
||||
break;
|
||||
case GL_SHORT:
|
||||
case GL_UNSIGNED_SHORT:
|
||||
case GL_UNSIGNED_SHORT_5_6_5:
|
||||
case GL_UNSIGNED_SHORT_4_4_4_4:
|
||||
case GL_UNSIGNED_SHORT_5_5_5_1:
|
||||
case GL_RGB565_OES:
|
||||
case GL_RGB5_A1_OES:
|
||||
case GL_RGBA4_OES:
|
||||
pixelsize = 16;
|
||||
break;
|
||||
case GL_INT:
|
||||
case GL_UNSIGNED_INT:
|
||||
case GL_FLOAT:
|
||||
case GL_FIXED:
|
||||
case GL_UNSIGNED_INT_24_8_OES:
|
||||
pixelsize = 32;
|
||||
break;
|
||||
default:
|
||||
ERR("glUtilsPixelBitSize: unknown pixel type - assuming pixel data 0\n");
|
||||
componentsize = 0;
|
||||
}
|
||||
|
||||
if (pixelsize == 0) {
|
||||
switch(format) {
|
||||
#if 0
|
||||
case GL_RED:
|
||||
case GL_GREEN:
|
||||
case GL_BLUE:
|
||||
#endif
|
||||
case GL_ALPHA:
|
||||
case GL_LUMINANCE:
|
||||
case GL_DEPTH_COMPONENT:
|
||||
case GL_DEPTH_STENCIL_OES:
|
||||
components = 1;
|
||||
break;
|
||||
case GL_LUMINANCE_ALPHA:
|
||||
components = 2;
|
||||
break;
|
||||
case GL_RGB:
|
||||
#if 0
|
||||
case GL_BGR:
|
||||
#endif
|
||||
components = 3;
|
||||
break;
|
||||
case GL_RGBA:
|
||||
case GL_BGRA_EXT:
|
||||
components = 4;
|
||||
break;
|
||||
default:
|
||||
ERR("glUtilsPixelBitSize: unknown pixel format...\n");
|
||||
components = 0;
|
||||
}
|
||||
pixelsize = components * componentsize;
|
||||
}
|
||||
|
||||
return pixelsize;
|
||||
}
|
||||
|
||||
// pack a list of strings into one.
|
||||
void glUtilsPackStrings(char *ptr, char **strings, GLint *length, GLsizei count)
|
||||
{
|
||||
char *p = ptr;
|
||||
*p = '\0';
|
||||
for (int i = 0; i < count; i++) {
|
||||
int l=0;
|
||||
if (strings[i]!=NULL) {
|
||||
if (length == NULL || length[i] < 0) {
|
||||
l = strlen(strings[i]);
|
||||
strcat(p, strings[i]);
|
||||
} else {
|
||||
l = length[i];
|
||||
strncat(p, strings[i], l);
|
||||
}
|
||||
}
|
||||
p += l;
|
||||
}
|
||||
}
|
||||
|
||||
// claculate the length of a list of strings
|
||||
int glUtilsCalcShaderSourceLen( char **strings, GLint *length, GLsizei count)
|
||||
{
|
||||
int len = 0;
|
||||
for (int i = 0; i < count; i++) {
|
||||
int l;
|
||||
if (length == NULL || length[i] < 0) {
|
||||
l = strings[i]!=NULL ? strlen(strings[i]) : 0;
|
||||
} else {
|
||||
l = length[i];
|
||||
}
|
||||
len += l;
|
||||
}
|
||||
return len;
|
||||
|
||||
}
|
||||
95
android/opengl/shared/OpenglCodecCommon/glUtils.h
Normal file
95
android/opengl/shared/OpenglCodecCommon/glUtils.h
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __GL_UTILS_H__
|
||||
#define __GL_UTILS_H__
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#ifdef GL_API
|
||||
#undef GL_API
|
||||
#endif
|
||||
#define GL_API
|
||||
|
||||
#ifdef GL_APIENTRY
|
||||
#undef GL_APIENTRY
|
||||
#endif
|
||||
|
||||
#ifdef GL_APIENTRYP
|
||||
#undef GL_APIENTRYP
|
||||
#endif
|
||||
#define GL_APIENTRYP
|
||||
|
||||
#ifndef ANDROID
|
||||
#define GL_APIENTRY
|
||||
#endif
|
||||
|
||||
#include <GLES/gl.h>
|
||||
#include <GLES/glext.h>
|
||||
#include <GLES2/gl2.h>
|
||||
#include <GLES2/gl2ext.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
size_t glSizeof(GLenum type);
|
||||
size_t glUtilsParamSize(GLenum param);
|
||||
void glUtilsPackPointerData(unsigned char *dst, unsigned char *str,
|
||||
int size, GLenum type, unsigned int stride,
|
||||
unsigned int datalen);
|
||||
void glUtilsWritePackPointerData(void* stream, unsigned char *src,
|
||||
int size, GLenum type, unsigned int stride,
|
||||
unsigned int datalen);
|
||||
int glUtilsPixelBitSize(GLenum format, GLenum type);
|
||||
void glUtilsPackStrings(char *ptr, char **strings, GLint *length, GLsizei count);
|
||||
int glUtilsCalcShaderSourceLen(char **strings, GLint *length, GLsizei count);
|
||||
#ifdef __cplusplus
|
||||
};
|
||||
#endif
|
||||
|
||||
namespace GLUtils {
|
||||
|
||||
template <class T> void minmax(T *indices, int count, int *min, int *max) {
|
||||
*min = -1;
|
||||
*max = -1;
|
||||
T *ptr = indices;
|
||||
for (int i = 0; i < count; i++) {
|
||||
if (*min == -1 || *ptr < *min) *min = *ptr;
|
||||
if (*max == -1 || *ptr > *max) *max = *ptr;
|
||||
ptr++;
|
||||
}
|
||||
}
|
||||
|
||||
template <class T> void shiftIndices(T *indices, int count, int offset) {
|
||||
T *ptr = indices;
|
||||
for (int i = 0; i < count; i++) {
|
||||
*ptr += offset;
|
||||
ptr++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template <class T> void shiftIndices(T *src, T *dst, int count, int offset)
|
||||
{
|
||||
for (int i = 0; i < count; i++) {
|
||||
*dst = *src + offset;
|
||||
dst++;
|
||||
src++;
|
||||
}
|
||||
}
|
||||
}; // namespace GLUtils
|
||||
#endif
|
||||
63
android/opengl/shared/OpenglCodecCommon/gl_base_types.h
Normal file
63
android/opengl/shared/OpenglCodecCommon/gl_base_types.h
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef __GL_BASE_TYPES__H
|
||||
#define __GL_BASE_TYPES__H
|
||||
|
||||
#include <KHR/khrplatform.h>
|
||||
|
||||
#ifndef gl_APIENTRY
|
||||
#define gl_APIENTRY KHRONOS_APIENTRY
|
||||
#endif
|
||||
|
||||
#ifndef gl2_APIENTRY
|
||||
#define gl2_APIENTRY KHRONOS_APIENTRY
|
||||
#endif
|
||||
|
||||
typedef void GLvoid;
|
||||
typedef unsigned int GLenum;
|
||||
typedef unsigned char GLboolean;
|
||||
typedef unsigned int GLbitfield;
|
||||
typedef char GLchar;
|
||||
typedef khronos_int8_t GLbyte;
|
||||
typedef short GLshort;
|
||||
typedef int GLint;
|
||||
typedef int GLsizei;
|
||||
typedef khronos_uint8_t GLubyte;
|
||||
typedef unsigned short GLushort;
|
||||
typedef unsigned int GLuint;
|
||||
typedef khronos_float_t GLfloat;
|
||||
typedef khronos_float_t GLclampf;
|
||||
typedef khronos_int32_t GLfixed;
|
||||
typedef khronos_int32_t GLclampx;
|
||||
typedef khronos_intptr_t GLintptr;
|
||||
typedef khronos_ssize_t GLsizeiptr;
|
||||
typedef char *GLstr;
|
||||
/* JR XXX Treating this as an in handle - is this correct? */
|
||||
typedef void * GLeglImageOES;
|
||||
|
||||
/* ErrorCode */
|
||||
#ifndef GL_INVALID_ENUM
|
||||
#define GL_NO_ERROR 0
|
||||
#define GL_INVALID_ENUM 0x0500
|
||||
#define GL_INVALID_VALUE 0x0501
|
||||
#define GL_INVALID_OPERATION 0x0502
|
||||
#define GL_STACK_OVERFLOW 0x0503
|
||||
#define GL_STACK_UNDERFLOW 0x0504
|
||||
#define GL_OUT_OF_MEMORY 0x0505
|
||||
#define GL_INVALID_FRAMEBUFFER_OPERATION 0x0506
|
||||
#endif
|
||||
|
||||
#endif
|
||||
12
android/opengl/system/GLESv1/Android.mk
Normal file
12
android/opengl/system/GLESv1/Android.mk
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
### GLESv1 implementation ###########################################
|
||||
$(call emugl-begin-shared-library,libGLESv1_CM_emulation)
|
||||
$(call emugl-import,libOpenglSystemCommon libGLESv1_enc lib_renderControl_enc)
|
||||
|
||||
LOCAL_CFLAGS += -DLOG_TAG=\"GLES_emulation\" -DGL_GLEXT_PROTOTYPES
|
||||
|
||||
LOCAL_SRC_FILES := gl.cpp
|
||||
LOCAL_MODULE_RELATIVE_PATH := egl
|
||||
|
||||
$(call emugl-end-module)
|
||||
165
android/opengl/system/GLESv1/gl.cpp
Normal file
165
android/opengl/system/GLESv1/gl.cpp
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
* Copyright 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include "EGLClientIface.h"
|
||||
#include "HostConnection.h"
|
||||
#include "GLEncoder.h"
|
||||
#include "GLES/gl.h"
|
||||
#include "GLES/glext.h"
|
||||
#include "ErrorLog.h"
|
||||
#include "gralloc_cb.h"
|
||||
#include "ThreadInfo.h"
|
||||
#include "EGLImage.h"
|
||||
|
||||
|
||||
//XXX: fix this macro to get the context from fast tls path
|
||||
#define GET_CONTEXT GLEncoder * ctx = getEGLThreadInfo()->hostConn->glEncoder();
|
||||
|
||||
#include "gl_entry.cpp"
|
||||
|
||||
//The functions table
|
||||
#include "gl_ftable.h"
|
||||
|
||||
static EGLClient_eglInterface * s_egl = NULL;
|
||||
static EGLClient_glesInterface * s_gl = NULL;
|
||||
|
||||
#define DEFINE_AND_VALIDATE_HOST_CONNECTION(ret) \
|
||||
HostConnection *hostCon = HostConnection::get(); \
|
||||
if (!hostCon) { \
|
||||
ALOGE("egl: Failed to get host connection\n"); \
|
||||
return ret; \
|
||||
} \
|
||||
renderControl_encoder_context_t *rcEnc = hostCon->rcEncoder(); \
|
||||
if (!rcEnc) { \
|
||||
ALOGE("egl: Failed to get renderControl encoder context\n"); \
|
||||
return ret; \
|
||||
}
|
||||
|
||||
//GL extensions
|
||||
void glEGLImageTargetTexture2DOES(void * self, GLenum target, GLeglImageOES img)
|
||||
{
|
||||
(void)self;
|
||||
|
||||
DBG("glEGLImageTargetTexture2DOES v1 target=%#x img=%p", target, img);
|
||||
|
||||
EGLImage_t *image = (EGLImage_t*)img;
|
||||
|
||||
if (image->target == EGL_NATIVE_BUFFER_ANDROID) {
|
||||
//TODO: check error - we don't have a way to set gl error
|
||||
android_native_buffer_t* native_buffer = image->native_buffer;
|
||||
|
||||
if (native_buffer->common.magic != ANDROID_NATIVE_BUFFER_MAGIC) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (native_buffer->common.version != sizeof(android_native_buffer_t)) {
|
||||
return;
|
||||
}
|
||||
|
||||
GET_CONTEXT;
|
||||
DEFINE_AND_VALIDATE_HOST_CONNECTION();
|
||||
|
||||
ctx->override2DTextureTarget(target);
|
||||
rcEnc->rcBindTexture(rcEnc,
|
||||
((cb_handle_t *)(native_buffer->handle))->hostHandle);
|
||||
ctx->restore2DTextureTarget();
|
||||
}
|
||||
else if (image->target == EGL_GL_TEXTURE_2D_KHR) {
|
||||
GET_CONTEXT;
|
||||
ctx->override2DTextureTarget(target);
|
||||
GLeglImageOES hostImage = reinterpret_cast<GLeglImageOES>((intptr_t)image->host_egl_image);
|
||||
ctx->m_glEGLImageTargetTexture2DOES_enc(self, target, hostImage);
|
||||
ctx->restore2DTextureTarget();
|
||||
}
|
||||
}
|
||||
|
||||
void glEGLImageTargetRenderbufferStorageOES(void *self, GLenum target, GLeglImageOES image)
|
||||
{
|
||||
(void)self;
|
||||
(void)target;
|
||||
|
||||
DBG("glEGLImageTargetRenderbufferStorageOES v1 target=%#x image=%p",
|
||||
target, image);
|
||||
//TODO: check error - we don't have a way to set gl error
|
||||
android_native_buffer_t* native_buffer = (android_native_buffer_t*)image;
|
||||
|
||||
if (native_buffer->common.magic != ANDROID_NATIVE_BUFFER_MAGIC) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (native_buffer->common.version != sizeof(android_native_buffer_t)) {
|
||||
return;
|
||||
}
|
||||
|
||||
DEFINE_AND_VALIDATE_HOST_CONNECTION();
|
||||
rcEnc->rcBindRenderbuffer(rcEnc,
|
||||
((cb_handle_t *)(native_buffer->handle))->hostHandle);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
void * getProcAddress(const char * procname)
|
||||
{
|
||||
// search in GL function table
|
||||
for (int i=0; i<gl_num_funcs; i++) {
|
||||
if (!strcmp(gl_funcs_by_name[i].name, procname)) {
|
||||
return gl_funcs_by_name[i].proc;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void finish()
|
||||
{
|
||||
glFinish();
|
||||
}
|
||||
|
||||
const GLubyte *my_glGetString (void *self, GLenum name)
|
||||
{
|
||||
(void)self;
|
||||
|
||||
if (s_egl) {
|
||||
return (const GLubyte*)s_egl->getGLString(name);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void init()
|
||||
{
|
||||
GET_CONTEXT;
|
||||
ctx->m_glEGLImageTargetTexture2DOES_enc = ctx->glEGLImageTargetTexture2DOES;
|
||||
ctx->glEGLImageTargetTexture2DOES = &glEGLImageTargetTexture2DOES;
|
||||
ctx->glEGLImageTargetRenderbufferStorageOES = &glEGLImageTargetRenderbufferStorageOES;
|
||||
ctx->glGetString = &my_glGetString;
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
EGLClient_glesInterface * init_emul_gles(EGLClient_eglInterface *eglIface)
|
||||
{
|
||||
s_egl = eglIface;
|
||||
|
||||
if (!s_gl) {
|
||||
s_gl = new EGLClient_glesInterface();
|
||||
s_gl->getProcAddress = getProcAddress;
|
||||
s_gl->finish = finish;
|
||||
s_gl->init = init;
|
||||
}
|
||||
|
||||
return s_gl;
|
||||
}
|
||||
} //extern
|
||||
|
||||
|
||||
19
android/opengl/system/GLESv1_enc/Android.mk
Normal file
19
android/opengl/system/GLESv1_enc/Android.mk
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
### GLESv1_enc Encoder ###########################################
|
||||
$(call emugl-begin-shared-library,libGLESv1_enc)
|
||||
|
||||
LOCAL_CFLAGS += -DLOG_TAG=\"emuglGLESv1_enc\"
|
||||
|
||||
LOCAL_SRC_FILES := \
|
||||
GLEncoder.cpp \
|
||||
GLEncoderUtils.cpp \
|
||||
gl_client_context.cpp \
|
||||
gl_enc.cpp \
|
||||
gl_entry.cpp
|
||||
|
||||
$(call emugl-import,libOpenglCodecCommon)
|
||||
$(call emugl-export,C_INCLUDES,$(LOCAL_PATH))
|
||||
$(call emugl-export,C_INCLUDES,$(intermediates))
|
||||
|
||||
$(call emugl-end-module)
|
||||
1029
android/opengl/system/GLESv1_enc/GLEncoder.cpp
Normal file
1029
android/opengl/system/GLESv1_enc/GLEncoder.cpp
Normal file
File diff suppressed because it is too large
Load diff
154
android/opengl/system/GLESv1_enc/GLEncoder.h
Normal file
154
android/opengl/system/GLESv1_enc/GLEncoder.h
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef _GL_ENCODER_H_
|
||||
#define _GL_ENCODER_H_
|
||||
|
||||
#include "gl_enc.h"
|
||||
#include "GLClientState.h"
|
||||
#include "GLSharedGroup.h"
|
||||
#include "FixedBuffer.h"
|
||||
#include "ChecksumCalculator.h"
|
||||
|
||||
class GLEncoder : public gl_encoder_context_t {
|
||||
|
||||
public:
|
||||
GLEncoder(IOStream *stream, ChecksumCalculator* protocol);
|
||||
virtual ~GLEncoder();
|
||||
void setClientState(GLClientState *state) {
|
||||
m_state = state;
|
||||
}
|
||||
void setSharedGroup(GLSharedGroupPtr shared) { m_shared = shared; }
|
||||
void flush() { m_stream->flush(); }
|
||||
size_t pixelDataSize(GLsizei width, GLsizei height, GLenum format, GLenum type, int pack);
|
||||
|
||||
void setInitialized(){ m_initialized = true; };
|
||||
bool isInitialized(){ return m_initialized; };
|
||||
|
||||
virtual void setError(GLenum error){ m_error = error; };
|
||||
virtual GLenum getError() { return m_error; };
|
||||
|
||||
void override2DTextureTarget(GLenum target);
|
||||
void restore2DTextureTarget();
|
||||
|
||||
private:
|
||||
|
||||
bool m_initialized;
|
||||
GLClientState *m_state;
|
||||
GLSharedGroupPtr m_shared;
|
||||
GLenum m_error;
|
||||
FixedBuffer m_fixedBuffer;
|
||||
GLint *m_compressedTextureFormats;
|
||||
GLint m_num_compressedTextureFormats;
|
||||
|
||||
GLint *getCompressedTextureFormats();
|
||||
// original functions;
|
||||
glGetError_client_proc_t m_glGetError_enc;
|
||||
glGetIntegerv_client_proc_t m_glGetIntegerv_enc;
|
||||
glGetFloatv_client_proc_t m_glGetFloatv_enc;
|
||||
glGetFixedv_client_proc_t m_glGetFixedv_enc;
|
||||
glGetBooleanv_client_proc_t m_glGetBooleanv_enc;
|
||||
glGetPointerv_client_proc_t m_glGetPointerv_enc;
|
||||
|
||||
glPixelStorei_client_proc_t m_glPixelStorei_enc;
|
||||
glVertexPointer_client_proc_t m_glVertexPointer_enc;
|
||||
glNormalPointer_client_proc_t m_glNormalPointer_enc;
|
||||
glColorPointer_client_proc_t m_glColorPointer_enc;
|
||||
glPointSizePointerOES_client_proc_t m_glPointSizePointerOES_enc;
|
||||
glTexCoordPointer_client_proc_t m_glTexCoordPointer_enc;
|
||||
glClientActiveTexture_client_proc_t m_glClientActiveTexture_enc;
|
||||
glMatrixIndexPointerOES_client_proc_t m_glMatrixIndexPointerOES_enc;
|
||||
glWeightPointerOES_client_proc_t m_glWeightPointerOES_enc;
|
||||
|
||||
glBindBuffer_client_proc_t m_glBindBuffer_enc;
|
||||
glBufferData_client_proc_t m_glBufferData_enc;
|
||||
glBufferSubData_client_proc_t m_glBufferSubData_enc;
|
||||
glDeleteBuffers_client_proc_t m_glDeleteBuffers_enc;
|
||||
|
||||
glEnableClientState_client_proc_t m_glEnableClientState_enc;
|
||||
glDisableClientState_client_proc_t m_glDisableClientState_enc;
|
||||
glIsEnabled_client_proc_t m_glIsEnabled_enc;
|
||||
glDrawArrays_client_proc_t m_glDrawArrays_enc;
|
||||
glDrawElements_client_proc_t m_glDrawElements_enc;
|
||||
glFlush_client_proc_t m_glFlush_enc;
|
||||
|
||||
glActiveTexture_client_proc_t m_glActiveTexture_enc;
|
||||
glBindTexture_client_proc_t m_glBindTexture_enc;
|
||||
glDeleteTextures_client_proc_t m_glDeleteTextures_enc;
|
||||
glDisable_client_proc_t m_glDisable_enc;
|
||||
glEnable_client_proc_t m_glEnable_enc;
|
||||
glGetTexParameterfv_client_proc_t m_glGetTexParameterfv_enc;
|
||||
glGetTexParameteriv_client_proc_t m_glGetTexParameteriv_enc;
|
||||
glGetTexParameterxv_client_proc_t m_glGetTexParameterxv_enc;
|
||||
glTexParameterf_client_proc_t m_glTexParameterf_enc;
|
||||
glTexParameterfv_client_proc_t m_glTexParameterfv_enc;
|
||||
glTexParameteri_client_proc_t m_glTexParameteri_enc;
|
||||
glTexParameterx_client_proc_t m_glTexParameterx_enc;
|
||||
glTexParameteriv_client_proc_t m_glTexParameteriv_enc;
|
||||
glTexParameterxv_client_proc_t m_glTexParameterxv_enc;
|
||||
|
||||
// statics
|
||||
static GLenum s_glGetError(void * self);
|
||||
static void s_glGetIntegerv(void *self, GLenum pname, GLint *ptr);
|
||||
static void s_glGetBooleanv(void *self, GLenum pname, GLboolean *ptr);
|
||||
static void s_glGetFloatv(void *self, GLenum pname, GLfloat *ptr);
|
||||
static void s_glGetFixedv(void *self, GLenum pname, GLfixed *ptr);
|
||||
static void s_glGetPointerv(void *self, GLenum pname, GLvoid **params);
|
||||
|
||||
static void s_glFlush(void * self);
|
||||
static const GLubyte * s_glGetString(void *self, GLenum name);
|
||||
static void s_glVertexPointer(void *self, int size, GLenum type, GLsizei stride, const void *data);
|
||||
static void s_glNormalPointer(void *self, GLenum type, GLsizei stride, const void *data);
|
||||
static void s_glColorPointer(void *self, int size, GLenum type, GLsizei stride, const void *data);
|
||||
static void s_glPointSizePointerOES(void *self, GLenum type, GLsizei stride, const void *data);
|
||||
static void s_glClientActiveTexture(void *self, GLenum texture);
|
||||
static void s_glTexCoordPointer(void *self, int size, GLenum type, GLsizei stride, const void *data);
|
||||
static void s_glMatrixIndexPointerOES(void *self, int size, GLenum type, GLsizei stride, const void * data);
|
||||
static void s_glWeightPointerOES(void *self, int size, GLenum type, GLsizei stride, const void * data);
|
||||
static void s_glDisableClientState(void *self, GLenum state);
|
||||
static void s_glEnableClientState(void *self, GLenum state);
|
||||
static GLboolean s_glIsEnabled(void *self, GLenum cap);
|
||||
static void s_glBindBuffer(void *self, GLenum target, GLuint id);
|
||||
static void s_glBufferData(void *self, GLenum target, GLsizeiptr size, const GLvoid * data, GLenum usage);
|
||||
static void s_glBufferSubData(void *self, GLenum target, GLintptr offset, GLsizeiptr size, const GLvoid * data);
|
||||
static void s_glDeleteBuffers(void *self, GLsizei n, const GLuint * buffers);
|
||||
|
||||
static void s_glDrawArrays(void *self, GLenum mode, GLint first, GLsizei count);
|
||||
static void s_glDrawElements(void *self, GLenum mode, GLsizei count, GLenum type, const void *indices);
|
||||
static void s_glPixelStorei(void *self, GLenum param, GLint value);
|
||||
|
||||
static void s_glFinish(void *self);
|
||||
void sendVertexData(unsigned first, unsigned count);
|
||||
|
||||
static void s_glActiveTexture(void* self, GLenum texture);
|
||||
static void s_glBindTexture(void* self, GLenum target, GLuint texture);
|
||||
static void s_glDeleteTextures(void* self, GLsizei n, const GLuint* textures);
|
||||
static void s_glDisable(void* self, GLenum cap);
|
||||
static void s_glEnable(void* self, GLenum cap);
|
||||
static void s_glGetTexParameterfv(void* self, GLenum target, GLenum pname, GLfloat* params);
|
||||
static void s_glGetTexParameteriv(void* self, GLenum target, GLenum pname, GLint* params);
|
||||
static void s_glGetTexParameterxv(void* self, GLenum target, GLenum pname, GLfixed* params);
|
||||
static void s_glTexParameterf(void* self, GLenum target, GLenum pname, GLfloat param);
|
||||
static void s_glTexParameterfv(void* self, GLenum target, GLenum pname, const GLfloat* params);
|
||||
static void s_glTexParameteri(void* self, GLenum target, GLenum pname, GLint param);
|
||||
static void s_glTexParameterx(void* self, GLenum target, GLenum pname, GLfixed param);
|
||||
static void s_glTexParameteriv(void* self, GLenum target, GLenum pname, const GLint* params);
|
||||
static void s_glTexParameterxv(void* self, GLenum target, GLenum pname, const GLfixed* params);
|
||||
|
||||
public:
|
||||
glEGLImageTargetTexture2DOES_client_proc_t m_glEGLImageTargetTexture2DOES_enc;
|
||||
|
||||
};
|
||||
#endif
|
||||
28
android/opengl/system/GLESv1_enc/GLEncoderUtils.cpp
Normal file
28
android/opengl/system/GLESv1_enc/GLEncoderUtils.cpp
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include "GLEncoder.h"
|
||||
|
||||
namespace glesv1_enc {
|
||||
|
||||
size_t pixelDataSize(void *self, GLsizei width, GLsizei height, GLenum format, GLenum type, int pack)
|
||||
{
|
||||
GLEncoder *ctx = (GLEncoder *)self;
|
||||
return ctx->pixelDataSize(width, height, format, type, pack);
|
||||
}
|
||||
|
||||
} // namespace glesv1_enc
|
||||
23
android/opengl/system/GLESv1_enc/GLEncoderUtils.h
Normal file
23
android/opengl/system/GLESv1_enc/GLEncoderUtils.h
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright (C) 2011 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef GL_ENCODER_UTILS_H
|
||||
#define GL_ENCODER_UTILS_H
|
||||
|
||||
namespace glesv1_enc {
|
||||
size_t pixelDataSize(void *self, GLsizei width, GLsizei height, GLenum format, GLenum type, int pack);
|
||||
};
|
||||
|
||||
#endif
|
||||
306
android/opengl/system/GLESv1_enc/gl_client_context.cpp
Normal file
306
android/opengl/system/GLESv1_enc/gl_client_context.cpp
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
// Generated Code - DO NOT EDIT !!
|
||||
// generated by 'emugen'
|
||||
|
||||
|
||||
#include <string.h>
|
||||
#include "gl_client_context.h"
|
||||
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
int gl_client_context_t::initDispatchByName(void *(*getProc)(const char *, void *userData), void *userData)
|
||||
{
|
||||
glAlphaFunc = (glAlphaFunc_client_proc_t) getProc("glAlphaFunc", userData);
|
||||
glClearColor = (glClearColor_client_proc_t) getProc("glClearColor", userData);
|
||||
glClearDepthf = (glClearDepthf_client_proc_t) getProc("glClearDepthf", userData);
|
||||
glClipPlanef = (glClipPlanef_client_proc_t) getProc("glClipPlanef", userData);
|
||||
glColor4f = (glColor4f_client_proc_t) getProc("glColor4f", userData);
|
||||
glDepthRangef = (glDepthRangef_client_proc_t) getProc("glDepthRangef", userData);
|
||||
glFogf = (glFogf_client_proc_t) getProc("glFogf", userData);
|
||||
glFogfv = (glFogfv_client_proc_t) getProc("glFogfv", userData);
|
||||
glFrustumf = (glFrustumf_client_proc_t) getProc("glFrustumf", userData);
|
||||
glGetClipPlanef = (glGetClipPlanef_client_proc_t) getProc("glGetClipPlanef", userData);
|
||||
glGetFloatv = (glGetFloatv_client_proc_t) getProc("glGetFloatv", userData);
|
||||
glGetLightfv = (glGetLightfv_client_proc_t) getProc("glGetLightfv", userData);
|
||||
glGetMaterialfv = (glGetMaterialfv_client_proc_t) getProc("glGetMaterialfv", userData);
|
||||
glGetTexEnvfv = (glGetTexEnvfv_client_proc_t) getProc("glGetTexEnvfv", userData);
|
||||
glGetTexParameterfv = (glGetTexParameterfv_client_proc_t) getProc("glGetTexParameterfv", userData);
|
||||
glLightModelf = (glLightModelf_client_proc_t) getProc("glLightModelf", userData);
|
||||
glLightModelfv = (glLightModelfv_client_proc_t) getProc("glLightModelfv", userData);
|
||||
glLightf = (glLightf_client_proc_t) getProc("glLightf", userData);
|
||||
glLightfv = (glLightfv_client_proc_t) getProc("glLightfv", userData);
|
||||
glLineWidth = (glLineWidth_client_proc_t) getProc("glLineWidth", userData);
|
||||
glLoadMatrixf = (glLoadMatrixf_client_proc_t) getProc("glLoadMatrixf", userData);
|
||||
glMaterialf = (glMaterialf_client_proc_t) getProc("glMaterialf", userData);
|
||||
glMaterialfv = (glMaterialfv_client_proc_t) getProc("glMaterialfv", userData);
|
||||
glMultMatrixf = (glMultMatrixf_client_proc_t) getProc("glMultMatrixf", userData);
|
||||
glMultiTexCoord4f = (glMultiTexCoord4f_client_proc_t) getProc("glMultiTexCoord4f", userData);
|
||||
glNormal3f = (glNormal3f_client_proc_t) getProc("glNormal3f", userData);
|
||||
glOrthof = (glOrthof_client_proc_t) getProc("glOrthof", userData);
|
||||
glPointParameterf = (glPointParameterf_client_proc_t) getProc("glPointParameterf", userData);
|
||||
glPointParameterfv = (glPointParameterfv_client_proc_t) getProc("glPointParameterfv", userData);
|
||||
glPointSize = (glPointSize_client_proc_t) getProc("glPointSize", userData);
|
||||
glPolygonOffset = (glPolygonOffset_client_proc_t) getProc("glPolygonOffset", userData);
|
||||
glRotatef = (glRotatef_client_proc_t) getProc("glRotatef", userData);
|
||||
glScalef = (glScalef_client_proc_t) getProc("glScalef", userData);
|
||||
glTexEnvf = (glTexEnvf_client_proc_t) getProc("glTexEnvf", userData);
|
||||
glTexEnvfv = (glTexEnvfv_client_proc_t) getProc("glTexEnvfv", userData);
|
||||
glTexParameterf = (glTexParameterf_client_proc_t) getProc("glTexParameterf", userData);
|
||||
glTexParameterfv = (glTexParameterfv_client_proc_t) getProc("glTexParameterfv", userData);
|
||||
glTranslatef = (glTranslatef_client_proc_t) getProc("glTranslatef", userData);
|
||||
glActiveTexture = (glActiveTexture_client_proc_t) getProc("glActiveTexture", userData);
|
||||
glAlphaFuncx = (glAlphaFuncx_client_proc_t) getProc("glAlphaFuncx", userData);
|
||||
glBindBuffer = (glBindBuffer_client_proc_t) getProc("glBindBuffer", userData);
|
||||
glBindTexture = (glBindTexture_client_proc_t) getProc("glBindTexture", userData);
|
||||
glBlendFunc = (glBlendFunc_client_proc_t) getProc("glBlendFunc", userData);
|
||||
glBufferData = (glBufferData_client_proc_t) getProc("glBufferData", userData);
|
||||
glBufferSubData = (glBufferSubData_client_proc_t) getProc("glBufferSubData", userData);
|
||||
glClear = (glClear_client_proc_t) getProc("glClear", userData);
|
||||
glClearColorx = (glClearColorx_client_proc_t) getProc("glClearColorx", userData);
|
||||
glClearDepthx = (glClearDepthx_client_proc_t) getProc("glClearDepthx", userData);
|
||||
glClearStencil = (glClearStencil_client_proc_t) getProc("glClearStencil", userData);
|
||||
glClientActiveTexture = (glClientActiveTexture_client_proc_t) getProc("glClientActiveTexture", userData);
|
||||
glColor4ub = (glColor4ub_client_proc_t) getProc("glColor4ub", userData);
|
||||
glColor4x = (glColor4x_client_proc_t) getProc("glColor4x", userData);
|
||||
glColorMask = (glColorMask_client_proc_t) getProc("glColorMask", userData);
|
||||
glColorPointer = (glColorPointer_client_proc_t) getProc("glColorPointer", userData);
|
||||
glCompressedTexImage2D = (glCompressedTexImage2D_client_proc_t) getProc("glCompressedTexImage2D", userData);
|
||||
glCompressedTexSubImage2D = (glCompressedTexSubImage2D_client_proc_t) getProc("glCompressedTexSubImage2D", userData);
|
||||
glCopyTexImage2D = (glCopyTexImage2D_client_proc_t) getProc("glCopyTexImage2D", userData);
|
||||
glCopyTexSubImage2D = (glCopyTexSubImage2D_client_proc_t) getProc("glCopyTexSubImage2D", userData);
|
||||
glCullFace = (glCullFace_client_proc_t) getProc("glCullFace", userData);
|
||||
glDeleteBuffers = (glDeleteBuffers_client_proc_t) getProc("glDeleteBuffers", userData);
|
||||
glDeleteTextures = (glDeleteTextures_client_proc_t) getProc("glDeleteTextures", userData);
|
||||
glDepthFunc = (glDepthFunc_client_proc_t) getProc("glDepthFunc", userData);
|
||||
glDepthMask = (glDepthMask_client_proc_t) getProc("glDepthMask", userData);
|
||||
glDepthRangex = (glDepthRangex_client_proc_t) getProc("glDepthRangex", userData);
|
||||
glDisable = (glDisable_client_proc_t) getProc("glDisable", userData);
|
||||
glDisableClientState = (glDisableClientState_client_proc_t) getProc("glDisableClientState", userData);
|
||||
glDrawArrays = (glDrawArrays_client_proc_t) getProc("glDrawArrays", userData);
|
||||
glDrawElements = (glDrawElements_client_proc_t) getProc("glDrawElements", userData);
|
||||
glEnable = (glEnable_client_proc_t) getProc("glEnable", userData);
|
||||
glEnableClientState = (glEnableClientState_client_proc_t) getProc("glEnableClientState", userData);
|
||||
glFinish = (glFinish_client_proc_t) getProc("glFinish", userData);
|
||||
glFlush = (glFlush_client_proc_t) getProc("glFlush", userData);
|
||||
glFogx = (glFogx_client_proc_t) getProc("glFogx", userData);
|
||||
glFogxv = (glFogxv_client_proc_t) getProc("glFogxv", userData);
|
||||
glFrontFace = (glFrontFace_client_proc_t) getProc("glFrontFace", userData);
|
||||
glFrustumx = (glFrustumx_client_proc_t) getProc("glFrustumx", userData);
|
||||
glGetBooleanv = (glGetBooleanv_client_proc_t) getProc("glGetBooleanv", userData);
|
||||
glGetBufferParameteriv = (glGetBufferParameteriv_client_proc_t) getProc("glGetBufferParameteriv", userData);
|
||||
glClipPlanex = (glClipPlanex_client_proc_t) getProc("glClipPlanex", userData);
|
||||
glGenBuffers = (glGenBuffers_client_proc_t) getProc("glGenBuffers", userData);
|
||||
glGenTextures = (glGenTextures_client_proc_t) getProc("glGenTextures", userData);
|
||||
glGetError = (glGetError_client_proc_t) getProc("glGetError", userData);
|
||||
glGetFixedv = (glGetFixedv_client_proc_t) getProc("glGetFixedv", userData);
|
||||
glGetIntegerv = (glGetIntegerv_client_proc_t) getProc("glGetIntegerv", userData);
|
||||
glGetLightxv = (glGetLightxv_client_proc_t) getProc("glGetLightxv", userData);
|
||||
glGetMaterialxv = (glGetMaterialxv_client_proc_t) getProc("glGetMaterialxv", userData);
|
||||
glGetPointerv = (glGetPointerv_client_proc_t) getProc("glGetPointerv", userData);
|
||||
glGetString = (glGetString_client_proc_t) getProc("glGetString", userData);
|
||||
glGetTexEnviv = (glGetTexEnviv_client_proc_t) getProc("glGetTexEnviv", userData);
|
||||
glGetTexEnvxv = (glGetTexEnvxv_client_proc_t) getProc("glGetTexEnvxv", userData);
|
||||
glGetTexParameteriv = (glGetTexParameteriv_client_proc_t) getProc("glGetTexParameteriv", userData);
|
||||
glGetTexParameterxv = (glGetTexParameterxv_client_proc_t) getProc("glGetTexParameterxv", userData);
|
||||
glHint = (glHint_client_proc_t) getProc("glHint", userData);
|
||||
glIsBuffer = (glIsBuffer_client_proc_t) getProc("glIsBuffer", userData);
|
||||
glIsEnabled = (glIsEnabled_client_proc_t) getProc("glIsEnabled", userData);
|
||||
glIsTexture = (glIsTexture_client_proc_t) getProc("glIsTexture", userData);
|
||||
glLightModelx = (glLightModelx_client_proc_t) getProc("glLightModelx", userData);
|
||||
glLightModelxv = (glLightModelxv_client_proc_t) getProc("glLightModelxv", userData);
|
||||
glLightx = (glLightx_client_proc_t) getProc("glLightx", userData);
|
||||
glLightxv = (glLightxv_client_proc_t) getProc("glLightxv", userData);
|
||||
glLineWidthx = (glLineWidthx_client_proc_t) getProc("glLineWidthx", userData);
|
||||
glLoadIdentity = (glLoadIdentity_client_proc_t) getProc("glLoadIdentity", userData);
|
||||
glLoadMatrixx = (glLoadMatrixx_client_proc_t) getProc("glLoadMatrixx", userData);
|
||||
glLogicOp = (glLogicOp_client_proc_t) getProc("glLogicOp", userData);
|
||||
glMaterialx = (glMaterialx_client_proc_t) getProc("glMaterialx", userData);
|
||||
glMaterialxv = (glMaterialxv_client_proc_t) getProc("glMaterialxv", userData);
|
||||
glMatrixMode = (glMatrixMode_client_proc_t) getProc("glMatrixMode", userData);
|
||||
glMultMatrixx = (glMultMatrixx_client_proc_t) getProc("glMultMatrixx", userData);
|
||||
glMultiTexCoord4x = (glMultiTexCoord4x_client_proc_t) getProc("glMultiTexCoord4x", userData);
|
||||
glNormal3x = (glNormal3x_client_proc_t) getProc("glNormal3x", userData);
|
||||
glNormalPointer = (glNormalPointer_client_proc_t) getProc("glNormalPointer", userData);
|
||||
glOrthox = (glOrthox_client_proc_t) getProc("glOrthox", userData);
|
||||
glPixelStorei = (glPixelStorei_client_proc_t) getProc("glPixelStorei", userData);
|
||||
glPointParameterx = (glPointParameterx_client_proc_t) getProc("glPointParameterx", userData);
|
||||
glPointParameterxv = (glPointParameterxv_client_proc_t) getProc("glPointParameterxv", userData);
|
||||
glPointSizex = (glPointSizex_client_proc_t) getProc("glPointSizex", userData);
|
||||
glPolygonOffsetx = (glPolygonOffsetx_client_proc_t) getProc("glPolygonOffsetx", userData);
|
||||
glPopMatrix = (glPopMatrix_client_proc_t) getProc("glPopMatrix", userData);
|
||||
glPushMatrix = (glPushMatrix_client_proc_t) getProc("glPushMatrix", userData);
|
||||
glReadPixels = (glReadPixels_client_proc_t) getProc("glReadPixels", userData);
|
||||
glRotatex = (glRotatex_client_proc_t) getProc("glRotatex", userData);
|
||||
glSampleCoverage = (glSampleCoverage_client_proc_t) getProc("glSampleCoverage", userData);
|
||||
glSampleCoveragex = (glSampleCoveragex_client_proc_t) getProc("glSampleCoveragex", userData);
|
||||
glScalex = (glScalex_client_proc_t) getProc("glScalex", userData);
|
||||
glScissor = (glScissor_client_proc_t) getProc("glScissor", userData);
|
||||
glShadeModel = (glShadeModel_client_proc_t) getProc("glShadeModel", userData);
|
||||
glStencilFunc = (glStencilFunc_client_proc_t) getProc("glStencilFunc", userData);
|
||||
glStencilMask = (glStencilMask_client_proc_t) getProc("glStencilMask", userData);
|
||||
glStencilOp = (glStencilOp_client_proc_t) getProc("glStencilOp", userData);
|
||||
glTexCoordPointer = (glTexCoordPointer_client_proc_t) getProc("glTexCoordPointer", userData);
|
||||
glTexEnvi = (glTexEnvi_client_proc_t) getProc("glTexEnvi", userData);
|
||||
glTexEnvx = (glTexEnvx_client_proc_t) getProc("glTexEnvx", userData);
|
||||
glTexEnviv = (glTexEnviv_client_proc_t) getProc("glTexEnviv", userData);
|
||||
glTexEnvxv = (glTexEnvxv_client_proc_t) getProc("glTexEnvxv", userData);
|
||||
glTexImage2D = (glTexImage2D_client_proc_t) getProc("glTexImage2D", userData);
|
||||
glTexParameteri = (glTexParameteri_client_proc_t) getProc("glTexParameteri", userData);
|
||||
glTexParameterx = (glTexParameterx_client_proc_t) getProc("glTexParameterx", userData);
|
||||
glTexParameteriv = (glTexParameteriv_client_proc_t) getProc("glTexParameteriv", userData);
|
||||
glTexParameterxv = (glTexParameterxv_client_proc_t) getProc("glTexParameterxv", userData);
|
||||
glTexSubImage2D = (glTexSubImage2D_client_proc_t) getProc("glTexSubImage2D", userData);
|
||||
glTranslatex = (glTranslatex_client_proc_t) getProc("glTranslatex", userData);
|
||||
glVertexPointer = (glVertexPointer_client_proc_t) getProc("glVertexPointer", userData);
|
||||
glViewport = (glViewport_client_proc_t) getProc("glViewport", userData);
|
||||
glPointSizePointerOES = (glPointSizePointerOES_client_proc_t) getProc("glPointSizePointerOES", userData);
|
||||
glVertexPointerOffset = (glVertexPointerOffset_client_proc_t) getProc("glVertexPointerOffset", userData);
|
||||
glColorPointerOffset = (glColorPointerOffset_client_proc_t) getProc("glColorPointerOffset", userData);
|
||||
glNormalPointerOffset = (glNormalPointerOffset_client_proc_t) getProc("glNormalPointerOffset", userData);
|
||||
glPointSizePointerOffset = (glPointSizePointerOffset_client_proc_t) getProc("glPointSizePointerOffset", userData);
|
||||
glTexCoordPointerOffset = (glTexCoordPointerOffset_client_proc_t) getProc("glTexCoordPointerOffset", userData);
|
||||
glWeightPointerOffset = (glWeightPointerOffset_client_proc_t) getProc("glWeightPointerOffset", userData);
|
||||
glMatrixIndexPointerOffset = (glMatrixIndexPointerOffset_client_proc_t) getProc("glMatrixIndexPointerOffset", userData);
|
||||
glVertexPointerData = (glVertexPointerData_client_proc_t) getProc("glVertexPointerData", userData);
|
||||
glColorPointerData = (glColorPointerData_client_proc_t) getProc("glColorPointerData", userData);
|
||||
glNormalPointerData = (glNormalPointerData_client_proc_t) getProc("glNormalPointerData", userData);
|
||||
glTexCoordPointerData = (glTexCoordPointerData_client_proc_t) getProc("glTexCoordPointerData", userData);
|
||||
glPointSizePointerData = (glPointSizePointerData_client_proc_t) getProc("glPointSizePointerData", userData);
|
||||
glWeightPointerData = (glWeightPointerData_client_proc_t) getProc("glWeightPointerData", userData);
|
||||
glMatrixIndexPointerData = (glMatrixIndexPointerData_client_proc_t) getProc("glMatrixIndexPointerData", userData);
|
||||
glDrawElementsOffset = (glDrawElementsOffset_client_proc_t) getProc("glDrawElementsOffset", userData);
|
||||
glDrawElementsData = (glDrawElementsData_client_proc_t) getProc("glDrawElementsData", userData);
|
||||
glGetCompressedTextureFormats = (glGetCompressedTextureFormats_client_proc_t) getProc("glGetCompressedTextureFormats", userData);
|
||||
glFinishRoundTrip = (glFinishRoundTrip_client_proc_t) getProc("glFinishRoundTrip", userData);
|
||||
glBlendEquationSeparateOES = (glBlendEquationSeparateOES_client_proc_t) getProc("glBlendEquationSeparateOES", userData);
|
||||
glBlendFuncSeparateOES = (glBlendFuncSeparateOES_client_proc_t) getProc("glBlendFuncSeparateOES", userData);
|
||||
glBlendEquationOES = (glBlendEquationOES_client_proc_t) getProc("glBlendEquationOES", userData);
|
||||
glDrawTexsOES = (glDrawTexsOES_client_proc_t) getProc("glDrawTexsOES", userData);
|
||||
glDrawTexiOES = (glDrawTexiOES_client_proc_t) getProc("glDrawTexiOES", userData);
|
||||
glDrawTexxOES = (glDrawTexxOES_client_proc_t) getProc("glDrawTexxOES", userData);
|
||||
glDrawTexsvOES = (glDrawTexsvOES_client_proc_t) getProc("glDrawTexsvOES", userData);
|
||||
glDrawTexivOES = (glDrawTexivOES_client_proc_t) getProc("glDrawTexivOES", userData);
|
||||
glDrawTexxvOES = (glDrawTexxvOES_client_proc_t) getProc("glDrawTexxvOES", userData);
|
||||
glDrawTexfOES = (glDrawTexfOES_client_proc_t) getProc("glDrawTexfOES", userData);
|
||||
glDrawTexfvOES = (glDrawTexfvOES_client_proc_t) getProc("glDrawTexfvOES", userData);
|
||||
glEGLImageTargetTexture2DOES = (glEGLImageTargetTexture2DOES_client_proc_t) getProc("glEGLImageTargetTexture2DOES", userData);
|
||||
glEGLImageTargetRenderbufferStorageOES = (glEGLImageTargetRenderbufferStorageOES_client_proc_t) getProc("glEGLImageTargetRenderbufferStorageOES", userData);
|
||||
glAlphaFuncxOES = (glAlphaFuncxOES_client_proc_t) getProc("glAlphaFuncxOES", userData);
|
||||
glClearColorxOES = (glClearColorxOES_client_proc_t) getProc("glClearColorxOES", userData);
|
||||
glClearDepthxOES = (glClearDepthxOES_client_proc_t) getProc("glClearDepthxOES", userData);
|
||||
glClipPlanexOES = (glClipPlanexOES_client_proc_t) getProc("glClipPlanexOES", userData);
|
||||
glClipPlanexIMG = (glClipPlanexIMG_client_proc_t) getProc("glClipPlanexIMG", userData);
|
||||
glColor4xOES = (glColor4xOES_client_proc_t) getProc("glColor4xOES", userData);
|
||||
glDepthRangexOES = (glDepthRangexOES_client_proc_t) getProc("glDepthRangexOES", userData);
|
||||
glFogxOES = (glFogxOES_client_proc_t) getProc("glFogxOES", userData);
|
||||
glFogxvOES = (glFogxvOES_client_proc_t) getProc("glFogxvOES", userData);
|
||||
glFrustumxOES = (glFrustumxOES_client_proc_t) getProc("glFrustumxOES", userData);
|
||||
glGetClipPlanexOES = (glGetClipPlanexOES_client_proc_t) getProc("glGetClipPlanexOES", userData);
|
||||
glGetClipPlanex = (glGetClipPlanex_client_proc_t) getProc("glGetClipPlanex", userData);
|
||||
glGetFixedvOES = (glGetFixedvOES_client_proc_t) getProc("glGetFixedvOES", userData);
|
||||
glGetLightxvOES = (glGetLightxvOES_client_proc_t) getProc("glGetLightxvOES", userData);
|
||||
glGetMaterialxvOES = (glGetMaterialxvOES_client_proc_t) getProc("glGetMaterialxvOES", userData);
|
||||
glGetTexEnvxvOES = (glGetTexEnvxvOES_client_proc_t) getProc("glGetTexEnvxvOES", userData);
|
||||
glGetTexParameterxvOES = (glGetTexParameterxvOES_client_proc_t) getProc("glGetTexParameterxvOES", userData);
|
||||
glLightModelxOES = (glLightModelxOES_client_proc_t) getProc("glLightModelxOES", userData);
|
||||
glLightModelxvOES = (glLightModelxvOES_client_proc_t) getProc("glLightModelxvOES", userData);
|
||||
glLightxOES = (glLightxOES_client_proc_t) getProc("glLightxOES", userData);
|
||||
glLightxvOES = (glLightxvOES_client_proc_t) getProc("glLightxvOES", userData);
|
||||
glLineWidthxOES = (glLineWidthxOES_client_proc_t) getProc("glLineWidthxOES", userData);
|
||||
glLoadMatrixxOES = (glLoadMatrixxOES_client_proc_t) getProc("glLoadMatrixxOES", userData);
|
||||
glMaterialxOES = (glMaterialxOES_client_proc_t) getProc("glMaterialxOES", userData);
|
||||
glMaterialxvOES = (glMaterialxvOES_client_proc_t) getProc("glMaterialxvOES", userData);
|
||||
glMultMatrixxOES = (glMultMatrixxOES_client_proc_t) getProc("glMultMatrixxOES", userData);
|
||||
glMultiTexCoord4xOES = (glMultiTexCoord4xOES_client_proc_t) getProc("glMultiTexCoord4xOES", userData);
|
||||
glNormal3xOES = (glNormal3xOES_client_proc_t) getProc("glNormal3xOES", userData);
|
||||
glOrthoxOES = (glOrthoxOES_client_proc_t) getProc("glOrthoxOES", userData);
|
||||
glPointParameterxOES = (glPointParameterxOES_client_proc_t) getProc("glPointParameterxOES", userData);
|
||||
glPointParameterxvOES = (glPointParameterxvOES_client_proc_t) getProc("glPointParameterxvOES", userData);
|
||||
glPointSizexOES = (glPointSizexOES_client_proc_t) getProc("glPointSizexOES", userData);
|
||||
glPolygonOffsetxOES = (glPolygonOffsetxOES_client_proc_t) getProc("glPolygonOffsetxOES", userData);
|
||||
glRotatexOES = (glRotatexOES_client_proc_t) getProc("glRotatexOES", userData);
|
||||
glSampleCoveragexOES = (glSampleCoveragexOES_client_proc_t) getProc("glSampleCoveragexOES", userData);
|
||||
glScalexOES = (glScalexOES_client_proc_t) getProc("glScalexOES", userData);
|
||||
glTexEnvxOES = (glTexEnvxOES_client_proc_t) getProc("glTexEnvxOES", userData);
|
||||
glTexEnvxvOES = (glTexEnvxvOES_client_proc_t) getProc("glTexEnvxvOES", userData);
|
||||
glTexParameterxOES = (glTexParameterxOES_client_proc_t) getProc("glTexParameterxOES", userData);
|
||||
glTexParameterxvOES = (glTexParameterxvOES_client_proc_t) getProc("glTexParameterxvOES", userData);
|
||||
glTranslatexOES = (glTranslatexOES_client_proc_t) getProc("glTranslatexOES", userData);
|
||||
glIsRenderbufferOES = (glIsRenderbufferOES_client_proc_t) getProc("glIsRenderbufferOES", userData);
|
||||
glBindRenderbufferOES = (glBindRenderbufferOES_client_proc_t) getProc("glBindRenderbufferOES", userData);
|
||||
glDeleteRenderbuffersOES = (glDeleteRenderbuffersOES_client_proc_t) getProc("glDeleteRenderbuffersOES", userData);
|
||||
glGenRenderbuffersOES = (glGenRenderbuffersOES_client_proc_t) getProc("glGenRenderbuffersOES", userData);
|
||||
glRenderbufferStorageOES = (glRenderbufferStorageOES_client_proc_t) getProc("glRenderbufferStorageOES", userData);
|
||||
glGetRenderbufferParameterivOES = (glGetRenderbufferParameterivOES_client_proc_t) getProc("glGetRenderbufferParameterivOES", userData);
|
||||
glIsFramebufferOES = (glIsFramebufferOES_client_proc_t) getProc("glIsFramebufferOES", userData);
|
||||
glBindFramebufferOES = (glBindFramebufferOES_client_proc_t) getProc("glBindFramebufferOES", userData);
|
||||
glDeleteFramebuffersOES = (glDeleteFramebuffersOES_client_proc_t) getProc("glDeleteFramebuffersOES", userData);
|
||||
glGenFramebuffersOES = (glGenFramebuffersOES_client_proc_t) getProc("glGenFramebuffersOES", userData);
|
||||
glCheckFramebufferStatusOES = (glCheckFramebufferStatusOES_client_proc_t) getProc("glCheckFramebufferStatusOES", userData);
|
||||
glFramebufferRenderbufferOES = (glFramebufferRenderbufferOES_client_proc_t) getProc("glFramebufferRenderbufferOES", userData);
|
||||
glFramebufferTexture2DOES = (glFramebufferTexture2DOES_client_proc_t) getProc("glFramebufferTexture2DOES", userData);
|
||||
glGetFramebufferAttachmentParameterivOES = (glGetFramebufferAttachmentParameterivOES_client_proc_t) getProc("glGetFramebufferAttachmentParameterivOES", userData);
|
||||
glGenerateMipmapOES = (glGenerateMipmapOES_client_proc_t) getProc("glGenerateMipmapOES", userData);
|
||||
glMapBufferOES = (glMapBufferOES_client_proc_t) getProc("glMapBufferOES", userData);
|
||||
glUnmapBufferOES = (glUnmapBufferOES_client_proc_t) getProc("glUnmapBufferOES", userData);
|
||||
glGetBufferPointervOES = (glGetBufferPointervOES_client_proc_t) getProc("glGetBufferPointervOES", userData);
|
||||
glCurrentPaletteMatrixOES = (glCurrentPaletteMatrixOES_client_proc_t) getProc("glCurrentPaletteMatrixOES", userData);
|
||||
glLoadPaletteFromModelViewMatrixOES = (glLoadPaletteFromModelViewMatrixOES_client_proc_t) getProc("glLoadPaletteFromModelViewMatrixOES", userData);
|
||||
glMatrixIndexPointerOES = (glMatrixIndexPointerOES_client_proc_t) getProc("glMatrixIndexPointerOES", userData);
|
||||
glWeightPointerOES = (glWeightPointerOES_client_proc_t) getProc("glWeightPointerOES", userData);
|
||||
glQueryMatrixxOES = (glQueryMatrixxOES_client_proc_t) getProc("glQueryMatrixxOES", userData);
|
||||
glDepthRangefOES = (glDepthRangefOES_client_proc_t) getProc("glDepthRangefOES", userData);
|
||||
glFrustumfOES = (glFrustumfOES_client_proc_t) getProc("glFrustumfOES", userData);
|
||||
glOrthofOES = (glOrthofOES_client_proc_t) getProc("glOrthofOES", userData);
|
||||
glClipPlanefOES = (glClipPlanefOES_client_proc_t) getProc("glClipPlanefOES", userData);
|
||||
glClipPlanefIMG = (glClipPlanefIMG_client_proc_t) getProc("glClipPlanefIMG", userData);
|
||||
glGetClipPlanefOES = (glGetClipPlanefOES_client_proc_t) getProc("glGetClipPlanefOES", userData);
|
||||
glClearDepthfOES = (glClearDepthfOES_client_proc_t) getProc("glClearDepthfOES", userData);
|
||||
glTexGenfOES = (glTexGenfOES_client_proc_t) getProc("glTexGenfOES", userData);
|
||||
glTexGenfvOES = (glTexGenfvOES_client_proc_t) getProc("glTexGenfvOES", userData);
|
||||
glTexGeniOES = (glTexGeniOES_client_proc_t) getProc("glTexGeniOES", userData);
|
||||
glTexGenivOES = (glTexGenivOES_client_proc_t) getProc("glTexGenivOES", userData);
|
||||
glTexGenxOES = (glTexGenxOES_client_proc_t) getProc("glTexGenxOES", userData);
|
||||
glTexGenxvOES = (glTexGenxvOES_client_proc_t) getProc("glTexGenxvOES", userData);
|
||||
glGetTexGenfvOES = (glGetTexGenfvOES_client_proc_t) getProc("glGetTexGenfvOES", userData);
|
||||
glGetTexGenivOES = (glGetTexGenivOES_client_proc_t) getProc("glGetTexGenivOES", userData);
|
||||
glGetTexGenxvOES = (glGetTexGenxvOES_client_proc_t) getProc("glGetTexGenxvOES", userData);
|
||||
glBindVertexArrayOES = (glBindVertexArrayOES_client_proc_t) getProc("glBindVertexArrayOES", userData);
|
||||
glDeleteVertexArraysOES = (glDeleteVertexArraysOES_client_proc_t) getProc("glDeleteVertexArraysOES", userData);
|
||||
glGenVertexArraysOES = (glGenVertexArraysOES_client_proc_t) getProc("glGenVertexArraysOES", userData);
|
||||
glIsVertexArrayOES = (glIsVertexArrayOES_client_proc_t) getProc("glIsVertexArrayOES", userData);
|
||||
glDiscardFramebufferEXT = (glDiscardFramebufferEXT_client_proc_t) getProc("glDiscardFramebufferEXT", userData);
|
||||
glMultiDrawArraysEXT = (glMultiDrawArraysEXT_client_proc_t) getProc("glMultiDrawArraysEXT", userData);
|
||||
glMultiDrawElementsEXT = (glMultiDrawElementsEXT_client_proc_t) getProc("glMultiDrawElementsEXT", userData);
|
||||
glMultiDrawArraysSUN = (glMultiDrawArraysSUN_client_proc_t) getProc("glMultiDrawArraysSUN", userData);
|
||||
glMultiDrawElementsSUN = (glMultiDrawElementsSUN_client_proc_t) getProc("glMultiDrawElementsSUN", userData);
|
||||
glRenderbufferStorageMultisampleIMG = (glRenderbufferStorageMultisampleIMG_client_proc_t) getProc("glRenderbufferStorageMultisampleIMG", userData);
|
||||
glFramebufferTexture2DMultisampleIMG = (glFramebufferTexture2DMultisampleIMG_client_proc_t) getProc("glFramebufferTexture2DMultisampleIMG", userData);
|
||||
glDeleteFencesNV = (glDeleteFencesNV_client_proc_t) getProc("glDeleteFencesNV", userData);
|
||||
glGenFencesNV = (glGenFencesNV_client_proc_t) getProc("glGenFencesNV", userData);
|
||||
glIsFenceNV = (glIsFenceNV_client_proc_t) getProc("glIsFenceNV", userData);
|
||||
glTestFenceNV = (glTestFenceNV_client_proc_t) getProc("glTestFenceNV", userData);
|
||||
glGetFenceivNV = (glGetFenceivNV_client_proc_t) getProc("glGetFenceivNV", userData);
|
||||
glFinishFenceNV = (glFinishFenceNV_client_proc_t) getProc("glFinishFenceNV", userData);
|
||||
glSetFenceNV = (glSetFenceNV_client_proc_t) getProc("glSetFenceNV", userData);
|
||||
glGetDriverControlsQCOM = (glGetDriverControlsQCOM_client_proc_t) getProc("glGetDriverControlsQCOM", userData);
|
||||
glGetDriverControlStringQCOM = (glGetDriverControlStringQCOM_client_proc_t) getProc("glGetDriverControlStringQCOM", userData);
|
||||
glEnableDriverControlQCOM = (glEnableDriverControlQCOM_client_proc_t) getProc("glEnableDriverControlQCOM", userData);
|
||||
glDisableDriverControlQCOM = (glDisableDriverControlQCOM_client_proc_t) getProc("glDisableDriverControlQCOM", userData);
|
||||
glExtGetTexturesQCOM = (glExtGetTexturesQCOM_client_proc_t) getProc("glExtGetTexturesQCOM", userData);
|
||||
glExtGetBuffersQCOM = (glExtGetBuffersQCOM_client_proc_t) getProc("glExtGetBuffersQCOM", userData);
|
||||
glExtGetRenderbuffersQCOM = (glExtGetRenderbuffersQCOM_client_proc_t) getProc("glExtGetRenderbuffersQCOM", userData);
|
||||
glExtGetFramebuffersQCOM = (glExtGetFramebuffersQCOM_client_proc_t) getProc("glExtGetFramebuffersQCOM", userData);
|
||||
glExtGetTexLevelParameterivQCOM = (glExtGetTexLevelParameterivQCOM_client_proc_t) getProc("glExtGetTexLevelParameterivQCOM", userData);
|
||||
glExtTexObjectStateOverrideiQCOM = (glExtTexObjectStateOverrideiQCOM_client_proc_t) getProc("glExtTexObjectStateOverrideiQCOM", userData);
|
||||
glExtGetTexSubImageQCOM = (glExtGetTexSubImageQCOM_client_proc_t) getProc("glExtGetTexSubImageQCOM", userData);
|
||||
glExtGetBufferPointervQCOM = (glExtGetBufferPointervQCOM_client_proc_t) getProc("glExtGetBufferPointervQCOM", userData);
|
||||
glExtGetShadersQCOM = (glExtGetShadersQCOM_client_proc_t) getProc("glExtGetShadersQCOM", userData);
|
||||
glExtGetProgramsQCOM = (glExtGetProgramsQCOM_client_proc_t) getProc("glExtGetProgramsQCOM", userData);
|
||||
glExtIsProgramBinaryQCOM = (glExtIsProgramBinaryQCOM_client_proc_t) getProc("glExtIsProgramBinaryQCOM", userData);
|
||||
glExtGetProgramBinarySourceQCOM = (glExtGetProgramBinarySourceQCOM_client_proc_t) getProc("glExtGetProgramBinarySourceQCOM", userData);
|
||||
glStartTilingQCOM = (glStartTilingQCOM_client_proc_t) getProc("glStartTilingQCOM", userData);
|
||||
glEndTilingQCOM = (glEndTilingQCOM_client_proc_t) getProc("glEndTilingQCOM", userData);
|
||||
return 0;
|
||||
}
|
||||
|
||||
313
android/opengl/system/GLESv1_enc/gl_client_context.h
Normal file
313
android/opengl/system/GLESv1_enc/gl_client_context.h
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
// Generated Code - DO NOT EDIT !!
|
||||
// generated by 'emugen'
|
||||
#ifndef __gl_client_context_t_h
|
||||
#define __gl_client_context_t_h
|
||||
|
||||
#include "gl_client_proc.h"
|
||||
|
||||
#include "gl_types.h"
|
||||
|
||||
|
||||
struct gl_client_context_t {
|
||||
|
||||
glAlphaFunc_client_proc_t glAlphaFunc;
|
||||
glClearColor_client_proc_t glClearColor;
|
||||
glClearDepthf_client_proc_t glClearDepthf;
|
||||
glClipPlanef_client_proc_t glClipPlanef;
|
||||
glColor4f_client_proc_t glColor4f;
|
||||
glDepthRangef_client_proc_t glDepthRangef;
|
||||
glFogf_client_proc_t glFogf;
|
||||
glFogfv_client_proc_t glFogfv;
|
||||
glFrustumf_client_proc_t glFrustumf;
|
||||
glGetClipPlanef_client_proc_t glGetClipPlanef;
|
||||
glGetFloatv_client_proc_t glGetFloatv;
|
||||
glGetLightfv_client_proc_t glGetLightfv;
|
||||
glGetMaterialfv_client_proc_t glGetMaterialfv;
|
||||
glGetTexEnvfv_client_proc_t glGetTexEnvfv;
|
||||
glGetTexParameterfv_client_proc_t glGetTexParameterfv;
|
||||
glLightModelf_client_proc_t glLightModelf;
|
||||
glLightModelfv_client_proc_t glLightModelfv;
|
||||
glLightf_client_proc_t glLightf;
|
||||
glLightfv_client_proc_t glLightfv;
|
||||
glLineWidth_client_proc_t glLineWidth;
|
||||
glLoadMatrixf_client_proc_t glLoadMatrixf;
|
||||
glMaterialf_client_proc_t glMaterialf;
|
||||
glMaterialfv_client_proc_t glMaterialfv;
|
||||
glMultMatrixf_client_proc_t glMultMatrixf;
|
||||
glMultiTexCoord4f_client_proc_t glMultiTexCoord4f;
|
||||
glNormal3f_client_proc_t glNormal3f;
|
||||
glOrthof_client_proc_t glOrthof;
|
||||
glPointParameterf_client_proc_t glPointParameterf;
|
||||
glPointParameterfv_client_proc_t glPointParameterfv;
|
||||
glPointSize_client_proc_t glPointSize;
|
||||
glPolygonOffset_client_proc_t glPolygonOffset;
|
||||
glRotatef_client_proc_t glRotatef;
|
||||
glScalef_client_proc_t glScalef;
|
||||
glTexEnvf_client_proc_t glTexEnvf;
|
||||
glTexEnvfv_client_proc_t glTexEnvfv;
|
||||
glTexParameterf_client_proc_t glTexParameterf;
|
||||
glTexParameterfv_client_proc_t glTexParameterfv;
|
||||
glTranslatef_client_proc_t glTranslatef;
|
||||
glActiveTexture_client_proc_t glActiveTexture;
|
||||
glAlphaFuncx_client_proc_t glAlphaFuncx;
|
||||
glBindBuffer_client_proc_t glBindBuffer;
|
||||
glBindTexture_client_proc_t glBindTexture;
|
||||
glBlendFunc_client_proc_t glBlendFunc;
|
||||
glBufferData_client_proc_t glBufferData;
|
||||
glBufferSubData_client_proc_t glBufferSubData;
|
||||
glClear_client_proc_t glClear;
|
||||
glClearColorx_client_proc_t glClearColorx;
|
||||
glClearDepthx_client_proc_t glClearDepthx;
|
||||
glClearStencil_client_proc_t glClearStencil;
|
||||
glClientActiveTexture_client_proc_t glClientActiveTexture;
|
||||
glColor4ub_client_proc_t glColor4ub;
|
||||
glColor4x_client_proc_t glColor4x;
|
||||
glColorMask_client_proc_t glColorMask;
|
||||
glColorPointer_client_proc_t glColorPointer;
|
||||
glCompressedTexImage2D_client_proc_t glCompressedTexImage2D;
|
||||
glCompressedTexSubImage2D_client_proc_t glCompressedTexSubImage2D;
|
||||
glCopyTexImage2D_client_proc_t glCopyTexImage2D;
|
||||
glCopyTexSubImage2D_client_proc_t glCopyTexSubImage2D;
|
||||
glCullFace_client_proc_t glCullFace;
|
||||
glDeleteBuffers_client_proc_t glDeleteBuffers;
|
||||
glDeleteTextures_client_proc_t glDeleteTextures;
|
||||
glDepthFunc_client_proc_t glDepthFunc;
|
||||
glDepthMask_client_proc_t glDepthMask;
|
||||
glDepthRangex_client_proc_t glDepthRangex;
|
||||
glDisable_client_proc_t glDisable;
|
||||
glDisableClientState_client_proc_t glDisableClientState;
|
||||
glDrawArrays_client_proc_t glDrawArrays;
|
||||
glDrawElements_client_proc_t glDrawElements;
|
||||
glEnable_client_proc_t glEnable;
|
||||
glEnableClientState_client_proc_t glEnableClientState;
|
||||
glFinish_client_proc_t glFinish;
|
||||
glFlush_client_proc_t glFlush;
|
||||
glFogx_client_proc_t glFogx;
|
||||
glFogxv_client_proc_t glFogxv;
|
||||
glFrontFace_client_proc_t glFrontFace;
|
||||
glFrustumx_client_proc_t glFrustumx;
|
||||
glGetBooleanv_client_proc_t glGetBooleanv;
|
||||
glGetBufferParameteriv_client_proc_t glGetBufferParameteriv;
|
||||
glClipPlanex_client_proc_t glClipPlanex;
|
||||
glGenBuffers_client_proc_t glGenBuffers;
|
||||
glGenTextures_client_proc_t glGenTextures;
|
||||
glGetError_client_proc_t glGetError;
|
||||
glGetFixedv_client_proc_t glGetFixedv;
|
||||
glGetIntegerv_client_proc_t glGetIntegerv;
|
||||
glGetLightxv_client_proc_t glGetLightxv;
|
||||
glGetMaterialxv_client_proc_t glGetMaterialxv;
|
||||
glGetPointerv_client_proc_t glGetPointerv;
|
||||
glGetString_client_proc_t glGetString;
|
||||
glGetTexEnviv_client_proc_t glGetTexEnviv;
|
||||
glGetTexEnvxv_client_proc_t glGetTexEnvxv;
|
||||
glGetTexParameteriv_client_proc_t glGetTexParameteriv;
|
||||
glGetTexParameterxv_client_proc_t glGetTexParameterxv;
|
||||
glHint_client_proc_t glHint;
|
||||
glIsBuffer_client_proc_t glIsBuffer;
|
||||
glIsEnabled_client_proc_t glIsEnabled;
|
||||
glIsTexture_client_proc_t glIsTexture;
|
||||
glLightModelx_client_proc_t glLightModelx;
|
||||
glLightModelxv_client_proc_t glLightModelxv;
|
||||
glLightx_client_proc_t glLightx;
|
||||
glLightxv_client_proc_t glLightxv;
|
||||
glLineWidthx_client_proc_t glLineWidthx;
|
||||
glLoadIdentity_client_proc_t glLoadIdentity;
|
||||
glLoadMatrixx_client_proc_t glLoadMatrixx;
|
||||
glLogicOp_client_proc_t glLogicOp;
|
||||
glMaterialx_client_proc_t glMaterialx;
|
||||
glMaterialxv_client_proc_t glMaterialxv;
|
||||
glMatrixMode_client_proc_t glMatrixMode;
|
||||
glMultMatrixx_client_proc_t glMultMatrixx;
|
||||
glMultiTexCoord4x_client_proc_t glMultiTexCoord4x;
|
||||
glNormal3x_client_proc_t glNormal3x;
|
||||
glNormalPointer_client_proc_t glNormalPointer;
|
||||
glOrthox_client_proc_t glOrthox;
|
||||
glPixelStorei_client_proc_t glPixelStorei;
|
||||
glPointParameterx_client_proc_t glPointParameterx;
|
||||
glPointParameterxv_client_proc_t glPointParameterxv;
|
||||
glPointSizex_client_proc_t glPointSizex;
|
||||
glPolygonOffsetx_client_proc_t glPolygonOffsetx;
|
||||
glPopMatrix_client_proc_t glPopMatrix;
|
||||
glPushMatrix_client_proc_t glPushMatrix;
|
||||
glReadPixels_client_proc_t glReadPixels;
|
||||
glRotatex_client_proc_t glRotatex;
|
||||
glSampleCoverage_client_proc_t glSampleCoverage;
|
||||
glSampleCoveragex_client_proc_t glSampleCoveragex;
|
||||
glScalex_client_proc_t glScalex;
|
||||
glScissor_client_proc_t glScissor;
|
||||
glShadeModel_client_proc_t glShadeModel;
|
||||
glStencilFunc_client_proc_t glStencilFunc;
|
||||
glStencilMask_client_proc_t glStencilMask;
|
||||
glStencilOp_client_proc_t glStencilOp;
|
||||
glTexCoordPointer_client_proc_t glTexCoordPointer;
|
||||
glTexEnvi_client_proc_t glTexEnvi;
|
||||
glTexEnvx_client_proc_t glTexEnvx;
|
||||
glTexEnviv_client_proc_t glTexEnviv;
|
||||
glTexEnvxv_client_proc_t glTexEnvxv;
|
||||
glTexImage2D_client_proc_t glTexImage2D;
|
||||
glTexParameteri_client_proc_t glTexParameteri;
|
||||
glTexParameterx_client_proc_t glTexParameterx;
|
||||
glTexParameteriv_client_proc_t glTexParameteriv;
|
||||
glTexParameterxv_client_proc_t glTexParameterxv;
|
||||
glTexSubImage2D_client_proc_t glTexSubImage2D;
|
||||
glTranslatex_client_proc_t glTranslatex;
|
||||
glVertexPointer_client_proc_t glVertexPointer;
|
||||
glViewport_client_proc_t glViewport;
|
||||
glPointSizePointerOES_client_proc_t glPointSizePointerOES;
|
||||
glVertexPointerOffset_client_proc_t glVertexPointerOffset;
|
||||
glColorPointerOffset_client_proc_t glColorPointerOffset;
|
||||
glNormalPointerOffset_client_proc_t glNormalPointerOffset;
|
||||
glPointSizePointerOffset_client_proc_t glPointSizePointerOffset;
|
||||
glTexCoordPointerOffset_client_proc_t glTexCoordPointerOffset;
|
||||
glWeightPointerOffset_client_proc_t glWeightPointerOffset;
|
||||
glMatrixIndexPointerOffset_client_proc_t glMatrixIndexPointerOffset;
|
||||
glVertexPointerData_client_proc_t glVertexPointerData;
|
||||
glColorPointerData_client_proc_t glColorPointerData;
|
||||
glNormalPointerData_client_proc_t glNormalPointerData;
|
||||
glTexCoordPointerData_client_proc_t glTexCoordPointerData;
|
||||
glPointSizePointerData_client_proc_t glPointSizePointerData;
|
||||
glWeightPointerData_client_proc_t glWeightPointerData;
|
||||
glMatrixIndexPointerData_client_proc_t glMatrixIndexPointerData;
|
||||
glDrawElementsOffset_client_proc_t glDrawElementsOffset;
|
||||
glDrawElementsData_client_proc_t glDrawElementsData;
|
||||
glGetCompressedTextureFormats_client_proc_t glGetCompressedTextureFormats;
|
||||
glFinishRoundTrip_client_proc_t glFinishRoundTrip;
|
||||
glBlendEquationSeparateOES_client_proc_t glBlendEquationSeparateOES;
|
||||
glBlendFuncSeparateOES_client_proc_t glBlendFuncSeparateOES;
|
||||
glBlendEquationOES_client_proc_t glBlendEquationOES;
|
||||
glDrawTexsOES_client_proc_t glDrawTexsOES;
|
||||
glDrawTexiOES_client_proc_t glDrawTexiOES;
|
||||
glDrawTexxOES_client_proc_t glDrawTexxOES;
|
||||
glDrawTexsvOES_client_proc_t glDrawTexsvOES;
|
||||
glDrawTexivOES_client_proc_t glDrawTexivOES;
|
||||
glDrawTexxvOES_client_proc_t glDrawTexxvOES;
|
||||
glDrawTexfOES_client_proc_t glDrawTexfOES;
|
||||
glDrawTexfvOES_client_proc_t glDrawTexfvOES;
|
||||
glEGLImageTargetTexture2DOES_client_proc_t glEGLImageTargetTexture2DOES;
|
||||
glEGLImageTargetRenderbufferStorageOES_client_proc_t glEGLImageTargetRenderbufferStorageOES;
|
||||
glAlphaFuncxOES_client_proc_t glAlphaFuncxOES;
|
||||
glClearColorxOES_client_proc_t glClearColorxOES;
|
||||
glClearDepthxOES_client_proc_t glClearDepthxOES;
|
||||
glClipPlanexOES_client_proc_t glClipPlanexOES;
|
||||
glClipPlanexIMG_client_proc_t glClipPlanexIMG;
|
||||
glColor4xOES_client_proc_t glColor4xOES;
|
||||
glDepthRangexOES_client_proc_t glDepthRangexOES;
|
||||
glFogxOES_client_proc_t glFogxOES;
|
||||
glFogxvOES_client_proc_t glFogxvOES;
|
||||
glFrustumxOES_client_proc_t glFrustumxOES;
|
||||
glGetClipPlanexOES_client_proc_t glGetClipPlanexOES;
|
||||
glGetClipPlanex_client_proc_t glGetClipPlanex;
|
||||
glGetFixedvOES_client_proc_t glGetFixedvOES;
|
||||
glGetLightxvOES_client_proc_t glGetLightxvOES;
|
||||
glGetMaterialxvOES_client_proc_t glGetMaterialxvOES;
|
||||
glGetTexEnvxvOES_client_proc_t glGetTexEnvxvOES;
|
||||
glGetTexParameterxvOES_client_proc_t glGetTexParameterxvOES;
|
||||
glLightModelxOES_client_proc_t glLightModelxOES;
|
||||
glLightModelxvOES_client_proc_t glLightModelxvOES;
|
||||
glLightxOES_client_proc_t glLightxOES;
|
||||
glLightxvOES_client_proc_t glLightxvOES;
|
||||
glLineWidthxOES_client_proc_t glLineWidthxOES;
|
||||
glLoadMatrixxOES_client_proc_t glLoadMatrixxOES;
|
||||
glMaterialxOES_client_proc_t glMaterialxOES;
|
||||
glMaterialxvOES_client_proc_t glMaterialxvOES;
|
||||
glMultMatrixxOES_client_proc_t glMultMatrixxOES;
|
||||
glMultiTexCoord4xOES_client_proc_t glMultiTexCoord4xOES;
|
||||
glNormal3xOES_client_proc_t glNormal3xOES;
|
||||
glOrthoxOES_client_proc_t glOrthoxOES;
|
||||
glPointParameterxOES_client_proc_t glPointParameterxOES;
|
||||
glPointParameterxvOES_client_proc_t glPointParameterxvOES;
|
||||
glPointSizexOES_client_proc_t glPointSizexOES;
|
||||
glPolygonOffsetxOES_client_proc_t glPolygonOffsetxOES;
|
||||
glRotatexOES_client_proc_t glRotatexOES;
|
||||
glSampleCoveragexOES_client_proc_t glSampleCoveragexOES;
|
||||
glScalexOES_client_proc_t glScalexOES;
|
||||
glTexEnvxOES_client_proc_t glTexEnvxOES;
|
||||
glTexEnvxvOES_client_proc_t glTexEnvxvOES;
|
||||
glTexParameterxOES_client_proc_t glTexParameterxOES;
|
||||
glTexParameterxvOES_client_proc_t glTexParameterxvOES;
|
||||
glTranslatexOES_client_proc_t glTranslatexOES;
|
||||
glIsRenderbufferOES_client_proc_t glIsRenderbufferOES;
|
||||
glBindRenderbufferOES_client_proc_t glBindRenderbufferOES;
|
||||
glDeleteRenderbuffersOES_client_proc_t glDeleteRenderbuffersOES;
|
||||
glGenRenderbuffersOES_client_proc_t glGenRenderbuffersOES;
|
||||
glRenderbufferStorageOES_client_proc_t glRenderbufferStorageOES;
|
||||
glGetRenderbufferParameterivOES_client_proc_t glGetRenderbufferParameterivOES;
|
||||
glIsFramebufferOES_client_proc_t glIsFramebufferOES;
|
||||
glBindFramebufferOES_client_proc_t glBindFramebufferOES;
|
||||
glDeleteFramebuffersOES_client_proc_t glDeleteFramebuffersOES;
|
||||
glGenFramebuffersOES_client_proc_t glGenFramebuffersOES;
|
||||
glCheckFramebufferStatusOES_client_proc_t glCheckFramebufferStatusOES;
|
||||
glFramebufferRenderbufferOES_client_proc_t glFramebufferRenderbufferOES;
|
||||
glFramebufferTexture2DOES_client_proc_t glFramebufferTexture2DOES;
|
||||
glGetFramebufferAttachmentParameterivOES_client_proc_t glGetFramebufferAttachmentParameterivOES;
|
||||
glGenerateMipmapOES_client_proc_t glGenerateMipmapOES;
|
||||
glMapBufferOES_client_proc_t glMapBufferOES;
|
||||
glUnmapBufferOES_client_proc_t glUnmapBufferOES;
|
||||
glGetBufferPointervOES_client_proc_t glGetBufferPointervOES;
|
||||
glCurrentPaletteMatrixOES_client_proc_t glCurrentPaletteMatrixOES;
|
||||
glLoadPaletteFromModelViewMatrixOES_client_proc_t glLoadPaletteFromModelViewMatrixOES;
|
||||
glMatrixIndexPointerOES_client_proc_t glMatrixIndexPointerOES;
|
||||
glWeightPointerOES_client_proc_t glWeightPointerOES;
|
||||
glQueryMatrixxOES_client_proc_t glQueryMatrixxOES;
|
||||
glDepthRangefOES_client_proc_t glDepthRangefOES;
|
||||
glFrustumfOES_client_proc_t glFrustumfOES;
|
||||
glOrthofOES_client_proc_t glOrthofOES;
|
||||
glClipPlanefOES_client_proc_t glClipPlanefOES;
|
||||
glClipPlanefIMG_client_proc_t glClipPlanefIMG;
|
||||
glGetClipPlanefOES_client_proc_t glGetClipPlanefOES;
|
||||
glClearDepthfOES_client_proc_t glClearDepthfOES;
|
||||
glTexGenfOES_client_proc_t glTexGenfOES;
|
||||
glTexGenfvOES_client_proc_t glTexGenfvOES;
|
||||
glTexGeniOES_client_proc_t glTexGeniOES;
|
||||
glTexGenivOES_client_proc_t glTexGenivOES;
|
||||
glTexGenxOES_client_proc_t glTexGenxOES;
|
||||
glTexGenxvOES_client_proc_t glTexGenxvOES;
|
||||
glGetTexGenfvOES_client_proc_t glGetTexGenfvOES;
|
||||
glGetTexGenivOES_client_proc_t glGetTexGenivOES;
|
||||
glGetTexGenxvOES_client_proc_t glGetTexGenxvOES;
|
||||
glBindVertexArrayOES_client_proc_t glBindVertexArrayOES;
|
||||
glDeleteVertexArraysOES_client_proc_t glDeleteVertexArraysOES;
|
||||
glGenVertexArraysOES_client_proc_t glGenVertexArraysOES;
|
||||
glIsVertexArrayOES_client_proc_t glIsVertexArrayOES;
|
||||
glDiscardFramebufferEXT_client_proc_t glDiscardFramebufferEXT;
|
||||
glMultiDrawArraysEXT_client_proc_t glMultiDrawArraysEXT;
|
||||
glMultiDrawElementsEXT_client_proc_t glMultiDrawElementsEXT;
|
||||
glMultiDrawArraysSUN_client_proc_t glMultiDrawArraysSUN;
|
||||
glMultiDrawElementsSUN_client_proc_t glMultiDrawElementsSUN;
|
||||
glRenderbufferStorageMultisampleIMG_client_proc_t glRenderbufferStorageMultisampleIMG;
|
||||
glFramebufferTexture2DMultisampleIMG_client_proc_t glFramebufferTexture2DMultisampleIMG;
|
||||
glDeleteFencesNV_client_proc_t glDeleteFencesNV;
|
||||
glGenFencesNV_client_proc_t glGenFencesNV;
|
||||
glIsFenceNV_client_proc_t glIsFenceNV;
|
||||
glTestFenceNV_client_proc_t glTestFenceNV;
|
||||
glGetFenceivNV_client_proc_t glGetFenceivNV;
|
||||
glFinishFenceNV_client_proc_t glFinishFenceNV;
|
||||
glSetFenceNV_client_proc_t glSetFenceNV;
|
||||
glGetDriverControlsQCOM_client_proc_t glGetDriverControlsQCOM;
|
||||
glGetDriverControlStringQCOM_client_proc_t glGetDriverControlStringQCOM;
|
||||
glEnableDriverControlQCOM_client_proc_t glEnableDriverControlQCOM;
|
||||
glDisableDriverControlQCOM_client_proc_t glDisableDriverControlQCOM;
|
||||
glExtGetTexturesQCOM_client_proc_t glExtGetTexturesQCOM;
|
||||
glExtGetBuffersQCOM_client_proc_t glExtGetBuffersQCOM;
|
||||
glExtGetRenderbuffersQCOM_client_proc_t glExtGetRenderbuffersQCOM;
|
||||
glExtGetFramebuffersQCOM_client_proc_t glExtGetFramebuffersQCOM;
|
||||
glExtGetTexLevelParameterivQCOM_client_proc_t glExtGetTexLevelParameterivQCOM;
|
||||
glExtTexObjectStateOverrideiQCOM_client_proc_t glExtTexObjectStateOverrideiQCOM;
|
||||
glExtGetTexSubImageQCOM_client_proc_t glExtGetTexSubImageQCOM;
|
||||
glExtGetBufferPointervQCOM_client_proc_t glExtGetBufferPointervQCOM;
|
||||
glExtGetShadersQCOM_client_proc_t glExtGetShadersQCOM;
|
||||
glExtGetProgramsQCOM_client_proc_t glExtGetProgramsQCOM;
|
||||
glExtIsProgramBinaryQCOM_client_proc_t glExtIsProgramBinaryQCOM;
|
||||
glExtGetProgramBinarySourceQCOM_client_proc_t glExtGetProgramBinarySourceQCOM;
|
||||
glStartTilingQCOM_client_proc_t glStartTilingQCOM;
|
||||
glEndTilingQCOM_client_proc_t glEndTilingQCOM;
|
||||
virtual ~gl_client_context_t() {}
|
||||
|
||||
typedef gl_client_context_t *CONTEXT_ACCESSOR_TYPE(void);
|
||||
static void setContextAccessor(CONTEXT_ACCESSOR_TYPE *f);
|
||||
int initDispatchByName( void *(*getProc)(const char *name, void *userData), void *userData);
|
||||
virtual void setError(unsigned int error){ (void)error; };
|
||||
virtual unsigned int getError(){ return 0; };
|
||||
};
|
||||
|
||||
#endif
|
||||
305
android/opengl/system/GLESv1_enc/gl_client_proc.h
Normal file
305
android/opengl/system/GLESv1_enc/gl_client_proc.h
Normal file
|
|
@ -0,0 +1,305 @@
|
|||
// Generated Code - DO NOT EDIT !!
|
||||
// generated by 'emugen'
|
||||
#ifndef __gl_client_proc_t_h
|
||||
#define __gl_client_proc_t_h
|
||||
|
||||
|
||||
|
||||
#include "gl_types.h"
|
||||
#ifndef gl_APIENTRY
|
||||
#define gl_APIENTRY
|
||||
#endif
|
||||
typedef void (gl_APIENTRY *glAlphaFunc_client_proc_t) (void * ctx, GLenum, GLclampf);
|
||||
typedef void (gl_APIENTRY *glClearColor_client_proc_t) (void * ctx, GLclampf, GLclampf, GLclampf, GLclampf);
|
||||
typedef void (gl_APIENTRY *glClearDepthf_client_proc_t) (void * ctx, GLclampf);
|
||||
typedef void (gl_APIENTRY *glClipPlanef_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glColor4f_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glDepthRangef_client_proc_t) (void * ctx, GLclampf, GLclampf);
|
||||
typedef void (gl_APIENTRY *glFogf_client_proc_t) (void * ctx, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glFogfv_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glFrustumf_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glGetClipPlanef_client_proc_t) (void * ctx, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetFloatv_client_proc_t) (void * ctx, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetLightfv_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetMaterialfv_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetTexEnvfv_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetTexParameterfv_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glLightModelf_client_proc_t) (void * ctx, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glLightModelfv_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glLightf_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glLightfv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glLineWidth_client_proc_t) (void * ctx, GLfloat);
|
||||
typedef void (gl_APIENTRY *glLoadMatrixf_client_proc_t) (void * ctx, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glMaterialf_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glMaterialfv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glMultMatrixf_client_proc_t) (void * ctx, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glMultiTexCoord4f_client_proc_t) (void * ctx, GLenum, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glNormal3f_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glOrthof_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glPointParameterf_client_proc_t) (void * ctx, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glPointParameterfv_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glPointSize_client_proc_t) (void * ctx, GLfloat);
|
||||
typedef void (gl_APIENTRY *glPolygonOffset_client_proc_t) (void * ctx, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glRotatef_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glScalef_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glTexEnvf_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glTexEnvfv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glTexParameterf_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glTexParameterfv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glTranslatef_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glActiveTexture_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glAlphaFuncx_client_proc_t) (void * ctx, GLenum, GLclampx);
|
||||
typedef void (gl_APIENTRY *glBindBuffer_client_proc_t) (void * ctx, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glBindTexture_client_proc_t) (void * ctx, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glBlendFunc_client_proc_t) (void * ctx, GLenum, GLenum);
|
||||
typedef void (gl_APIENTRY *glBufferData_client_proc_t) (void * ctx, GLenum, GLsizeiptr, const GLvoid*, GLenum);
|
||||
typedef void (gl_APIENTRY *glBufferSubData_client_proc_t) (void * ctx, GLenum, GLintptr, GLsizeiptr, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glClear_client_proc_t) (void * ctx, GLbitfield);
|
||||
typedef void (gl_APIENTRY *glClearColorx_client_proc_t) (void * ctx, GLclampx, GLclampx, GLclampx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glClearDepthx_client_proc_t) (void * ctx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glClearStencil_client_proc_t) (void * ctx, GLint);
|
||||
typedef void (gl_APIENTRY *glClientActiveTexture_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glColor4ub_client_proc_t) (void * ctx, GLubyte, GLubyte, GLubyte, GLubyte);
|
||||
typedef void (gl_APIENTRY *glColor4x_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glColorMask_client_proc_t) (void * ctx, GLboolean, GLboolean, GLboolean, GLboolean);
|
||||
typedef void (gl_APIENTRY *glColorPointer_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glCompressedTexImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLenum, GLsizei, GLsizei, GLint, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glCompressedTexSubImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glCopyTexImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLenum, GLint, GLint, GLsizei, GLsizei, GLint);
|
||||
typedef void (gl_APIENTRY *glCopyTexSubImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLint, GLint, GLint, GLint, GLsizei, GLsizei);
|
||||
typedef void (gl_APIENTRY *glCullFace_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glDeleteBuffers_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glDeleteTextures_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glDepthFunc_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glDepthMask_client_proc_t) (void * ctx, GLboolean);
|
||||
typedef void (gl_APIENTRY *glDepthRangex_client_proc_t) (void * ctx, GLclampx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glDisable_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glDisableClientState_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glDrawArrays_client_proc_t) (void * ctx, GLenum, GLint, GLsizei);
|
||||
typedef void (gl_APIENTRY *glDrawElements_client_proc_t) (void * ctx, GLenum, GLsizei, GLenum, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glEnable_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glEnableClientState_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glFinish_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glFlush_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glFogx_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glFogxv_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glFrontFace_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glFrustumx_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glGetBooleanv_client_proc_t) (void * ctx, GLenum, GLboolean*);
|
||||
typedef void (gl_APIENTRY *glGetBufferParameteriv_client_proc_t) (void * ctx, GLenum, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glClipPlanex_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGenBuffers_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef void (gl_APIENTRY *glGenTextures_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef GLenum (gl_APIENTRY *glGetError_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glGetFixedv_client_proc_t) (void * ctx, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetIntegerv_client_proc_t) (void * ctx, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glGetLightxv_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetMaterialxv_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetPointerv_client_proc_t) (void * ctx, GLenum, GLvoid**);
|
||||
typedef const GLubyte* (gl_APIENTRY *glGetString_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glGetTexEnviv_client_proc_t) (void * ctx, GLenum, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glGetTexEnvxv_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetTexParameteriv_client_proc_t) (void * ctx, GLenum, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glGetTexParameterxv_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glHint_client_proc_t) (void * ctx, GLenum, GLenum);
|
||||
typedef GLboolean (gl_APIENTRY *glIsBuffer_client_proc_t) (void * ctx, GLuint);
|
||||
typedef GLboolean (gl_APIENTRY *glIsEnabled_client_proc_t) (void * ctx, GLenum);
|
||||
typedef GLboolean (gl_APIENTRY *glIsTexture_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glLightModelx_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLightModelxv_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLightx_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLightxv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLineWidthx_client_proc_t) (void * ctx, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLoadIdentity_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glLoadMatrixx_client_proc_t) (void * ctx, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLogicOp_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glMaterialx_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glMaterialxv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glMatrixMode_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glMultMatrixx_client_proc_t) (void * ctx, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glMultiTexCoord4x_client_proc_t) (void * ctx, GLenum, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glNormal3x_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glNormalPointer_client_proc_t) (void * ctx, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glOrthox_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPixelStorei_client_proc_t) (void * ctx, GLenum, GLint);
|
||||
typedef void (gl_APIENTRY *glPointParameterx_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPointParameterxv_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glPointSizex_client_proc_t) (void * ctx, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPolygonOffsetx_client_proc_t) (void * ctx, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPopMatrix_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glPushMatrix_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glReadPixels_client_proc_t) (void * ctx, GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, GLvoid*);
|
||||
typedef void (gl_APIENTRY *glRotatex_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glSampleCoverage_client_proc_t) (void * ctx, GLclampf, GLboolean);
|
||||
typedef void (gl_APIENTRY *glSampleCoveragex_client_proc_t) (void * ctx, GLclampx, GLboolean);
|
||||
typedef void (gl_APIENTRY *glScalex_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glScissor_client_proc_t) (void * ctx, GLint, GLint, GLsizei, GLsizei);
|
||||
typedef void (gl_APIENTRY *glShadeModel_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glStencilFunc_client_proc_t) (void * ctx, GLenum, GLint, GLuint);
|
||||
typedef void (gl_APIENTRY *glStencilMask_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glStencilOp_client_proc_t) (void * ctx, GLenum, GLenum, GLenum);
|
||||
typedef void (gl_APIENTRY *glTexCoordPointer_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glTexEnvi_client_proc_t) (void * ctx, GLenum, GLenum, GLint);
|
||||
typedef void (gl_APIENTRY *glTexEnvx_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexEnviv_client_proc_t) (void * ctx, GLenum, GLenum, const GLint*);
|
||||
typedef void (gl_APIENTRY *glTexEnvxv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glTexImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLint, GLsizei, GLsizei, GLint, GLenum, GLenum, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glTexParameteri_client_proc_t) (void * ctx, GLenum, GLenum, GLint);
|
||||
typedef void (gl_APIENTRY *glTexParameterx_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexParameteriv_client_proc_t) (void * ctx, GLenum, GLenum, const GLint*);
|
||||
typedef void (gl_APIENTRY *glTexParameterxv_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glTexSubImage2D_client_proc_t) (void * ctx, GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glTranslatex_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glVertexPointer_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glViewport_client_proc_t) (void * ctx, GLint, GLint, GLsizei, GLsizei);
|
||||
typedef void (gl_APIENTRY *glPointSizePointerOES_client_proc_t) (void * ctx, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glVertexPointerOffset_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glColorPointerOffset_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glNormalPointerOffset_client_proc_t) (void * ctx, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glPointSizePointerOffset_client_proc_t) (void * ctx, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glTexCoordPointerOffset_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glWeightPointerOffset_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glMatrixIndexPointerOffset_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, GLuint);
|
||||
typedef void (gl_APIENTRY *glVertexPointerData_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glColorPointerData_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glNormalPointerData_client_proc_t) (void * ctx, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glTexCoordPointerData_client_proc_t) (void * ctx, GLint, GLint, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glPointSizePointerData_client_proc_t) (void * ctx, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glWeightPointerData_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glMatrixIndexPointerData_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glDrawElementsOffset_client_proc_t) (void * ctx, GLenum, GLsizei, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glDrawElementsData_client_proc_t) (void * ctx, GLenum, GLsizei, GLenum, void*, GLuint);
|
||||
typedef void (gl_APIENTRY *glGetCompressedTextureFormats_client_proc_t) (void * ctx, int, GLint*);
|
||||
typedef int (gl_APIENTRY *glFinishRoundTrip_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glBlendEquationSeparateOES_client_proc_t) (void * ctx, GLenum, GLenum);
|
||||
typedef void (gl_APIENTRY *glBlendFuncSeparateOES_client_proc_t) (void * ctx, GLenum, GLenum, GLenum, GLenum);
|
||||
typedef void (gl_APIENTRY *glBlendEquationOES_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glDrawTexsOES_client_proc_t) (void * ctx, GLshort, GLshort, GLshort, GLshort, GLshort);
|
||||
typedef void (gl_APIENTRY *glDrawTexiOES_client_proc_t) (void * ctx, GLint, GLint, GLint, GLint, GLint);
|
||||
typedef void (gl_APIENTRY *glDrawTexxOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glDrawTexsvOES_client_proc_t) (void * ctx, const GLshort*);
|
||||
typedef void (gl_APIENTRY *glDrawTexivOES_client_proc_t) (void * ctx, const GLint*);
|
||||
typedef void (gl_APIENTRY *glDrawTexxvOES_client_proc_t) (void * ctx, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glDrawTexfOES_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glDrawTexfvOES_client_proc_t) (void * ctx, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glEGLImageTargetTexture2DOES_client_proc_t) (void * ctx, GLenum, GLeglImageOES);
|
||||
typedef void (gl_APIENTRY *glEGLImageTargetRenderbufferStorageOES_client_proc_t) (void * ctx, GLenum, GLeglImageOES);
|
||||
typedef void (gl_APIENTRY *glAlphaFuncxOES_client_proc_t) (void * ctx, GLenum, GLclampx);
|
||||
typedef void (gl_APIENTRY *glClearColorxOES_client_proc_t) (void * ctx, GLclampx, GLclampx, GLclampx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glClearDepthxOES_client_proc_t) (void * ctx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glClipPlanexOES_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glClipPlanexIMG_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glColor4xOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glDepthRangexOES_client_proc_t) (void * ctx, GLclampx, GLclampx);
|
||||
typedef void (gl_APIENTRY *glFogxOES_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glFogxvOES_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glFrustumxOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glGetClipPlanexOES_client_proc_t) (void * ctx, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetClipPlanex_client_proc_t) (void * ctx, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetFixedvOES_client_proc_t) (void * ctx, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetLightxvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetMaterialxvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetTexEnvxvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetTexParameterxvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLightModelxOES_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLightModelxvOES_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLightxOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLightxvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glLineWidthxOES_client_proc_t) (void * ctx, GLfixed);
|
||||
typedef void (gl_APIENTRY *glLoadMatrixxOES_client_proc_t) (void * ctx, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glMaterialxOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glMaterialxvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glMultMatrixxOES_client_proc_t) (void * ctx, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glMultiTexCoord4xOES_client_proc_t) (void * ctx, GLenum, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glNormal3xOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glOrthoxOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPointParameterxOES_client_proc_t) (void * ctx, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPointParameterxvOES_client_proc_t) (void * ctx, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glPointSizexOES_client_proc_t) (void * ctx, GLfixed);
|
||||
typedef void (gl_APIENTRY *glPolygonOffsetxOES_client_proc_t) (void * ctx, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glRotatexOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glSampleCoveragexOES_client_proc_t) (void * ctx, GLclampx, GLboolean);
|
||||
typedef void (gl_APIENTRY *glScalexOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexEnvxOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexEnvxvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glTexParameterxOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexParameterxvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glTranslatexOES_client_proc_t) (void * ctx, GLfixed, GLfixed, GLfixed);
|
||||
typedef GLboolean (gl_APIENTRY *glIsRenderbufferOES_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glBindRenderbufferOES_client_proc_t) (void * ctx, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glDeleteRenderbuffersOES_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glGenRenderbuffersOES_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef void (gl_APIENTRY *glRenderbufferStorageOES_client_proc_t) (void * ctx, GLenum, GLenum, GLsizei, GLsizei);
|
||||
typedef void (gl_APIENTRY *glGetRenderbufferParameterivOES_client_proc_t) (void * ctx, GLenum, GLenum, GLint*);
|
||||
typedef GLboolean (gl_APIENTRY *glIsFramebufferOES_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glBindFramebufferOES_client_proc_t) (void * ctx, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glDeleteFramebuffersOES_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glGenFramebuffersOES_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef GLenum (gl_APIENTRY *glCheckFramebufferStatusOES_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glFramebufferRenderbufferOES_client_proc_t) (void * ctx, GLenum, GLenum, GLenum, GLuint);
|
||||
typedef void (gl_APIENTRY *glFramebufferTexture2DOES_client_proc_t) (void * ctx, GLenum, GLenum, GLenum, GLuint, GLint);
|
||||
typedef void (gl_APIENTRY *glGetFramebufferAttachmentParameterivOES_client_proc_t) (void * ctx, GLenum, GLenum, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glGenerateMipmapOES_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void* (gl_APIENTRY *glMapBufferOES_client_proc_t) (void * ctx, GLenum, GLenum);
|
||||
typedef GLboolean (gl_APIENTRY *glUnmapBufferOES_client_proc_t) (void * ctx, GLenum);
|
||||
typedef void (gl_APIENTRY *glGetBufferPointervOES_client_proc_t) (void * ctx, GLenum, GLenum, GLvoid**);
|
||||
typedef void (gl_APIENTRY *glCurrentPaletteMatrixOES_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glLoadPaletteFromModelViewMatrixOES_client_proc_t) (void * ctx);
|
||||
typedef void (gl_APIENTRY *glMatrixIndexPointerOES_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, const GLvoid*);
|
||||
typedef void (gl_APIENTRY *glWeightPointerOES_client_proc_t) (void * ctx, GLint, GLenum, GLsizei, const GLvoid*);
|
||||
typedef GLbitfield (gl_APIENTRY *glQueryMatrixxOES_client_proc_t) (void * ctx, GLfixed*, GLint*);
|
||||
typedef void (gl_APIENTRY *glDepthRangefOES_client_proc_t) (void * ctx, GLclampf, GLclampf);
|
||||
typedef void (gl_APIENTRY *glFrustumfOES_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glOrthofOES_client_proc_t) (void * ctx, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||
typedef void (gl_APIENTRY *glClipPlanefOES_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glClipPlanefIMG_client_proc_t) (void * ctx, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetClipPlanefOES_client_proc_t) (void * ctx, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glClearDepthfOES_client_proc_t) (void * ctx, GLclampf);
|
||||
typedef void (gl_APIENTRY *glTexGenfOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat);
|
||||
typedef void (gl_APIENTRY *glTexGenfvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfloat*);
|
||||
typedef void (gl_APIENTRY *glTexGeniOES_client_proc_t) (void * ctx, GLenum, GLenum, GLint);
|
||||
typedef void (gl_APIENTRY *glTexGenivOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLint*);
|
||||
typedef void (gl_APIENTRY *glTexGenxOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed);
|
||||
typedef void (gl_APIENTRY *glTexGenxvOES_client_proc_t) (void * ctx, GLenum, GLenum, const GLfixed*);
|
||||
typedef void (gl_APIENTRY *glGetTexGenfvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfloat*);
|
||||
typedef void (gl_APIENTRY *glGetTexGenivOES_client_proc_t) (void * ctx, GLenum, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glGetTexGenxvOES_client_proc_t) (void * ctx, GLenum, GLenum, GLfixed*);
|
||||
typedef void (gl_APIENTRY *glBindVertexArrayOES_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glDeleteVertexArraysOES_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glGenVertexArraysOES_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef GLboolean (gl_APIENTRY *glIsVertexArrayOES_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glDiscardFramebufferEXT_client_proc_t) (void * ctx, GLenum, GLsizei, const GLenum*);
|
||||
typedef void (gl_APIENTRY *glMultiDrawArraysEXT_client_proc_t) (void * ctx, GLenum, const GLint*, const GLsizei*, GLsizei);
|
||||
typedef void (gl_APIENTRY *glMultiDrawElementsEXT_client_proc_t) (void * ctx, GLenum, const GLsizei*, GLenum, const GLvoid* const*, GLsizei);
|
||||
typedef void (gl_APIENTRY *glMultiDrawArraysSUN_client_proc_t) (void * ctx, GLenum, GLint*, GLsizei*, GLsizei);
|
||||
typedef void (gl_APIENTRY *glMultiDrawElementsSUN_client_proc_t) (void * ctx, GLenum, const GLsizei*, GLenum, const GLvoid**, GLsizei);
|
||||
typedef void (gl_APIENTRY *glRenderbufferStorageMultisampleIMG_client_proc_t) (void * ctx, GLenum, GLsizei, GLenum, GLsizei, GLsizei);
|
||||
typedef void (gl_APIENTRY *glFramebufferTexture2DMultisampleIMG_client_proc_t) (void * ctx, GLenum, GLenum, GLenum, GLuint, GLint, GLsizei);
|
||||
typedef void (gl_APIENTRY *glDeleteFencesNV_client_proc_t) (void * ctx, GLsizei, const GLuint*);
|
||||
typedef void (gl_APIENTRY *glGenFencesNV_client_proc_t) (void * ctx, GLsizei, GLuint*);
|
||||
typedef GLboolean (gl_APIENTRY *glIsFenceNV_client_proc_t) (void * ctx, GLuint);
|
||||
typedef GLboolean (gl_APIENTRY *glTestFenceNV_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glGetFenceivNV_client_proc_t) (void * ctx, GLuint, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glFinishFenceNV_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glSetFenceNV_client_proc_t) (void * ctx, GLuint, GLenum);
|
||||
typedef void (gl_APIENTRY *glGetDriverControlsQCOM_client_proc_t) (void * ctx, GLint*, GLsizei, GLuint*);
|
||||
typedef void (gl_APIENTRY *glGetDriverControlStringQCOM_client_proc_t) (void * ctx, GLuint, GLsizei, GLsizei*, GLchar*);
|
||||
typedef void (gl_APIENTRY *glEnableDriverControlQCOM_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glDisableDriverControlQCOM_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glExtGetTexturesQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtGetBuffersQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtGetRenderbuffersQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtGetFramebuffersQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtGetTexLevelParameterivQCOM_client_proc_t) (void * ctx, GLuint, GLenum, GLint, GLenum, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtTexObjectStateOverrideiQCOM_client_proc_t) (void * ctx, GLenum, GLenum, GLint);
|
||||
typedef void (gl_APIENTRY *glExtGetTexSubImageQCOM_client_proc_t) (void * ctx, GLenum, GLint, GLint, GLint, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLenum, GLvoid*);
|
||||
typedef void (gl_APIENTRY *glExtGetBufferPointervQCOM_client_proc_t) (void * ctx, GLenum, GLvoid**);
|
||||
typedef void (gl_APIENTRY *glExtGetShadersQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef void (gl_APIENTRY *glExtGetProgramsQCOM_client_proc_t) (void * ctx, GLuint*, GLint, GLint*);
|
||||
typedef GLboolean (gl_APIENTRY *glExtIsProgramBinaryQCOM_client_proc_t) (void * ctx, GLuint);
|
||||
typedef void (gl_APIENTRY *glExtGetProgramBinarySourceQCOM_client_proc_t) (void * ctx, GLuint, GLenum, GLchar*, GLint*);
|
||||
typedef void (gl_APIENTRY *glStartTilingQCOM_client_proc_t) (void * ctx, GLuint, GLuint, GLuint, GLuint, GLbitfield);
|
||||
typedef void (gl_APIENTRY *glEndTilingQCOM_client_proc_t) (void * ctx, GLbitfield);
|
||||
|
||||
|
||||
#endif
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue