Android虚拟相机

Android虚拟相机

修改libcameraservice映射内存

修改libcameraservice,映射一块普通应用能读写的内存,方便写入yuv数据。

1
2
3
4
5
6
// frameworks/av/camera/aidl/android/hardware/ICameraService.aidl   
boolean setVcamMode(in int data);
int getVcamMode();
ParcelFileDescriptor getSharedMemoryFd();
int getWidth();
int getHeight();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
// frameworks/av/services/camera/libcameraservice/CameraService.h

#include <binder/ParcelFileDescriptor.h>
#include <unistd.h>
#include <android/sharedmem.h>
#include <sys/mman.h>

static int32_t vcam_mode;
static int32_t yuv_width;
static int32_t yuv_height;
static int sharedMemoryFd;
static void* sharedMemoryPtr;
static size_t sharedMemorySize;

// 共享内存操作方法
android::status_t mapSharedMemory();
static void unmapSharedMemory();
static android::status_t writeToSharedMemory(const void* data, size_t size, size_t offset = 0);
static android::status_t readFromSharedMemory(void* buffer, size_t size, size_t offset = 0);

virtual binder::Status setVcamMode(int32_t data, bool* return_value) override;
virtual binder::Status getVcamMode(int32_t * return_value) override;
virtual binder::Status getWidth(int32_t * return_value) override;
virtual binder::Status getHeight(int32_t * return_value) override;
virtual binder::Status getSharedMemoryFd(::android::os::ParcelFileDescriptor* _aidl_return) override;
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
// frameworks/av/services/camera/libcameraservice/CameraService.cpp

int32_t CameraService::vcam_mode = 0;
int32_t CameraService::yuv_width = 0;
int32_t CameraService::yuv_height = 0;
int32_t CameraService::sharedMemoryFd = -1;
void* CameraService::sharedMemoryPtr = nullptr;
size_t CameraService::sharedMemorySize = 0;

//------------------

CameraService::CameraService() :
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0),
mNumberOfCamerasWithoutSystemCamera(0),
mSoundRef(0), mInitialized(false),
mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE) {
ALOGI("CameraService started (pid=%d)", getpid());
mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock);
mMemFd = memfd_create(sFileName, MFD_ALLOW_SEALING);
if (mMemFd == -1) {
ALOGE("%s: Error while creating the file: %s", __FUNCTION__, sFileName);
}
size_t memorySize = 16 * 1024 * 1024; // 16MB
CameraService::sharedMemorySize = memorySize;
// ASharedMemory_create, "android/sharedmem.h", 链接时需要libandroid
CameraService::sharedMemoryFd = ASharedMemory_create("CameraSharedMemory", memorySize);
if (CameraService::sharedMemoryFd < 0) {
ALOGE("Failed to create shared memory");
} else {
ALOGI("CameraService create mSharedMemoryFd: %d", CameraService::sharedMemoryFd);
// 映射共享内存
if (mapSharedMemory() != OK) {
ALOGE("Failed to map shared memory");
}
}

}

//------------------
binder::Status CameraService::setVcamMode(int32_t data, bool* return_value) {
vcam_mode = data;
*return_value = true;
return binder::Status::ok();
}


binder::Status CameraService::getVcamMode(int32_t * return_value) {
*return_value = vcam_mode;
return binder::Status::ok();
}

binder::Status CameraService::getSharedMemoryFd(::android::os::ParcelFileDescriptor* _aidl_return) {
if (CameraService::sharedMemoryFd < 0) {
return binder::Status::fromServiceSpecificError(ERROR_INVALID_OPERATION, "Shared memory not available");
}

int dupFd = dup(CameraService::sharedMemoryFd);
if (dupFd < 0) {
return binder::Status::ok();
}
_aidl_return->reset(android::base::unique_fd(dupFd));
return binder::Status::ok();
}

binder::Status CameraService::getWidth(int32_t * return_value) {
*return_value = yuv_width;
return binder::Status::ok();
}

binder::Status CameraService::getHeight(int32_t * return_value) {
*return_value = yuv_height;
return binder::Status::ok();
}

// 共享内存操作方法实现
android::status_t CameraService::mapSharedMemory() {
if (sharedMemoryFd < 0 || sharedMemorySize == 0) {
ALOGE("Invalid shared memory fd or size");
return android::BAD_VALUE;
}

if (sharedMemoryPtr != nullptr) {
ALOGW("Shared memory already mapped");
return android::OK;
}

sharedMemoryPtr = mmap(nullptr, sharedMemorySize, PROT_READ | PROT_WRITE, MAP_SHARED, sharedMemoryFd, 0);
if (sharedMemoryPtr == MAP_FAILED) {
ALOGE("Failed to map shared memory: %s", strerror(errno));
sharedMemoryPtr = nullptr;
return android::UNKNOWN_ERROR;
}

// ALOGI("Successfully mapped shared memory at %p, size: %zu", sharedMemoryPtr, sharedMemorySize);
return android::OK;
}

void CameraService::unmapSharedMemory() {
if (sharedMemoryPtr != nullptr) {
munmap(sharedMemoryPtr, sharedMemorySize);
sharedMemoryPtr = nullptr;
ALOGI("Unmapped shared memory");
}
}

android::status_t CameraService::writeToSharedMemory(const void* data, size_t size, size_t offset) {
if (sharedMemoryPtr == nullptr) {
ALOGE("Shared memory not mapped");
return android::INVALID_OPERATION;
}

if (offset + size > sharedMemorySize) {
ALOGE("Write would exceed shared memory bounds: offset=%zu, size=%zu, total=%zu",
offset, size, sharedMemorySize);
return android::BAD_VALUE;
}

memcpy(static_cast<char*>(sharedMemoryPtr) + offset, data, size);
// ALOGD("Wrote %zu bytes to shared memory at offset %zu", size, offset);
return android::OK;
}

android::status_t CameraService::readFromSharedMemory(void* buffer, size_t size, size_t offset) {
if (sharedMemoryPtr == nullptr) {
ALOGE("Shared memory not mapped");
return android::INVALID_OPERATION;
}

if (buffer == nullptr) {
ALOGE("Invalid buffer pointer");
return android::BAD_VALUE;
}

if (offset + size > sharedMemorySize) {
ALOGE("Read would exceed shared memory bounds: offset=%zu, size=%zu, total=%zu",
offset, size, sharedMemorySize);
return android::BAD_VALUE;
}

memcpy(buffer, static_cast<char*>(sharedMemoryPtr) + offset, size);
// ALOGD("Read %zu bytes from shared memory at offset %zu", size, offset);
return android::OK;
}

1
2
3
4
5
6
7
8
// frameworks/av/services/camera/libcameraservice/Android.bp
shared_libs: [
"libbase",
"libdl",
"libexif",
//...
"libandroid",
]
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
// frameworks/av/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
status_t Camera3OutputStream::returnBufferCheckedLocked(
const camera_stream_buffer &buffer,
nsecs_t timestamp,
nsecs_t readoutTimestamp,
bool output,
int32_t transform,
const std::vector<size_t> &surface_ids,
/*out*/
sp<Fence> *releaseFenceOut)
{

(void)output;
ALOG_ASSERT(output, "Expected output to be true");

status_t res;

// Fence management - always honor release fence from HAL
sp<Fence> releaseFence = new Fence(buffer.release_fence);
int anwReleaseFence = releaseFence->dup();

/**
* Release the lock briefly to avoid deadlock with
* StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
* thread will go into StreamingProcessor::onFrameAvailable) during
* queueBuffer
*/
sp<ANativeWindow> currentConsumer = mConsumer;
StreamState state = mState;
mLock.unlock();

ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);

/******************************************************************
* 【替换相机预览画面】
******************************************************************/
android_ycbcr ycbcr = {};
GraphicBufferMapper &gmapper = GraphicBufferMapper::get();
res = gmapper.lockYCbCr(*buffer.buffer,
GRALLOC_USAGE_SW_WRITE_OFTEN,
Rect(anwBuffer->width, anwBuffer->height),
&ycbcr);
gmapper.unlock(*buffer.buffer);

if (res == OK)
{

ALOGD("Buffer format is: 0x%x", getFormat());
ALOGD("YCbCr lock success. w=%d, h=%d, y=%p, cb=%p, cr=%p, ystride=%zu, cstride=%zu", anwBuffer->width, anwBuffer->height, ycbcr.y, ycbcr.cb, ycbcr.cr, ycbcr.ystride, ycbcr.cstride);

uint8_t *dstUV = static_cast<uint8_t *>(ycbcr.cb);
if (dstUV != NULL)
{
CameraService::yuv_width = anwBuffer->width;
CameraService::yuv_height = anwBuffer->height;

if (CameraService::vcam_mode == 0)
{
// switch off, do noting
}
else if (CameraService::vcam_mode == 1)
{
// read data from shared memory
gImageReplacer.replaceYUVBuffer(ycbcr, anwBuffer->width, anwBuffer->height);
}
else if (CameraService::vcam_mode == 2)
{
// read data from /data/local/tmp/1.yuv
int fd = open(YUV_FILENAME, O_RDONLY | O_CLOEXEC);
if (fd < 0)
{
ALOGE("Failed to open: %s, error: %s", YUV_FILENAME, strerror(errno));
}
else
{
close(fd);
gImageReplacer.replaceYUVBuffer2(ycbcr, anwBuffer->width, anwBuffer->height);
}
}
else
{
// do noting
}
}

gmapper.unlock(*buffer.buffer);
}
/******************************************************************
* 【替换相机预览画面结束】
******************************************************************/

bool bufferDeferred = false;
/**
* Return buffer back to ANativeWindow
*/
if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0)
{
// Cancel buffer
if (mDropBuffers)
{
ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
}
else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR)
{
ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
}
else
{
ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
}

res = currentConsumer->cancelBuffer(currentConsumer.get(),
anwBuffer,
anwReleaseFence);
if (shouldLogError(res, state))
{
ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
" %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
}

notifyBufferReleased(anwBuffer);
if (mUseBufferManager)
{
// Return this buffer back to buffer manager.
mBufferProducerListener->onBufferReleased();
}
}
else
{
if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT))
{
{
char traceLog[48];
snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
ATRACE_NAME(traceLog);
}
mTraceFirstBuffer = false;
}
// Fix CameraBlob id type discrepancy between HIDL and AIDL, details : http://b/229688810
if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF)
{
if (mIPCTransport == IPCTransport::HIDL)
{
fixUpHidlJpegBlobHeader(anwBuffer, anwReleaseFence);
}
// If this is a JPEG output, and image dump mask is set, save image to
// disk.
if (mImageDumpMask)
{
dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
}
}

nsecs_t captureTime = (mUseReadoutTime && readoutTimestamp != 0 ? readoutTimestamp : timestamp) - mTimestampOffset;
if (mPreviewFrameSpacer != nullptr)
{
nsecs_t readoutTime = (readoutTimestamp != 0 ? readoutTimestamp : timestamp) - mTimestampOffset;
res = mPreviewFrameSpacer->queuePreviewBuffer(captureTime, readoutTime,
transform, anwBuffer, anwReleaseFence);
if (res != OK)
{
ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
return res;
}
bufferDeferred = true;
}
else
{
nsecs_t presentTime = mSyncToDisplay ? syncTimestampToDisplayLocked(captureTime) : captureTime;

setTransform(transform, true /*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
if (res != OK)
{
ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
return res;
}

queueHDRMetadata(anwBuffer->handle, currentConsumer, dynamic_range_profile);

res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
if (shouldLogError(res, state))
{
ALOGE("%s: Stream %d: Error queueing buffer to native window:"
" %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
}
}
}
mLock.lock();

if (bufferDeferred)
{
mCachedOutputBufferCount++;
}

// Once a valid buffer has been returned to the queue, can no longer
// dequeue all buffers for preallocation.
if (buffer.status != CAMERA_BUFFER_STATUS_ERROR)
{
mStreamUnpreparable = true;
}

if (res != OK)
{
close(anwReleaseFence);
}

*releaseFenceOut = releaseFence;

return res;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
public boolean setVcamMode(int yuvData) throws CameraAccessException {
Log.d("CameraManager.java", "setYuvData called...");
synchronized (mLock) {
try {
return CameraManagerGlobal.get().getCameraService().setVcamMode(yuvData);
} catch (RemoteException e) {
throw e.rethrowAsRuntimeException();
}
}
}
public int getVcamMode() throws CameraAccessException {
synchronized (mLock) {
try {
return CameraManagerGlobal.get().getCameraService().getVcamMode();
} catch (RemoteException e) {
throw e.rethrowAsRuntimeException();
}
}
}
public int getWidth() throws CameraAccessException {
synchronized (mLock) {
try {
return CameraManagerGlobal.get().getCameraService().getWidth();
} catch (RemoteException e) {
throw e.rethrowAsRuntimeException();
}
}
}
public int getHeight() throws CameraAccessException {
synchronized (mLock) {
try {
return CameraManagerGlobal.get().getCameraService().getHeight();
} catch (RemoteException e) {
throw e.rethrowAsRuntimeException();
}
}
}
@NonNull
public ParcelFileDescriptor getSharedMemoryFd() throws CameraAccessException {
synchronized (mLock) {
try {
return CameraManagerGlobal.get().getCameraService().getSharedMemoryFd();
} catch (RemoteException e) {
throw e.rethrowAsRuntimeException();
}
}
}

JAVA层读写共享内存

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
CameraManager mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
Method method = mCameraManager.getClass().getMethod("getSharedMemoryFd");
ParcelFileDescriptor pfd = (ParcelFileDescriptor) method.invoke(mCameraManager,null);
if (pfd != null) {
SharedMemory mSharedMemory = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
mSharedMemory = SharedMemory.fromFileDescriptor(pfd);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
ByteBuffer mMappedBuffer = mSharedMemory.mapReadWrite();
mMappedBuffer.position(0);
AssetManager assetManager = getAssets();
InputStream inputStream = assetManager.open("test_1920_1080.yuv");
byte[] yuv_data = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
yuv_data = inputStream.readAllBytes();
}
inputStream.close();
mMappedBuffer.put(yuv_data);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
CameraManager mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
Method method = mCameraManager.getClass().getMethod("getSharedMemoryFd");
ParcelFileDescriptor pfd = (ParcelFileDescriptor) method.invoke(mCameraManager,null);
if (pfd != null) {
SharedMemory mSharedMemory = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
mSharedMemory = SharedMemory.fromFileDescriptor(pfd);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O_MR1) {
ByteBuffer mMappedBuffer = mSharedMemory.mapReadWrite();
byte[] buffer = new byte[10];
mMappedBuffer.position(0);
mMappedBuffer.get(buffer);
Log.d(TAG, "buffer: " + buffer);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}

Kiro

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
Q:应用不知道各个厂商实现HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED格式的像素布局,应用层是怎样解析获取到的数据的?

A:
这是一个非常好的问题!你触及了Android相机架构的核心设计原理。让我详细解释应用层是如何处理HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED格式的。

应用层不直接处理IMPLEMENTATION_DEFINED格式
关键点是:应用层通常不会直接接收到HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED格式的数据。Android相机架构通过以下机制来解决这个问题:

1. 格式转换层次结构
让我查看相关的代码来说明这个机制:

Searched workspace
搜索ImageFormat相关的定义,了解应用层使用的格式
让我查看Android框架中的相关文件:

Searched workspace
查找ImageFormat相关的文件
让我查看相机服务中的格式转换相关代码:

Searched workspace
搜索格式转换相关的代码
让我查看Camera3Device相关的代码,了解格式处理:

Searched workspace
查找Camera3Device相关文件
Read file(s)

Camera3Device.h 1 - 201
现在让我基于Android相机架构的知识来详细解释这个问题:

应用层如何处理IMPLEMENTATION_DEFINED格式
1. 关键原理:应用层不直接处理IMPLEMENTATION_DEFINED
重要概念:应用层实际上从不直接接收HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED格式的数据。Android相机架构通过以下层次来处理:

应用层 (ImageFormat.YUV_420_888, NV21等)
↓ (格式转换)
相机服务层 (Camera3OutputStream)
↓ (缓冲区管理)
HAL层 (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)
↓ (硬件实现)
硬件层 (实际的NV12/NV21/I420等)
2. 具体的处理机制
让我创建一个示例来说明这个机制:

Created

format_conversion_example.md


3. 在你的虚拟摄像头实现中的含义
现在我明白了你遇到的问题。让我查看你的代码并解释:

Read file(s)

Camera3OutputStream.cpp 1200 - 1301
4. 问题的核心解答
现在我可以回答你的问题了:

应用层实际上不需要解析HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED格式的数据,因为:

A. 应用层从不直接接收这种格式
// 应用层代码示例
ImageReader reader = ImageReader.newInstance(width, height,
ImageFormat.YUV_420_888, maxImages); // 应用请求标准格式

// 应用收到的Image对象
Image image = reader.acquireLatestImage();
Image.Plane[] planes = image.getPlanes(); // 总是标准的YUV平面布局
B. Surface系统负责格式转换
当应用创建一个Surface时,Surface系统会:

记录应用期望的格式(如YUV_420_888)
与HAL协商最佳格式(可能选择IMPLEMENTATION_DEFINED)
在数据返回给应用前进行格式转换
C. 你的虚拟摄像头实现的位置
你在Camera3OutputStream::returnBufferCheckedLocked中修改数据是正确的,因为:

这里是HAL返回数据后,应用接收数据前的关键位置
此时缓冲区还是HAL格式(可能是IMPLEMENTATION_DEFINED)
之后Surface系统会将其转换为应用期望的格式
5. 改进你的实现
基于这个理解,我建议改进你的代码:

Accepted edits to

Camera3OutputStream.cpp


6. 总结
关键理解:

应用层永远不会直接处理IMPLEMENTATION_DEFINED格式
Surface/GraphicBuffer系统负责格式转换
你的修改位置是正确的 - 在HAL层和应用层之间
如果lockYCbCr成功,就按YUV格式处理,无论报告的格式是什么
实际的数据流:

HAL输出 → 你的修改 → Surface格式转换 → 应用接收
(0x22) (YUV数据) (标准格式) (ImageFormat.YUV_420_888)
这就是为什么你的虚拟摄像头实现能够工作的原因 - 你在正确的位置修改了数据,Surface系统会处理剩下的格式转换工作。
1
2
当应用使用SurfaceView或TextureView时,系统可能会自动选择IMPLEMENTATION_DEFINED格式
这个选择发生在Surface创建时,而不是在Parameters
1
当应用使用SurfaceView或TextureView时,系统可能会自动选择IMPLEMENTATION_DEFINED格式,如何修改可以实现不使用IMPLEMENTATION_DEFINED格式