Merge commit '28d94e8b86cef2f86bff054565179fc2027db8cd' into dev

This commit is contained in:
SimoZ64
2025-04-15 09:23:03 +02:00
751 changed files with 60484 additions and 32976 deletions

View File

@@ -72,6 +72,7 @@ const char *SDL_GetCameraDriver(int index)
if (index >= 0 && index < SDL_GetNumCameraDrivers()) {
return bootstrap[index]->name;
}
SDL_InvalidParamError("index");
return NULL;
}
@@ -277,23 +278,10 @@ static void ClosePhysicalCamera(SDL_Camera *device)
device->base_timestamp = 0;
device->adjust_timestamp = 0;
}
// this must not be called while `device` is still in a device list, or while a device's camera thread is still running.
static void DestroyPhysicalCamera(SDL_Camera *device)
{
if (device) {
// Destroy any logical devices that still exist...
ClosePhysicalCamera(device);
camera_driver.impl.FreeDeviceHandle(device);
SDL_DestroyMutex(device->lock);
SDL_free(device->all_specs);
SDL_free(device->name);
SDL_free(device);
}
SDL_zero(device->spec);
}
// Don't hold the device lock when calling this, as we may destroy the device!
void UnrefPhysicalCamera(SDL_Camera *device)
{
@@ -304,7 +292,6 @@ void UnrefPhysicalCamera(SDL_Camera *device)
SDL_AddAtomicInt(&camera_driver.device_count, -1);
}
SDL_UnlockRWLock(camera_driver.device_hash_lock);
DestroyPhysicalCamera(device); // ...and nuke it.
}
}
@@ -497,7 +484,7 @@ SDL_Camera *SDL_AddCamera(const char *name, SDL_CameraPosition position, int num
RefPhysicalCamera(device);
SDL_LockRWLockForWriting(camera_driver.device_hash_lock);
if (SDL_InsertIntoHashTable(camera_driver.device_hash, (const void *) (uintptr_t) device->instance_id, device)) {
if (SDL_InsertIntoHashTable(camera_driver.device_hash, (const void *) (uintptr_t) device->instance_id, device, false)) {
SDL_AddAtomicInt(&camera_driver.device_count, 1);
} else {
SDL_DestroyMutex(device->lock);
@@ -621,7 +608,25 @@ void SDL_CameraPermissionOutcome(SDL_Camera *device, bool approved)
}
}
typedef struct FindOnePhysicalCameraByCallbackData
{
bool (*callback)(SDL_Camera *device, void *userdata);
void *userdata;
SDL_Camera *device;
} FindOnePhysicalCameraByCallbackData;
static bool SDLCALL FindOnePhysicalCameraByCallback(void *userdata, const SDL_HashTable *table, const void *key, const void *value)
{
FindOnePhysicalCameraByCallbackData *data = (FindOnePhysicalCameraByCallbackData *) userdata;
SDL_Camera *device = (SDL_Camera *) value;
if (data->callback(device, data->userdata)) {
data->device = device;
return false; // stop iterating.
}
return true; // keep iterating.
}
// !!! FIXME: this doesn't follow SDL convention of `userdata` being the first param of the callback.
SDL_Camera *SDL_FindPhysicalCameraByCallback(bool (*callback)(SDL_Camera *device, void *userdata), void *userdata)
{
if (!SDL_GetCurrentCameraDriver()) {
@@ -629,28 +634,22 @@ SDL_Camera *SDL_FindPhysicalCameraByCallback(bool (*callback)(SDL_Camera *device
return NULL;
}
const void *key;
const void *value;
void *iter = NULL;
FindOnePhysicalCameraByCallbackData data = { callback, userdata, NULL };
SDL_LockRWLockForReading(camera_driver.device_hash_lock);
while (SDL_IterateHashTable(camera_driver.device_hash, &key, &value, &iter)) {
SDL_Camera *device = (SDL_Camera *) value;
if (callback(device, userdata)) { // found it?
SDL_UnlockRWLock(camera_driver.device_hash_lock);
return device;
}
}
SDL_IterateHashTable(camera_driver.device_hash, FindOnePhysicalCameraByCallback, &data);
SDL_UnlockRWLock(camera_driver.device_hash_lock);
SDL_SetError("Device not found");
return NULL;
if (!data.device) {
SDL_SetError("Device not found");
}
return data.device;
}
void SDL_CloseCamera(SDL_Camera *camera)
{
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ClosePhysicalCamera(device);
}
@@ -664,7 +663,7 @@ bool SDL_GetCameraFormat(SDL_Camera *camera, SDL_CameraSpec *spec)
return SDL_InvalidParamError("spec");
}
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
if (device->permission > 0) {
SDL_copyp(spec, &device->spec);
@@ -701,6 +700,19 @@ SDL_CameraPosition SDL_GetCameraPosition(SDL_CameraID instance_id)
}
typedef struct GetOneCameraData
{
SDL_CameraID *result;
int devs_seen;
} GetOneCameraData;
static bool SDLCALL GetOneCamera(void *userdata, const SDL_HashTable *table, const void *key, const void *value)
{
GetOneCameraData *data = (GetOneCameraData *) userdata;
data->result[data->devs_seen++] = (SDL_CameraID) (uintptr_t) key;
return true; // keep iterating.
}
SDL_CameraID *SDL_GetCameras(int *count)
{
int dummy_count;
@@ -722,16 +734,10 @@ SDL_CameraID *SDL_GetCameras(int *count)
if (!result) {
num_devices = 0;
} else {
int devs_seen = 0;
const void *key;
const void *value;
void *iter = NULL;
while (SDL_IterateHashTable(camera_driver.device_hash, &key, &value, &iter)) {
result[devs_seen++] = (SDL_CameraID) (uintptr_t) key;
}
SDL_assert(devs_seen == num_devices);
result[devs_seen] = 0; // null-terminated.
GetOneCameraData data = { result, 0 };
SDL_IterateHashTable(camera_driver.device_hash, GetOneCamera, &data);
SDL_assert(data.devs_seen == num_devices);
result[num_devices] = 0; // null-terminated.
}
SDL_UnlockRWLock(camera_driver.device_hash_lock);
@@ -886,7 +892,7 @@ bool SDL_CameraThreadIterate(SDL_Camera *device)
SDL_Surface *srcsurf = acquired;
if (device->needs_scaling == -1) { // downscaling? Do it first. -1: downscale, 0: no scaling, 1: upscale
SDL_Surface *dstsurf = device->needs_conversion ? device->conversion_surface : output_surface;
SDL_SoftStretch(srcsurf, NULL, dstsurf, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing?
SDL_StretchSurface(srcsurf, NULL, dstsurf, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing?
srcsurf = dstsurf;
}
if (device->needs_conversion) {
@@ -897,7 +903,7 @@ bool SDL_CameraThreadIterate(SDL_Camera *device)
srcsurf = dstsurf;
}
if (device->needs_scaling == 1) { // upscaling? Do it last. -1: downscale, 0: no scaling, 1: upscale
SDL_SoftStretch(srcsurf, NULL, output_surface, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing?
SDL_StretchSurface(srcsurf, NULL, output_surface, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing?
}
// we made a copy, so we can give the driver back its resources.
@@ -952,6 +958,110 @@ static int SDLCALL CameraThread(void *devicep)
return 0;
}
bool SDL_PrepareCameraSurfaces(SDL_Camera *device)
{
SDL_CameraSpec *appspec = &device->spec; // the app wants this format.
const SDL_CameraSpec *devspec = &device->actual_spec; // the hardware is set to this format.
SDL_assert(device->acquire_surface == NULL); // shouldn't call this function twice on an opened camera!
SDL_assert(devspec->format != SDL_PIXELFORMAT_UNKNOWN); // fix the backend, it should have an actual format by now.
SDL_assert(devspec->width >= 0); // fix the backend, it should have an actual format by now.
SDL_assert(devspec->height >= 0); // fix the backend, it should have an actual format by now.
if (appspec->width <= 0 || appspec->height <= 0) {
appspec->width = devspec->width;
appspec->height = devspec->height;
}
if (appspec->format == SDL_PIXELFORMAT_UNKNOWN) {
appspec->format = devspec->format;
}
if (appspec->framerate_denominator == 0) {
appspec->framerate_numerator = devspec->framerate_numerator;
appspec->framerate_denominator = devspec->framerate_denominator;
}
if ((devspec->width == appspec->width) && (devspec->height == appspec->height)) {
device->needs_scaling = 0;
} else {
const Uint64 srcarea = ((Uint64) devspec->width) * ((Uint64) devspec->height);
const Uint64 dstarea = ((Uint64) appspec->width) * ((Uint64) appspec->height);
if (dstarea <= srcarea) {
device->needs_scaling = -1; // downscaling (or changing to new aspect ratio with same area)
} else {
device->needs_scaling = 1; // upscaling
}
}
device->needs_conversion = (devspec->format != appspec->format);
device->acquire_surface = SDL_CreateSurfaceFrom(devspec->width, devspec->height, devspec->format, NULL, 0);
if (!device->acquire_surface) {
goto failed;
}
SDL_SetSurfaceColorspace(device->acquire_surface, devspec->colorspace);
// if we have to scale _and_ convert, we need a middleman surface, since we can't do both changes at once.
if (device->needs_scaling && device->needs_conversion) {
const bool downscaling_first = (device->needs_scaling < 0);
const SDL_CameraSpec *s = downscaling_first ? appspec : devspec;
const SDL_PixelFormat fmt = downscaling_first ? devspec->format : appspec->format;
device->conversion_surface = SDL_CreateSurface(s->width, s->height, fmt);
if (!device->conversion_surface) {
goto failed;
}
SDL_SetSurfaceColorspace(device->conversion_surface, devspec->colorspace);
}
// output surfaces are in the app-requested format. If no conversion is necessary, we'll just use the pointers
// the backend fills into acquired_surface, and you can get all the way from DMA access in the camera hardware
// to the app without a single copy. Otherwise, these will be full surfaces that hold converted/scaled copies.
for (int i = 0; i < (SDL_arraysize(device->output_surfaces) - 1); i++) {
device->output_surfaces[i].next = &device->output_surfaces[i + 1];
}
device->empty_output_surfaces.next = device->output_surfaces;
for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) {
SDL_Surface *surf;
if (device->needs_scaling || device->needs_conversion) {
surf = SDL_CreateSurface(appspec->width, appspec->height, appspec->format);
} else {
surf = SDL_CreateSurfaceFrom(appspec->width, appspec->height, appspec->format, NULL, 0);
}
if (!surf) {
goto failed;
}
SDL_SetSurfaceColorspace(surf, devspec->colorspace);
device->output_surfaces[i].surface = surf;
}
return true;
failed:
if (device->acquire_surface) {
SDL_DestroySurface(device->acquire_surface);
device->acquire_surface = NULL;
}
if (device->conversion_surface) {
SDL_DestroySurface(device->conversion_surface);
device->conversion_surface = NULL;
}
for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) {
SDL_Surface *surf = device->output_surfaces[i].surface;
if (surf) {
SDL_DestroySurface(surf);
}
}
SDL_zeroa(device->output_surfaces);
return false;
}
static void ChooseBestCameraSpec(SDL_Camera *device, const SDL_CameraSpec *spec, SDL_CameraSpec *closest)
{
// Find the closest available native format/size...
@@ -1104,85 +1214,19 @@ SDL_Camera *SDL_OpenCamera(SDL_CameraID instance_id, const SDL_CameraSpec *spec)
return NULL;
}
if (spec) {
SDL_copyp(&device->spec, spec);
if (spec->width <= 0 || spec->height <= 0) {
device->spec.width = closest.width;
device->spec.height = closest.height;
}
if (spec->format == SDL_PIXELFORMAT_UNKNOWN) {
device->spec.format = closest.format;
}
if (spec->framerate_denominator == 0) {
device->spec.framerate_numerator = closest.framerate_numerator;
device->spec.framerate_denominator = closest.framerate_denominator;
}
} else {
SDL_copyp(&device->spec, &closest);
}
SDL_copyp(&device->spec, spec ? spec : &closest);
SDL_copyp(&device->actual_spec, &closest);
if ((closest.width == device->spec.width) && (closest.height == device->spec.height)) {
device->needs_scaling = 0;
} else {
const Uint64 srcarea = ((Uint64) closest.width) * ((Uint64) closest.height);
const Uint64 dstarea = ((Uint64) device->spec.width) * ((Uint64) device->spec.height);
if (dstarea <= srcarea) {
device->needs_scaling = -1; // downscaling (or changing to new aspect ratio with same area)
} else {
device->needs_scaling = 1; // upscaling
}
}
device->needs_conversion = (closest.format != device->spec.format);
device->acquire_surface = SDL_CreateSurfaceFrom(closest.width, closest.height, closest.format, NULL, 0);
if (!device->acquire_surface) {
ClosePhysicalCamera(device);
ReleaseCamera(device);
return NULL;
}
SDL_SetSurfaceColorspace(device->acquire_surface, closest.colorspace);
// if we have to scale _and_ convert, we need a middleman surface, since we can't do both changes at once.
if (device->needs_scaling && device->needs_conversion) {
const bool downsampling_first = (device->needs_scaling < 0);
const SDL_CameraSpec *s = downsampling_first ? &device->spec : &closest;
const SDL_PixelFormat fmt = downsampling_first ? closest.format : device->spec.format;
device->conversion_surface = SDL_CreateSurface(s->width, s->height, fmt);
if (!device->conversion_surface) {
// SDL_PIXELFORMAT_UNKNOWN here is taken as a signal that the backend
// doesn't know its format yet (Emscripten waiting for user permission,
// in this case), and the backend will call SDL_PrepareCameraSurfaces()
// itself, later but before the app is allowed to acquire images.
if (closest.format != SDL_PIXELFORMAT_UNKNOWN) {
if (!SDL_PrepareCameraSurfaces(device)) {
ClosePhysicalCamera(device);
ReleaseCamera(device);
return NULL;
}
SDL_SetSurfaceColorspace(device->conversion_surface, closest.colorspace);
}
// output surfaces are in the app-requested format. If no conversion is necessary, we'll just use the pointers
// the backend fills into acquired_surface, and you can get all the way from DMA access in the camera hardware
// to the app without a single copy. Otherwise, these will be full surfaces that hold converted/scaled copies.
for (int i = 0; i < (SDL_arraysize(device->output_surfaces) - 1); i++) {
device->output_surfaces[i].next = &device->output_surfaces[i + 1];
}
device->empty_output_surfaces.next = device->output_surfaces;
for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) {
SDL_Surface *surf;
if (device->needs_scaling || device->needs_conversion) {
surf = SDL_CreateSurface(device->spec.width, device->spec.height, device->spec.format);
} else {
surf = SDL_CreateSurfaceFrom(device->spec.width, device->spec.height, device->spec.format, NULL, 0);
}
if (!surf) {
ClosePhysicalCamera(device);
ReleaseCamera(device);
return NULL;
}
SDL_SetSurfaceColorspace(surf, closest.colorspace);
device->output_surfaces[i].surface = surf;
}
device->drop_frames = 1;
@@ -1202,7 +1246,7 @@ SDL_Camera *SDL_OpenCamera(SDL_CameraID instance_id, const SDL_CameraSpec *spec)
ReleaseCamera(device); // unlock, we're good to go!
return (SDL_Camera *) device; // currently there's no separation between physical and logical device.
return device; // currently there's no separation between physical and logical device.
}
SDL_Surface *SDL_AcquireCameraFrame(SDL_Camera *camera, Uint64 *timestampNS)
@@ -1216,7 +1260,7 @@ SDL_Surface *SDL_AcquireCameraFrame(SDL_Camera *camera, Uint64 *timestampNS)
return NULL;
}
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
@@ -1258,7 +1302,7 @@ void SDL_ReleaseCameraFrame(SDL_Camera *camera, SDL_Surface *frame)
return;
}
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
SurfaceList *slistprev = &device->app_held_output_surfaces;
@@ -1300,7 +1344,7 @@ SDL_CameraID SDL_GetCameraID(SDL_Camera *camera)
if (!camera) {
SDL_InvalidParamError("camera");
} else {
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
result = device->instance_id;
ReleaseCamera(device);
@@ -1315,7 +1359,7 @@ SDL_PropertiesID SDL_GetCameraProperties(SDL_Camera *camera)
if (!camera) {
SDL_InvalidParamError("camera");
} else {
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
if (device->props == 0) {
device->props = SDL_CreateProperties();
@@ -1334,7 +1378,7 @@ int SDL_GetCameraPermissionState(SDL_Camera *camera)
SDL_InvalidParamError("camera");
result = -1;
} else {
SDL_Camera *device = (SDL_Camera *) camera; // currently there's no separation between physical and logical device.
SDL_Camera *device = camera; // currently there's no separation between physical and logical device.
ObtainPhysicalCameraObj(device);
result = device->permission;
ReleaseCamera(device);
@@ -1378,37 +1422,26 @@ void SDL_QuitCamera(void)
SDL_free(i);
}
const void *key;
const void *value;
void *iter = NULL;
while (SDL_IterateHashTable(device_hash, &key, &value, &iter)) {
DestroyPhysicalCamera((SDL_Camera *) value);
}
SDL_DestroyHashTable(device_hash);
// Free the driver data
camera_driver.impl.Deinitialize();
SDL_DestroyRWLock(camera_driver.device_hash_lock);
SDL_DestroyHashTable(device_hash);
SDL_zero(camera_driver);
}
static Uint32 HashCameraID(const void *key, void *data)
// Physical camera objects are only destroyed when removed from the device hash.
static void SDLCALL DestroyCameraHashItem(void *userdata, const void *key, const void *value)
{
// The values are unique incrementing integers, starting at 1, so just return minus 1 to start with bucket zero.
return ((Uint32) ((uintptr_t) key)) - 1;
}
static bool MatchCameraID(const void *a, const void *b, void *data)
{
return (a == b); // simple integers, just compare them as pointer values.
}
static void NukeCameraHashItem(const void *key, const void *value, void *data)
{
// no-op, keys and values in this hashtable are treated as Plain Old Data and don't get freed here.
SDL_Camera *device = (SDL_Camera *) value;
ClosePhysicalCamera(device);
camera_driver.impl.FreeDeviceHandle(device);
SDL_DestroyMutex(device->lock);
SDL_free(device->all_specs);
SDL_free(device->name);
SDL_free(device);
}
bool SDL_CameraInit(const char *driver_name)
@@ -1422,7 +1455,7 @@ bool SDL_CameraInit(const char *driver_name)
return false;
}
SDL_HashTable *device_hash = SDL_CreateHashTable(NULL, 8, HashCameraID, MatchCameraID, NukeCameraHashItem, false, false);
SDL_HashTable *device_hash = SDL_CreateHashTable(0, false, SDL_HashID, SDL_KeyMatchID, DestroyCameraHashItem, NULL);
if (!device_hash) {
SDL_DestroyRWLock(device_hash_lock);
return false;

View File

@@ -54,6 +54,10 @@ extern void SDL_CameraThreadSetup(SDL_Camera *device);
extern bool SDL_CameraThreadIterate(SDL_Camera *device);
extern void SDL_CameraThreadShutdown(SDL_Camera *device);
// Backends can call this if they have to finish initializing later, like Emscripten. Most backends should _not_ call this directly!
extern bool SDL_PrepareCameraSurfaces(SDL_Camera *device);
// common utility functionality to gather up camera specs. Not required!
typedef struct CameraFormatAddData
{
@@ -190,7 +194,7 @@ typedef struct SDL_CameraDriver
const char *desc; // The description of this camera driver
SDL_CameraDriverImpl impl; // the backend's interface
SDL_RWLock *device_hash_lock; // A rwlock that protects `device_hash`
SDL_RWLock *device_hash_lock; // A rwlock that protects `device_hash` // !!! FIXME: device_hash _also_ has a rwlock, see if we still need this one.
SDL_HashTable *device_hash; // the collection of currently-available camera devices
SDL_PendingCameraEvent pending_events;
SDL_PendingCameraEvent *pending_events_tail;

View File

@@ -98,17 +98,24 @@ static void EMSCRIPTENCAMERA_CloseDevice(SDL_Camera *device)
}
}
static void SDLEmscriptenCameraPermissionOutcome(SDL_Camera *device, int approved, int w, int h, int fps)
static int SDLEmscriptenCameraPermissionOutcome(SDL_Camera *device, int approved, int w, int h, int fps)
{
device->spec.width = device->actual_spec.width = w;
device->spec.height = device->actual_spec.height = h;
device->spec.framerate_numerator = device->actual_spec.framerate_numerator = fps;
device->spec.framerate_denominator = device->actual_spec.framerate_denominator = 1;
if (device->acquire_surface) {
device->acquire_surface->w = w;
device->acquire_surface->h = h;
if (approved) {
device->actual_spec.format = SDL_PIXELFORMAT_RGBA32;
device->actual_spec.width = w;
device->actual_spec.height = h;
device->actual_spec.framerate_numerator = fps;
device->actual_spec.framerate_denominator = 1;
if (!SDL_PrepareCameraSurfaces(device)) {
// uhoh, we're in trouble. Probably ran out of memory.
SDL_LogError(SDL_LOG_CATEGORY_ERROR, "Camera could not prepare surfaces: %s ... revoking approval!", SDL_GetError());
approved = 0; // disconnecting the SDL camera might not be safe here, just mark it as denied by user.
}
}
SDL_CameraPermissionOutcome(device, approved ? true : false);
return approved;
}
static bool EMSCRIPTENCAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec)
@@ -167,40 +174,40 @@ static bool EMSCRIPTENCAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec
const actualfps = settings.frameRate;
console.log("Camera is opened! Actual spec: (" + actualw + "x" + actualh + "), fps=" + actualfps);
dynCall('viiiii', outcome, [device, 1, actualw, actualh, actualfps]);
if (dynCall('iiiiii', outcome, [device, 1, actualw, actualh, actualfps])) {
const video = document.createElement("video");
video.width = actualw;
video.height = actualh;
video.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels.
video.srcObject = stream;
const video = document.createElement("video");
video.width = actualw;
video.height = actualh;
video.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels.
video.srcObject = stream;
const canvas = document.createElement("canvas");
canvas.width = actualw;
canvas.height = actualh;
canvas.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels.
const canvas = document.createElement("canvas");
canvas.width = actualw;
canvas.height = actualh;
canvas.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels.
const ctx2d = canvas.getContext('2d');
const ctx2d = canvas.getContext('2d');
const SDL3 = Module['SDL3'];
SDL3.camera.width = actualw;
SDL3.camera.height = actualh;
SDL3.camera.fps = actualfps;
SDL3.camera.fpsincrms = 1000.0 / actualfps;
SDL3.camera.stream = stream;
SDL3.camera.video = video;
SDL3.camera.canvas = canvas;
SDL3.camera.ctx2d = ctx2d;
SDL3.camera.next_frame_time = performance.now();
const SDL3 = Module['SDL3'];
SDL3.camera.width = actualw;
SDL3.camera.height = actualh;
SDL3.camera.fps = actualfps;
SDL3.camera.fpsincrms = 1000.0 / actualfps;
SDL3.camera.stream = stream;
SDL3.camera.video = video;
SDL3.camera.canvas = canvas;
SDL3.camera.ctx2d = ctx2d;
SDL3.camera.next_frame_time = performance.now();
video.play();
video.addEventListener('loadedmetadata', () => {
grabNextCameraFrame(); // start this loop going.
});
video.play();
video.addEventListener('loadedmetadata', () => {
grabNextCameraFrame(); // start this loop going.
});
}
})
.catch((err) => {
console.error("Tried to open camera but it threw an error! " + err.name + ": " + err.message);
dynCall('viiiii', outcome, [device, 0, 0, 0, 0]); // we call this a permission error, because it probably is.
dynCall('iiiiii', outcome, [device, 0, 0, 0, 0]); // we call this a permission error, because it probably is.
});
}, device, spec->width, spec->height, spec->framerate_numerator, spec->framerate_denominator, SDLEmscriptenCameraPermissionOutcome, SDL_CameraThreadIterate);

View File

@@ -76,6 +76,7 @@ SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_UYVY, FCC('UYVY'));
SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_YVYU, FCC('YVYU'));
SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_NV12, FCC('NV12'));
SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_NV21, FCC('NV21'));
SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_MJPG, FCC('MJPG'));
#undef SDL_DEFINE_MEDIATYPE_GUID
#ifdef __GNUC__
@@ -102,7 +103,8 @@ static const struct
{ &SDL_MFVideoFormat_UYVY, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_YVYU, SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_NV12, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_NV21, SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED }
{ &SDL_MFVideoFormat_NV21, SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_MJPG, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_SRGB }
};
static SDL_Colorspace GetMediaTypeColorspace(IMFMediaType *mediatype, SDL_Colorspace default_colorspace)
@@ -296,6 +298,13 @@ static void MediaTypeToSDLFmt(IMFMediaType *mediatype, SDL_PixelFormat *format,
}
}
}
#if DEBUG_CAMERA
SDL_Log("Unknown media type: 0x%x (%c%c%c%c)", type.Data1,
(char)(Uint8)(type.Data1 >> 0),
(char)(Uint8)(type.Data1 >> 8),
(char)(Uint8)(type.Data1 >> 16),
(char)(Uint8)(type.Data1 >> 24));
#endif
*format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
@@ -424,7 +433,7 @@ static void SDLCALL CleanupIMFMediaBuffer(void *userdata, void *value)
static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS)
{
SDL_assert(device->hidden->current_sample != NULL);
SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY;
HRESULT ret;
LONGLONG timestamp100NS = 0;
@@ -457,46 +466,60 @@ static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SD
} else {
BYTE *pixels = NULL;
LONG pitch = 0;
DWORD buflen = 0;
if (SUCCEEDED(IMFMediaBuffer_QueryInterface(objs->buffer, &SDL_IID_IMF2DBuffer2, (void **)&objs->buffer2d2))) {
BYTE *bufstart = NULL;
DWORD buflen = 0;
ret = IMF2DBuffer2_Lock2DSize(objs->buffer2d2, MF2DBuffer_LockFlags_Read, &pixels, &pitch, &bufstart, &buflen);
if (FAILED(ret)) {
result = SDL_CAMERA_FRAME_ERROR;
CleanupIMF2DBuffer2(NULL, objs);
} else {
if (frame->format == SDL_PIXELFORMAT_MJPG) {
pitch = (LONG)buflen;
}
if (pitch < 0) { // image rows are reversed.
pixels += -pitch * (frame->h - 1);
}
frame->pixels = pixels;
frame->pitch = (int) pitch;
frame->pitch = (int)pitch;
if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMF2DBuffer2, NULL)) {
result = SDL_CAMERA_FRAME_ERROR;
}
}
} else if (SUCCEEDED(IMFMediaBuffer_QueryInterface(objs->buffer, &SDL_IID_IMF2DBuffer, (void **)&objs->buffer2d))) {
} else if (frame->format != SDL_PIXELFORMAT_MJPG &&
SUCCEEDED(IMFMediaBuffer_QueryInterface(objs->buffer, &SDL_IID_IMF2DBuffer, (void **)&objs->buffer2d))) {
ret = IMF2DBuffer_Lock2D(objs->buffer2d, &pixels, &pitch);
if (FAILED(ret)) {
CleanupIMF2DBuffer(NULL, objs);
result = SDL_CAMERA_FRAME_ERROR;
} else {
if (pitch < 0) { // image rows are reversed.
pixels += -pitch * (frame->h - 1);
}
frame->pixels = pixels;
frame->pitch = (int) pitch;
frame->pitch = (int)pitch;
if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMF2DBuffer, NULL)) {
result = SDL_CAMERA_FRAME_ERROR;
}
}
} else {
DWORD maxlen = 0, currentlen = 0;
ret = IMFMediaBuffer_Lock(objs->buffer, &pixels, &maxlen, &currentlen);
DWORD maxlen = 0;
ret = IMFMediaBuffer_Lock(objs->buffer, &pixels, &maxlen, &buflen);
if (FAILED(ret)) {
CleanupIMFMediaBuffer(NULL, objs);
result = SDL_CAMERA_FRAME_ERROR;
} else {
pitch = (LONG) device->hidden->pitch;
if (pitch < 0) { // image rows are reversed.
if (frame->format == SDL_PIXELFORMAT_MJPG) {
pitch = (LONG)buflen;
} else {
pitch = (LONG)device->hidden->pitch;
}
if (pitch < 0) { // image rows are reversed.
pixels += -pitch * (frame->h - 1);
}
frame->pixels = pixels;
frame->pitch = (int) pitch;
frame->pitch = (int)pitch;
if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMFMediaBuffer, NULL)) {
result = SDL_CAMERA_FRAME_ERROR;
}
@@ -522,6 +545,23 @@ static void MEDIAFOUNDATION_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame)
#else
static SDL_CameraFrameResult MEDIAFOUNDATION_CopyFrame(SDL_Surface *frame, const BYTE *pixels, LONG pitch, DWORD buflen)
{
frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
if (!frame->pixels) {
return SDL_CAMERA_FRAME_ERROR;
}
const BYTE *start = pixels;
if (pitch < 0) { // image rows are reversed.
start += -pitch * (frame->h - 1);
}
SDL_memcpy(frame->pixels, start, buflen);
frame->pitch = (int)pitch;
return SDL_CAMERA_FRAME_READY;
}
static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS)
{
SDL_assert(device->hidden->current_sample != NULL);
@@ -555,63 +595,44 @@ static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SD
IMF2DBuffer2 *buffer2d2 = NULL;
BYTE *pixels = NULL;
LONG pitch = 0;
DWORD buflen = 0;
if (SUCCEEDED(IMFMediaBuffer_QueryInterface(buffer, &SDL_IID_IMF2DBuffer2, (void **)&buffer2d2))) {
BYTE *bufstart = NULL;
DWORD buflen = 0;
ret = IMF2DBuffer2_Lock2DSize(buffer2d2, MF2DBuffer_LockFlags_Read, &pixels, &pitch, &bufstart, &buflen);
if (FAILED(ret)) {
result = SDL_CAMERA_FRAME_ERROR;
} else {
frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
if (frame->pixels == NULL) {
result = SDL_CAMERA_FRAME_ERROR;
} else {
SDL_memcpy(frame->pixels, pixels, buflen);
frame->pitch = (int)pitch;
if (frame->format == SDL_PIXELFORMAT_MJPG) {
pitch = (LONG)buflen;
}
result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen);
IMF2DBuffer2_Unlock2D(buffer2d2);
}
IMF2DBuffer2_Release(buffer2d2);
} else if (SUCCEEDED(IMFMediaBuffer_QueryInterface(buffer, &SDL_IID_IMF2DBuffer, (void **)&buffer2d))) {
} else if (frame->format != SDL_PIXELFORMAT_MJPG &&
SUCCEEDED(IMFMediaBuffer_QueryInterface(buffer, &SDL_IID_IMF2DBuffer, (void **)&buffer2d))) {
ret = IMF2DBuffer_Lock2D(buffer2d, &pixels, &pitch);
if (FAILED(ret)) {
result = SDL_CAMERA_FRAME_ERROR;
} else {
BYTE *bufstart = pixels;
const DWORD buflen = (SDL_abs((int)pitch) * frame->w) * frame->h;
if (pitch < 0) { // image rows are reversed.
bufstart += -pitch * (frame->h - 1);
}
frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
if (frame->pixels == NULL) {
result = SDL_CAMERA_FRAME_ERROR;
} else {
SDL_memcpy(frame->pixels, bufstart, buflen);
frame->pitch = (int)pitch;
}
buflen = SDL_abs((int)pitch) * frame->h;
result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen);
IMF2DBuffer_Unlock2D(buffer2d);
}
IMF2DBuffer_Release(buffer2d);
} else {
DWORD maxlen = 0, currentlen = 0;
ret = IMFMediaBuffer_Lock(buffer, &pixels, &maxlen, &currentlen);
DWORD maxlen = 0;
ret = IMFMediaBuffer_Lock(buffer, &pixels, &maxlen, &buflen);
if (FAILED(ret)) {
result = SDL_CAMERA_FRAME_ERROR;
} else {
BYTE *bufstart = pixels;
pitch = (LONG)device->hidden->pitch;
const DWORD buflen = (SDL_abs((int)pitch) * frame->w) * frame->h;
if (pitch < 0) { // image rows are reversed.
bufstart += -pitch * (frame->h - 1);
}
frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen);
if (frame->pixels == NULL) {
result = SDL_CAMERA_FRAME_ERROR;
if (frame->format == SDL_PIXELFORMAT_MJPG) {
pitch = (LONG)buflen;
} else {
SDL_memcpy(frame->pixels, bufstart, buflen);
frame->pitch = (int)pitch;
pitch = (LONG)device->hidden->pitch;
}
result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen);
IMFMediaBuffer_Unlock(buffer);
}
}

View File

@@ -25,6 +25,10 @@
#include "../SDL_syscamera.h"
#ifdef HAVE_DBUS_DBUS_H
#include "../../core/linux/SDL_dbus.h"
#endif
#include <spa/utils/type.h>
#include <spa/pod/builder.h>
#include <spa/pod/iter.h>
@@ -58,7 +62,9 @@ static bool pipewire_initialized = false;
// Pipewire entry points
static const char *(*PIPEWIRE_pw_get_library_version)(void);
#if PW_CHECK_VERSION(0, 3, 75)
static bool (*PIPEWIRE_pw_check_library_version)(int major, int minor, int micro);
#endif
static void (*PIPEWIRE_pw_init)(int *, char ***);
static void (*PIPEWIRE_pw_deinit)(void);
static struct pw_main_loop *(*PIPEWIRE_pw_main_loop_new)(const struct spa_dict *loop);
@@ -78,6 +84,9 @@ static int (*PIPEWIRE_pw_thread_loop_start)(struct pw_thread_loop *);
static struct pw_context *(*PIPEWIRE_pw_context_new)(struct pw_loop *, struct pw_properties *, size_t);
static void (*PIPEWIRE_pw_context_destroy)(struct pw_context *);
static struct pw_core *(*PIPEWIRE_pw_context_connect)(struct pw_context *, struct pw_properties *, size_t);
#ifdef SDL_USE_LIBDBUS
static struct pw_core *(*PIPEWIRE_pw_context_connect_fd)(struct pw_context *, int, struct pw_properties *, size_t);
#endif
static void (*PIPEWIRE_pw_proxy_add_object_listener)(struct pw_proxy *, struct spa_hook *, const void *, void *);
static void (*PIPEWIRE_pw_proxy_add_listener)(struct pw_proxy *, struct spa_hook *, const struct pw_proxy_events *, void *);
static void *(*PIPEWIRE_pw_proxy_get_user_data)(struct pw_proxy *);
@@ -151,7 +160,9 @@ static void unload_pipewire_library(void)
static bool load_pipewire_syms(void)
{
SDL_PIPEWIRE_SYM(pw_get_library_version);
#if PW_CHECK_VERSION(0, 3, 75)
SDL_PIPEWIRE_SYM(pw_check_library_version);
#endif
SDL_PIPEWIRE_SYM(pw_init);
SDL_PIPEWIRE_SYM(pw_deinit);
SDL_PIPEWIRE_SYM(pw_main_loop_new);
@@ -171,6 +182,9 @@ static bool load_pipewire_syms(void)
SDL_PIPEWIRE_SYM(pw_context_new);
SDL_PIPEWIRE_SYM(pw_context_destroy);
SDL_PIPEWIRE_SYM(pw_context_connect);
#ifdef SDL_USE_LIBDBUS
SDL_PIPEWIRE_SYM(pw_context_connect_fd);
#endif
SDL_PIPEWIRE_SYM(pw_proxy_add_listener);
SDL_PIPEWIRE_SYM(pw_proxy_add_object_listener);
SDL_PIPEWIRE_SYM(pw_proxy_get_user_data);
@@ -359,21 +373,14 @@ static struct sdl_video_format {
SDL_Colorspace colorspace;
uint32_t id;
} sdl_video_formats[] = {
#if SDL_BYTEORDER == SDL_BIG_ENDIAN
{ SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBx },
{ SDL_PIXELFORMAT_BGRX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRx },
{ SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA },
{ SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB },
{ SDL_PIXELFORMAT_BGRA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA },
{ SDL_PIXELFORMAT_ABGR8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR },
#else
{ SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xBGR },
{ SDL_PIXELFORMAT_BGRX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xRGB },
{ SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR },
{ SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA },
{ SDL_PIXELFORMAT_BGRA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB },
{ SDL_PIXELFORMAT_ABGR8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA },
#endif
{ SDL_PIXELFORMAT_RGBX32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBx },
{ SDL_PIXELFORMAT_XRGB32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xRGB },
{ SDL_PIXELFORMAT_BGRX32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRx },
{ SDL_PIXELFORMAT_XBGR32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xBGR },
{ SDL_PIXELFORMAT_RGBA32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA },
{ SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB },
{ SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA },
{ SDL_PIXELFORMAT_ABGR32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR },
{ SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGB },
{ SDL_PIXELFORMAT_BGR24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGR },
{ SDL_PIXELFORMAT_YV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YV12 },
@@ -381,10 +388,8 @@ static struct sdl_video_format {
{ SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YUY2 },
{ SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_UYVY },
{ SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YVYU },
#if SDL_VERSION_ATLEAST(2,0,4)
{ SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV12 },
{ SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV21 },
#endif
{ SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV21 }
};
static uint32_t sdl_format_to_id(SDL_PixelFormat format)
@@ -507,14 +512,24 @@ static bool PIPEWIRECAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *
&device->hidden->stream_listener,
&stream_events, device);
params[n_params++] = spa_pod_builder_add_object(&b,
SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw),
SPA_FORMAT_VIDEO_format, SPA_POD_Id(sdl_format_to_id(spec->format)),
SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&SPA_RECTANGLE(spec->width, spec->height)),
SPA_FORMAT_VIDEO_framerate,
SPA_POD_Fraction(&SPA_FRACTION(spec->framerate_numerator, spec->framerate_denominator)));
if (spec->format == SDL_PIXELFORMAT_MJPG) {
params[n_params++] = spa_pod_builder_add_object(&b,
SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_mjpg),
SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&SPA_RECTANGLE(spec->width, spec->height)),
SPA_FORMAT_VIDEO_framerate,
SPA_POD_Fraction(&SPA_FRACTION(spec->framerate_numerator, spec->framerate_denominator)));
} else {
params[n_params++] = spa_pod_builder_add_object(&b,
SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw),
SPA_FORMAT_VIDEO_format, SPA_POD_Id(sdl_format_to_id(spec->format)),
SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&SPA_RECTANGLE(spec->width, spec->height)),
SPA_FORMAT_VIDEO_framerate,
SPA_POD_Fraction(&SPA_FRACTION(spec->framerate_numerator, spec->framerate_denominator)));
}
if ((res = PIPEWIRE_pw_stream_connect(device->hidden->stream,
PW_DIRECTION_INPUT,
@@ -580,7 +595,11 @@ static SDL_CameraFrameResult PIPEWIRECAMERA_AcquireFrame(SDL_Camera *device, SDL
*timestampNS = SDL_GetTicksNS();
#endif
frame->pixels = b->buffer->datas[0].data;
frame->pitch = b->buffer->datas[0].chunk->stride;
if (frame->format == SDL_PIXELFORMAT_MJPG) {
frame->pitch = b->buffer->datas[0].chunk->size;
} else {
frame->pitch = b->buffer->datas[0].chunk->stride;
}
PIPEWIRE_pw_thread_loop_unlock(hotplug.loop);
@@ -619,7 +638,7 @@ static void collect_rates(CameraFormatAddData *data, struct param *p, SDL_PixelF
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SPA_FALLTHROUGH;
SDL_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
if (!SDL_AddCameraFormat(data, sdlfmt, colorspace, size->width, size->height, rates[i].num, rates[i].denom)) {
@@ -652,7 +671,7 @@ static void collect_size(CameraFormatAddData *data, struct param *p, SDL_PixelFo
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SPA_FALLTHROUGH;
SDL_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
collect_rates(data, p, sdlfmt, colorspace, &rectangles[i]);
@@ -664,7 +683,7 @@ static void collect_size(CameraFormatAddData *data, struct param *p, SDL_PixelFo
}
}
static void collect_format(CameraFormatAddData *data, struct param *p)
static void collect_raw(CameraFormatAddData *data, struct param *p)
{
const struct spa_pod_prop *prop;
SDL_PixelFormat sdlfmt;
@@ -684,7 +703,7 @@ static void collect_format(CameraFormatAddData *data, struct param *p)
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SPA_FALLTHROUGH;
SDL_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
id_to_sdl_format(ids[i], &sdlfmt, &colorspace);
@@ -695,7 +714,47 @@ static void collect_format(CameraFormatAddData *data, struct param *p)
}
break;
default:
SDL_Log("CAMERA: unimplemented choice:%d", choice);
SDL_Log("CAMERA: unimplemented choice: %d", choice);
break;
}
}
static void collect_format(CameraFormatAddData *data, struct param *p)
{
const struct spa_pod_prop *prop;
struct spa_pod * values;
uint32_t i, n_vals, choice, *ids;
prop = spa_pod_find_prop(p->param, NULL, SPA_FORMAT_mediaSubtype);
if (prop == NULL)
return;
values = spa_pod_get_values(&prop->value, &n_vals, &choice);
if (values->type != SPA_TYPE_Id || n_vals == 0)
return;
ids = SPA_POD_BODY(values);
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SDL_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
switch (ids[i]) {
case SPA_MEDIA_SUBTYPE_raw:
collect_raw(data, p);
break;
case SPA_MEDIA_SUBTYPE_mjpg:
collect_size(data, p, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_JPEG);
break;
default:
// Unsupported format
break;
}
}
break;
default:
SDL_Log("CAMERA: unimplemented choice: %d", choice);
break;
}
}
@@ -976,10 +1035,21 @@ static bool pipewire_server_version_at_least(int major, int minor, int patch)
static bool hotplug_loop_init(void)
{
int res;
#ifdef SDL_USE_LIBDBUS
int fd;
fd = SDL_DBus_CameraPortalRequestAccess();
if (fd == -1)
return false;
#endif
spa_list_init(&hotplug.global_list);
#if PW_CHECK_VERSION(0, 3, 75)
hotplug.have_1_0_5 = PIPEWIRE_pw_check_library_version(1,0,5);
#else
hotplug.have_1_0_5 = false;
#endif
hotplug.loop = PIPEWIRE_pw_thread_loop_new("SDLPwCameraPlug", NULL);
if (!hotplug.loop) {
@@ -990,8 +1060,15 @@ static bool hotplug_loop_init(void)
if (!hotplug.context) {
return SDL_SetError("Pipewire: Failed to create hotplug detection context (%i)", errno);
}
#ifdef SDL_USE_LIBDBUS
if (fd >= 0) {
hotplug.core = PIPEWIRE_pw_context_connect_fd(hotplug.context, fd, NULL, 0);
} else {
hotplug.core = PIPEWIRE_pw_context_connect(hotplug.context, NULL, 0);
}
#else
hotplug.core = PIPEWIRE_pw_context_connect(hotplug.context, NULL, 0);
#endif
if (!hotplug.core) {
return SDL_SetError("Pipewire: Failed to connect hotplug detection context (%i)", errno);
}

View File

@@ -128,10 +128,11 @@ static SDL_CameraFrameResult V4L2_AcquireFrame(SDL_Camera *device, SDL_Surface *
const io_method io = device->hidden->io;
size_t size = device->hidden->buffers[0].length;
struct v4l2_buffer buf;
ssize_t amount;
switch (io) {
case IO_METHOD_READ:
if (read(fd, device->hidden->buffers[0].start, size) == -1) {
if ((amount = read(fd, device->hidden->buffers[0].start, size)) == -1) {
switch (errno) {
case EAGAIN:
return SDL_CAMERA_FRAME_SKIP;
@@ -148,7 +149,11 @@ static SDL_CameraFrameResult V4L2_AcquireFrame(SDL_Camera *device, SDL_Surface *
*timestampNS = SDL_GetTicksNS(); // oh well, close enough.
frame->pixels = device->hidden->buffers[0].start;
frame->pitch = device->hidden->driver_pitch;
if (device->hidden->driver_pitch) {
frame->pitch = device->hidden->driver_pitch;
} else {
frame->pitch = (int)amount;
}
break;
case IO_METHOD_MMAP:
@@ -178,7 +183,11 @@ static SDL_CameraFrameResult V4L2_AcquireFrame(SDL_Camera *device, SDL_Surface *
}
frame->pixels = device->hidden->buffers[buf.index].start;
frame->pitch = device->hidden->driver_pitch;
if (device->hidden->driver_pitch) {
frame->pitch = device->hidden->driver_pitch;
} else {
frame->pitch = buf.bytesused;
}
device->hidden->buffers[buf.index].available = 1;
*timestampNS = (((Uint64) buf.timestamp.tv_sec) * SDL_NS_PER_SECOND) + SDL_US_TO_NS(buf.timestamp.tv_usec);
@@ -222,7 +231,11 @@ static SDL_CameraFrameResult V4L2_AcquireFrame(SDL_Camera *device, SDL_Surface *
}
frame->pixels = (void*)buf.m.userptr;
frame->pitch = device->hidden->driver_pitch;
if (device->hidden->driver_pitch) {
frame->pitch = device->hidden->driver_pitch;
} else {
frame->pitch = buf.bytesused;
}
device->hidden->buffers[i].available = 1;
*timestampNS = (((Uint64) buf.timestamp.tv_sec) * SDL_NS_PER_SECOND) + SDL_US_TO_NS(buf.timestamp.tv_usec);
@@ -404,10 +417,15 @@ static void format_v4l2_to_sdl(Uint32 fmt, SDL_PixelFormat *format, SDL_Colorspa
switch (fmt) {
#define CASE(x, y, z) case x: *format = y; *colorspace = z; return
CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED);
CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_SRGB);
#undef CASE
default:
#if DEBUG_CAMERA
SDL_Log("CAMERA: Unknown format V4L2_PIX_FORMAT '%d'", fmt);
SDL_Log("CAMERA: Unknown format V4L2_PIX_FORMAT '%c%c%c%c' (0x%x)",
(char)(Uint8)(fmt >> 0),
(char)(Uint8)(fmt >> 8),
(char)(Uint8)(fmt >> 16),
(char)(Uint8)(fmt >> 24), fmt);
#endif
break;
}
@@ -420,10 +438,10 @@ static Uint32 format_sdl_to_v4l2(SDL_PixelFormat fmt)
switch (fmt) {
#define CASE(y, x) case x: return y
CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2);
CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_UNKNOWN);
CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_MJPG);
#undef CASE
default:
return true;
return 0;
}
}