applemedia: always fill GstBuffers with GstMemory
Always fill buffers with our custom memory. The custom memory will avoid mapping CV/CM buffers unless necessary.
This commit is contained in:
parent
8577224c74
commit
f6154b4cc0
@ -1079,7 +1079,7 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
|
|||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
buf = gst_core_media_buffer_new (cmbuf, FALSE, TRUE);
|
buf = gst_core_media_buffer_new (cmbuf, FALSE);
|
||||||
CFRelease (cmbuf);
|
CFRelease (cmbuf);
|
||||||
if (buf == NULL)
|
if (buf == NULL)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -939,7 +939,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta, textureCache == NULL);
|
*buf = gst_core_media_buffer_new (sbuf, useVideoMeta);
|
||||||
if (*buf == NULL) {
|
if (*buf == NULL) {
|
||||||
CFRelease (sbuf);
|
CFRelease (sbuf);
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
|
@ -228,7 +228,7 @@ gst_video_info_init_from_pixel_buffer (GstVideoInfo * info,
|
|||||||
|
|
||||||
GstBuffer *
|
GstBuffer *
|
||||||
gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||||
gboolean use_video_meta, gboolean map)
|
gboolean use_video_meta)
|
||||||
{
|
{
|
||||||
CVImageBufferRef image_buf;
|
CVImageBufferRef image_buf;
|
||||||
CMBlockBufferRef block_buf;
|
CMBlockBufferRef block_buf;
|
||||||
@ -250,11 +250,11 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
|
|
||||||
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, &has_padding, map);
|
gst_core_video_wrap_pixel_buffer (buf, &info, pixel_buf, &has_padding);
|
||||||
|
|
||||||
/* If the video meta API is not supported, remove padding by
|
/* If the video meta API is not supported, remove padding by
|
||||||
* copying the core media buffer to a system memory buffer */
|
* copying the core media buffer to a system memory buffer */
|
||||||
if (map && has_padding && !use_video_meta) {
|
if (has_padding && !use_video_meta) {
|
||||||
GstBuffer *copy_buf;
|
GstBuffer *copy_buf;
|
||||||
copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info);
|
copy_buf = gst_core_media_buffer_new_from_buffer (buf, &info);
|
||||||
if (!copy_buf) {
|
if (!copy_buf) {
|
||||||
@ -266,7 +266,7 @@ gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else if (block_buf != NULL) {
|
} else if (block_buf != NULL) {
|
||||||
if (map && !gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
if (!gst_core_media_buffer_wrap_block_buffer (buf, block_buf)) {
|
||||||
goto error;
|
goto error;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -43,8 +43,7 @@ typedef struct _GstCoreMediaMeta
|
|||||||
|
|
||||||
|
|
||||||
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
GstBuffer * gst_core_media_buffer_new (CMSampleBufferRef sample_buf,
|
||||||
gboolean use_video_meta,
|
gboolean use_video_meta);
|
||||||
gboolean map);
|
|
||||||
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
|
CVPixelBufferRef gst_core_media_buffer_get_pixel_buffer
|
||||||
(GstBuffer * buf);
|
(GstBuffer * buf);
|
||||||
GType gst_core_media_meta_api_get_type (void);
|
GType gst_core_media_meta_api_get_type (void);
|
||||||
|
@ -85,22 +85,19 @@ gst_core_video_meta_get_info (void)
|
|||||||
|
|
||||||
void
|
void
|
||||||
gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
||||||
CVPixelBufferRef pixel_buf, gboolean * has_padding, gboolean map)
|
CVPixelBufferRef pixel_buf, gboolean * has_padding)
|
||||||
{
|
{
|
||||||
guint n_planes;
|
guint n_planes;
|
||||||
gsize offset[GST_VIDEO_MAX_PLANES] = { 0 };
|
gsize offset[GST_VIDEO_MAX_PLANES] = { 0 };
|
||||||
gint stride[GST_VIDEO_MAX_PLANES] = { 0 };
|
gint stride[GST_VIDEO_MAX_PLANES] = { 0 };
|
||||||
UInt32 size;
|
UInt32 size;
|
||||||
|
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
||||||
|
|
||||||
|
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
||||||
*has_padding = FALSE;
|
*has_padding = FALSE;
|
||||||
|
|
||||||
if (CVPixelBufferIsPlanar (pixel_buf)) {
|
if (CVPixelBufferIsPlanar (pixel_buf)) {
|
||||||
gint i, size = 0, plane_offset = 0;
|
gint i, size = 0, plane_offset = 0;
|
||||||
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
|
||||||
|
|
||||||
if (map) {
|
|
||||||
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
n_planes = CVPixelBufferGetPlaneCount (pixel_buf);
|
n_planes = CVPixelBufferGetPlaneCount (pixel_buf);
|
||||||
for (i = 0; i < n_planes; i++) {
|
for (i = 0; i < n_planes; i++) {
|
||||||
@ -114,33 +111,22 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||||||
offset[i] = plane_offset;
|
offset[i] = plane_offset;
|
||||||
plane_offset += size;
|
plane_offset += size;
|
||||||
|
|
||||||
if (map) {
|
gst_buffer_append_memory (buf,
|
||||||
gst_buffer_append_memory (buf,
|
gst_apple_core_video_memory_new_wrapped (gpixbuf, i, size));
|
||||||
gst_apple_core_video_memory_new_wrapped (gpixbuf, i, size));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (map) {
|
|
||||||
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
n_planes = 1;
|
n_planes = 1;
|
||||||
stride[0] = CVPixelBufferGetBytesPerRow (pixel_buf);
|
stride[0] = CVPixelBufferGetBytesPerRow (pixel_buf);
|
||||||
offset[0] = 0;
|
offset[0] = 0;
|
||||||
size = stride[0] * CVPixelBufferGetHeight (pixel_buf);
|
size = stride[0] * CVPixelBufferGetHeight (pixel_buf);
|
||||||
|
|
||||||
if (map) {
|
gst_buffer_append_memory (buf,
|
||||||
GstAppleCoreVideoPixelBuffer *gpixbuf;
|
gst_apple_core_video_memory_new_wrapped (gpixbuf,
|
||||||
|
GST_APPLE_CORE_VIDEO_NO_PLANE, size));
|
||||||
gpixbuf = gst_apple_core_video_pixel_buffer_new (pixel_buf);
|
|
||||||
gst_buffer_append_memory (buf,
|
|
||||||
gst_apple_core_video_memory_new_wrapped (gpixbuf,
|
|
||||||
GST_APPLE_CORE_VIDEO_NO_PLANE, size));
|
|
||||||
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gst_apple_core_video_pixel_buffer_unref (gpixbuf);
|
||||||
|
|
||||||
if (info) {
|
if (info) {
|
||||||
GstVideoMeta *video_meta;
|
GstVideoMeta *video_meta;
|
||||||
|
|
||||||
@ -152,8 +138,7 @@ gst_core_video_wrap_pixel_buffer (GstBuffer * buf, GstVideoInfo * info,
|
|||||||
}
|
}
|
||||||
|
|
||||||
GstBuffer *
|
GstBuffer *
|
||||||
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo,
|
gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo)
|
||||||
gboolean map)
|
|
||||||
{
|
{
|
||||||
CVPixelBufferRef pixbuf = NULL;
|
CVPixelBufferRef pixbuf = NULL;
|
||||||
GstBuffer *buf;
|
GstBuffer *buf;
|
||||||
@ -174,7 +159,7 @@ gst_core_video_buffer_new (CVBufferRef cvbuf, GstVideoInfo * vinfo,
|
|||||||
meta->cvbuf = CVBufferRetain (cvbuf);
|
meta->cvbuf = CVBufferRetain (cvbuf);
|
||||||
meta->pixbuf = pixbuf;
|
meta->pixbuf = pixbuf;
|
||||||
|
|
||||||
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, &has_padding, map);
|
gst_core_video_wrap_pixel_buffer (buf, vinfo, pixbuf, &has_padding);
|
||||||
|
|
||||||
return buf;
|
return buf;
|
||||||
}
|
}
|
||||||
|
@ -41,13 +41,11 @@ typedef struct _GstCoreVideoMeta
|
|||||||
} GstCoreVideoMeta;
|
} GstCoreVideoMeta;
|
||||||
|
|
||||||
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf,
|
GstBuffer * gst_core_video_buffer_new (CVBufferRef cvbuf,
|
||||||
GstVideoInfo *info,
|
GstVideoInfo *info);
|
||||||
gboolean map);
|
|
||||||
void gst_core_video_wrap_pixel_buffer (GstBuffer * buf,
|
void gst_core_video_wrap_pixel_buffer (GstBuffer * buf,
|
||||||
GstVideoInfo * info,
|
GstVideoInfo * info,
|
||||||
CVPixelBufferRef pixel_buf,
|
CVPixelBufferRef pixel_buf,
|
||||||
gboolean * has_padding,
|
gboolean * has_padding);
|
||||||
gboolean map);
|
|
||||||
GType gst_core_video_meta_api_get_type (void);
|
GType gst_core_video_meta_api_get_type (void);
|
||||||
|
|
||||||
G_END_DECLS
|
G_END_DECLS
|
||||||
|
@ -434,7 +434,7 @@ openFailed:
|
|||||||
[queueLock unlockWithCondition:
|
[queueLock unlockWithCondition:
|
||||||
([queue count] == 0) ? NO_FRAMES : HAS_FRAME_OR_STOP_REQUEST];
|
([queue count] == 0) ? NO_FRAMES : HAS_FRAME_OR_STOP_REQUEST];
|
||||||
|
|
||||||
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, NULL, TRUE);
|
*buf = gst_core_video_buffer_new ((CVBufferRef)frame, NULL);
|
||||||
CVBufferRelease (frame);
|
CVBufferRelease (frame);
|
||||||
|
|
||||||
[self timestampBuffer:*buf];
|
[self timestampBuffer:*buf];
|
||||||
|
@ -145,20 +145,20 @@ cv_pixel_buffer_from_gst_buffer (GstBuffer * buffer)
|
|||||||
}
|
}
|
||||||
|
|
||||||
#if HAVE_IOS
|
#if HAVE_IOS
|
||||||
static gboolean
|
static void
|
||||||
gl_mem_from_buffer (GstVideoTextureCache * cache,
|
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
||||||
GstBuffer * buffer, GstMemory **mem1, GstMemory **mem2)
|
|
||||||
{
|
{
|
||||||
CVOpenGLESTextureRef texture = NULL;
|
CVOpenGLESTextureRef texture = NULL;
|
||||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (buffer);
|
GstVideoTextureCache *cache = data->cache;
|
||||||
|
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
|
||||||
GstGLTextureTarget gl_target;
|
GstGLTextureTarget gl_target;
|
||||||
GstGLBaseMemoryAllocator *base_mem_alloc;
|
GstGLBaseMemoryAllocator *base_mem_alloc;
|
||||||
GstGLVideoAllocationParams *params;
|
GstGLVideoAllocationParams *params;
|
||||||
|
GstBuffer *output_buffer;
|
||||||
|
|
||||||
base_mem_alloc = GST_GL_BASE_MEMORY_ALLOCATOR (gst_gl_memory_allocator_get_default (cache->ctx));
|
base_mem_alloc = GST_GL_BASE_MEMORY_ALLOCATOR (gst_gl_memory_allocator_get_default (cache->ctx));
|
||||||
|
output_buffer = gst_buffer_new ();
|
||||||
*mem1 = NULL;
|
gst_buffer_copy_into (output_buffer, data->input_buffer, GST_BUFFER_COPY_METADATA, 0, -1);
|
||||||
*mem2 = NULL;
|
|
||||||
|
|
||||||
CVOpenGLESTextureCacheFlush (cache->cache, 0);
|
CVOpenGLESTextureCacheFlush (cache->cache, 0);
|
||||||
|
|
||||||
@ -178,8 +178,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||||||
CVOpenGLESTextureGetName (texture), texture,
|
CVOpenGLESTextureGetName (texture), texture,
|
||||||
(GDestroyNotify) CFRelease);
|
(GDestroyNotify) CFRelease);
|
||||||
|
|
||||||
*mem1 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
gst_buffer_append_memory (output_buffer,
|
||||||
(GstGLAllocationParams *) params);
|
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||||
|
(GstGLAllocationParams *) params));
|
||||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||||
break;
|
break;
|
||||||
case GST_VIDEO_FORMAT_NV12: {
|
case GST_VIDEO_FORMAT_NV12: {
|
||||||
@ -204,8 +205,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||||||
CVOpenGLESTextureGetName (texture), texture,
|
CVOpenGLESTextureGetName (texture), texture,
|
||||||
(GDestroyNotify) CFRelease);
|
(GDestroyNotify) CFRelease);
|
||||||
|
|
||||||
*mem1 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
gst_buffer_append_memory (output_buffer,
|
||||||
(GstGLAllocationParams *) params);
|
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||||
|
(GstGLAllocationParams *) params));
|
||||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||||
|
|
||||||
textype = gst_gl_texture_type_from_format (cache->ctx, GST_VIDEO_FORMAT_NV12, 1);
|
textype = gst_gl_texture_type_from_format (cache->ctx, GST_VIDEO_FORMAT_NV12, 1);
|
||||||
@ -225,8 +227,9 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||||||
CVOpenGLESTextureGetName (texture), texture,
|
CVOpenGLESTextureGetName (texture), texture,
|
||||||
(GDestroyNotify) CFRelease);
|
(GDestroyNotify) CFRelease);
|
||||||
|
|
||||||
*mem2 = (GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
gst_buffer_append_memory (output_buffer,
|
||||||
(GstGLAllocationParams *) params);
|
(GstMemory *) gst_gl_base_memory_alloc (base_mem_alloc,
|
||||||
|
(GstGLAllocationParams *) params));
|
||||||
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
gst_gl_allocation_params_free ((GstGLAllocationParams *) params);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -237,21 +240,23 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||||||
|
|
||||||
gst_object_unref (base_mem_alloc);
|
gst_object_unref (base_mem_alloc);
|
||||||
|
|
||||||
return TRUE;
|
data->output_buffer = output_buffer;
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
error:
|
error:
|
||||||
return FALSE;
|
data->output_buffer = NULL;
|
||||||
}
|
}
|
||||||
#else /* !HAVE_IOS */
|
#else /* !HAVE_IOS */
|
||||||
|
static void
|
||||||
static gboolean
|
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
||||||
gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|
||||||
GstBuffer * buffer, GstMemory **mem1, GstMemory **mem2)
|
|
||||||
{
|
{
|
||||||
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (buffer);
|
GstVideoTextureCache *cache = data->cache;
|
||||||
IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixel_buf);
|
CVPixelBufferRef pixel_buf = cv_pixel_buffer_from_gst_buffer (data->input_buffer);
|
||||||
|
IOSurfaceRef surface = CVPixelBufferGetIOSurface (pixel_buf);
|
||||||
|
|
||||||
*mem1 = *mem2 = NULL;
|
data->output_buffer = gst_buffer_new ();
|
||||||
|
gst_buffer_copy_into (data->output_buffer, data->input_buffer, GST_BUFFER_COPY_METADATA, 0, -1);
|
||||||
for (int i = 0; i < GST_VIDEO_INFO_N_PLANES (&cache->input_info); i++) {
|
for (int i = 0; i < GST_VIDEO_INFO_N_PLANES (&cache->input_info); i++) {
|
||||||
GstIOSurfaceMemory *mem;
|
GstIOSurfaceMemory *mem;
|
||||||
|
|
||||||
@ -260,41 +265,21 @@ gl_mem_from_buffer (GstVideoTextureCache * cache,
|
|||||||
surface, GST_GL_TEXTURE_TARGET_RECTANGLE, &cache->input_info,
|
surface, GST_GL_TEXTURE_TARGET_RECTANGLE, &cache->input_info,
|
||||||
i, NULL, pixel_buf, (GDestroyNotify) CFRelease);
|
i, NULL, pixel_buf, (GDestroyNotify) CFRelease);
|
||||||
|
|
||||||
if (i == 0)
|
gst_buffer_append_memory (data->output_buffer, (GstMemory *) mem);
|
||||||
*mem1 = (GstMemory *) mem;
|
|
||||||
else
|
|
||||||
*mem2 = (GstMemory *) mem;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return TRUE;
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void
|
|
||||||
_do_get_gl_buffer (GstGLContext * context, ContextThreadData * data)
|
|
||||||
{
|
|
||||||
GstMemory *mem1 = NULL, *mem2 = NULL;
|
|
||||||
GstVideoTextureCache *cache = data->cache;
|
|
||||||
GstBuffer *buffer = data->input_buffer;
|
|
||||||
|
|
||||||
if (!gl_mem_from_buffer (cache, buffer, &mem1, &mem2)) {
|
|
||||||
gst_buffer_unref (buffer);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
gst_buffer_append_memory (buffer, mem1);
|
|
||||||
if (mem2)
|
|
||||||
gst_buffer_append_memory (buffer, mem2);
|
|
||||||
|
|
||||||
data->output_buffer = buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
GstBuffer *
|
GstBuffer *
|
||||||
gst_video_texture_cache_get_gl_buffer (GstVideoTextureCache * cache,
|
gst_video_texture_cache_get_gl_buffer (GstVideoTextureCache * cache,
|
||||||
GstBuffer * cv_buffer)
|
GstBuffer * cv_buffer)
|
||||||
{
|
{
|
||||||
ContextThreadData data = {cache, cv_buffer, NULL};
|
ContextThreadData data = {cache, cv_buffer, NULL};
|
||||||
|
|
||||||
gst_gl_context_thread_add (cache->ctx,
|
gst_gl_context_thread_add (cache->ctx,
|
||||||
(GstGLContextThreadFunc) _do_get_gl_buffer, &data);
|
(GstGLContextThreadFunc) _do_get_gl_buffer, &data);
|
||||||
|
|
||||||
|
gst_buffer_unref (cv_buffer);
|
||||||
|
|
||||||
return data.output_buffer;
|
return data.output_buffer;
|
||||||
}
|
}
|
||||||
|
@ -767,9 +767,7 @@ gst_vtdec_session_output_callback (void *decompression_output_ref_con,
|
|||||||
GST_WARNING_OBJECT (vtdec, "Output state not configured, release buffer");
|
GST_WARNING_OBJECT (vtdec, "Output state not configured, release buffer");
|
||||||
frame->flags &= VTDEC_FRAME_FLAG_SKIP;
|
frame->flags &= VTDEC_FRAME_FLAG_SKIP;
|
||||||
} else {
|
} else {
|
||||||
buf =
|
buf = gst_core_video_buffer_new (image_buffer, &state->info);
|
||||||
gst_core_video_buffer_new (image_buffer, &state->info,
|
|
||||||
vtdec->texture_cache == NULL);
|
|
||||||
gst_video_codec_state_unref (state);
|
gst_video_codec_state_unref (state);
|
||||||
GST_BUFFER_PTS (buf) = pts.value;
|
GST_BUFFER_PTS (buf) = pts.value;
|
||||||
GST_BUFFER_DURATION (buf) = duration.value;
|
GST_BUFFER_DURATION (buf) = duration.value;
|
||||||
|
@ -1295,7 +1295,7 @@ gst_vtenc_enqueue_buffer (void *outputCallbackRefCon,
|
|||||||
|
|
||||||
/* We are dealing with block buffers here, so we don't need
|
/* We are dealing with block buffers here, so we don't need
|
||||||
* to enable the use of the video meta API on the core media buffer */
|
* to enable the use of the video meta API on the core media buffer */
|
||||||
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE, TRUE);
|
frame->output_buffer = gst_core_media_buffer_new (sampleBuffer, FALSE);
|
||||||
|
|
||||||
beach:
|
beach:
|
||||||
/* needed anyway so the frame will be released */
|
/* needed anyway so the frame will be released */
|
||||||
|
Loading…
x
Reference in New Issue
Block a user