diff --git a/gst-libs/gst/vulkan/gstvkformat.c b/gst-libs/gst/vulkan/gstvkformat.c new file mode 100644 index 0000000000..c57ba37edf --- /dev/null +++ b/gst-libs/gst/vulkan/gstvkformat.c @@ -0,0 +1,423 @@ +/* + * GStreamer + * Copyright (C) 2019 Matthew Waters + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstvkformat.h" + +#define FORMAT(name, scaling) \ + G_PASTE(G_PASTE(VK_FORMAT_,name),G_PASTE(_,scaling)), G_STRINGIFY(G_PASTE(G_PASTE(name,_),scaling)), G_PASTE(GST_VULKAN_FORMAT_SCALING_, scaling) +#define FLAG(v) \ + G_PASTE(GST_VULKAN_FORMAT_FLAG_,v) +#if G_BYTE_ORDER == G_LITTLE_ENDIAN +#define NE FLAG(LE) +#define OE 0 +#elif G_BYTE_ORDER == G_BIG_ENDIAN +#define NE 0 +#define OE FLAG(LE) +#endif + +/* depths: bits, n_components, shift, depth */ +#define DPTH0 0, 0, { 0, 0, 0, 0 }, { 0, 0, 0, 0 } +#define DPTH8 8, 1, { 0, 0, 0, 0 }, { 8, 0, 0, 0 } +#define DPTH88 8, 2, { 0, 0, 0, 0 }, { 8, 8, 0, 0 } +#define DPTH8_32 8, 2, { 0, 0, 0, 0 }, { 8, 32, 0, 0 } +#define DPTH888 8, 3, { 0, 0, 0, 0 }, { 8, 8, 8, 0 } +#define DPTH8888 8, 4, { 0, 0, 0, 0 }, { 8, 8, 8, 8 } +#define DPTH8880 8, 4, { 0, 0, 0, 0 }, { 8, 8, 8, 0 } +#define DPTH10 10, 1, { 0, 0, 0, 0 }, { 10, 0, 0, 0 } +#define DPTH10_10_10 10, 3, { 0, 0, 0, 0 }, { 10, 10, 10, 0 } +#define DPTH10_10_10_10 10, 4, { 0, 0, 0, 0 }, { 10, 10, 10, 10 } +#define DPTH10_10_10_HI 16, 3, { 6, 6, 6, 0 }, { 10, 10, 10, 0 } +#define DPTH10_10_10_2 10, 4, { 0, 0, 0, 0 }, { 10, 10, 10, 2} +#define DPTH12_12_12 12, 3, { 0, 0, 0, 0 }, { 12, 12, 12, 0 } +#define DPTH12_12_12_12 12, 4, { 0, 0, 0, 0 }, { 12, 12, 12, 12 } +#define DPTH16 16, 1, { 0, 0, 0, 0 }, { 16, 0, 0, 0 } +#define DPTH16_16_16 16, 3, { 0, 0, 0, 0 }, { 16, 16, 16, 0 } +#define DPTH16_16_16_16 16, 4, { 0, 0, 0, 0 }, { 16, 16, 16, 16 } +#define DPTH555 16, 3, { 10, 5, 0, 0 }, { 5, 5, 5, 0 } +#define DPTH565 16, 3, { 11, 5, 0, 0 }, { 5, 6, 5, 0 } + +/* pixel strides */ +#define PSTR0 { 0, 0, 0, 0 } +#define PSTR1 { 1, 0, 0, 0 } +#define PSTR14 { 1, 4, 0, 0 } +#define PSTR111 { 1, 1, 1, 0 } +#define PSTR1111 { 1, 1, 1, 1 } +#define PSTR122 { 1, 2, 2, 0 } +#define PSTR2 { 2, 0, 0, 0 } +#define PSTR222 { 2, 2, 2, 0 } +#define PSTR2222 { 2, 2, 2, 2 } +#define PSTR244 { 2, 4, 4, 0 } +#define PSTR4 { 4, 4, 0, 0 } +#define PSTR44 { 4, 4, 0, 0 } +#define PSTR444 { 4, 4, 4, 0 } +#define PSTR4444 { 4, 4, 4, 4 } +#define PSTR333 { 3, 3, 3, 0 } +#define PSTR488 { 4, 8, 8, 0 } +#define PSTR8888 { 8, 8, 8, 8 } + +/* planes, in what plane do we find component N */ +#define PLANE_NA 0, { 0, 0, 0, 0 } +#define PLANE0 1, { 0, 0, 0, 0 } +#define PLANE01 2, { 0, 1, 0, 0 } +#define PLANE011 2, { 0, 1, 1, 0 } +#define PLANE012 3, { 0, 1, 2, 0 } +#define PLANE0123 4, { 0, 1, 2, 3 } +#define PLANE021 3, { 0, 2, 1, 0 } +#define PLANE201 3, { 2, 0, 1, 0 } +#define PLANE2013 4, { 2, 0, 1, 3 } + +/* offsets, what's the offset of the component N in the plane */ +#define OFFS0 { 0, 0, 0, 0 } +#define OFFS013 { 0, 1, 3, 0 } +#define OFFS102 { 1, 0, 2, 0 } +#define OFFS1230 { 1, 2, 3, 0 } +#define OFFS012 { 0, 1, 2, 0 } +#define OFFS210 { 2, 1, 0, 0 } +#define OFFS123 { 1, 2, 3, 0 } +#define OFFS321 { 3, 2, 1, 0 } +#define OFFS0123 { 0, 1, 2, 3 } +#define OFFS2103 { 2, 1, 0, 3 } +#define OFFS3210 { 3, 2, 1, 0 } +#define OFFS031 { 0, 3, 1, 0 } +#define OFFS204 { 2, 0, 4, 0 } +#define OFFS001 { 0, 0, 1, 0 } +#define OFFS010 { 0, 1, 0, 0 } +#define OFFS01 { 0, 1, 0, 0 } +#define OFFS104 { 1, 0, 4, 0 } +#define OFFS2460 { 2, 4, 6, 0 } + +/* subsampling, w_sub, h_sub */ +#define SUB410 { 0, 2, 2, 0 }, { 0, 2, 2, 0 } +#define SUB411 { 0, 2, 2, 0 }, { 0, 0, 0, 0 } +#define SUB420 { 0, 1, 1, 0 }, { 0, 1, 1, 0 } +#define SUB422 { 0, 1, 1, 0 }, { 0, 0, 0, 0 } +#define SUB4 { 0, 0, 0, 0 }, { 0, 0, 0, 0 } +#define SUB44 { 0, 0, 0, 0 }, { 0, 0, 0, 0 } +#define SUB444 { 0, 0, 0, 0 }, { 0, 0, 0, 0 } +#define SUB4444 { 0, 0, 0, 0 }, { 0, 0, 0, 0 } +#define SUB4204 { 0, 1, 1, 0 }, { 0, 1, 1, 0 } +#define SUB4224 { 0, 1, 1, 0 }, { 0, 0, 0, 0 } + +static GstVulkanFormatInfo formats[] = { + {FORMAT (R8G8B8A8, UNORM), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, SNORM), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, USCALED), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, + PSTR4444, PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, SSCALED), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, + PSTR4444, PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, UINT), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, SINT), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS0123, SUB444}, + {FORMAT (R8G8B8A8, SRGB), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS0123, SUB444}, + {FORMAT (B8G8R8A8, UNORM), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, SNORM), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, USCALED), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, + PSTR4444, PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, SSCALED), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, + PSTR4444, PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, UINT), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, SINT), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS2103, SUB444}, + {FORMAT (B8G8R8A8, SRGB), FLAG (RGB) | FLAG (ALPHA) | NE, DPTH8888, PSTR4444, + PLANE0, OFFS2103, SUB444}, + {FORMAT (R8G8B8, UNORM), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, SNORM), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, USCALED), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, SSCALED), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, UINT), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, SINT), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (R8G8B8, SRGB), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS012, + SUB444}, + {FORMAT (B8G8R8, UNORM), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, SNORM), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, USCALED), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, SSCALED), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, UINT), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, SINT), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (B8G8R8, SRGB), FLAG (RGB) | NE, DPTH888, PSTR444, PLANE0, OFFS210, + SUB444}, + {FORMAT (R8G8, UNORM), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, + SUB44}, + {FORMAT (R8G8, SNORM), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, + SUB44}, + {FORMAT (R8G8, USCALED), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, + SUB44}, + {FORMAT (R8G8, SSCALED), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, + SUB44}, + {FORMAT (R8G8, UINT), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, SUB44}, + {FORMAT (R8G8, SINT), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, SUB44}, + {FORMAT (R8G8, SRGB), FLAG (RGB) | NE, DPTH88, PSTR44, PLANE0, OFFS01, SUB44}, + {FORMAT (R8, UNORM), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, SNORM), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, USCALED), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, SSCALED), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, UINT), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, SINT), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + {FORMAT (R8, SRGB), FLAG (RGB) | NE, DPTH8, PSTR4, PLANE0, OFFS0, SUB4}, + +#if 0 +FIXME: implement: + {VK_FORMAT_R4G4_UNORM_PACK8, {0, 1, -1, -1}}, + {VK_FORMAT_R4G4B4A4_UNORM_PACK16, {0, 1, 2, 3}}, + {VK_FORMAT_B4G4R4A4_UNORM_PACK16, {2, 1, 0, 3}}, + {VK_FORMAT_R5G6B5_UNORM_PACK16, {0, 1, 2, -1}}, + {VK_FORMAT_B5G6R5_UNORM_PACK16, {2, 1, 0, -1}}, + {VK_FORMAT_R5G5B5A1_UNORM_PACK16, {0, 1, 2, 3}}, + {VK_FORMAT_B5G5R5A1_UNORM_PACK16, {2, 1, 0, 3}}, + {VK_FORMAT_A1R5G5B5_UNORM_PACK16, {3, 0, 1, 2}}, +#if G_BYTE_ORDER == G_BIG_ENDIAN + {VK_FORMAT_A8B8G8R8_UNORM_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_SNORM_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_USCALED_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_SSCALED_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_UINT_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_SINT_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A8B8G8R8_SRGB_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2R10G10B10_UNORM_PACK32, {3, 0, 1, 2}}, + {VK_FORMAT_A2R10G10B10_SNORM_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2R10G10B10_USCALED_PACK32, {3, 0, 1, 2}}, + {VK_FORMAT_A2R10G10B10_SSCALED_PACK32, {3, 0, 1, 2}}, + {VK_FORMAT_A2R10G10B10_UINT_PACK32, {3, 0, 1, 2}}, + {VK_FORMAT_A2R10G10B10_SINT_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_UNORM_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_SNORM_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_USCALED_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_SSCALED_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_UINT_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_SINT_PACK32, {3, 2, 1, 0}}, + {VK_FORMAT_A2B10G10R10_UNORM_PACK32, {3, 2, 1, 0}}, +#elif G_BYTE_ORDER == G_LITTLE_ENDIAN + {VK_FORMAT_A8B8G8R8_UNORM_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_SNORM_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_USCALED_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_SSCALED_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_UINT_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_SINT_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A8B8G8R8_SRGB_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2R10G10B10_UNORM_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2R10G10B10_SNORM_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2R10G10B10_USCALED_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2R10G10B10_SSCALED_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2R10G10B10_UINT_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2R10G10B10_SINT_PACK32, {2, 1, 0, 3}}, + {VK_FORMAT_A2B10G10R10_UNORM_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_SNORM_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_USCALED_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_SSCALED_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_UINT_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_SINT_PACK32, {0, 1, 2, 3}}, + {VK_FORMAT_A2B10G10R10_UNORM_PACK32, {0, 1, 2, 3}}, +#endif + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, +#endif +}; + +const GstVulkanFormatInfo * +gst_vulkan_format_get_info (VkFormat format) +{ + int i; + + for (i = 0; i < G_N_ELEMENTS (formats); i++) { + if (formats[i].format == format) { + return &formats[i]; + } + } + + return NULL; +} diff --git a/gst-libs/gst/vulkan/gstvkformat.h b/gst-libs/gst/vulkan/gstvkformat.h new file mode 100644 index 0000000000..e2238fe132 --- /dev/null +++ b/gst-libs/gst/vulkan/gstvkformat.h @@ -0,0 +1,87 @@ +/* + * GStreamer + * Copyright (C) 2016 Matthew Waters + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_VULKAN_FORMAT_H__ +#define __GST_VULKAN_FORMAT_H__ + +#include + +G_BEGIN_DECLS + +typedef struct _GstVulkanFormatInfo GstVulkanFormatInfo; + +#define GST_VULKAN_MAX_COMPONENTS 4 + +typedef enum +{ + GST_VULKAN_FORMAT_SCALING_UNORM = 1, + GST_VULKAN_FORMAT_SCALING_SNORM, + GST_VULKAN_FORMAT_SCALING_USCALED, + GST_VULKAN_FORMAT_SCALING_SSCALED, + GST_VULKAN_FORMAT_SCALING_UINT, + GST_VULKAN_FORMAT_SCALING_SINT, + GST_VULKAN_FORMAT_SCALING_SRGB, +} GstVulkanFormatScaling; + +typedef enum +{ + GST_VULKAN_FORMAT_FLAG_YUV = (1 << 0), + GST_VULKAN_FORMAT_FLAG_RGB = (1 << 1), + GST_VULKAN_FORMAT_FLAG_ALPHA = (1 << 2), + GST_VULKAN_FORMAT_FLAG_LE = (1 << 3), + GST_VULKAN_FORMAT_FLAG_COMPLEX = (1 << 4), +} GstVulkanFormatFlags; + +/** + * GstVulkanFormatInfo: + * @format: the Vulkan format being described + * @name: name of this format + * @scaling: how raw data is interpreted and scaled + * @n_components; number of components in this format + * @comp_order: the order of the components. The 'R' component can be + * found at index 0, the G component at index 1, etc + * @comp_offset: number of bits from the start of a pixel where the component + * is located + * @comp_depth: number of bits the component uses + */ +struct _GstVulkanFormatInfo +{ + VkFormat format; + const gchar *name; + GstVulkanFormatScaling scaling; + GstVulkanFormatFlags flags; + guint bits; + guint n_components; + guint8 shift[GST_VULKAN_MAX_COMPONENTS]; + guint8 depth[GST_VULKAN_MAX_COMPONENTS]; + gint8 pixel_stride[GST_VULKAN_MAX_COMPONENTS]; + guint n_planes; + guint8 plane[GST_VULKAN_MAX_COMPONENTS]; + guint8 poffset[GST_VULKAN_MAX_COMPONENTS]; + guint8 w_sub[GST_VULKAN_MAX_COMPONENTS]; + guint8 h_sub[GST_VULKAN_MAX_COMPONENTS]; +}; + +GST_VULKAN_API +const GstVulkanFormatInfo * gst_vulkan_format_get_info (VkFormat format); + +G_END_DECLS + +#endif /* __GST_VULKAN_FORMAT_H__ */ diff --git a/gst-libs/gst/vulkan/meson.build b/gst-libs/gst/vulkan/meson.build index bd0dfbb912..1ada67e182 100644 --- a/gst-libs/gst/vulkan/meson.build +++ b/gst-libs/gst/vulkan/meson.build @@ -11,6 +11,7 @@ vulkan_sources = [ 'gstvkdisplay.c', 'gstvkerror.c', 'gstvkfence.c', + 'gstvkformat.c', 'gstvkimagememory.c', 'gstvkimagebufferpool.c', 'gstvkinstance.c', @@ -29,6 +30,7 @@ vulkan_headers = [ 'gstvkdisplay.h', 'gstvkerror.h', 'gstvkfence.h', + 'gstvkformat.h', 'gstvkimagememory.h', 'gstvkimagebufferpool.h', 'gstvkinstance.h', diff --git a/gst-libs/gst/vulkan/vulkan.h b/gst-libs/gst/vulkan/vulkan.h index 9f13d7e6db..62f6828d3b 100644 --- a/gst-libs/gst/vulkan/vulkan.h +++ b/gst-libs/gst/vulkan/vulkan.h @@ -30,6 +30,7 @@ #include #include #include +#include #include #include #include