diff --git a/subprojects/gst-plugins-base/docs/plugins/gst_plugins_cache.json b/subprojects/gst-plugins-base/docs/plugins/gst_plugins_cache.json
index 52cc0fc30e..6827f94171 100644
--- a/subprojects/gst-plugins-base/docs/plugins/gst_plugins_cache.json
+++ b/subprojects/gst-plugins-base/docs/plugins/gst_plugins_cache.json
@@ -13609,14 +13609,15 @@
         "tracers": {},
         "url": "Unknown package origin"
     },
-    "videoconvert": {
-        "description": "Colorspace conversion",
+    "videoconvertscale": {
+        "description": "Convert video colorspaces and resizes video frames",
         "elements": {
             "videoconvert": {
-                "author": "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>",
-                "description": "Converts video from one colorspace to another",
+                "author": "Wim Taymans <wim.taymans@gmail.com>",
+                "description": "Resizes video and allow color conversion",
                 "hierarchy": [
                     "GstVideoConvert",
+                    "GstVideoConvertScale",
                     "GstVideoFilter",
                     "GstBaseTransform",
                     "GstElement",
@@ -13624,21 +13625,61 @@
                     "GInitiallyUnowned",
                     "GObject"
                 ],
-                "klass": "Filter/Converter/Video",
-                "long-name": "Colorspace converter",
+                "klass": "Filter/Converter/Video/Scaler/Colorspace",
+                "long-name": "Video colorspace converter and scaler",
                 "pad-templates": {
                     "sink": {
-                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 2147483647 ]\n         height: [ 1, 2147483647 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 2147483647 ]\n         height: [ 1, 2147483647 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
                         "direction": "sink",
                         "presence": "always"
                     },
                     "src": {
-                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 2147483647 ]\n         height: [ 1, 2147483647 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 2147483647 ]\n         height: [ 1, 2147483647 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "direction": "src",
+                        "presence": "always"
+                    }
+                },
+                "rank": "marginal"
+            },
+            "videoconvertscale": {
+                "author": "Wim Taymans <wim.taymans@gmail.com>",
+                "description": "Resizes video and allow color conversion",
+                "hierarchy": [
+                    "GstVideoConvertScale",
+                    "GstVideoFilter",
+                    "GstBaseTransform",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "klass": "Filter/Converter/Video/Scaler/Colorspace",
+                "long-name": "Video colorspace converter and scaler",
+                "pad-templates": {
+                    "sink": {
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "direction": "sink",
+                        "presence": "always"
+                    },
+                    "src": {
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
                         "direction": "src",
                         "presence": "always"
                     }
                 },
                 "properties": {
+                    "add-borders": {
+                        "blurb": "Add black borders if necessary to keep the display aspect ratio",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "true",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gboolean",
+                        "writable": true
+                    },
                     "alpha-mode": {
                         "blurb": "Alpha Mode to use",
                         "conditionally-available": false,
@@ -13715,6 +13756,20 @@
                         "type": "guint",
                         "writable": true
                     },
+                    "envelope": {
+                        "blurb": "Size of filter envelope",
+                        "conditionally-available": false,
+                        "construct": true,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "2",
+                        "max": "5",
+                        "min": "1",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gdouble",
+                        "writable": true
+                    },
                     "gamma-mode": {
                         "blurb": "Gamma Conversion Mode",
                         "conditionally-available": false,
@@ -13739,10 +13794,22 @@
                         "type": "GstVideoMatrixMode",
                         "writable": true
                     },
+                    "method": {
+                        "blurb": "method",
+                        "conditionally-available": false,
+                        "construct": false,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "bilinear (1)",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "GstVideoScaleMethod",
+                        "writable": true
+                    },
                     "n-threads": {
                         "blurb": "Maximum number of threads to use",
                         "conditionally-available": false,
-                        "construct": false,
+                        "construct": true,
                         "construct-only": false,
                         "controllable": false,
                         "default": "1",
@@ -13764,14 +13831,141 @@
                         "readable": true,
                         "type": "GstVideoPrimariesMode",
                         "writable": true
+                    },
+                    "sharpen": {
+                        "blurb": "Sharpening",
+                        "conditionally-available": false,
+                        "construct": true,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "0",
+                        "max": "1",
+                        "min": "0",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gdouble",
+                        "writable": true
+                    },
+                    "sharpness": {
+                        "blurb": "Sharpness of filter",
+                        "conditionally-available": false,
+                        "construct": true,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "1",
+                        "max": "1.5",
+                        "min": "0.5",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gdouble",
+                        "writable": true
                     }
                 },
-                "rank": "none"
+                "rank": "secondary"
+            },
+            "videoscale": {
+                "author": "Wim Taymans <wim.taymans@gmail.com>",
+                "description": "Resizes video and allow color conversion",
+                "hierarchy": [
+                    "GstVideoScale",
+                    "GstVideoConvertScale",
+                    "GstVideoFilter",
+                    "GstBaseTransform",
+                    "GstElement",
+                    "GstObject",
+                    "GInitiallyUnowned",
+                    "GObject"
+                ],
+                "klass": "Filter/Converter/Video/Scaler/Colorspace",
+                "long-name": "Video colorspace converter and scaler",
+                "pad-templates": {
+                    "sink": {
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "direction": "sink",
+                        "presence": "always"
+                    },
+                    "src": {
+                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
+                        "direction": "src",
+                        "presence": "always"
+                    }
+                },
+                "properties": {
+                    "gamma-decode": {
+                        "blurb": "Decode gamma before scaling",
+                        "conditionally-available": false,
+                        "construct": true,
+                        "construct-only": false,
+                        "controllable": false,
+                        "default": "false",
+                        "mutable": "null",
+                        "readable": true,
+                        "type": "gboolean",
+                        "writable": true
+                    }
+                },
+                "rank": "marginal"
             }
         },
-        "filename": "gstvideoconvert",
+        "filename": "gstvideoconvertscale",
         "license": "LGPL",
-        "other-types": {},
+        "other-types": {
+            "GstVideoScaleMethod": {
+                "kind": "enum",
+                "values": [
+                    {
+                        "desc": "Nearest Neighbour",
+                        "name": "nearest-neighbour",
+                        "value": "0"
+                    },
+                    {
+                        "desc": "Bilinear (2-tap)",
+                        "name": "bilinear",
+                        "value": "1"
+                    },
+                    {
+                        "desc": "4-tap Sinc",
+                        "name": "4-tap",
+                        "value": "2"
+                    },
+                    {
+                        "desc": "Lanczos",
+                        "name": "lanczos",
+                        "value": "3"
+                    },
+                    {
+                        "desc": "Bilinear (multi-tap)",
+                        "name": "bilinear2",
+                        "value": "4"
+                    },
+                    {
+                        "desc": "Sinc (multi-tap)",
+                        "name": "sinc",
+                        "value": "5"
+                    },
+                    {
+                        "desc": "Hermite (multi-tap)",
+                        "name": "hermite",
+                        "value": "6"
+                    },
+                    {
+                        "desc": "Spline (multi-tap)",
+                        "name": "spline",
+                        "value": "7"
+                    },
+                    {
+                        "desc": "Catmull-Rom (multi-tap)",
+                        "name": "catrom",
+                        "value": "8"
+                    },
+                    {
+                        "desc": "Mitchell (multi-tap)",
+                        "name": "mitchell",
+                        "value": "9"
+                    }
+                ]
+            }
+        },
         "package": "GStreamer Base Plug-ins",
         "source": "gst-plugins-base",
         "tracers": {},
@@ -13980,208 +14174,6 @@
         "tracers": {},
         "url": "Unknown package origin"
     },
-    "videoscale": {
-        "description": "Resizes video",
-        "elements": {
-            "videoscale": {
-                "author": "Wim Taymans <wim.taymans@gmail.com>",
-                "description": "Resizes video",
-                "hierarchy": [
-                    "GstVideoScale",
-                    "GstVideoFilter",
-                    "GstBaseTransform",
-                    "GstElement",
-                    "GstObject",
-                    "GInitiallyUnowned",
-                    "GObject"
-                ],
-                "klass": "Filter/Converter/Video/Scaler",
-                "long-name": "Video scaler",
-                "pad-templates": {
-                    "sink": {
-                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
-                        "direction": "sink",
-                        "presence": "always"
-                    },
-                    "src": {
-                        "caps": "video/x-raw:\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(ANY):\n         format: { ABGR64_LE, BGRA64_LE, AYUV64, ARGB64_LE, ARGB64, RGBA64_LE, ABGR64_BE, BGRA64_BE, ARGB64_BE, RGBA64_BE, GBRA_12LE, GBRA_12BE, Y412_LE, Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, NV12_10BE_8L128, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, YV12, NV21, NV12, NV12_8L128, NV12_64Z32, NV12_4L4, NV12_32L32, NV12_16L32S, Y41B, IYU1, YVU9, YUV9, RGB16, BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }\n          width: [ 1, 32767 ]\n         height: [ 1, 32767 ]\n      framerate: [ 0/1, 2147483647/1 ]\n",
-                        "direction": "src",
-                        "presence": "always"
-                    }
-                },
-                "properties": {
-                    "add-borders": {
-                        "blurb": "Add black borders if necessary to keep the display aspect ratio",
-                        "conditionally-available": false,
-                        "construct": false,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "true",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gboolean",
-                        "writable": true
-                    },
-                    "dither": {
-                        "blurb": "Add dither (only used for Lanczos method)",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "false",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gboolean",
-                        "writable": true
-                    },
-                    "envelope": {
-                        "blurb": "Size of filter envelope",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "2",
-                        "max": "5",
-                        "min": "1",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gdouble",
-                        "writable": true
-                    },
-                    "gamma-decode": {
-                        "blurb": "Decode gamma before scaling",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "false",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gboolean",
-                        "writable": true
-                    },
-                    "method": {
-                        "blurb": "method",
-                        "conditionally-available": false,
-                        "construct": false,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "bilinear (1)",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "GstVideoScaleMethod",
-                        "writable": true
-                    },
-                    "n-threads": {
-                        "blurb": "Maximum number of threads to use",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "1",
-                        "max": "-1",
-                        "min": "0",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "guint",
-                        "writable": true
-                    },
-                    "sharpen": {
-                        "blurb": "Sharpening",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "0",
-                        "max": "1",
-                        "min": "0",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gdouble",
-                        "writable": true
-                    },
-                    "sharpness": {
-                        "blurb": "Sharpness of filter",
-                        "conditionally-available": false,
-                        "construct": true,
-                        "construct-only": false,
-                        "controllable": false,
-                        "default": "1",
-                        "max": "1.5",
-                        "min": "0.5",
-                        "mutable": "null",
-                        "readable": true,
-                        "type": "gdouble",
-                        "writable": true
-                    }
-                },
-                "rank": "none"
-            }
-        },
-        "filename": "gstvideoscale",
-        "license": "LGPL",
-        "other-types": {
-            "GstVideoScaleMethod": {
-                "kind": "enum",
-                "values": [
-                    {
-                        "desc": "Nearest Neighbour",
-                        "name": "nearest-neighbour",
-                        "value": "0"
-                    },
-                    {
-                        "desc": "Bilinear (2-tap)",
-                        "name": "bilinear",
-                        "value": "1"
-                    },
-                    {
-                        "desc": "4-tap Sinc",
-                        "name": "4-tap",
-                        "value": "2"
-                    },
-                    {
-                        "desc": "Lanczos",
-                        "name": "lanczos",
-                        "value": "3"
-                    },
-                    {
-                        "desc": "Bilinear (multi-tap)",
-                        "name": "bilinear2",
-                        "value": "4"
-                    },
-                    {
-                        "desc": "Sinc (multi-tap)",
-                        "name": "sinc",
-                        "value": "5"
-                    },
-                    {
-                        "desc": "Hermite (multi-tap)",
-                        "name": "hermite",
-                        "value": "6"
-                    },
-                    {
-                        "desc": "Spline (multi-tap)",
-                        "name": "spline",
-                        "value": "7"
-                    },
-                    {
-                        "desc": "Catmull-Rom (multi-tap)",
-                        "name": "catrom",
-                        "value": "8"
-                    },
-                    {
-                        "desc": "Mitchell (multi-tap)",
-                        "name": "mitchell",
-                        "value": "9"
-                    }
-                ]
-            }
-        },
-        "package": "GStreamer Base Plug-ins",
-        "source": "gst-plugins-base",
-        "tracers": {},
-        "url": "Unknown package origin"
-    },
     "videotestsrc": {
         "description": "Creates a test video stream",
         "elements": {
diff --git a/subprojects/gst-plugins-base/gst/meson.build b/subprojects/gst-plugins-base/gst/meson.build
index 91be2a00b7..2d14191ce9 100644
--- a/subprojects/gst-plugins-base/gst/meson.build
+++ b/subprojects/gst-plugins-base/gst/meson.build
@@ -1,6 +1,6 @@
 foreach plugin : ['adder', 'app', 'audioconvert', 'audiomixer', 'audiorate', 'audioresample',
                   'audiotestsrc', 'compositor', 'encoding', 'gio', 'overlaycomposition', 'pbtypes', 'playback',
-                  'rawparse', 'subparse', 'tcp', 'typefind', 'videoconvert', 'videorate', 'videoscale',
+                  'rawparse', 'subparse', 'tcp', 'typefind', 'videoconvertscale', 'videorate',
                   'videotestsrc', 'volume']
   if not get_option(plugin).disabled()
     subdir(plugin)
diff --git a/subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.c b/subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.c
deleted file mode 100644
index 0f65ee2db8..0000000000
--- a/subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.c
+++ /dev/null
@@ -1,870 +0,0 @@
-/* GStreamer
- * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
- * This file:
- * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
- * Copyright (C) 2010 David Schleef <ds@schleef.org>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- * Boston, MA 02110-1301, USA.
- */
-
-/**
- * SECTION:element-videoconvert
- * @title: videoconvert
- *
- * Convert video frames between a great variety of video formats.
- *
- * ## Example launch line
- * |[
- * gst-launch-1.0 -v videotestsrc ! video/x-raw,format=YUY2 ! videoconvert ! autovideosink
- * ]|
- *  This will output a test video (generated in YUY2 format) in a video
- * window. If the video sink selected does not support YUY2 videoconvert will
- * automatically convert the video to a format understood by the video sink.
- *
- */
-
-#ifdef HAVE_CONFIG_H
-#  include "config.h"
-#endif
-
-#include "gstvideoconvert.h"
-
-#include <gst/video/video.h>
-#include <gst/video/gstvideometa.h>
-#include <gst/video/gstvideopool.h>
-
-#include <string.h>
-
-GST_DEBUG_CATEGORY (videoconvert_debug);
-#define GST_CAT_DEFAULT videoconvert_debug
-GST_DEBUG_CATEGORY_STATIC (CAT_PERFORMANCE);
-
-static GQuark _colorspace_quark;
-
-#define gst_video_convert_parent_class parent_class
-G_DEFINE_TYPE (GstVideoConvert, gst_video_convert, GST_TYPE_VIDEO_FILTER);
-GST_ELEMENT_REGISTER_DEFINE (videoconvert, "videoconvert",
-    GST_RANK_NONE, GST_TYPE_VIDEO_CONVERT);
-
-#define DEFAULT_PROP_DITHER      GST_VIDEO_DITHER_BAYER
-#define DEFAULT_PROP_DITHER_QUANTIZATION 1
-#define DEFAULT_PROP_CHROMA_RESAMPLER	GST_VIDEO_RESAMPLER_METHOD_LINEAR
-#define DEFAULT_PROP_ALPHA_MODE GST_VIDEO_ALPHA_MODE_COPY
-#define DEFAULT_PROP_ALPHA_VALUE 1.0
-#define DEFAULT_PROP_CHROMA_MODE GST_VIDEO_CHROMA_MODE_FULL
-#define DEFAULT_PROP_MATRIX_MODE GST_VIDEO_MATRIX_MODE_FULL
-#define DEFAULT_PROP_GAMMA_MODE GST_VIDEO_GAMMA_MODE_NONE
-#define DEFAULT_PROP_PRIMARIES_MODE GST_VIDEO_PRIMARIES_MODE_NONE
-#define DEFAULT_PROP_N_THREADS 1
-
-enum
-{
-  PROP_0,
-  PROP_DITHER,
-  PROP_DITHER_QUANTIZATION,
-  PROP_CHROMA_RESAMPLER,
-  PROP_ALPHA_MODE,
-  PROP_ALPHA_VALUE,
-  PROP_CHROMA_MODE,
-  PROP_MATRIX_MODE,
-  PROP_GAMMA_MODE,
-  PROP_PRIMARIES_MODE,
-  PROP_N_THREADS
-};
-
-#define CSP_VIDEO_CAPS GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ";" \
-    GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
-
-static GstStaticPadTemplate gst_video_convert_src_template =
-GST_STATIC_PAD_TEMPLATE ("src",
-    GST_PAD_SRC,
-    GST_PAD_ALWAYS,
-    GST_STATIC_CAPS (CSP_VIDEO_CAPS)
-    );
-
-static GstStaticPadTemplate gst_video_convert_sink_template =
-GST_STATIC_PAD_TEMPLATE ("sink",
-    GST_PAD_SINK,
-    GST_PAD_ALWAYS,
-    GST_STATIC_CAPS (CSP_VIDEO_CAPS)
-    );
-
-static void gst_video_convert_set_property (GObject * object,
-    guint property_id, const GValue * value, GParamSpec * pspec);
-static void gst_video_convert_get_property (GObject * object,
-    guint property_id, GValue * value, GParamSpec * pspec);
-
-static gboolean gst_video_convert_set_info (GstVideoFilter * filter,
-    GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
-    GstVideoInfo * out_info);
-static GstFlowReturn gst_video_convert_transform_frame (GstVideoFilter * filter,
-    GstVideoFrame * in_frame, GstVideoFrame * out_frame);
-
-static GstCapsFeatures *features_format_interlaced,
-    *features_format_interlaced_sysmem;
-
-/* copies the given caps */
-static GstCaps *
-gst_video_convert_caps_remove_format_info (GstCaps * caps)
-{
-  GstStructure *st;
-  GstCapsFeatures *f;
-  gint i, n;
-  GstCaps *res;
-
-  res = gst_caps_new_empty ();
-
-  n = gst_caps_get_size (caps);
-  for (i = 0; i < n; i++) {
-    st = gst_caps_get_structure (caps, i);
-    f = gst_caps_get_features (caps, i);
-
-    /* If this is already expressed by the existing caps
-     * skip this structure */
-    if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
-      continue;
-
-    st = gst_structure_copy (st);
-    /* Only remove format info for the cases when we can actually convert */
-    if (!gst_caps_features_is_any (f)
-        && (gst_caps_features_is_equal (f,
-                GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)
-            || gst_caps_features_is_equal (f, features_format_interlaced)
-            || gst_caps_features_is_equal (f,
-                features_format_interlaced_sysmem))) {
-      gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
-          NULL);
-    }
-
-    gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
-  }
-
-  return res;
-}
-
-/*
- * This is an incomplete matrix of in formats and a score for the preferred output
- * format.
- *
- *         out: RGB24   RGB16  ARGB  AYUV  YUV444  YUV422 YUV420 YUV411 YUV410  PAL  GRAY
- *  in
- * RGB24          0      2       1     2     2       3      4      5      6      7    8
- * RGB16          1      0       1     2     2       3      4      5      6      7    8
- * ARGB           2      3       0     1     4       5      6      7      8      9    10
- * AYUV           3      4       1     0     2       5      6      7      8      9    10
- * YUV444         2      4       3     1     0       5      6      7      8      9    10
- * YUV422         3      5       4     2     1       0      6      7      8      9    10
- * YUV420         4      6       5     3     2       1      0      7      8      9    10
- * YUV411         4      6       5     3     2       1      7      0      8      9    10
- * YUV410         6      8       7     5     4       3      2      1      0      9    10
- * PAL            1      3       2     6     4       6      7      8      9      0    10
- * GRAY           1      4       3     2     1       5      6      7      8      9    0
- *
- * PAL or GRAY are never preferred, if we can we would convert to PAL instead
- * of GRAY, though
- * less subsampling is preferred and if any, preferably horizontal
- * We would like to keep the alpha, even if we would need to to colorspace conversion
- * or lose depth.
- */
-#define SCORE_FORMAT_CHANGE       1
-#define SCORE_DEPTH_CHANGE        1
-#define SCORE_ALPHA_CHANGE        1
-#define SCORE_CHROMA_W_CHANGE     1
-#define SCORE_CHROMA_H_CHANGE     1
-#define SCORE_PALETTE_CHANGE      1
-
-#define SCORE_COLORSPACE_LOSS     2     /* RGB <-> YUV */
-#define SCORE_DEPTH_LOSS          4     /* change bit depth */
-#define SCORE_ALPHA_LOSS          8     /* lose the alpha channel */
-#define SCORE_CHROMA_W_LOSS      16     /* vertical subsample */
-#define SCORE_CHROMA_H_LOSS      32     /* horizontal subsample */
-#define SCORE_PALETTE_LOSS       64     /* convert to palette format */
-#define SCORE_COLOR_LOSS        128     /* convert to GRAY */
-
-#define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
-                         GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
-#define ALPHA_MASK      (GST_VIDEO_FORMAT_FLAG_ALPHA)
-#define PALETTE_MASK    (GST_VIDEO_FORMAT_FLAG_PALETTE)
-
-/* calculate how much loss a conversion would be */
-static void
-score_value (GstBaseTransform * base, const GstVideoFormatInfo * in_info,
-    const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info)
-{
-  const gchar *fname;
-  const GstVideoFormatInfo *t_info;
-  GstVideoFormatFlags in_flags, t_flags;
-  gint loss;
-
-  fname = g_value_get_string (val);
-  t_info = gst_video_format_get_info (gst_video_format_from_string (fname));
-  if (!t_info)
-    return;
-
-  /* accept input format immediately without loss */
-  if (in_info == t_info) {
-    *min_loss = 0;
-    *out_info = t_info;
-    return;
-  }
-
-  loss = SCORE_FORMAT_CHANGE;
-
-  in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
-  in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
-  in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
-  in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
-
-  t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
-  t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
-  t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
-  t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
-
-  if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
-    loss += SCORE_PALETTE_CHANGE;
-    if (t_flags & PALETTE_MASK)
-      loss += SCORE_PALETTE_LOSS;
-  }
-
-  if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
-    loss += SCORE_COLORSPACE_LOSS;
-    if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
-      loss += SCORE_COLOR_LOSS;
-  }
-
-  if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
-    loss += SCORE_ALPHA_CHANGE;
-    if (in_flags & ALPHA_MASK)
-      loss += SCORE_ALPHA_LOSS;
-  }
-
-  if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
-    loss += SCORE_CHROMA_H_CHANGE;
-    if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
-      loss += SCORE_CHROMA_H_LOSS;
-  }
-  if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
-    loss += SCORE_CHROMA_W_CHANGE;
-    if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
-      loss += SCORE_CHROMA_W_LOSS;
-  }
-
-  if ((in_info->bits) != (t_info->bits)) {
-    loss += SCORE_DEPTH_CHANGE;
-    if ((in_info->bits) > (t_info->bits))
-      loss += SCORE_DEPTH_LOSS;
-  }
-
-  GST_DEBUG_OBJECT (base, "score %s -> %s = %d",
-      GST_VIDEO_FORMAT_INFO_NAME (in_info),
-      GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
-
-  if (loss < *min_loss) {
-    GST_DEBUG_OBJECT (base, "found new best %d", loss);
-    *out_info = t_info;
-    *min_loss = loss;
-  }
-}
-
-static void
-gst_video_convert_fixate_format (GstBaseTransform * base, GstCaps * caps,
-    GstCaps * result)
-{
-  GstStructure *ins, *outs;
-  const gchar *in_format;
-  const GstVideoFormatInfo *in_info, *out_info = NULL;
-  gint min_loss = G_MAXINT;
-  guint i, capslen;
-
-  ins = gst_caps_get_structure (caps, 0);
-  in_format = gst_structure_get_string (ins, "format");
-  if (!in_format)
-    return;
-
-  GST_DEBUG_OBJECT (base, "source format %s", in_format);
-
-  in_info =
-      gst_video_format_get_info (gst_video_format_from_string (in_format));
-  if (!in_info)
-    return;
-
-  outs = gst_caps_get_structure (result, 0);
-
-  capslen = gst_caps_get_size (result);
-  GST_DEBUG_OBJECT (base, "iterate %d structures", capslen);
-  for (i = 0; i < capslen; i++) {
-    GstStructure *tests;
-    const GValue *format;
-
-    tests = gst_caps_get_structure (result, i);
-    format = gst_structure_get_value (tests, "format");
-    /* should not happen */
-    if (format == NULL)
-      continue;
-
-    if (GST_VALUE_HOLDS_LIST (format)) {
-      gint j, len;
-
-      len = gst_value_list_get_size (format);
-      GST_DEBUG_OBJECT (base, "have %d formats", len);
-      for (j = 0; j < len; j++) {
-        const GValue *val;
-
-        val = gst_value_list_get_value (format, j);
-        if (G_VALUE_HOLDS_STRING (val)) {
-          score_value (base, in_info, val, &min_loss, &out_info);
-          if (min_loss == 0)
-            break;
-        }
-      }
-    } else if (G_VALUE_HOLDS_STRING (format)) {
-      score_value (base, in_info, format, &min_loss, &out_info);
-    }
-  }
-  if (out_info)
-    gst_structure_set (outs, "format", G_TYPE_STRING,
-        GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
-}
-
-static gboolean
-subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
-{
-  gint i;
-  const GstVideoFormatInfo *in_format, *out_format;
-
-  if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
-      GST_VIDEO_INFO_N_COMPONENTS (out_info))
-    return FALSE;
-
-  in_format = in_info->finfo;
-  out_format = out_info->finfo;
-
-  for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
-    if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
-            i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
-      return FALSE;
-    if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
-            i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
-      return FALSE;
-  }
-
-  return TRUE;
-}
-
-static void
-transfer_colorimetry_from_input (GstBaseTransform * trans, GstCaps * in_caps,
-    GstCaps * out_caps)
-{
-  GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
-  GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
-  gboolean have_colorimetry =
-      gst_structure_has_field (out_caps_s, "colorimetry");
-  gboolean have_chroma_site =
-      gst_structure_has_field (out_caps_s, "chroma-site");
-
-  /* If the output already has colorimetry and chroma-site, stop,
-   * otherwise try and transfer what we can from the input caps */
-  if (have_colorimetry && have_chroma_site)
-    return;
-
-  {
-    GstVideoInfo in_info, out_info;
-    const GValue *in_colorimetry =
-        gst_structure_get_value (in_caps_s, "colorimetry");
-
-    if (!gst_video_info_from_caps (&in_info, in_caps)) {
-      GST_WARNING_OBJECT (trans,
-          "Failed to convert sink pad caps to video info");
-      return;
-    }
-    if (!gst_video_info_from_caps (&out_info, out_caps)) {
-      GST_WARNING_OBJECT (trans,
-          "Failed to convert src pad caps to video info");
-      return;
-    }
-
-    if (!have_colorimetry && in_colorimetry != NULL) {
-      if ((GST_VIDEO_INFO_IS_YUV (&out_info)
-              && GST_VIDEO_INFO_IS_YUV (&in_info))
-          || (GST_VIDEO_INFO_IS_RGB (&out_info)
-              && GST_VIDEO_INFO_IS_RGB (&in_info))
-          || (GST_VIDEO_INFO_IS_GRAY (&out_info)
-              && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
-        /* Can transfer the colorimetry intact from the input if it has it */
-        gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
-      } else {
-        gchar *colorimetry_str;
-
-        /* Changing between YUV/RGB - forward primaries and transfer function, but use
-         * default range and matrix.
-         * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
-         * the transfer function could be another reference (e.g., HDR)
-         */
-        out_info.colorimetry.primaries = in_info.colorimetry.primaries;
-        out_info.colorimetry.transfer = in_info.colorimetry.transfer;
-
-        colorimetry_str =
-            gst_video_colorimetry_to_string (&out_info.colorimetry);
-        gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
-            colorimetry_str, NULL);
-        g_free (colorimetry_str);
-      }
-    }
-
-    /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
-     * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
-     * scaled anyway so there's no real reason to prefer the input siting. */
-    if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
-      if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
-        const GValue *in_chroma_site =
-            gst_structure_get_value (in_caps_s, "chroma-site");
-        if (in_chroma_site != NULL
-            && subsampling_unchanged (&in_info, &out_info))
-          gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
-      }
-    }
-  }
-}
-
-static GstCaps *
-gst_video_convert_fixate_caps (GstBaseTransform * trans,
-    GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
-{
-  GstCaps *result;
-
-  GST_DEBUG_OBJECT (trans, "trying to fixate othercaps %" GST_PTR_FORMAT
-      " based on caps %" GST_PTR_FORMAT, othercaps, caps);
-
-  result = gst_caps_intersect (othercaps, caps);
-  if (gst_caps_is_empty (result)) {
-    gst_caps_unref (result);
-    result = othercaps;
-  } else {
-    gst_caps_unref (othercaps);
-  }
-
-  GST_DEBUG_OBJECT (trans, "now fixating %" GST_PTR_FORMAT, result);
-
-  result = gst_caps_make_writable (result);
-  gst_video_convert_fixate_format (trans, caps, result);
-
-  /* fixate remaining fields */
-  result = gst_caps_fixate (result);
-
-  if (direction == GST_PAD_SINK) {
-    if (gst_caps_is_subset (caps, result)) {
-      gst_caps_replace (&result, caps);
-    } else {
-      /* Try and preserve input colorimetry / chroma information */
-      transfer_colorimetry_from_input (trans, caps, result);
-    }
-  }
-
-  return result;
-}
-
-static gboolean
-gst_video_convert_filter_meta (GstBaseTransform * trans, GstQuery * query,
-    GType api, const GstStructure * params)
-{
-  /* This element cannot passthrough the crop meta, because it would convert the
-   * wrong sub-region of the image, and worst, our output image may not be large
-   * enough for the crop to be applied later */
-  if (api == GST_VIDEO_CROP_META_API_TYPE)
-    return FALSE;
-
-  /* propose all other metadata upstream */
-  return TRUE;
-}
-
-/* The caps can be transformed into any other caps with format info removed.
- * However, we should prefer passthrough, so if passthrough is possible,
- * put it first in the list. */
-static GstCaps *
-gst_video_convert_transform_caps (GstBaseTransform * btrans,
-    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
-{
-  GstCaps *tmp, *tmp2;
-  GstCaps *result;
-
-  /* Get all possible caps that we can transform to */
-  tmp = gst_video_convert_caps_remove_format_info (caps);
-
-  if (filter) {
-    tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
-    gst_caps_unref (tmp);
-    tmp = tmp2;
-  }
-
-  result = tmp;
-
-  GST_DEBUG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " into %"
-      GST_PTR_FORMAT, caps, result);
-
-  return result;
-}
-
-static gboolean
-gst_video_convert_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
-    GstMeta * meta, GstBuffer * inbuf)
-{
-  const GstMetaInfo *info = meta->info;
-  gboolean ret;
-
-  if (gst_meta_api_type_has_tag (info->api, _colorspace_quark)) {
-    /* don't copy colorspace specific metadata, FIXME, we need a MetaTransform
-     * for the colorspace metadata. */
-    ret = FALSE;
-  } else {
-    /* copy other metadata */
-    ret = TRUE;
-  }
-  return ret;
-}
-
-static gboolean
-gst_video_convert_set_info (GstVideoFilter * filter,
-    GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
-    GstVideoInfo * out_info)
-{
-  GstVideoConvert *space;
-  GstBaseTransformClass *gstbasetransform_class =
-      GST_BASE_TRANSFORM_GET_CLASS (filter);
-  GstVideoInfo tmp_info;
-
-  space = GST_VIDEO_CONVERT_CAST (filter);
-
-  if (space->convert) {
-    gst_video_converter_free (space->convert);
-    space->convert = NULL;
-  }
-
-  /* these must match */
-  if (in_info->width != out_info->width || in_info->height != out_info->height
-      || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
-    goto format_mismatch;
-
-  /* if present, these must match too */
-  if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
-    goto format_mismatch;
-
-  /* if present, these must match too */
-  if (in_info->interlace_mode != out_info->interlace_mode)
-    goto format_mismatch;
-
-  /* if the only thing different in the caps is the transfer function, and
-   * we're converting between equivalent transfer functions, do passthrough */
-  tmp_info = *in_info;
-  tmp_info.colorimetry.transfer = out_info->colorimetry.transfer;
-  if (gst_video_info_is_equal (&tmp_info, out_info)) {
-    if (gst_video_transfer_function_is_equivalent (in_info->
-            colorimetry.transfer, in_info->finfo->bits,
-            out_info->colorimetry.transfer, out_info->finfo->bits)) {
-      gstbasetransform_class->passthrough_on_same_caps = FALSE;
-      gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
-      return TRUE;
-    }
-  }
-  gstbasetransform_class->passthrough_on_same_caps = TRUE;
-  gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);
-
-  space->convert = gst_video_converter_new (in_info, out_info,
-      gst_structure_new ("GstVideoConvertConfig",
-          GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
-          space->dither,
-          GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
-          space->dither_quantization,
-          GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD,
-          GST_TYPE_VIDEO_RESAMPLER_METHOD, space->chroma_resampler,
-          GST_VIDEO_CONVERTER_OPT_ALPHA_MODE,
-          GST_TYPE_VIDEO_ALPHA_MODE, space->alpha_mode,
-          GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE,
-          G_TYPE_DOUBLE, space->alpha_value,
-          GST_VIDEO_CONVERTER_OPT_CHROMA_MODE,
-          GST_TYPE_VIDEO_CHROMA_MODE, space->chroma_mode,
-          GST_VIDEO_CONVERTER_OPT_MATRIX_MODE,
-          GST_TYPE_VIDEO_MATRIX_MODE, space->matrix_mode,
-          GST_VIDEO_CONVERTER_OPT_GAMMA_MODE,
-          GST_TYPE_VIDEO_GAMMA_MODE, space->gamma_mode,
-          GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE,
-          GST_TYPE_VIDEO_PRIMARIES_MODE, space->primaries_mode,
-          GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT,
-          space->n_threads, NULL));
-  if (space->convert == NULL)
-    goto no_convert;
-
-  GST_DEBUG_OBJECT (filter, "converting format %s -> %s",
-      gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)),
-      gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
-
-  return TRUE;
-
-  /* ERRORS */
-format_mismatch:
-  {
-    GST_ERROR_OBJECT (space, "input and output formats do not match");
-    return FALSE;
-  }
-no_convert:
-  {
-    GST_ERROR_OBJECT (space, "could not create converter");
-    return FALSE;
-  }
-}
-
-static void
-gst_video_convert_finalize (GObject * obj)
-{
-  GstVideoConvert *space = GST_VIDEO_CONVERT (obj);
-
-  if (space->convert) {
-    gst_video_converter_free (space->convert);
-  }
-
-  G_OBJECT_CLASS (parent_class)->finalize (obj);
-}
-
-static void
-gst_video_convert_class_init (GstVideoConvertClass * klass)
-{
-  GObjectClass *gobject_class = (GObjectClass *) klass;
-  GstElementClass *gstelement_class = (GstElementClass *) klass;
-  GstBaseTransformClass *gstbasetransform_class =
-      (GstBaseTransformClass *) klass;
-  GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
-
-  gobject_class->set_property = gst_video_convert_set_property;
-  gobject_class->get_property = gst_video_convert_get_property;
-  gobject_class->finalize = gst_video_convert_finalize;
-
-  gst_element_class_add_static_pad_template (gstelement_class,
-      &gst_video_convert_src_template);
-  gst_element_class_add_static_pad_template (gstelement_class,
-      &gst_video_convert_sink_template);
-
-  gst_element_class_set_static_metadata (gstelement_class,
-      "Colorspace converter", "Filter/Converter/Video",
-      "Converts video from one colorspace to another",
-      "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
-
-  gstbasetransform_class->transform_caps =
-      GST_DEBUG_FUNCPTR (gst_video_convert_transform_caps);
-  gstbasetransform_class->fixate_caps =
-      GST_DEBUG_FUNCPTR (gst_video_convert_fixate_caps);
-  gstbasetransform_class->filter_meta =
-      GST_DEBUG_FUNCPTR (gst_video_convert_filter_meta);
-  gstbasetransform_class->transform_meta =
-      GST_DEBUG_FUNCPTR (gst_video_convert_transform_meta);
-
-  gstbasetransform_class->passthrough_on_same_caps = TRUE;
-
-  gstvideofilter_class->set_info =
-      GST_DEBUG_FUNCPTR (gst_video_convert_set_info);
-  gstvideofilter_class->transform_frame =
-      GST_DEBUG_FUNCPTR (gst_video_convert_transform_frame);
-
-  g_object_class_install_property (gobject_class, PROP_DITHER,
-      g_param_spec_enum ("dither", "Dither", "Apply dithering while converting",
-          gst_video_dither_method_get_type (), DEFAULT_PROP_DITHER,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_DITHER_QUANTIZATION,
-      g_param_spec_uint ("dither-quantization", "Dither Quantize",
-          "Quantizer to use", 0, G_MAXUINT, DEFAULT_PROP_DITHER_QUANTIZATION,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_CHROMA_RESAMPLER,
-      g_param_spec_enum ("chroma-resampler", "Chroma resampler",
-          "Chroma resampler method", gst_video_resampler_method_get_type (),
-          DEFAULT_PROP_CHROMA_RESAMPLER,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_ALPHA_MODE,
-      g_param_spec_enum ("alpha-mode", "Alpha Mode",
-          "Alpha Mode to use", gst_video_alpha_mode_get_type (),
-          DEFAULT_PROP_ALPHA_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_ALPHA_VALUE,
-      g_param_spec_double ("alpha-value", "Alpha Value",
-          "Alpha Value to use", 0.0, 1.0,
-          DEFAULT_PROP_ALPHA_VALUE,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_CHROMA_MODE,
-      g_param_spec_enum ("chroma-mode", "Chroma Mode", "Chroma Resampling Mode",
-          gst_video_chroma_mode_get_type (), DEFAULT_PROP_CHROMA_MODE,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_MATRIX_MODE,
-      g_param_spec_enum ("matrix-mode", "Matrix Mode", "Matrix Conversion Mode",
-          gst_video_matrix_mode_get_type (), DEFAULT_PROP_MATRIX_MODE,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_GAMMA_MODE,
-      g_param_spec_enum ("gamma-mode", "Gamma Mode", "Gamma Conversion Mode",
-          gst_video_gamma_mode_get_type (), DEFAULT_PROP_GAMMA_MODE,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_PRIMARIES_MODE,
-      g_param_spec_enum ("primaries-mode", "Primaries Mode",
-          "Primaries Conversion Mode", gst_video_primaries_mode_get_type (),
-          DEFAULT_PROP_PRIMARIES_MODE,
-          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-  g_object_class_install_property (gobject_class, PROP_N_THREADS,
-      g_param_spec_uint ("n-threads", "Threads",
-          "Maximum number of threads to use", 0, G_MAXUINT,
-          DEFAULT_PROP_N_THREADS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-}
-
-static void
-gst_video_convert_init (GstVideoConvert * space)
-{
-  space->dither = DEFAULT_PROP_DITHER;
-  space->dither_quantization = DEFAULT_PROP_DITHER_QUANTIZATION;
-  space->chroma_resampler = DEFAULT_PROP_CHROMA_RESAMPLER;
-  space->alpha_mode = DEFAULT_PROP_ALPHA_MODE;
-  space->alpha_value = DEFAULT_PROP_ALPHA_VALUE;
-  space->chroma_mode = DEFAULT_PROP_CHROMA_MODE;
-  space->matrix_mode = DEFAULT_PROP_MATRIX_MODE;
-  space->gamma_mode = DEFAULT_PROP_GAMMA_MODE;
-  space->primaries_mode = DEFAULT_PROP_PRIMARIES_MODE;
-  space->n_threads = DEFAULT_PROP_N_THREADS;
-}
-
-void
-gst_video_convert_set_property (GObject * object, guint property_id,
-    const GValue * value, GParamSpec * pspec)
-{
-  GstVideoConvert *csp;
-
-  csp = GST_VIDEO_CONVERT (object);
-
-  switch (property_id) {
-    case PROP_DITHER:
-      csp->dither = g_value_get_enum (value);
-      break;
-    case PROP_CHROMA_RESAMPLER:
-      csp->chroma_resampler = g_value_get_enum (value);
-      break;
-    case PROP_ALPHA_MODE:
-      csp->alpha_mode = g_value_get_enum (value);
-      break;
-    case PROP_ALPHA_VALUE:
-      csp->alpha_value = g_value_get_double (value);
-      break;
-    case PROP_CHROMA_MODE:
-      csp->chroma_mode = g_value_get_enum (value);
-      break;
-    case PROP_MATRIX_MODE:
-      csp->matrix_mode = g_value_get_enum (value);
-      break;
-    case PROP_GAMMA_MODE:
-      csp->gamma_mode = g_value_get_enum (value);
-      break;
-    case PROP_PRIMARIES_MODE:
-      csp->primaries_mode = g_value_get_enum (value);
-      break;
-    case PROP_DITHER_QUANTIZATION:
-      csp->dither_quantization = g_value_get_uint (value);
-      break;
-    case PROP_N_THREADS:
-      csp->n_threads = g_value_get_uint (value);
-      break;
-    default:
-      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
-      break;
-  }
-}
-
-void
-gst_video_convert_get_property (GObject * object, guint property_id,
-    GValue * value, GParamSpec * pspec)
-{
-  GstVideoConvert *csp;
-
-  csp = GST_VIDEO_CONVERT (object);
-
-  switch (property_id) {
-    case PROP_DITHER:
-      g_value_set_enum (value, csp->dither);
-      break;
-    case PROP_CHROMA_RESAMPLER:
-      g_value_set_enum (value, csp->chroma_resampler);
-      break;
-    case PROP_ALPHA_MODE:
-      g_value_set_enum (value, csp->alpha_mode);
-      break;
-    case PROP_ALPHA_VALUE:
-      g_value_set_double (value, csp->alpha_value);
-      break;
-    case PROP_CHROMA_MODE:
-      g_value_set_enum (value, csp->chroma_mode);
-      break;
-    case PROP_MATRIX_MODE:
-      g_value_set_enum (value, csp->matrix_mode);
-      break;
-    case PROP_GAMMA_MODE:
-      g_value_set_enum (value, csp->gamma_mode);
-      break;
-    case PROP_PRIMARIES_MODE:
-      g_value_set_enum (value, csp->primaries_mode);
-      break;
-    case PROP_DITHER_QUANTIZATION:
-      g_value_set_uint (value, csp->dither_quantization);
-      break;
-    case PROP_N_THREADS:
-      g_value_set_uint (value, csp->n_threads);
-      break;
-    default:
-      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
-      break;
-  }
-}
-
-static GstFlowReturn
-gst_video_convert_transform_frame (GstVideoFilter * filter,
-    GstVideoFrame * in_frame, GstVideoFrame * out_frame)
-{
-  GstVideoConvert *space;
-
-  space = GST_VIDEO_CONVERT_CAST (filter);
-
-  GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter,
-      "doing colorspace conversion from %s -> to %s",
-      GST_VIDEO_INFO_NAME (&filter->in_info),
-      GST_VIDEO_INFO_NAME (&filter->out_info));
-
-  gst_video_converter_frame (space->convert, in_frame, out_frame);
-
-  return GST_FLOW_OK;
-}
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
-  GST_DEBUG_CATEGORY_INIT (videoconvert_debug, "videoconvert", 0,
-      "Colorspace Converter");
-
-  GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
-
-  _colorspace_quark = g_quark_from_static_string ("colorspace");
-
-  features_format_interlaced =
-      gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
-  features_format_interlaced_sysmem =
-      gst_caps_features_copy (features_format_interlaced);
-  gst_caps_features_add (features_format_interlaced_sysmem,
-      GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
-
-  return GST_ELEMENT_REGISTER (videoconvert, plugin);
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
-    GST_VERSION_MINOR,
-    videoconvert, "Colorspace conversion", plugin_init, VERSION, GST_LICENSE,
-    GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/subprojects/gst-plugins-base/gst/videoconvert/meson.build b/subprojects/gst-plugins-base/gst/videoconvert/meson.build
deleted file mode 100644
index 2798f8d018..0000000000
--- a/subprojects/gst-plugins-base/gst/videoconvert/meson.build
+++ /dev/null
@@ -1,14 +0,0 @@
-vconvert_sources = [
-  'gstvideoconvert.c',
-]
-
-gstvideoconvert = library('gstvideoconvert',
-  vconvert_sources,
-  c_args : gst_plugins_base_args,
-  include_directories: [configinc, libsinc],
-  dependencies : [video_dep],
-  install : true,
-  install_dir : plugins_install_dir,
-)
-pkgconfig.generate(gstvideoconvert, install_dir : plugins_pkgconfig_install_dir)
-plugins += [gstvideoconvert]
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/README b/subprojects/gst-plugins-base/gst/videoconvertscale/README
new file mode 100644
index 0000000000..731f17db3a
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/README
@@ -0,0 +1,5 @@
+- test different strides using
+gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoconvertscale ! video/x-raw,width=328,height=240 ! xvimagesink
+gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoconvertscale ! video/x-raw,width=324,height=240 ! xvimagesink
+gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoconvertscale ! video/x-raw,width=322,height=240 ! xvimagesink
+gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoconvertscale ! video/x-raw,width=321,height=240 ! xvimagesink
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.c b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.c
new file mode 100644
index 0000000000..8424bbfe2b
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.c
@@ -0,0 +1,59 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * This file:
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2010 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-videoconvert
+ * @title: videoconvert
+ *
+ * Convert video frames between a great variety of video formats.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! video/x-raw,format=YUY2 ! videoconvert ! autovideosink
+ * ]|
+ *  This will output a test video (generated in YUY2 format) in a video
+ * window. If the video sink selected does not support YUY2 videoconvert will
+ * automatically convert the video to a format understood by the video sink.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstvideoconvert.h"
+
+G_DEFINE_TYPE (GstVideoConvert, gst_video_convert,
+    GST_TYPE_VIDEO_CONVERT_SCALE);
+GST_ELEMENT_REGISTER_DEFINE (videoconvert, "videoconvert",
+    GST_RANK_MARGINAL, gst_video_convert_get_type ());
+
+static void
+gst_video_convert_class_init (GstVideoConvertClass * klass)
+{
+
+}
+
+static void
+gst_video_convert_init (GstVideoConvert * self)
+{
+
+}
diff --git a/subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.h b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.h
similarity index 57%
rename from subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.h
rename to subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.h
index 0f093f4f7d..143b6d4265 100644
--- a/subprojects/gst-plugins-base/gst/videoconvert/gstvideoconvert.h
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvert.h
@@ -19,43 +19,20 @@
  * Boston, MA 02110-1301, USA.
  */
 
-#ifndef __GST_VIDEOCONVERT_H__
-#define __GST_VIDEOCONVERT_H__
+#pragma once
 
-#include <gst/gst.h>
-#include <gst/video/video.h>
-#include <gst/video/gstvideofilter.h>
+#include "gstvideoconvertscale.h"
 
 G_BEGIN_DECLS
 
-#define GST_TYPE_VIDEO_CONVERT (gst_video_convert_get_type())
-#define GST_VIDEO_CONVERT_CAST(obj) ((GstVideoConvert *)(obj))
 G_DECLARE_FINAL_TYPE (GstVideoConvert, gst_video_convert, GST, VIDEO_CONVERT,
-    GstVideoFilter)
+    GstVideoConvertScale);
 
-/**
- * GstVideoConvert:
- *
- * Opaque object data structure.
- */
-struct _GstVideoConvert {
-  GstVideoFilter element;
-
-  GstVideoConverter *convert;
-  GstVideoDitherMethod dither;
-  guint dither_quantization;
-  GstVideoResamplerMethod chroma_resampler;
-  GstVideoAlphaMode alpha_mode;
-  GstVideoChromaMode chroma_mode;
-  GstVideoMatrixMode matrix_mode;
-  GstVideoGammaMode gamma_mode;
-  GstVideoPrimariesMode primaries_mode;
-  gdouble alpha_value;
-  gint n_threads;
+struct _GstVideoConvert
+{
+  GstVideoConvertScale parent;
 };
 
 GST_ELEMENT_REGISTER_DECLARE (videoconvert);
 
 G_END_DECLS
-
-#endif /* __GST_VIDEOCONVERT_H__ */
diff --git a/subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.c b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.c
similarity index 54%
rename from subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.c
rename to subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.c
index 1b20c7311b..df168caf82 100644
--- a/subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.c
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.c
@@ -1,6 +1,8 @@
 /* GStreamer
  * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
  * Copyright (C) 2005-2012 David Schleef <ds@schleef.org>
+ * Copyright (C) 2022 Thibault Saunier <tsaunier@igalia.com>
  *
  * This library is free software; you can redistribute it and/or
  * modify it under the terms of the GNU Library General Public
@@ -19,14 +21,14 @@
  */
 
 /**
- * SECTION:element-videoscale
- * @title: videoscale
- * @see_also: videorate, videoconvert
+ * SECTION:element-videoconvertscale
+ * @title: videoconvertscale
  *
- * This element resizes video frames. By default the element will try to
- * negotiate to the same size on the source and sinkpad so that no scaling
- * is needed. It is therefore safe to insert this element in a pipeline to
- * get more robust behaviour without any cost if no scaling is needed.
+ * This element resizes video frames and allows changing colorspace. By default
+ * the element will try to negotiate to the same size on the source and sinkpad
+ * so that no scaling is needed. It is therefore safe to insert this element in
+ * a pipeline to get more robust behaviour without any cost if no scaling is
+ * needed.
  *
  * This element supports a wide range of color spaces including various YUV and
  * RGB formats and is therefore generally able to operate anywhere in a
@@ -34,17 +36,18 @@
  *
  * ## Example pipelines
  * |[
- * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! autovideosink
+ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvertscale ! autovideosink
  * ]|
  *  Decode an Ogg/Theora and display the video. If the video sink chosen
- * cannot perform scaling, the video scaling will be performed by videoscale
+ * cannot perform scaling, the video scaling will be performed by videoconvertscale
  * when you resize the video window.
  * To create the test Ogg/Theora file refer to the documentation of theoraenc.
  * |[
- * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! video/x-raw,width=100 ! autovideosink
+ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvertscale ! video/x-raw,width=100 ! autovideosink
  * ]|
  *  Decode an Ogg/Theora and display the video with a width of 100.
  *
+ * Since: 1.22
  */
 
 /*
@@ -78,21 +81,63 @@
 #include <gst/video/gstvideometa.h>
 #include <gst/video/gstvideopool.h>
 
-#include "gstvideoscale.h"
+#include "gstvideoconvertscale.h"
 
-#define GST_CAT_DEFAULT video_scale_debug
-GST_DEBUG_CATEGORY_STATIC (video_scale_debug);
+typedef struct
+{
+  /* properties */
+  GstVideoScaleMethod method;
+  gboolean add_borders;
+  double sharpness;
+  double sharpen;
+  int submethod;
+  double envelope;
+  gint n_threads;
+  GstVideoDitherMethod dither;
+  guint dither_quantization;
+  GstVideoResamplerMethod chroma_resampler;
+  GstVideoAlphaMode alpha_mode;
+  GstVideoChromaMode chroma_mode;
+  GstVideoMatrixMode matrix_mode;
+  GstVideoGammaMode gamma_mode;
+  GstVideoPrimariesMode primaries_mode;
+  gdouble alpha_value;
+
+  GstVideoConverter *convert;
+
+  gint borders_h;
+  gint borders_w;
+} GstVideoConvertScalePrivate;
+
+#define gst_video_convert_scale_parent_class parent_class
+G_DEFINE_TYPE_WITH_PRIVATE (GstVideoConvertScale, gst_video_convert_scale,
+    GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (videoconvertscale, "videoconvertscale",
+    GST_RANK_SECONDARY, GST_TYPE_VIDEO_CONVERT_SCALE);
+
+#define PRIV(self) gst_video_convert_scale_get_instance_private(((GstVideoConvertScale*) self))
+
+#define GST_CAT_DEFAULT video_convertscale_debug
+GST_DEBUG_CATEGORY_STATIC (video_convertscale_debug);
 GST_DEBUG_CATEGORY_STATIC (CAT_PERFORMANCE);
 
 #define DEFAULT_PROP_METHOD       GST_VIDEO_SCALE_BILINEAR
 #define DEFAULT_PROP_ADD_BORDERS  TRUE
 #define DEFAULT_PROP_SHARPNESS    1.0
 #define DEFAULT_PROP_SHARPEN      0.0
-#define DEFAULT_PROP_DITHER       FALSE
-#define DEFAULT_PROP_SUBMETHOD    1
+#define DEFAULT_PROP_DITHER      GST_VIDEO_DITHER_BAYER
 #define DEFAULT_PROP_ENVELOPE     2.0
-#define DEFAULT_PROP_GAMMA_DECODE FALSE
-#define DEFAULT_PROP_N_THREADS    1
+#define DEFAULT_PROP_DITHER_QUANTIZATION 1
+#define DEFAULT_PROP_CHROMA_RESAMPLER	GST_VIDEO_RESAMPLER_METHOD_LINEAR
+#define DEFAULT_PROP_ALPHA_MODE GST_VIDEO_ALPHA_MODE_COPY
+#define DEFAULT_PROP_ALPHA_VALUE 1.0
+#define DEFAULT_PROP_CHROMA_MODE GST_VIDEO_CHROMA_MODE_FULL
+#define DEFAULT_PROP_MATRIX_MODE GST_VIDEO_MATRIX_MODE_FULL
+#define DEFAULT_PROP_GAMMA_MODE GST_VIDEO_GAMMA_MODE_NONE
+#define DEFAULT_PROP_PRIMARIES_MODE GST_VIDEO_PRIMARIES_MODE_NONE
+#define DEFAULT_PROP_N_THREADS 1
+
+static GQuark _colorspace_quark;
 
 enum
 {
@@ -104,8 +149,15 @@ enum
   PROP_DITHER,
   PROP_SUBMETHOD,
   PROP_ENVELOPE,
-  PROP_GAMMA_DECODE,
-  PROP_N_THREADS
+  PROP_N_THREADS,
+  PROP_DITHER_QUANTIZATION,
+  PROP_CHROMA_RESAMPLER,
+  PROP_ALPHA_MODE,
+  PROP_ALPHA_VALUE,
+  PROP_CHROMA_MODE,
+  PROP_MATRIX_MODE,
+  PROP_GAMMA_MODE,
+  PROP_PRIMARIES_MODE,
 };
 
 #undef GST_VIDEO_SIZE_RANGE
@@ -123,7 +175,7 @@ enum
 
 #define GST_VIDEO_FORMATS GST_VIDEO_FORMATS_ALL
 
-static GstStaticCaps gst_video_scale_format_caps =
+static GstStaticCaps gst_video_convert_scale_format_caps =
     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS) ";"
     GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS));
 
@@ -158,75 +210,98 @@ gst_video_scale_method_get_type (void)
 }
 
 static GstCaps *
-gst_video_scale_get_capslist (void)
+gst_video_convert_scale_get_capslist (void)
 {
   static GstCaps *caps = NULL;
   static gsize inited = 0;
 
   if (g_once_init_enter (&inited)) {
-    caps = gst_static_caps_get (&gst_video_scale_format_caps);
+    caps = gst_static_caps_get (&gst_video_convert_scale_format_caps);
     g_once_init_leave (&inited, 1);
   }
   return caps;
 }
 
 static GstPadTemplate *
-gst_video_scale_src_template_factory (void)
+gst_video_convert_scale_src_template_factory (void)
 {
   return gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
-      gst_video_scale_get_capslist ());
+      gst_video_convert_scale_get_capslist ());
 }
 
 static GstPadTemplate *
-gst_video_scale_sink_template_factory (void)
+gst_video_convert_scale_sink_template_factory (void)
 {
   return gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
-      gst_video_scale_get_capslist ());
+      gst_video_convert_scale_get_capslist ());
 }
 
 
-static void gst_video_scale_finalize (GstVideoScale * videoscale);
-static gboolean gst_video_scale_src_event (GstBaseTransform * trans,
+static void gst_video_convert_scale_finalize (GstVideoConvertScale * self);
+static gboolean gst_video_convert_scale_src_event (GstBaseTransform * trans,
     GstEvent * event);
 
 /* base transform vmethods */
-static GstCaps *gst_video_scale_transform_caps (GstBaseTransform * trans,
-    GstPadDirection direction, GstCaps * caps, GstCaps * filter);
-static GstCaps *gst_video_scale_fixate_caps (GstBaseTransform * base,
+static GstCaps *gst_video_convert_scale_transform_caps (GstBaseTransform *
+    trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter);
+static GstCaps *gst_video_convert_scale_fixate_caps (GstBaseTransform * base,
     GstPadDirection direction, GstCaps * caps, GstCaps * othercaps);
-static gboolean gst_video_scale_transform_meta (GstBaseTransform * trans,
-    GstBuffer * outbuf, GstMeta * meta, GstBuffer * inbuf);
+static gboolean gst_video_convert_scale_transform_meta (GstBaseTransform *
+    trans, GstBuffer * outbuf, GstMeta * meta, GstBuffer * inbuf);
 
-static gboolean gst_video_scale_set_info (GstVideoFilter * filter,
+static gboolean gst_video_convert_scale_set_info (GstVideoFilter * filter,
     GstCaps * in, GstVideoInfo * in_info, GstCaps * out,
     GstVideoInfo * out_info);
-static GstFlowReturn gst_video_scale_transform_frame (GstVideoFilter * filter,
-    GstVideoFrame * in, GstVideoFrame * out);
+static GstFlowReturn gst_video_convert_scale_transform_frame (GstVideoFilter *
+    filter, GstVideoFrame * in, GstVideoFrame * out);
 
-static void gst_video_scale_set_property (GObject * object, guint prop_id,
-    const GValue * value, GParamSpec * pspec);
-static void gst_video_scale_get_property (GObject * object, guint prop_id,
-    GValue * value, GParamSpec * pspec);
-
-#define gst_video_scale_parent_class parent_class
-G_DEFINE_TYPE (GstVideoScale, gst_video_scale, GST_TYPE_VIDEO_FILTER);
-GST_ELEMENT_REGISTER_DEFINE (videoscale, "videoscale",
-    GST_RANK_NONE, GST_TYPE_VIDEO_SCALE);
+static void gst_video_convert_scale_set_property (GObject * object,
+    guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_video_convert_scale_get_property (GObject * object,
+    guint prop_id, GValue * value, GParamSpec * pspec);
 
 static GstCapsFeatures *features_format_interlaced,
     *features_format_interlaced_sysmem;
 
+static gboolean
+gst_video_convert_scale_filter_meta (GstBaseTransform * trans, GstQuery * query,
+    GType api, const GstStructure * params)
+{
+  /* This element cannot passthrough the crop meta, because it would convert the
+   * wrong sub-region of the image, and worst, our output image may not be large
+   * enough for the crop to be applied later */
+  if (api == GST_VIDEO_CROP_META_API_TYPE)
+    return FALSE;
+
+  /* propose all other metadata upstream */
+  return TRUE;
+}
+
 static void
-gst_video_scale_class_init (GstVideoScaleClass * klass)
+gst_video_convert_scale_class_init (GstVideoConvertScaleClass * klass)
 {
   GObjectClass *gobject_class = (GObjectClass *) klass;
   GstElementClass *element_class = (GstElementClass *) klass;
   GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
   GstVideoFilterClass *filter_class = (GstVideoFilterClass *) klass;
 
-  gobject_class->finalize = (GObjectFinalizeFunc) gst_video_scale_finalize;
-  gobject_class->set_property = gst_video_scale_set_property;
-  gobject_class->get_property = gst_video_scale_get_property;
+  GST_DEBUG_CATEGORY_INIT (video_convertscale_debug, "videoconvertscale", 0,
+      "videoconvertscale element");
+  GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
+
+  features_format_interlaced =
+      gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
+  features_format_interlaced_sysmem =
+      gst_caps_features_copy (features_format_interlaced);
+  gst_caps_features_add (features_format_interlaced_sysmem,
+      GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
+
+  _colorspace_quark = g_quark_from_static_string ("colorspace");
+
+  gobject_class->finalize =
+      (GObjectFinalizeFunc) gst_video_convert_scale_finalize;
+  gobject_class->set_property = gst_video_convert_scale_set_property;
+  gobject_class->get_property = gst_video_convert_scale_get_property;
 
   g_object_class_install_property (gobject_class, PROP_METHOD,
       g_param_spec_enum ("method", "method", "method",
@@ -250,216 +325,259 @@ gst_video_scale_class_init (GstVideoScaleClass * klass)
           G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
 
   g_object_class_install_property (gobject_class, PROP_DITHER,
-      g_param_spec_boolean ("dither", "Dither",
-          "Add dither (only used for Lanczos method)",
-          DEFAULT_PROP_DITHER,
-          G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
-#if 0
-  /* I am hiding submethod for now, since it's poorly named, poorly
-   * documented, and will probably just get people into trouble. */
-  g_object_class_install_property (gobject_class, PROP_SUBMETHOD,
-      g_param_spec_int ("submethod", "submethod",
-          "submethod", 0, 3, DEFAULT_PROP_SUBMETHOD,
-          G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-#endif
-
+      g_param_spec_enum ("dither", "Dither", "Apply dithering while converting",
+          gst_video_dither_method_get_type (), DEFAULT_PROP_DITHER,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
   g_object_class_install_property (gobject_class, PROP_ENVELOPE,
       g_param_spec_double ("envelope", "Envelope",
           "Size of filter envelope", 1.0, 5.0, DEFAULT_PROP_ENVELOPE,
           G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
 
-  g_object_class_install_property (gobject_class, PROP_GAMMA_DECODE,
-      g_param_spec_boolean ("gamma-decode", "Gamma Decode",
-          "Decode gamma before scaling", DEFAULT_PROP_GAMMA_DECODE,
-          G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
   g_object_class_install_property (gobject_class, PROP_N_THREADS,
       g_param_spec_uint ("n-threads", "Threads",
           "Maximum number of threads to use", 0, G_MAXUINT,
           DEFAULT_PROP_N_THREADS,
           G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
 
+  g_object_class_install_property (gobject_class, PROP_DITHER_QUANTIZATION,
+      g_param_spec_uint ("dither-quantization", "Dither Quantize",
+          "Quantizer to use", 0, G_MAXUINT, DEFAULT_PROP_DITHER_QUANTIZATION,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_CHROMA_RESAMPLER,
+      g_param_spec_enum ("chroma-resampler", "Chroma resampler",
+          "Chroma resampler method", gst_video_resampler_method_get_type (),
+          DEFAULT_PROP_CHROMA_RESAMPLER,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_ALPHA_MODE,
+      g_param_spec_enum ("alpha-mode", "Alpha Mode",
+          "Alpha Mode to use", gst_video_alpha_mode_get_type (),
+          DEFAULT_PROP_ALPHA_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_ALPHA_VALUE,
+      g_param_spec_double ("alpha-value", "Alpha Value",
+          "Alpha Value to use", 0.0, 1.0,
+          DEFAULT_PROP_ALPHA_VALUE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_CHROMA_MODE,
+      g_param_spec_enum ("chroma-mode", "Chroma Mode", "Chroma Resampling Mode",
+          gst_video_chroma_mode_get_type (), DEFAULT_PROP_CHROMA_MODE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_MATRIX_MODE,
+      g_param_spec_enum ("matrix-mode", "Matrix Mode", "Matrix Conversion Mode",
+          gst_video_matrix_mode_get_type (), DEFAULT_PROP_MATRIX_MODE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_GAMMA_MODE,
+      g_param_spec_enum ("gamma-mode", "Gamma Mode", "Gamma Conversion Mode",
+          gst_video_gamma_mode_get_type (), DEFAULT_PROP_GAMMA_MODE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+  g_object_class_install_property (gobject_class, PROP_PRIMARIES_MODE,
+      g_param_spec_enum ("primaries-mode", "Primaries Mode",
+          "Primaries Conversion Mode", gst_video_primaries_mode_get_type (),
+          DEFAULT_PROP_PRIMARIES_MODE,
+          G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
   gst_element_class_set_static_metadata (element_class,
-      "Video scaler", "Filter/Converter/Video/Scaler",
-      "Resizes video", "Wim Taymans <wim.taymans@gmail.com>");
+      "Video colorspace converter and scaler",
+      "Filter/Converter/Video/Scaler/Colorspace",
+      "Resizes video and allow color conversion",
+      "Wim Taymans <wim.taymans@gmail.com>");
 
   gst_element_class_add_pad_template (element_class,
-      gst_video_scale_sink_template_factory ());
+      gst_video_convert_scale_sink_template_factory ());
   gst_element_class_add_pad_template (element_class,
-      gst_video_scale_src_template_factory ());
-
-  trans_class->transform_caps =
-      GST_DEBUG_FUNCPTR (gst_video_scale_transform_caps);
-  trans_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_scale_fixate_caps);
-  trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_scale_src_event);
-  trans_class->transform_meta =
-      GST_DEBUG_FUNCPTR (gst_video_scale_transform_meta);
-
-  filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_scale_set_info);
-  filter_class->transform_frame =
-      GST_DEBUG_FUNCPTR (gst_video_scale_transform_frame);
+      gst_video_convert_scale_src_template_factory ());
 
   _size_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_SIZE_STR);
   _scale_quark = gst_video_meta_transform_scale_get_quark ();
 
   gst_type_mark_as_plugin_api (GST_TYPE_VIDEO_SCALE_METHOD, 0);
+  trans_class->transform_caps =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_transform_caps);
+  trans_class->fixate_caps =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_fixate_caps);
+  trans_class->filter_meta =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_filter_meta);
+  trans_class->src_event =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_src_event);
+  trans_class->transform_meta =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_transform_meta);
+
+  filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_convert_scale_set_info);
+  filter_class->transform_frame =
+      GST_DEBUG_FUNCPTR (gst_video_convert_scale_transform_frame);
 }
 
 static void
-gst_video_scale_init (GstVideoScale * videoscale)
+gst_video_convert_scale_init (GstVideoConvertScale * self)
 {
-  videoscale->method = DEFAULT_PROP_METHOD;
-  videoscale->add_borders = DEFAULT_PROP_ADD_BORDERS;
-  videoscale->submethod = DEFAULT_PROP_SUBMETHOD;
-  videoscale->sharpness = DEFAULT_PROP_SHARPNESS;
-  videoscale->sharpen = DEFAULT_PROP_SHARPEN;
-  videoscale->dither = DEFAULT_PROP_DITHER;
-  videoscale->envelope = DEFAULT_PROP_ENVELOPE;
-  videoscale->gamma_decode = DEFAULT_PROP_GAMMA_DECODE;
-  videoscale->n_threads = DEFAULT_PROP_N_THREADS;
+  GstVideoConvertScalePrivate *priv = PRIV (self);
+
+  priv->method = DEFAULT_PROP_METHOD;
+  priv->add_borders = DEFAULT_PROP_ADD_BORDERS;
+  priv->sharpness = DEFAULT_PROP_SHARPNESS;
+  priv->sharpen = DEFAULT_PROP_SHARPEN;
+  priv->envelope = DEFAULT_PROP_ENVELOPE;
+  priv->n_threads = DEFAULT_PROP_N_THREADS;
+  priv->dither = DEFAULT_PROP_DITHER;
+  priv->dither_quantization = DEFAULT_PROP_DITHER_QUANTIZATION;
+  priv->chroma_resampler = DEFAULT_PROP_CHROMA_RESAMPLER;
+  priv->alpha_mode = DEFAULT_PROP_ALPHA_MODE;
+  priv->alpha_value = DEFAULT_PROP_ALPHA_VALUE;
+  priv->chroma_mode = DEFAULT_PROP_CHROMA_MODE;
+  priv->matrix_mode = DEFAULT_PROP_MATRIX_MODE;
+  priv->gamma_mode = DEFAULT_PROP_GAMMA_MODE;
+  priv->primaries_mode = DEFAULT_PROP_PRIMARIES_MODE;
 }
 
 static void
-gst_video_scale_finalize (GstVideoScale * videoscale)
+gst_video_convert_scale_finalize (GstVideoConvertScale * self)
 {
-  if (videoscale->convert)
-    gst_video_converter_free (videoscale->convert);
+  GstVideoConvertScalePrivate *priv = PRIV (self);
 
-  G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (videoscale));
+  if (priv->convert)
+    gst_video_converter_free (priv->convert);
+
+  G_OBJECT_CLASS (parent_class)->finalize (G_OBJECT (self));
 }
 
 static void
-gst_video_scale_set_property (GObject * object, guint prop_id,
+gst_video_convert_scale_set_property (GObject * object, guint prop_id,
     const GValue * value, GParamSpec * pspec)
 {
-  GstVideoScale *vscale = GST_VIDEO_SCALE (object);
+  GstVideoConvertScalePrivate *priv = PRIV (object);
 
+  GST_OBJECT_LOCK (object);
   switch (prop_id) {
     case PROP_METHOD:
-      GST_OBJECT_LOCK (vscale);
-      vscale->method = g_value_get_enum (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->method = g_value_get_enum (value);
       break;
     case PROP_ADD_BORDERS:
-      GST_OBJECT_LOCK (vscale);
-      vscale->add_borders = g_value_get_boolean (value);
-      GST_OBJECT_UNLOCK (vscale);
-      gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (vscale));
-      break;
+      priv->add_borders = g_value_get_boolean (value);
+      GST_OBJECT_UNLOCK (object);
+
+      gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (object));
+      return;
     case PROP_SHARPNESS:
-      GST_OBJECT_LOCK (vscale);
-      vscale->sharpness = g_value_get_double (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->sharpness = g_value_get_double (value);
       break;
     case PROP_SHARPEN:
-      GST_OBJECT_LOCK (vscale);
-      vscale->sharpen = g_value_get_double (value);
-      GST_OBJECT_UNLOCK (vscale);
-      break;
-    case PROP_DITHER:
-      GST_OBJECT_LOCK (vscale);
-      vscale->dither = g_value_get_boolean (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->sharpen = g_value_get_double (value);
       break;
     case PROP_SUBMETHOD:
-      GST_OBJECT_LOCK (vscale);
-      vscale->submethod = g_value_get_int (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->submethod = g_value_get_int (value);
       break;
     case PROP_ENVELOPE:
-      GST_OBJECT_LOCK (vscale);
-      vscale->envelope = g_value_get_double (value);
-      GST_OBJECT_UNLOCK (vscale);
-      break;
-    case PROP_GAMMA_DECODE:
-      GST_OBJECT_LOCK (vscale);
-      vscale->gamma_decode = g_value_get_boolean (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->envelope = g_value_get_double (value);
       break;
     case PROP_N_THREADS:
-      GST_OBJECT_LOCK (vscale);
-      vscale->n_threads = g_value_get_uint (value);
-      GST_OBJECT_UNLOCK (vscale);
+      priv->n_threads = g_value_get_uint (value);
+      break;
+    case PROP_DITHER:
+      priv->dither = g_value_get_enum (value);
+      break;
+    case PROP_CHROMA_RESAMPLER:
+      priv->chroma_resampler = g_value_get_enum (value);
+      break;
+    case PROP_ALPHA_MODE:
+      priv->alpha_mode = g_value_get_enum (value);
+      break;
+    case PROP_ALPHA_VALUE:
+      priv->alpha_value = g_value_get_double (value);
+      break;
+    case PROP_CHROMA_MODE:
+      priv->chroma_mode = g_value_get_enum (value);
+      break;
+    case PROP_MATRIX_MODE:
+      priv->matrix_mode = g_value_get_enum (value);
+      break;
+    case PROP_GAMMA_MODE:
+      priv->gamma_mode = g_value_get_enum (value);
+      break;
+    case PROP_PRIMARIES_MODE:
+      priv->primaries_mode = g_value_get_enum (value);
+      break;
+    case PROP_DITHER_QUANTIZATION:
+      priv->dither_quantization = g_value_get_uint (value);
       break;
     default:
       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
       break;
   }
+  GST_OBJECT_UNLOCK (object);
 }
 
 static void
-gst_video_scale_get_property (GObject * object, guint prop_id, GValue * value,
-    GParamSpec * pspec)
+gst_video_convert_scale_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
 {
-  GstVideoScale *vscale = GST_VIDEO_SCALE (object);
+  GstVideoConvertScalePrivate *priv = PRIV (object);
 
+  GST_OBJECT_LOCK (object);
   switch (prop_id) {
     case PROP_METHOD:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_enum (value, vscale->method);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_enum (value, priv->method);
       break;
     case PROP_ADD_BORDERS:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_boolean (value, vscale->add_borders);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_boolean (value, priv->add_borders);
       break;
     case PROP_SHARPNESS:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_double (value, vscale->sharpness);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_double (value, priv->sharpness);
       break;
     case PROP_SHARPEN:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_double (value, vscale->sharpen);
-      GST_OBJECT_UNLOCK (vscale);
-      break;
-    case PROP_DITHER:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_boolean (value, vscale->dither);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_double (value, priv->sharpen);
       break;
     case PROP_SUBMETHOD:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_int (value, vscale->submethod);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_int (value, priv->submethod);
       break;
     case PROP_ENVELOPE:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_double (value, vscale->envelope);
-      GST_OBJECT_UNLOCK (vscale);
-      break;
-    case PROP_GAMMA_DECODE:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_boolean (value, vscale->gamma_decode);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_double (value, priv->envelope);
       break;
     case PROP_N_THREADS:
-      GST_OBJECT_LOCK (vscale);
-      g_value_set_uint (value, vscale->n_threads);
-      GST_OBJECT_UNLOCK (vscale);
+      g_value_set_uint (value, priv->n_threads);
+      break;
+    case PROP_DITHER:
+      g_value_set_enum (value, priv->dither);
+      break;
+    case PROP_CHROMA_RESAMPLER:
+      g_value_set_enum (value, priv->chroma_resampler);
+      break;
+    case PROP_ALPHA_MODE:
+      g_value_set_enum (value, priv->alpha_mode);
+      break;
+    case PROP_ALPHA_VALUE:
+      g_value_set_double (value, priv->alpha_value);
+      break;
+    case PROP_CHROMA_MODE:
+      g_value_set_enum (value, priv->chroma_mode);
+      break;
+    case PROP_MATRIX_MODE:
+      g_value_set_enum (value, priv->matrix_mode);
+      break;
+    case PROP_GAMMA_MODE:
+      g_value_set_enum (value, priv->gamma_mode);
+      break;
+    case PROP_PRIMARIES_MODE:
+      g_value_set_enum (value, priv->primaries_mode);
+      break;
+    case PROP_DITHER_QUANTIZATION:
+      g_value_set_uint (value, priv->dither_quantization);
       break;
     default:
       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
       break;
   }
+  GST_OBJECT_UNLOCK (object);
 }
 
 static GstCaps *
-gst_video_scale_transform_caps (GstBaseTransform * trans,
-    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+gst_video_convert_caps_remove_format_and_rangify_size_info (GstCaps * caps)
 {
   GstCaps *ret;
   GstStructure *structure;
   GstCapsFeatures *features;
   gint i, n;
 
-  GST_DEBUG_OBJECT (trans,
-      "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
-      (direction == GST_PAD_SINK) ? "sink" : "src");
-
   ret = gst_caps_new_empty ();
+
   n = gst_caps_get_size (caps);
   for (i = 0; i < n; i++) {
     structure = gst_caps_get_structure (caps, i);
@@ -470,10 +588,8 @@ gst_video_scale_transform_caps (GstBaseTransform * trans,
     if (i > 0 && gst_caps_is_subset_structure_full (ret, structure, features))
       continue;
 
-    /* make copy */
     structure = gst_structure_copy (structure);
-
-    /* If the features are non-sysmem we can only do passthrough */
+    /* Only remove format info for the cases when we can actually convert */
     if (!gst_caps_features_is_any (features)
         && (gst_caps_features_is_equal (features,
                 GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)
@@ -482,17 +598,32 @@ gst_video_scale_transform_caps (GstBaseTransform * trans,
                 features_format_interlaced_sysmem))) {
       gst_structure_set (structure, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
           "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
-
       /* if pixel aspect ratio, make a range of it */
       if (gst_structure_has_field (structure, "pixel-aspect-ratio")) {
         gst_structure_set (structure, "pixel-aspect-ratio",
             GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL);
       }
+      gst_structure_remove_fields (structure, "format", "colorimetry",
+          "chroma-site", NULL);
     }
     gst_caps_append_structure_full (ret, structure,
         gst_caps_features_copy (features));
   }
 
+  return ret;
+}
+
+static GstCaps *
+gst_video_convert_scale_transform_caps (GstBaseTransform * trans,
+    GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+  GstCaps *ret;
+
+  GST_DEBUG_OBJECT (trans,
+      "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps,
+      (direction == GST_PAD_SINK) ? "sink" : "src");
+
+  ret = gst_video_convert_caps_remove_format_and_rangify_size_info (caps);
   if (filter) {
     GstCaps *intersection;
 
@@ -508,18 +639,18 @@ gst_video_scale_transform_caps (GstBaseTransform * trans,
 }
 
 static gboolean
-gst_video_scale_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
-    GstMeta * meta, GstBuffer * inbuf)
+gst_video_convert_scale_transform_meta (GstBaseTransform * trans,
+    GstBuffer * outbuf, GstMeta * meta, GstBuffer * inbuf)
 {
   GstVideoFilter *videofilter = GST_VIDEO_FILTER (trans);
   const GstMetaInfo *info = meta->info;
   const gchar *const *tags;
   const gchar *const *curr = NULL;
   gboolean should_copy = TRUE;
-  const gchar *const valid_tags[] = { GST_META_TAG_VIDEO_STR,
-    GST_META_TAG_VIDEO_COLORSPACE_STR,
+  const gchar *const valid_tags[] = {
+    GST_META_TAG_VIDEO_STR,
     GST_META_TAG_VIDEO_ORIENTATION_STR,
-    GST_META_TAG_VIDEO_SIZE_STR
+    GST_META_TAG_VIDEO_SIZE_STR,
   };
 
   tags = gst_meta_api_type_get_tags (info->api);
@@ -529,6 +660,12 @@ gst_video_scale_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
     return TRUE;
   }
 
+  if (gst_meta_api_type_has_tag (info->api, _colorspace_quark)) {
+    /* don't copy colorspace specific metadata, FIXME, we need a MetaTransform
+     * for the colorspace metadata. */
+    return FALSE;
+  }
+
   /* We are only changing size, we can preserve other metas tagged as
      orientation and colorspace */
   for (curr = tags; *curr; ++curr) {
@@ -561,11 +698,18 @@ gst_video_scale_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
 }
 
 static gboolean
-gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in,
+gst_video_convert_scale_set_info (GstVideoFilter * filter, GstCaps * in,
     GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info)
 {
-  GstVideoScale *videoscale = GST_VIDEO_SCALE (filter);
+  GstVideoConvertScale *self = GST_VIDEO_CONVERT_SCALE (filter);
+  GstVideoConvertScalePrivate *priv = PRIV (self);
   gint from_dar_n, from_dar_d, to_dar_n, to_dar_d;
+  GstVideoInfo tmp_info;
+
+  if (priv->convert) {
+    gst_video_converter_free (priv->convert);
+    priv->convert = NULL;
+  }
 
   if (!gst_util_fraction_multiply (in_info->width,
           in_info->height, in_info->par_n, in_info->par_d, &from_dar_n,
@@ -579,9 +723,9 @@ gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in,
     to_dar_n = to_dar_d = -1;
   }
 
-  videoscale->borders_w = videoscale->borders_h = 0;
+  priv->borders_w = priv->borders_h = 0;
   if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) {
-    if (videoscale->add_borders) {
+    if (priv->add_borders) {
       gint n, d, to_h, to_w;
 
       if (from_dar_n != -1 && from_dar_d != -1
@@ -589,33 +733,44 @@ gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in,
               out_info->par_d, out_info->par_n, &n, &d)) {
         to_h = gst_util_uint64_scale_int (out_info->width, d, n);
         if (to_h <= out_info->height) {
-          videoscale->borders_h = out_info->height - to_h;
-          videoscale->borders_w = 0;
+          priv->borders_h = out_info->height - to_h;
+          priv->borders_w = 0;
         } else {
           to_w = gst_util_uint64_scale_int (out_info->height, n, d);
           g_assert (to_w <= out_info->width);
-          videoscale->borders_h = 0;
-          videoscale->borders_w = out_info->width - to_w;
+          priv->borders_h = 0;
+          priv->borders_w = out_info->width - to_w;
         }
       } else {
-        GST_WARNING_OBJECT (videoscale, "Can't calculate borders");
+        GST_WARNING_OBJECT (self, "Can't calculate borders");
       }
     } else {
-      GST_WARNING_OBJECT (videoscale, "Can't keep DAR!");
+      GST_WARNING_OBJECT (self, "Can't keep DAR!");
     }
   }
 
-  if (in_info->width == out_info->width && in_info->height == out_info->height
-      && videoscale->borders_w == 0 && videoscale->borders_h == 0) {
-    gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
+  /* if present, these must match */
+  if (in_info->interlace_mode != out_info->interlace_mode)
+    goto format_mismatch;
+
+  /* if the only thing different in the caps is the transfer function, and
+   * we're converting between equivalent transfer functions, do passthrough */
+  tmp_info = *in_info;
+  tmp_info.colorimetry.transfer = out_info->colorimetry.transfer;
+  if (gst_video_info_is_equal (&tmp_info, out_info)) {
+    if (gst_video_transfer_function_is_equivalent (in_info->
+            colorimetry.transfer, in_info->finfo->bits,
+            out_info->colorimetry.transfer, out_info->finfo->bits)) {
+      gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
+    }
   } else {
     GstStructure *options;
     GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter, "setup videoscaling");
     gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);
 
-    options = gst_structure_new_empty ("videoscale");
+    options = gst_structure_new_empty ("videoconvertscale");
 
-    switch (videoscale->method) {
+    switch (priv->method) {
       case GST_VIDEO_SCALE_NEAREST:
         gst_structure_set (options,
             GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD,
@@ -686,48 +841,380 @@ gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in,
         break;
     }
     gst_structure_set (options,
-        GST_VIDEO_RESAMPLER_OPT_ENVELOPE, G_TYPE_DOUBLE, videoscale->envelope,
-        GST_VIDEO_RESAMPLER_OPT_SHARPNESS, G_TYPE_DOUBLE, videoscale->sharpness,
-        GST_VIDEO_RESAMPLER_OPT_SHARPEN, G_TYPE_DOUBLE, videoscale->sharpen,
-        GST_VIDEO_CONVERTER_OPT_DEST_X, G_TYPE_INT, videoscale->borders_w / 2,
-        GST_VIDEO_CONVERTER_OPT_DEST_Y, G_TYPE_INT, videoscale->borders_h / 2,
+        GST_VIDEO_RESAMPLER_OPT_ENVELOPE, G_TYPE_DOUBLE, priv->envelope,
+        GST_VIDEO_RESAMPLER_OPT_SHARPNESS, G_TYPE_DOUBLE, priv->sharpness,
+        GST_VIDEO_RESAMPLER_OPT_SHARPEN, G_TYPE_DOUBLE, priv->sharpen,
+        GST_VIDEO_CONVERTER_OPT_DEST_X, G_TYPE_INT, priv->borders_w / 2,
+        GST_VIDEO_CONVERTER_OPT_DEST_Y, G_TYPE_INT, priv->borders_h / 2,
         GST_VIDEO_CONVERTER_OPT_DEST_WIDTH, G_TYPE_INT,
-        out_info->width - videoscale->borders_w,
-        GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT, G_TYPE_INT,
-        out_info->height - videoscale->borders_h,
+        out_info->width - priv->borders_w, GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT,
+        G_TYPE_INT, out_info->height - priv->borders_h,
+        GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
+        priv->dither, GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
+        priv->dither_quantization,
+        GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD,
+        GST_TYPE_VIDEO_RESAMPLER_METHOD, priv->chroma_resampler,
+        GST_VIDEO_CONVERTER_OPT_ALPHA_MODE, GST_TYPE_VIDEO_ALPHA_MODE,
+        priv->alpha_mode, GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE, G_TYPE_DOUBLE,
+        priv->alpha_value, GST_VIDEO_CONVERTER_OPT_CHROMA_MODE,
+        GST_TYPE_VIDEO_CHROMA_MODE, priv->chroma_mode,
         GST_VIDEO_CONVERTER_OPT_MATRIX_MODE, GST_TYPE_VIDEO_MATRIX_MODE,
-        GST_VIDEO_MATRIX_MODE_NONE, GST_VIDEO_CONVERTER_OPT_DITHER_METHOD,
-        GST_TYPE_VIDEO_DITHER_METHOD, GST_VIDEO_DITHER_NONE,
-        GST_VIDEO_CONVERTER_OPT_CHROMA_MODE, GST_TYPE_VIDEO_CHROMA_MODE,
-        GST_VIDEO_CHROMA_MODE_NONE,
-        GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT, videoscale->n_threads,
-        NULL);
+        priv->matrix_mode, GST_VIDEO_CONVERTER_OPT_GAMMA_MODE,
+        GST_TYPE_VIDEO_GAMMA_MODE, priv->gamma_mode,
+        GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE, GST_TYPE_VIDEO_PRIMARIES_MODE,
+        priv->primaries_mode, GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT,
+        priv->n_threads, NULL);
 
-    if (videoscale->gamma_decode) {
-      gst_structure_set (options,
-          GST_VIDEO_CONVERTER_OPT_GAMMA_MODE, GST_TYPE_VIDEO_GAMMA_MODE,
-          GST_VIDEO_GAMMA_MODE_REMAP, NULL);
-    }
-
-    if (videoscale->convert)
-      gst_video_converter_free (videoscale->convert);
-    videoscale->convert = gst_video_converter_new (in_info, out_info, options);
+    priv->convert = gst_video_converter_new (in_info, out_info, options);
+    if (priv->convert == NULL)
+      goto no_convert;
   }
 
-  GST_DEBUG_OBJECT (videoscale, "from=%dx%d (par=%d/%d dar=%d/%d), size %"
+  GST_DEBUG_OBJECT (filter, "converting format %s -> %s",
+      gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)),
+      gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
+  GST_DEBUG_OBJECT (self, "from=%dx%d (par=%d/%d dar=%d/%d), size %"
       G_GSIZE_FORMAT " -> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), "
       "size %" G_GSIZE_FORMAT,
       in_info->width, in_info->height, in_info->par_n, in_info->par_d,
       from_dar_n, from_dar_d, in_info->size, out_info->width,
       out_info->height, out_info->par_n, out_info->par_d, to_dar_n, to_dar_d,
-      videoscale->borders_w, videoscale->borders_h, out_info->size);
+      priv->borders_w, priv->borders_h, out_info->size);
+
+  return TRUE;
+
+  /* ERRORS */
+format_mismatch:
+  {
+    GST_ERROR_OBJECT (self, "input and output formats do not match");
+    return FALSE;
+  }
+no_convert:
+  {
+    GST_ERROR_OBJECT (self, "could not create converter");
+    return FALSE;
+  }
+}
+
+/*
+ * This is an incomplete matrix of in formats and a score for the preferred output
+ * format.
+ *
+ *         out: RGB24   RGB16  ARGB  AYUV  YUV444  YUV422 YUV420 YUV411 YUV410  PAL  GRAY
+ *  in
+ * RGB24          0      2       1     2     2       3      4      5      6      7    8
+ * RGB16          1      0       1     2     2       3      4      5      6      7    8
+ * ARGB           2      3       0     1     4       5      6      7      8      9    10
+ * AYUV           3      4       1     0     2       5      6      7      8      9    10
+ * YUV444         2      4       3     1     0       5      6      7      8      9    10
+ * YUV422         3      5       4     2     1       0      6      7      8      9    10
+ * YUV420         4      6       5     3     2       1      0      7      8      9    10
+ * YUV411         4      6       5     3     2       1      7      0      8      9    10
+ * YUV410         6      8       7     5     4       3      2      1      0      9    10
+ * PAL            1      3       2     6     4       6      7      8      9      0    10
+ * GRAY           1      4       3     2     1       5      6      7      8      9    0
+ *
+ * PAL or GRAY are never preferred, if we can we would convert to PAL instead
+ * of GRAY, though
+ * less subsampling is preferred and if any, preferably horizontal
+ * We would like to keep the alpha, even if we would need to to colorspace conversion
+ * or lose depth.
+ */
+#define SCORE_FORMAT_CHANGE       1
+#define SCORE_DEPTH_CHANGE        1
+#define SCORE_ALPHA_CHANGE        1
+#define SCORE_CHROMA_W_CHANGE     1
+#define SCORE_CHROMA_H_CHANGE     1
+#define SCORE_PALETTE_CHANGE      1
+
+#define SCORE_COLORSPACE_LOSS     2     /* RGB <-> YUV */
+#define SCORE_DEPTH_LOSS          4     /* change bit depth */
+#define SCORE_ALPHA_LOSS          8     /* lose the alpha channel */
+#define SCORE_CHROMA_W_LOSS      16     /* vertical subsample */
+#define SCORE_CHROMA_H_LOSS      32     /* horizontal subsample */
+#define SCORE_PALETTE_LOSS       64     /* convert to palette format */
+#define SCORE_COLOR_LOSS        128     /* convert to GRAY */
+
+#define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
+                         GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
+#define ALPHA_MASK      (GST_VIDEO_FORMAT_FLAG_ALPHA)
+#define PALETTE_MASK    (GST_VIDEO_FORMAT_FLAG_PALETTE)
+
+/* calculate how much loss a conversion would be */
+static void
+score_value (GstBaseTransform * base, const GstVideoFormatInfo * in_info,
+    const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info)
+{
+  const gchar *fname;
+  const GstVideoFormatInfo *t_info;
+  GstVideoFormatFlags in_flags, t_flags;
+  gint loss;
+
+  fname = g_value_get_string (val);
+  t_info = gst_video_format_get_info (gst_video_format_from_string (fname));
+  if (!t_info || t_info->format == GST_VIDEO_FORMAT_UNKNOWN)
+    return;
+
+  /* accept input format immediately without loss */
+  if (in_info == t_info) {
+    *min_loss = 0;
+    *out_info = t_info;
+    return;
+  }
+
+  loss = SCORE_FORMAT_CHANGE;
+
+  in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
+  in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
+  in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
+  in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
+
+  t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
+  t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
+  t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
+  t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
+
+  if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
+    loss += SCORE_PALETTE_CHANGE;
+    if (t_flags & PALETTE_MASK)
+      loss += SCORE_PALETTE_LOSS;
+  }
+
+  if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
+    loss += SCORE_COLORSPACE_LOSS;
+    if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
+      loss += SCORE_COLOR_LOSS;
+  }
+
+  if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
+    loss += SCORE_ALPHA_CHANGE;
+    if (in_flags & ALPHA_MASK)
+      loss += SCORE_ALPHA_LOSS;
+  }
+
+  if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
+    loss += SCORE_CHROMA_H_CHANGE;
+    if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
+      loss += SCORE_CHROMA_H_LOSS;
+  }
+  if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
+    loss += SCORE_CHROMA_W_CHANGE;
+    if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
+      loss += SCORE_CHROMA_W_LOSS;
+  }
+
+  if ((in_info->bits) != (t_info->bits)) {
+    loss += SCORE_DEPTH_CHANGE;
+    if ((in_info->bits) > (t_info->bits))
+      loss += SCORE_DEPTH_LOSS;
+  }
+
+  GST_DEBUG_OBJECT (base, "score %s -> %s = %d",
+      GST_VIDEO_FORMAT_INFO_NAME (in_info),
+      GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
+
+  if (loss < *min_loss) {
+    GST_DEBUG_OBJECT (base, "found new best %d", loss);
+    *out_info = t_info;
+    *min_loss = loss;
+  }
+}
+
+static void
+gst_video_convert_scale_fixate_format (GstBaseTransform * base, GstCaps * caps,
+    GstCaps * result)
+{
+  GstStructure *ins, *outs;
+  const gchar *in_format;
+  const GstVideoFormatInfo *in_info, *out_info = NULL;
+  gint min_loss = G_MAXINT;
+  guint i, capslen;
+
+  ins = gst_caps_get_structure (caps, 0);
+  in_format = gst_structure_get_string (ins, "format");
+  if (!in_format)
+    return;
+
+  GST_DEBUG_OBJECT (base, "source format %s", in_format);
+
+  in_info =
+      gst_video_format_get_info (gst_video_format_from_string (in_format));
+  if (!in_info)
+    return;
+
+  outs = gst_caps_get_structure (result, 0);
+
+  capslen = gst_caps_get_size (result);
+  GST_DEBUG_OBJECT (base, "iterate %d structures", capslen);
+  for (i = 0; i < capslen; i++) {
+    GstStructure *tests;
+    const GValue *format;
+
+    tests = gst_caps_get_structure (result, i);
+    format = gst_structure_get_value (tests, "format");
+    gst_structure_remove_fields (tests, "height", "width", "pixel-aspect-ratio",
+        "display-aspect-ratio", NULL);
+    /* should not happen */
+    if (format == NULL)
+      continue;
+
+    if (GST_VALUE_HOLDS_LIST (format)) {
+      gint j, len;
+
+      len = gst_value_list_get_size (format);
+      GST_DEBUG_OBJECT (base, "have %d formats", len);
+      for (j = 0; j < len; j++) {
+        const GValue *val;
+
+        val = gst_value_list_get_value (format, j);
+        if (G_VALUE_HOLDS_STRING (val)) {
+          score_value (base, in_info, val, &min_loss, &out_info);
+          if (min_loss == 0)
+            break;
+        }
+      }
+    } else if (G_VALUE_HOLDS_STRING (format)) {
+      score_value (base, in_info, format, &min_loss, &out_info);
+    }
+  }
+  if (out_info)
+    gst_structure_set (outs, "format", G_TYPE_STRING,
+        GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
+}
+
+static gboolean
+subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
+{
+  gint i;
+  const GstVideoFormatInfo *in_format, *out_format;
+
+  if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
+      GST_VIDEO_INFO_N_COMPONENTS (out_info))
+    return FALSE;
+
+  in_format = in_info->finfo;
+  out_format = out_info->finfo;
+
+  for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
+    if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
+            i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
+      return FALSE;
+    if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
+            i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
+      return FALSE;
+  }
 
   return TRUE;
 }
 
+static void
+transfer_colorimetry_from_input (GstBaseTransform * trans, GstCaps * in_caps,
+    GstCaps * out_caps)
+{
+  GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
+  GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
+  gboolean have_colorimetry =
+      gst_structure_has_field (out_caps_s, "colorimetry");
+  gboolean have_chroma_site =
+      gst_structure_has_field (out_caps_s, "chroma-site");
+
+  /* If the output already has colorimetry and chroma-site, stop,
+   * otherwise try and transfer what we can from the input caps */
+  if (have_colorimetry && have_chroma_site)
+    return;
+
+  {
+    GstVideoInfo in_info, out_info;
+    const GValue *in_colorimetry =
+        gst_structure_get_value (in_caps_s, "colorimetry");
+
+    if (!gst_video_info_from_caps (&in_info, in_caps)) {
+      GST_WARNING_OBJECT (trans,
+          "Failed to convert sink pad caps to video info");
+      return;
+    }
+    if (!gst_video_info_from_caps (&out_info, out_caps)) {
+      GST_WARNING_OBJECT (trans,
+          "Failed to convert src pad caps to video info");
+      return;
+    }
+
+    if (!have_colorimetry && in_colorimetry != NULL) {
+      if ((GST_VIDEO_INFO_IS_YUV (&out_info)
+              && GST_VIDEO_INFO_IS_YUV (&in_info))
+          || (GST_VIDEO_INFO_IS_RGB (&out_info)
+              && GST_VIDEO_INFO_IS_RGB (&in_info))
+          || (GST_VIDEO_INFO_IS_GRAY (&out_info)
+              && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
+        /* Can transfer the colorimetry intact from the input if it has it */
+        gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
+      } else {
+        gchar *colorimetry_str;
+
+        /* Changing between YUV/RGB - forward primaries and transfer function, but use
+         * default range and matrix.
+         * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
+         * the transfer function could be another reference (e.g., HDR)
+         */
+        out_info.colorimetry.primaries = in_info.colorimetry.primaries;
+        out_info.colorimetry.transfer = in_info.colorimetry.transfer;
+
+        colorimetry_str =
+            gst_video_colorimetry_to_string (&out_info.colorimetry);
+        gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
+            colorimetry_str, NULL);
+        g_free (colorimetry_str);
+      }
+    }
+
+    /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
+     * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
+     * scaled anyway so there's no real reason to prefer the input siting. */
+    if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
+      if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
+        const GValue *in_chroma_site =
+            gst_structure_get_value (in_caps_s, "chroma-site");
+        if (in_chroma_site != NULL
+            && subsampling_unchanged (&in_info, &out_info))
+          gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
+      }
+    }
+  }
+}
+
+
 static GstCaps *
-gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction,
-    GstCaps * caps, GstCaps * othercaps)
+gst_video_convert_scale_get_fixed_format (GstBaseTransform * trans,
+    GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
+{
+  GstCaps *result;
+
+  result = gst_caps_intersect (othercaps, caps);
+  if (gst_caps_is_empty (result)) {
+    gst_caps_unref (result);
+    result = gst_caps_copy (othercaps);
+  }
+
+  gst_video_convert_scale_fixate_format (trans, caps, result);
+
+  /* fixate remaining fields */
+  result = gst_caps_fixate (result);
+
+  if (direction == GST_PAD_SINK) {
+    if (gst_caps_is_subset (caps, result)) {
+      gst_caps_replace (&result, caps);
+    } else {
+      /* Try and preserve input colorimetry / chroma information */
+      transfer_colorimetry_from_input (trans, caps, result);
+    }
+  }
+
+  return result;
+}
+
+static GstCaps *
+gst_video_convert_scale_fixate_size (GstBaseTransform * base,
+    GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
 {
   GstStructure *ins, *outs;
   const GValue *from_par, *to_par;
@@ -736,10 +1223,6 @@ gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction,
 
   othercaps = gst_caps_truncate (othercaps);
   othercaps = gst_caps_make_writable (othercaps);
-
-  GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT
-      " based on caps %" GST_PTR_FORMAT, othercaps, caps);
-
   ins = gst_caps_get_structure (caps, 0);
   outs = gst_caps_get_structure (othercaps, 0);
 
@@ -1169,34 +1652,78 @@ done:
   return othercaps;
 }
 
+static GstCaps *
+gst_video_convert_scale_fixate_caps (GstBaseTransform * base,
+    GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
+{
+  GstCaps *format;
+
+  GST_DEBUG_OBJECT (base,
+      "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %"
+      GST_PTR_FORMAT, othercaps, caps);
+
+  format = gst_video_convert_scale_get_fixed_format (base, direction, caps,
+      othercaps);
+
+  if (gst_caps_is_empty (format)) {
+    GST_ERROR_OBJECT (base, "Could not convert formats");
+    return format;
+  }
+
+  othercaps =
+      gst_video_convert_scale_fixate_size (base, direction, caps, othercaps);
+  if (gst_caps_get_size (othercaps) == 1) {
+    gint i;
+    const gchar *format_fields[] = { "format", "colorimetry", "chroma-site" };
+    GstStructure *format_struct = gst_caps_get_structure (format, 0);
+    GstStructure *fixated_struct;
+
+    othercaps = gst_caps_make_writable (othercaps);
+    fixated_struct = gst_caps_get_structure (othercaps, 0);
+
+    for (i = 0; i < G_N_ELEMENTS (format_fields); i++) {
+      if (gst_structure_has_field (format_struct, format_fields[i])) {
+        gst_structure_set (fixated_struct, format_fields[i], G_TYPE_STRING,
+            gst_structure_get_string (format_struct, format_fields[i]), NULL);
+      } else {
+        gst_structure_remove_field (fixated_struct, format_fields[i]);
+      }
+    }
+  }
+  gst_caps_unref (format);
+
+  GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps);
+
+  return othercaps;
+}
+
 #define GET_LINE(frame, line) \
     (gpointer)(((guint8*)(GST_VIDEO_FRAME_PLANE_DATA (frame, 0))) + \
      GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0) * (line))
 
 static GstFlowReturn
-gst_video_scale_transform_frame (GstVideoFilter * filter,
+gst_video_convert_scale_transform_frame (GstVideoFilter * filter,
     GstVideoFrame * in_frame, GstVideoFrame * out_frame)
 {
-  GstVideoScale *videoscale = GST_VIDEO_SCALE_CAST (filter);
+  GstVideoConvertScalePrivate *priv = PRIV (filter);
   GstFlowReturn ret = GST_FLOW_OK;
 
   GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter, "doing video scaling");
 
-  gst_video_converter_frame (videoscale->convert, in_frame, out_frame);
+  gst_video_converter_frame (priv->convert, in_frame, out_frame);
 
   return ret;
 }
 
 static gboolean
-gst_video_scale_src_event (GstBaseTransform * trans, GstEvent * event)
+gst_video_convert_scale_src_event (GstBaseTransform * trans, GstEvent * event)
 {
-  GstVideoScale *videoscale = GST_VIDEO_SCALE_CAST (trans);
+  GstVideoConvertScale *self = GST_VIDEO_CONVERT_SCALE_CAST (trans);
   GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
   gboolean ret;
   gdouble x, y;
 
-  GST_DEBUG_OBJECT (videoscale, "handling %s event",
-      GST_EVENT_TYPE_NAME (event));
+  GST_DEBUG_OBJECT (self, "handling %s event", GST_EVENT_TYPE_NAME (event));
 
   switch (GST_EVENT_TYPE (event)) {
     case GST_EVENT_NAVIGATION:
@@ -1219,26 +1746,3 @@ gst_video_scale_src_event (GstBaseTransform * trans, GstEvent * event)
 
   return ret;
 }
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
-  features_format_interlaced =
-      gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
-  features_format_interlaced_sysmem =
-      gst_caps_features_copy (features_format_interlaced);
-  gst_caps_features_add (features_format_interlaced_sysmem,
-      GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
-
-  GST_DEBUG_CATEGORY_INIT (video_scale_debug, "videoscale", 0,
-      "videoscale element");
-  GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
-
-  return GST_ELEMENT_REGISTER (videoscale, plugin);
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
-    GST_VERSION_MINOR,
-    videoscale,
-    "Resizes video", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
-    GST_PACKAGE_ORIGIN)
diff --git a/subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.h b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.h
similarity index 74%
rename from subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.h
rename to subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.h
index f518d5e797..9d25e039b6 100644
--- a/subprojects/gst-plugins-base/gst/videoscale/gstvideoscale.h
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscale.h
@@ -1,5 +1,6 @@
 /* GStreamer
  * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2020 Thibault Saunier <tsaunier@igalia.com>
  *
  * This library is free software; you can redistribute it and/or
  * modify it under the terms of the GNU Library General Public
@@ -17,8 +18,8 @@
  * Boston, MA 02110-1301, USA.
  */
 
-#ifndef __GST_VIDEO_SCALE_H__
-#define __GST_VIDEO_SCALE_H__
+#ifndef __GST_VIDEO_CONVERT_SCALE_H__
+#define __GST_VIDEO_CONVERT_SCALE_H__
 
 #include <gst/gst.h>
 #include <gst/video/video.h>
@@ -26,11 +27,16 @@
 
 G_BEGIN_DECLS
 
-#define GST_TYPE_VIDEO_SCALE (gst_video_scale_get_type())
-#define GST_VIDEO_SCALE_CAST(obj) ((GstVideoScale *)(obj))
-G_DECLARE_FINAL_TYPE (GstVideoScale, gst_video_scale, GST, VIDEO_SCALE,
+#define GST_TYPE_VIDEO_CONVERT_SCALE (gst_video_convert_scale_get_type())
+#define GST_VIDEO_CONVERT_SCALE_CAST(obj) ((GstVideoConvertScale *)(obj))
+
+G_DECLARE_DERIVABLE_TYPE (GstVideoConvertScale, gst_video_convert_scale, GST, VIDEO_CONVERT_SCALE,
     GstVideoFilter)
 
+struct _GstVideoConvertScaleClass
+{
+  GstVideoFilterClass parent;
+};
 
 /**
  * GstVideoScaleMethod:
@@ -61,33 +67,8 @@ typedef enum {
   GST_VIDEO_SCALE_MITCHELL
 } GstVideoScaleMethod;
 
-/**
- * GstVideoScale:
- *
- * Opaque data structure
- */
-struct _GstVideoScale {
-  GstVideoFilter element;
-
-  /* properties */
-  GstVideoScaleMethod method;
-  gboolean add_borders;
-  double sharpness;
-  double sharpen;
-  gboolean dither;
-  int submethod;
-  double envelope;
-  gboolean gamma_decode;
-  gint n_threads;
-
-  GstVideoConverter *convert;
-
-  gint borders_h;
-  gint borders_w;
-};
-
-GST_ELEMENT_REGISTER_DECLARE (videoscale);
+GST_ELEMENT_REGISTER_DECLARE (videoconvertscale);
 
 G_END_DECLS
 
-#endif /* __GST_VIDEO_SCALE_H__ */
+#endif /* __GST_VIDEO_CONVERT_SCALE_H__ */
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscaleplugin.c b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscaleplugin.c
new file mode 100644
index 0000000000..222b6bbd23
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoconvertscaleplugin.c
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/**
+ * plugin-videoconvertscale:
+ *
+ * Since: 1.22
+ */
+
+#include "gstvideoscale.h"
+#include "gstvideoconvert.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+  if (!GST_ELEMENT_REGISTER (videoscale, plugin))
+    return FALSE;
+
+  if (!GST_ELEMENT_REGISTER (videoconvert, plugin))
+    return FALSE;
+
+  if (!GST_ELEMENT_REGISTER (videoconvertscale, plugin))
+    return FALSE;
+
+  return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+    GST_VERSION_MINOR,
+    videoconvertscale,
+    "Convert video colorspaces and resizes video frames",
+    plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.c b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.c
new file mode 100644
index 0000000000..b246bf97b1
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.c
@@ -0,0 +1,125 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2005-2012 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-videoscale
+ * @title: videoscale
+ * @see_also: videorate, videoconvert
+ *
+ * This element resizes video frames. By default the element will try to
+ * negotiate to the same size on the source and sinkpad so that no scaling
+ * is needed. It is therefore safe to insert this element in a pipeline to
+ * get more robust behaviour without any cost if no scaling is needed.
+ *
+ * This element supports a wide range of color spaces including various YUV and
+ * RGB formats and is therefore generally able to operate anywhere in a
+ * pipeline.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! autovideosink
+ * ]|
+ *  Decode an Ogg/Theora and display the video. If the video sink chosen
+ * cannot perform scaling, the video scaling will be performed by videoscale
+ * when you resize the video window.
+ * To create the test Ogg/Theora file refer to the documentation of theoraenc.
+ * |[
+ * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videoconvert ! videoscale ! video/x-raw,width=100 ! autovideosink
+ * ]|
+ *  Decode an Ogg/Theora and display the video with a width of 100.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#define DEFAULT_PROP_GAMMA_DECODE FALSE
+
+#include "gstvideoscale.h"
+
+G_DEFINE_TYPE (GstVideoScale, gst_video_scale, GST_TYPE_VIDEO_CONVERT_SCALE);
+GST_ELEMENT_REGISTER_DEFINE (videoscale, "videoscale",
+    GST_RANK_MARGINAL, gst_video_scale_get_type ());
+
+enum
+{
+  PROP_0,
+  PROP_GAMMA_DECODE
+};
+
+static void
+gst_video_scale_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+  switch (prop_id) {
+    case PROP_GAMMA_DECODE:
+    {
+      gint mode;
+
+      g_object_get (object, "gamma-mode", &mode, NULL);
+      g_value_set_boolean (value, mode == GST_VIDEO_GAMMA_MODE_REMAP);
+
+      break;
+    }
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_video_scale_set_property (GObject * object, guint prop_id,
+    const GValue * value, GParamSpec * pspec)
+{
+  switch (prop_id) {
+    case PROP_GAMMA_DECODE:
+    {
+      if (g_value_get_boolean (value))
+        g_object_set (object, "gamma-mode", GST_VIDEO_GAMMA_MODE_REMAP, NULL);
+      else
+        g_object_set (object, "gamma-mode", GST_VIDEO_GAMMA_MODE_NONE, NULL);
+
+      break;
+    }
+    default:
+      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+      break;
+  }
+}
+
+static void
+gst_video_scale_class_init (GstVideoScaleClass * klass)
+{
+  GObjectClass *gobject_class = (GObjectClass *) klass;
+
+  gobject_class->set_property = gst_video_scale_set_property;
+  gobject_class->get_property = gst_video_scale_get_property;
+
+  g_object_class_install_property (gobject_class, PROP_GAMMA_DECODE,
+      g_param_spec_boolean ("gamma-decode", "Gamma Decode",
+          "Decode gamma before scaling", DEFAULT_PROP_GAMMA_DECODE,
+          G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_video_scale_init (GstVideoScale * self)
+{
+
+}
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.h b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.h
new file mode 100644
index 0000000000..c924b0eb5b
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/gstvideoscale.h
@@ -0,0 +1,36 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#pragma once
+
+#include "gstvideoconvertscale.h"
+
+G_BEGIN_DECLS
+
+G_DECLARE_FINAL_TYPE (GstVideoScale, gst_video_scale, GST, VIDEO_SCALE,
+    GstVideoConvertScale);
+
+struct _GstVideoScale
+{
+  GstVideoConvertScale parent;
+};
+
+GST_ELEMENT_REGISTER_DECLARE (videoscale);
+
+G_END_DECLS
diff --git a/subprojects/gst-plugins-base/gst/videoconvertscale/meson.build b/subprojects/gst-plugins-base/gst/videoconvertscale/meson.build
new file mode 100644
index 0000000000..6c73bf6804
--- /dev/null
+++ b/subprojects/gst-plugins-base/gst/videoconvertscale/meson.build
@@ -0,0 +1,18 @@
+videoconvertscale_sources = [
+  'gstvideoconvert.c',
+  'gstvideoconvertscale.c',
+  'gstvideoconvertscaleplugin.c',
+  'gstvideoscale.c',
+]
+
+gstvideoconvertscale = library('gstvideoconvertscale',
+  videoconvertscale_sources,
+  c_args : gst_plugins_base_args,
+  include_directories: [configinc, libsinc],
+  dependencies : [video_dep, gst_dep, gst_base_dep],
+  install : true,
+  install_dir : plugins_install_dir,
+)
+
+pkgconfig.generate(gstvideoconvertscale, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstvideoconvertscale]
diff --git a/subprojects/gst-plugins-base/gst/videoscale/README b/subprojects/gst-plugins-base/gst/videoscale/README
deleted file mode 100644
index 50f1fe2792..0000000000
--- a/subprojects/gst-plugins-base/gst/videoscale/README
+++ /dev/null
@@ -1,5 +0,0 @@
-- test different strides using
-gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoscale ! video/x-raw,width=328,height=240 ! xvimagesink 
-gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoscale ! video/x-raw,width=324,height=240 ! xvimagesink 
-gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoscale ! video/x-raw,width=322,height=240 ! xvimagesink 
-gst-launch -v videotestsrc ! video/x-raw,width=320,height=240,format=UYVY ! videoscale ! video/x-raw,width=321,height=240 ! xvimagesink 
diff --git a/subprojects/gst-plugins-base/gst/videoscale/meson.build b/subprojects/gst-plugins-base/gst/videoscale/meson.build
deleted file mode 100644
index 991fa19797..0000000000
--- a/subprojects/gst-plugins-base/gst/videoscale/meson.build
+++ /dev/null
@@ -1,14 +0,0 @@
-videoscale_sources = [
-  'gstvideoscale.c',
-]
-
-gstvideoscale = library('gstvideoscale',
-  videoscale_sources,
-  c_args : gst_plugins_base_args,
-  include_directories: [configinc, libsinc],
-  dependencies : [video_dep, gst_dep, gst_base_dep],
-  install : true,
-  install_dir : plugins_install_dir,
-)
-pkgconfig.generate(gstvideoscale, install_dir : plugins_pkgconfig_install_dir)
-plugins += [gstvideoscale]
diff --git a/subprojects/gst-plugins-base/meson_options.txt b/subprojects/gst-plugins-base/meson_options.txt
index 3c7641cd77..50ec6aae1f 100644
--- a/subprojects/gst-plugins-base/meson_options.txt
+++ b/subprojects/gst-plugins-base/meson_options.txt
@@ -46,9 +46,8 @@ option('rawparse', type : 'feature', value : 'auto')
 option('subparse', type : 'feature', value : 'auto')
 option('tcp', type : 'feature', value : 'auto')
 option('typefind', type : 'feature', value : 'auto')
-option('videoconvert', type : 'feature', value : 'auto')
+option('videoconvertscale', type : 'feature', value : 'auto')
 option('videorate', type : 'feature', value : 'auto')
-option('videoscale', type : 'feature', value : 'auto')
 option('videotestsrc', type : 'feature', value : 'auto')
 option('volume', type : 'feature', value : 'auto')