From 577e9f936bc6c4bde1b6438f70e3e1597f0b00a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Feb 2021 19:03:21 +0200 Subject: [PATCH 01/73] Initial commit --- .clang-format | 182 ++++++ .gitignore | 3 + COPYING | 502 +++++++++++++++ gstajacommon.cpp | 477 ++++++++++++++ gstajacommon.h | 168 +++++ gstajasink.cpp | 1373 ++++++++++++++++++++++++++++++++++++++++ gstajasink.h | 95 +++ gstajasinkcombiner.cpp | 294 +++++++++ gstajasinkcombiner.h | 60 ++ meson.build | 97 +++ meson_options.txt | 2 + plugin.cpp | 38 ++ 12 files changed, 3291 insertions(+) create mode 100644 .clang-format create mode 100644 .gitignore create mode 100644 COPYING create mode 100644 gstajacommon.cpp create mode 100644 gstajacommon.h create mode 100644 gstajasink.cpp create mode 100644 gstajasink.h create mode 100644 gstajasinkcombiner.cpp create mode 100644 gstajasinkcombiner.h create mode 100644 meson.build create mode 100644 meson_options.txt create mode 100644 plugin.cpp diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000000..70da6add54 --- /dev/null +++ b/.clang-format @@ -0,0 +1,182 @@ +--- +Language: Cpp +# BasedOnStyle: Google +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignConsecutiveMacros: false +AlignConsecutiveAssignments: false +AlignConsecutiveBitFields: false +AlignConsecutiveDeclarations: false +AlignEscapedNewlines: Left +AlignOperands: Align +AlignTrailingComments: true +AllowAllArgumentsOnNextLine: true +AllowAllConstructorInitializersOnNextLine: true +AllowAllParametersOfDeclarationOnNextLine: true +AllowShortEnumsOnASingleLine: true +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: All +AllowShortLambdasOnASingleLine: All +AllowShortIfStatementsOnASingleLine: WithoutElse +AllowShortLoopsOnASingleLine: true +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: Yes +BinPackArguments: true +BinPackParameters: true +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterControlStatement: Never + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + BeforeLambdaBody: false + BeforeWhile: false + IndentBraces: false + SplitEmptyFunction: true + SplitEmptyRecord: true + SplitEmptyNamespace: true +BreakBeforeBinaryOperators: None +BreakBeforeBraces: Attach +BreakBeforeInheritanceComma: false +BreakInheritanceList: BeforeColon +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakConstructorInitializers: BeforeColon +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: true +ColumnLimit: 80 +CommentPragmas: '^ IWYU pragma:' +CompactNamespaces: false +ConstructorInitializerAllOnOneLineOrOnePerLine: true +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 4 +Cpp11BracedListStyle: true +DeriveLineEnding: true +DerivePointerAlignment: true +DisableFormat: false +ExperimentalAutoDetectBinPacking: false +FixNamespaceComments: true +ForEachMacros: + - foreach + - Q_FOREACH + - BOOST_FOREACH +IncludeBlocks: Regroup +IncludeCategories: + - Regex: '^' + Priority: 2 + SortPriority: 0 + - Regex: '^<.*\.h>' + Priority: 1 + SortPriority: 0 + - Regex: '^<.*' + Priority: 2 + SortPriority: 0 + - Regex: '.*' + Priority: 3 + SortPriority: 0 +IncludeIsMainRegex: '([-_](test|unittest))?$' +IncludeIsMainSourceRegex: '' +IndentCaseLabels: true +IndentCaseBlocks: false +IndentGotoLabels: true +IndentPPDirectives: None +IndentExternBlock: AfterExternBlock +IndentWidth: 2 +IndentWrappedFunctionNames: false +InsertTrailingCommas: None +JavaScriptQuotes: Leave +JavaScriptWrapImports: true +KeepEmptyLinesAtTheStartOfBlocks: false +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBinPackProtocolList: Never +ObjCBlockIndentWidth: 2 +ObjCBreakBeforeNestedBlockParam: true +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: true +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyBreakTemplateDeclaration: 10 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PointerAlignment: Left +RawStringFormats: + - Language: Cpp + Delimiters: + - cc + - CC + - cpp + - Cpp + - CPP + - 'c++' + - 'C++' + CanonicalDelimiter: '' + BasedOnStyle: google + - Language: TextProto + Delimiters: + - pb + - PB + - proto + - PROTO + EnclosingFunctions: + - EqualsProto + - EquivToProto + - PARSE_PARTIAL_TEXT_PROTO + - PARSE_TEST_PROTO + - PARSE_TEXT_PROTO + - ParseTextOrDie + - ParseTextProtoOrDie + - ParseTestProto + - ParsePartialTestProto + CanonicalDelimiter: '' + BasedOnStyle: google +ReflowComments: true +SortIncludes: true +SortUsingDeclarations: true +SpaceAfterCStyleCast: false +SpaceAfterLogicalNot: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyBlock: false +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: false +SpacesInConditionalStatement: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpaceBeforeSquareBrackets: false +Standard: Auto +StatementMacros: + - Q_UNUSED + - QT_REQUIRE_VERSION +TabWidth: 8 +UseCRLF: false +UseTab: Never +WhitespaceSensitiveMacros: + - STRINGIZE + - PP_STRINGIZE + - BOOST_PP_STRINGIZE +... + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..c82e9a4fdc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +*build*/ +.cache/ +compile_commands.json diff --git a/COPYING b/COPYING new file mode 100644 index 0000000000..4362b49151 --- /dev/null +++ b/COPYING @@ -0,0 +1,502 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/gstajacommon.cpp b/gstajacommon.cpp new file mode 100644 index 0000000000..2019f7295e --- /dev/null +++ b/gstajacommon.cpp @@ -0,0 +1,477 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "gstajacommon.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_debug); +#define GST_CAT_DEFAULT gst_aja_debug + +static const NTV2VideoFormat supported_video_formats[] = { + NTV2_FORMAT_1080i_5000, NTV2_FORMAT_1080i_5994, + NTV2_FORMAT_1080i_6000, NTV2_FORMAT_720p_5994, + NTV2_FORMAT_720p_6000, NTV2_FORMAT_1080p_2997, + NTV2_FORMAT_1080p_3000, NTV2_FORMAT_1080p_2500, + NTV2_FORMAT_1080p_2398, NTV2_FORMAT_1080p_2400, + NTV2_FORMAT_720p_5000, NTV2_FORMAT_720p_2398, + NTV2_FORMAT_720p_2500, NTV2_FORMAT_1080p_5000_A, + NTV2_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_6000_A, + NTV2_FORMAT_625_5000, NTV2_FORMAT_525_5994, + NTV2_FORMAT_525_2398, NTV2_FORMAT_525_2400}; + +GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { + GstCaps *caps = gst_caps_new_empty(); + + for (gsize i = 0; i < G_N_ELEMENTS(supported_video_formats); i++) { + NTV2VideoFormat format = supported_video_formats[i]; + + if (device_id == DEVICE_ID_INVALID || + ::NTV2DeviceCanDoVideoFormat(device_id, format)) { + gst_caps_append(caps, gst_ntv2_video_format_to_caps(format)); + } + } + + return caps; +} + +GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format) { + GstVideoInfo info; + + guint width = ::GetDisplayWidth(format); + guint height = ::GetDisplayHeight(format); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + gst_video_info_set_format(&info, GST_VIDEO_FORMAT_v210, width, height); + info.fps_n = fps_n; + info.fps_d = fps_d; + if (NTV2_IS_525_FORMAT(format)) { + info.par_n = 10; + info.par_d = 11; + } else if (NTV2_IS_625_FORMAT(format)) { + info.par_n = 12; + info.par_d = 11; + } + info.interlace_mode = !::IsProgressiveTransport(format) + ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED + : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + + return gst_video_info_to_caps(&info); +} + +NTV2VideoFormat gst_ntv2_video_format_from_caps(GstCaps *caps) { + GstVideoInfo info; + + if (!gst_video_info_from_caps(&info, caps)) return NTV2_FORMAT_UNKNOWN; + + for (gsize i = 0; i < G_N_ELEMENTS(supported_video_formats); i++) { + NTV2VideoFormat format = supported_video_formats[i]; + + guint width = ::GetDisplayWidth(format); + guint height = ::GetDisplayHeight(format); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + if (width == (guint)info.width && height == (guint)info.height && + (guint)info.fps_n == fps_n && (guint)info.fps_d == fps_d && + ((!::IsProgressiveTransport(format) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) || + (::IsProgressiveTransport(format) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE))) + return format; + } + + return NTV2_FORMAT_UNKNOWN; +} + +GType gst_aja_audio_meta_api_get_type(void) { + static volatile GType type; + + if (g_once_init_enter(&type)) { + static const gchar *tags[] = {NULL}; + GType _type = gst_meta_api_type_register("GstAjaAudioMetaAPI", tags); + GST_INFO("registering"); + g_once_init_leave(&type, _type); + } + return type; +} + +static gboolean gst_aja_audio_meta_transform(GstBuffer *dest, GstMeta *meta, + GstBuffer *buffer, GQuark type, + gpointer data) { + GstAjaAudioMeta *dmeta, *smeta; + + if (GST_META_TRANSFORM_IS_COPY(type)) { + smeta = (GstAjaAudioMeta *)meta; + + GST_DEBUG("copy AJA audio metadata"); + dmeta = gst_buffer_add_aja_audio_meta(dest, smeta->buffer); + if (!dmeta) return FALSE; + } else { + /* return FALSE, if transform type is not supported */ + return FALSE; + } + return TRUE; +} + +static gboolean gst_aja_audio_meta_init(GstMeta *meta, gpointer params, + GstBuffer *buffer) { + GstAjaAudioMeta *emeta = (GstAjaAudioMeta *)meta; + + emeta->buffer = NULL; + + return TRUE; +} + +static void gst_aja_audio_meta_free(GstMeta *meta, GstBuffer *buffer) { + GstAjaAudioMeta *emeta = (GstAjaAudioMeta *)meta; + + gst_buffer_replace(&emeta->buffer, NULL); +} + +const GstMetaInfo *gst_aja_audio_meta_get_info(void) { + static const GstMetaInfo *meta_info = NULL; + + if (g_once_init_enter((GstMetaInfo **)&meta_info)) { + const GstMetaInfo *mi = gst_meta_register( + GST_AJA_AUDIO_META_API_TYPE, "GstAjaAudioMeta", sizeof(GstAjaAudioMeta), + gst_aja_audio_meta_init, gst_aja_audio_meta_free, + gst_aja_audio_meta_transform); + g_once_init_leave((GstMetaInfo **)&meta_info, (GstMetaInfo *)mi); + } + return meta_info; +} + +GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, + GstBuffer *audio_buffer) { + GstAjaAudioMeta *meta; + + g_return_val_if_fail(buffer != NULL, NULL); + g_return_val_if_fail(audio_buffer != NULL, NULL); + + meta = (GstAjaAudioMeta *)gst_buffer_add_meta(buffer, GST_AJA_AUDIO_META_INFO, + NULL); + + meta->buffer = gst_buffer_ref(audio_buffer); + + return meta; +} + +typedef struct { + GstMemory mem; + + guint8 *data; +} GstAjaMemory; + +G_DEFINE_TYPE(GstAjaAllocator, gst_aja_allocator, GST_TYPE_ALLOCATOR); + +static inline void _aja_memory_init(GstAjaAllocator *alloc, GstAjaMemory *mem, + GstMemoryFlags flags, GstMemory *parent, + gpointer data, gsize maxsize, gsize offset, + gsize size) { + gst_memory_init(GST_MEMORY_CAST(mem), flags, GST_ALLOCATOR(alloc), parent, + maxsize, 4095, offset, size); + + mem->data = (guint8 *)data; +} + +static inline GstAjaMemory *_aja_memory_new(GstAjaAllocator *alloc, + GstMemoryFlags flags, + GstAjaMemory *parent, gpointer data, + gsize maxsize, gsize offset, + gsize size) { + GstAjaMemory *mem; + + mem = (GstAjaMemory *)g_slice_alloc(sizeof(GstAjaMemory)); + _aja_memory_init(alloc, mem, flags, (GstMemory *)parent, data, maxsize, + offset, size); + + return mem; +} + +static GstAjaMemory *_aja_memory_new_block(GstAjaAllocator *alloc, + GstMemoryFlags flags, gsize maxsize, + gsize offset, gsize size) { + GstAjaMemory *mem; + guint8 *data; + + mem = (GstAjaMemory *)g_slice_alloc(sizeof(GstAjaMemory)); + + data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); + GST_DEBUG_OBJECT(alloc, "Allocated %" G_GSIZE_FORMAT " at %p", maxsize, data); + if (!alloc->device->device->DMABufferLock((ULWord *)data, maxsize, true)) { + GST_WARNING_OBJECT(alloc, "Failed to pre-lock memory"); + } + + _aja_memory_init(alloc, mem, flags, NULL, data, maxsize, offset, size); + + return mem; +} + +static gpointer _aja_memory_map(GstAjaMemory *mem, gsize maxsize, + GstMapFlags flags) { + return mem->data; +} + +static gboolean _aja_memory_unmap(GstAjaMemory *mem) { return TRUE; } + +static GstMemory *_aja_memory_copy(GstAjaMemory *mem, gssize offset, + gsize size) { + GstMemory *copy; + GstMapInfo map; + + if (size == (gsize)-1) + size = mem->mem.size > (gsize)offset ? mem->mem.size - offset : 0; + + copy = gst_allocator_alloc(mem->mem.allocator, size, NULL); + gst_memory_map(copy, &map, GST_MAP_READ); + GST_DEBUG("memcpy %" G_GSIZE_FORMAT " memory %p -> %p", size, mem, copy); + memcpy(map.data, mem->data + mem->mem.offset + offset, size); + gst_memory_unmap(copy, &map); + + return copy; +} + +static GstAjaMemory *_aja_memory_share(GstAjaMemory *mem, gssize offset, + gsize size) { + GstAjaMemory *sub; + GstAjaMemory *parent; + + /* find the real parent */ + if ((parent = (GstAjaMemory *)mem->mem.parent) == NULL) + parent = (GstAjaMemory *)mem; + + if (size == (gsize)-1) size = mem->mem.size - offset; + + sub = _aja_memory_new(GST_AJA_ALLOCATOR(parent->mem.allocator), + (GstMemoryFlags)(GST_MINI_OBJECT_FLAGS(parent) | + GST_MINI_OBJECT_FLAG_LOCK_READONLY), + parent, parent->data, mem->mem.maxsize, + mem->mem.offset + offset, size); + + return sub; +} + +static GstMemory *gst_aja_allocator_alloc(GstAllocator *alloc, gsize size, + GstAllocationParams *params) { + g_warn_if_fail(params->prefix == 0); + g_warn_if_fail(params->padding == 0); + + return (GstMemory *)_aja_memory_new_block(GST_AJA_ALLOCATOR(alloc), + params->flags, size, 0, size); +} + +static void gst_aja_allocator_free(GstAllocator *alloc, GstMemory *mem) { + GstAjaMemory *dmem = (GstAjaMemory *)mem; + + if (!mem->parent) { + GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); + + GST_DEBUG_OBJECT(alloc, "Freeing memory at %p", dmem->data); + aja_alloc->device->device->DMABufferUnlock((ULWord *)dmem->data, + mem->maxsize); + AJAMemory::FreeAligned(dmem->data); + } + + g_slice_free1(sizeof(GstAjaMemory), dmem); +} + +static void gst_aja_allocator_finalize(GObject *alloc) { + GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); + + GST_DEBUG_OBJECT(alloc, "Freeing allocator"); + + gst_aja_device_unref(aja_alloc->device); + + G_OBJECT_CLASS(gst_aja_allocator_parent_class)->finalize(alloc); +} + +static void gst_aja_allocator_class_init(GstAjaAllocatorClass *klass) { + GObjectClass *gobject_class; + GstAllocatorClass *allocator_class; + + gobject_class = (GObjectClass *)klass; + allocator_class = (GstAllocatorClass *)klass; + + gobject_class->finalize = gst_aja_allocator_finalize; + + allocator_class->alloc = gst_aja_allocator_alloc; + allocator_class->free = gst_aja_allocator_free; +} + +static void gst_aja_allocator_init(GstAjaAllocator *aja_alloc) { + GstAllocator *alloc = GST_ALLOCATOR_CAST(aja_alloc); + + alloc->mem_type = GST_AJA_ALLOCATOR_MEMTYPE; + alloc->mem_map = (GstMemoryMapFunction)_aja_memory_map; + alloc->mem_unmap = (GstMemoryUnmapFunction)_aja_memory_unmap; + alloc->mem_copy = (GstMemoryCopyFunction)_aja_memory_copy; + alloc->mem_share = (GstMemoryShareFunction)_aja_memory_share; +} + +GstAllocator *gst_aja_allocator_new(GstAjaDevice *device) { + GstAjaAllocator *alloc = + (GstAjaAllocator *)g_object_new(GST_TYPE_AJA_ALLOCATOR, NULL); + + alloc->device = gst_aja_device_ref(device); + + GST_DEBUG_OBJECT(alloc, "Creating allocator for device %d", + device->device->GetIndexNumber()); + + return GST_ALLOCATOR(alloc); +} + +GstAjaDevice *gst_aja_device_obtain(const gchar *device_identifier) { + CNTV2Device *device = new CNTV2Device(); + + if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument(device_identifier, + *device)) { + delete device; + return NULL; + } + + GstAjaDevice *dev = g_atomic_rc_box_new0(GstAjaDevice); + dev->device = device; + + return dev; +} + +GstAjaDevice *gst_aja_device_ref(GstAjaDevice *device) { + return (GstAjaDevice *)g_atomic_rc_box_acquire(device); +} + +void gst_aja_device_unref(GstAjaDevice *device) { + g_atomic_rc_box_release_full(device, [](gpointer data) { + GstAjaDevice *dev = (GstAjaDevice *)data; + + delete dev->device; + }); +} + +static gpointer init_setup_mutex(gpointer data) { + sem_t *s = SEM_FAILED; + s = sem_open("/gstreamer-aja-sem", O_CREAT, S_IRUSR | S_IWUSR, 1); + if (s == SEM_FAILED) { + g_critical("Failed to create SHM semaphore for GStreamer AJA plugin: %s", + g_strerror(errno)); + } + return s; +} + +static sem_t *get_setup_mutex(void) { + static GOnce once = G_ONCE_INIT; + + g_once(&once, init_setup_mutex, NULL); + + return (sem_t *)once.retval; +} + +ShmMutexLocker::ShmMutexLocker() { + sem_t *s = get_setup_mutex(); + if (s != SEM_FAILED) sem_wait(s); +} + +ShmMutexLocker::~ShmMutexLocker() { + sem_t *s = get_setup_mutex(); + if (s != SEM_FAILED) sem_post(s); +} + +GType gst_aja_audio_system_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_AUDIO_SYSTEM_AUTO, "auto", "Auto (based on selected channel)"}, + {GST_AJA_AUDIO_SYSTEM_1, "1", "Audio system 1"}, + {GST_AJA_AUDIO_SYSTEM_2, "2", "Audio system 2"}, + {GST_AJA_AUDIO_SYSTEM_3, "3", "Audio system 3"}, + {GST_AJA_AUDIO_SYSTEM_4, "4", "Audio system 4"}, + {GST_AJA_AUDIO_SYSTEM_5, "5", "Audio system 5"}, + {GST_AJA_AUDIO_SYSTEM_6, "6", "Audio system 6"}, + {GST_AJA_AUDIO_SYSTEM_7, "7", "Audio system 7"}, + {GST_AJA_AUDIO_SYSTEM_8, "8", "Audio system 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaAudioSystem", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_output_destination_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_OUTPUT_DESTINATION_AUTO, "auto", + "Auto (based on selected channel)"}, + {GST_AJA_OUTPUT_DESTINATION_ANALOG, "analog", "Analog Output"}, + {GST_AJA_OUTPUT_DESTINATION_SDI1, "sdi-1", "SDI Output 1"}, + {GST_AJA_OUTPUT_DESTINATION_SDI2, "sdi-2", "SDI Output 2"}, + {GST_AJA_OUTPUT_DESTINATION_SDI3, "sdi-3", "SDI Output 3"}, + {GST_AJA_OUTPUT_DESTINATION_SDI4, "sdi-4", "SDI Output 4"}, + {GST_AJA_OUTPUT_DESTINATION_SDI5, "sdi-5", "SDI Output 5"}, + {GST_AJA_OUTPUT_DESTINATION_SDI6, "sdi-6", "SDI Output 6"}, + {GST_AJA_OUTPUT_DESTINATION_SDI7, "sdi-7", "SDI Output 7"}, + {GST_AJA_OUTPUT_DESTINATION_SDI8, "sdi-8", "SDI Output 8"}, + {GST_AJA_OUTPUT_DESTINATION_HDMI, "hdmi", "HDMI Output"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaOutputDestination", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_reference_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_REFERENCE_SOURCE_AUTO, "auto", "Auto"}, + {GST_AJA_REFERENCE_SOURCE_FREERUN, "freerun", "Freerun"}, + {GST_AJA_REFERENCE_SOURCE_EXTERNAL, "external", "External"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_1, "input-1", "SDI Input 1"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_2, "input-2", "SDI Input 2"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_3, "input-3", "SDI Input 3"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_4, "input-4", "SDI Input 4"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_5, "input-5", "SDI Input 5"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_6, "input-6", "SDI Input 6"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_7, "input-7", "SDI Input 7"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_8, "input-8", "SDI Input 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaReferenceSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +void gst_aja_common_init(void) { + GST_DEBUG_CATEGORY_INIT(gst_aja_debug, "aja", 0, + "Debug category for AJA plugin"); +} diff --git a/gstajacommon.h b/gstajacommon.h new file mode 100644 index 0000000000..965229d505 --- /dev/null +++ b/gstajacommon.h @@ -0,0 +1,168 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +G_BEGIN_DECLS + +typedef struct { + GstMeta meta; + + GstBuffer *buffer; +} GstAjaAudioMeta; + +G_GNUC_INTERNAL +GType gst_aja_audio_meta_api_get_type(void); +#define GST_AJA_AUDIO_META_API_TYPE (gst_aja_audio_meta_api_get_type()) + +G_GNUC_INTERNAL +const GstMetaInfo *gst_aja_audio_meta_get_info(void); +#define GST_AJA_AUDIO_META_INFO (gst_aja_audio_meta_get_info()) + +#define gst_buffer_get_aja_audio_meta(b) \ + ((GstAjaAudioMeta *)gst_buffer_get_meta((b), GST_AJA_AUDIO_META_API_TYPE)) + +G_GNUC_INTERNAL +GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, + GstBuffer *audio_buffer); + +G_GNUC_INTERNAL +GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id); +G_GNUC_INTERNAL +GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format); +G_GNUC_INTERNAL +NTV2VideoFormat gst_ntv2_video_format_from_caps(GstCaps *caps); + +typedef struct { + CNTV2Card *device; +} GstAjaDevice; + +G_GNUC_INTERNAL +GstAjaDevice *gst_aja_device_obtain(const gchar *device_identifier); +G_GNUC_INTERNAL +GstAjaDevice *gst_aja_device_ref(GstAjaDevice *device); +G_GNUC_INTERNAL +void gst_aja_device_unref(GstAjaDevice *device); + +#define GST_AJA_ALLOCATOR_MEMTYPE "aja" + +#define GST_TYPE_AJA_ALLOCATOR (gst_aja_allocator_get_type()) +#define GST_AJA_ALLOCATOR(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_ALLOCATOR, GstAjaAllocator)) +#define GST_AJA_ALLOCATOR_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_ALLOCATOR, \ + GstAjaAllocatorClass)) +#define GST_IS_Aja_ALLOCATOR(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_ALLOCATOR)) +#define GST_IS_Aja_ALLOCATOR_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_ALLOCATOR)) +#define GST_AJA_ALLOCATOR_CAST(obj) ((GstAjaAllocator *)(obj)) + +typedef struct _GstAjaAllocator GstAjaAllocator; +typedef struct _GstAjaAllocatorClass GstAjaAllocatorClass; + +struct _GstAjaAllocator { + GstAllocator allocator; + + GstAjaDevice *device; +}; + +struct _GstAjaAllocatorClass { + GstAllocatorClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_allocator_get_type(void); +G_GNUC_INTERNAL +GstAllocator *gst_aja_allocator_new(GstAjaDevice *device); + +typedef enum { + GST_AJA_AUDIO_SYSTEM_AUTO, + GST_AJA_AUDIO_SYSTEM_1, + GST_AJA_AUDIO_SYSTEM_2, + GST_AJA_AUDIO_SYSTEM_3, + GST_AJA_AUDIO_SYSTEM_4, + GST_AJA_AUDIO_SYSTEM_5, + GST_AJA_AUDIO_SYSTEM_6, + GST_AJA_AUDIO_SYSTEM_7, + GST_AJA_AUDIO_SYSTEM_8, +} GstAjaAudioSystem; + +#define GST_TYPE_AJA_AUDIO_SYSTEM (gst_aja_audio_system_get_type()) +G_GNUC_INTERNAL +GType gst_aja_audio_system_get_type(void); + +typedef enum { + GST_AJA_OUTPUT_DESTINATION_AUTO, + GST_AJA_OUTPUT_DESTINATION_ANALOG, + GST_AJA_OUTPUT_DESTINATION_SDI1, + GST_AJA_OUTPUT_DESTINATION_SDI2, + GST_AJA_OUTPUT_DESTINATION_SDI3, + GST_AJA_OUTPUT_DESTINATION_SDI4, + GST_AJA_OUTPUT_DESTINATION_SDI5, + GST_AJA_OUTPUT_DESTINATION_SDI6, + GST_AJA_OUTPUT_DESTINATION_SDI7, + GST_AJA_OUTPUT_DESTINATION_SDI8, + GST_AJA_OUTPUT_DESTINATION_HDMI, +} GstAjaOutputDestination; + +#define GST_TYPE_AJA_OUTPUT_DESTINATION (gst_aja_output_destination_get_type()) +G_GNUC_INTERNAL +GType gst_aja_output_destination_get_type(void); + +typedef enum { + GST_AJA_REFERENCE_SOURCE_AUTO, + GST_AJA_REFERENCE_SOURCE_FREERUN, + GST_AJA_REFERENCE_SOURCE_EXTERNAL, + GST_AJA_REFERENCE_SOURCE_INPUT_1, + GST_AJA_REFERENCE_SOURCE_INPUT_2, + GST_AJA_REFERENCE_SOURCE_INPUT_3, + GST_AJA_REFERENCE_SOURCE_INPUT_4, + GST_AJA_REFERENCE_SOURCE_INPUT_5, + GST_AJA_REFERENCE_SOURCE_INPUT_6, + GST_AJA_REFERENCE_SOURCE_INPUT_7, + GST_AJA_REFERENCE_SOURCE_INPUT_8, +} GstAjaReferenceSource; + +#define GST_TYPE_AJA_REFERENCE_SOURCE (gst_aja_reference_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_reference_source_get_type(void); + +G_GNUC_INTERNAL +void gst_aja_common_init(void); + +G_END_DECLS + +class ShmMutexLocker { + public: + ShmMutexLocker(); + ~ShmMutexLocker(); +}; diff --git a/gstajasink.cpp b/gstajasink.cpp new file mode 100644 index 0000000000..5645d00274 --- /dev/null +++ b/gstajasink.cpp @@ -0,0 +1,1373 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include + +#include "gstajacommon.h" +#include "gstajasink.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); +#define GST_CAT_DEFAULT gst_aja_sink_debug + +#define DEFAULT_DEVICE_IDENTIFIER ("0") +#define DEFAULT_CHANNEL (::NTV2_CHANNEL1) +#define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) +#define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) +#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) +#define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT) + +enum { + PROP_0, + PROP_DEVICE_IDENTIFIER, + PROP_CHANNEL, + PROP_AUDIO_SYSTEM, + PROP_OUTPUT_DESTINATION, + PROP_REFERENCE_SOURCE, + PROP_QUEUE_SIZE, + PROP_OUTPUT_CPU_CORE, +}; + +typedef enum { + QUEUE_ITEM_TYPE_FRAME, +} QueueItemType; + +typedef struct { + QueueItemType type; + + // For FRAME + GstVideoFrame frame; + GstBuffer *audio_buffer; + GstMapInfo audio_map; + NTV2_RP188 tc; + AJAAncillaryList *anc_packet_list; +} QueueItem; + +static void gst_aja_sink_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec); +static void gst_aja_sink_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec); +static void gst_aja_sink_finalize(GObject *object); + +static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps); +static GstCaps *gst_aja_sink_get_caps(GstBaseSink *bsink, GstCaps *filter); +static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event); +static gboolean gst_aja_sink_propose_allocation(GstBaseSink *bsink, + GstQuery *query); +static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, GstBuffer *buffer); + +static gboolean gst_aja_sink_open(GstAjaSink *sink); +static gboolean gst_aja_sink_close(GstAjaSink *sink); +static gboolean gst_aja_sink_start(GstAjaSink *sink); +static gboolean gst_aja_sink_stop(GstAjaSink *sink); + +static GstStateChangeReturn gst_aja_sink_change_state( + GstElement *element, GstStateChange transition); + +static void output_thread_func(AJAThread *thread, void *data); + +#define parent_class gst_aja_sink_parent_class +G_DEFINE_TYPE(GstAjaSink, gst_aja_sink, GST_TYPE_BASE_SINK); + +static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { + GObjectClass *gobject_class = G_OBJECT_CLASS(klass); + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS(klass); + GstCaps *templ_caps; + + gobject_class->set_property = gst_aja_sink_set_property; + gobject_class->get_property = gst_aja_sink_get_property; + gobject_class->finalize = gst_aja_sink_finalize; + + g_object_class_install_property( + gobject_class, PROP_DEVICE_IDENTIFIER, + g_param_spec_string( + "device-identifier", "Device identifier", + "Input device instance to use", DEFAULT_DEVICE_IDENTIFIER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CHANNEL, + g_param_spec_uint( + "channel", "Channel", "Channel to use", 0, NTV2_MAX_NUM_CHANNELS - 1, + DEFAULT_CHANNEL, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_QUEUE_SIZE, + g_param_spec_uint( + "queue-size", "Queue Size", + "Size of internal queue in number of video frames. " + "Half of this is allocated as device buffers and equal to the " + "latency.", + 1, G_MAXINT, DEFAULT_QUEUE_SIZE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_AUDIO_SYSTEM, + g_param_spec_enum( + "audio-system", "Audio System", "Audio system to use", + GST_TYPE_AJA_AUDIO_SYSTEM, DEFAULT_AUDIO_SYSTEM, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_OUTPUT_DESTINATION, + g_param_spec_enum( + "output-destination", "Output Destination", + "Output destination to use", GST_TYPE_AJA_OUTPUT_DESTINATION, + DEFAULT_OUTPUT_DESTINATION, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_REFERENCE_SOURCE, + g_param_spec_enum( + "reference-source", "Reference Source", "Reference source to use", + GST_TYPE_AJA_REFERENCE_SOURCE, DEFAULT_REFERENCE_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_OUTPUT_CPU_CORE, + g_param_spec_uint( + "output-cpu-core", "Output CPU Core", + "Sets the affinity of the output thread to this CPU core " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_OUTPUT_CPU_CORE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + element_class->change_state = GST_DEBUG_FUNCPTR(gst_aja_sink_change_state); + + basesink_class->set_caps = GST_DEBUG_FUNCPTR(gst_aja_sink_set_caps); + basesink_class->get_caps = GST_DEBUG_FUNCPTR(gst_aja_sink_get_caps); + basesink_class->event = GST_DEBUG_FUNCPTR(gst_aja_sink_event); + basesink_class->propose_allocation = + GST_DEBUG_FUNCPTR(gst_aja_sink_propose_allocation); + basesink_class->render = GST_DEBUG_FUNCPTR(gst_aja_sink_render); + + templ_caps = gst_ntv2_supported_caps(DEVICE_ID_INVALID); + gst_element_class_add_pad_template( + element_class, + gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, templ_caps)); + gst_caps_unref(templ_caps); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video sink", "Audio/Video/Sink", + "Outputs audio/video frames with AJA devices", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_sink_debug, "ajasink", 0, "AJA sink"); +} + +static void gst_aja_sink_init(GstAjaSink *self) { + g_mutex_init(&self->queue_lock); + g_cond_init(&self->queue_cond); + g_cond_init(&self->drain_cond); + + self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); + self->channel = DEFAULT_CHANNEL; + self->queue_size = DEFAULT_QUEUE_SIZE; + self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; + self->output_destination = DEFAULT_OUTPUT_DESTINATION; + self->reference_source = DEFAULT_REFERENCE_SOURCE; + self->output_cpu_core = DEFAULT_OUTPUT_CPU_CORE; + + gst_base_sink_set_render_delay(GST_BASE_SINK(self), + (self->queue_size / 2) * GST_SECOND / 30); + self->queue = + gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); +} + +void gst_aja_sink_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec) { + GstAjaSink *self = GST_AJA_SINK(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_free(self->device_identifier); + self->device_identifier = g_value_dup_string(value); + break; + case PROP_CHANNEL: + self->channel = (NTV2Channel)g_value_get_uint(value); + break; + case PROP_QUEUE_SIZE: + self->queue_size = g_value_get_uint(value); + break; + case PROP_AUDIO_SYSTEM: + self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value); + break; + case PROP_OUTPUT_DESTINATION: + self->output_destination = + (GstAjaOutputDestination)g_value_get_enum(value); + break; + case PROP_REFERENCE_SOURCE: + self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); + break; + case PROP_OUTPUT_CPU_CORE: + self->output_cpu_core = g_value_get_uint(value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_sink_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec) { + GstAjaSink *self = GST_AJA_SINK(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_value_set_string(value, self->device_identifier); + break; + case PROP_CHANNEL: + g_value_set_uint(value, self->channel); + break; + case PROP_QUEUE_SIZE: + g_value_set_uint(value, self->queue_size); + break; + case PROP_AUDIO_SYSTEM: + g_value_set_enum(value, self->audio_system_setting); + break; + case PROP_OUTPUT_DESTINATION: + g_value_set_enum(value, self->output_destination); + break; + case PROP_REFERENCE_SOURCE: + g_value_set_enum(value, self->reference_source); + break; + case PROP_OUTPUT_CPU_CORE: + g_value_set_uint(value, self->output_cpu_core); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_sink_finalize(GObject *object) { + GstAjaSink *self = GST_AJA_SINK(object); + + g_assert(self->device == NULL); + g_assert(gst_queue_array_get_length(self->queue) == 0); + g_clear_pointer(&self->queue, gst_queue_array_free); + + g_mutex_clear(&self->queue_lock); + g_cond_clear(&self->queue_cond); + g_cond_clear(&self->drain_cond); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static gboolean gst_aja_sink_open(GstAjaSink *self) { + GST_DEBUG_OBJECT(self, "Opening device"); + + g_assert(self->device == NULL); + + self->device = gst_aja_device_obtain(self->device_identifier); + if (!self->device) { + GST_ERROR_OBJECT(self, "Failed to open device"); + return FALSE; + } + + if (!self->device->device->IsDeviceReady(false)) { + g_clear_pointer(&self->device, gst_aja_device_unref); + return FALSE; + } + + self->device->device->SetEveryFrameServices(::NTV2_OEM_TASKS); + self->device_id = self->device->device->GetDeviceID(); + + std::string serial_number; + if (!self->device->device->GetSerialNumberString(serial_number)) + serial_number = "none"; + + GST_DEBUG_OBJECT(self, + "Opened device with ID %d at index %d (%s, version %s, " + "serial number %s, can do VANC %d)", + self->device_id, self->device->device->GetIndexNumber(), + self->device->device->GetDisplayName().c_str(), + self->device->device->GetDeviceVersionString().c_str(), + serial_number.c_str(), + ::NTV2DeviceCanDoCustomAnc(self->device_id)); + + GST_DEBUG_OBJECT(self, + "Using SDK version %d.%d.%d.%d (%s) and driver version %s", + AJA_NTV2_SDK_VERSION_MAJOR, AJA_NTV2_SDK_VERSION_MINOR, + AJA_NTV2_SDK_VERSION_POINT, AJA_NTV2_SDK_BUILD_NUMBER, + AJA_NTV2_SDK_BUILD_DATETIME, + self->device->device->GetDriverVersionString().c_str()); + + self->device->device->SetMultiFormatMode(true); + + self->allocator = gst_aja_allocator_new(self->device); + + GST_DEBUG_OBJECT(self, "Opened device"); + + return TRUE; +} + +static gboolean gst_aja_sink_close(GstAjaSink *self) { + gst_clear_object(&self->allocator); + g_clear_pointer(&self->device, gst_aja_device_unref); + self->device_id = DEVICE_ID_INVALID; + + GST_DEBUG_OBJECT(self, "Closed device"); + + return TRUE; +} + +static gboolean gst_aja_sink_start(GstAjaSink *self) { + GST_DEBUG_OBJECT(self, "Starting"); + self->output_thread = new AJAThread(); + self->output_thread->Attach(output_thread_func, self); + self->output_thread->SetPriority(AJA_ThreadPriority_High); + self->output_thread->Start(); + g_mutex_lock(&self->queue_lock); + self->shutdown = FALSE; + self->playing = FALSE; + self->eos = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static gboolean gst_aja_sink_stop(GstAjaSink *self) { + QueueItem *item; + + GST_DEBUG_OBJECT(self, "Stopping"); + + g_mutex_lock(&self->queue_lock); + self->shutdown = TRUE; + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + if (self->output_thread) { + self->output_thread->Stop(); + delete self->output_thread; + self->output_thread = NULL; + } + + GST_OBJECT_LOCK(self); + gst_clear_caps(&self->configured_caps); + self->configured_audio_channels = 0; + GST_OBJECT_UNLOCK(self); + + while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + if (item->type == QUEUE_ITEM_TYPE_FRAME) { + gst_video_frame_unmap(&item->frame); + if (item->audio_buffer) { + gst_buffer_unmap(item->audio_buffer, &item->audio_map); + gst_buffer_unref(item->audio_buffer); + } + if (item->anc_packet_list) { + delete item->anc_packet_list; + } + } + } + + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } + + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + GST_DEBUG_OBJECT(self, "Stopped"); + + return TRUE; +} + +static GstStateChangeReturn gst_aja_sink_change_state( + GstElement *element, GstStateChange transition) { + GstAjaSink *self = GST_AJA_SINK(element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!gst_aja_sink_open(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + if (!gst_aja_sink_start(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); + if (ret == GST_STATE_CHANGE_FAILURE) return ret; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + g_mutex_lock(&self->queue_lock); + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + g_mutex_lock(&self->queue_lock); + self->playing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (!gst_aja_sink_stop(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + if (!gst_aja_sink_close(self)) return GST_STATE_CHANGE_FAILURE; + break; + default: + break; + } + + return ret; +} + +static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { + GstAjaSink *self = GST_AJA_SINK(bsink); + const GstStructure *s; + NTV2VideoFormat video_format = ::NTV2_FORMAT_UNKNOWN; + + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + + GST_OBJECT_LOCK(self); + if (self->configured_caps) { + if (!gst_caps_can_intersect(self->configured_caps, caps)) { + GST_DEBUG_OBJECT(self, "Need to reconfigure, waiting for draining"); + GST_OBJECT_UNLOCK(self); + g_mutex_lock(&self->queue_lock); + self->draining = TRUE; + g_cond_signal(&self->queue_cond); + while (self->draining && !self->flushing && !self->shutdown) { + g_cond_wait(&self->drain_cond, &self->queue_lock); + } + + if (self->flushing || self->shutdown) { + g_mutex_unlock(&self->queue_lock); + GST_DEBUG_OBJECT(self, "Flushing"); + return FALSE; + } + g_mutex_unlock(&self->queue_lock); + GST_OBJECT_LOCK(self); + } else { + GST_OBJECT_UNLOCK(self); + GST_DEBUG_OBJECT(self, + "Compatible caps with previous caps, not reconfiguring"); + return TRUE; + } + } + + if (!gst_video_info_from_caps(&self->configured_info, caps)) { + GST_OBJECT_UNLOCK(self); + GST_FIXME_OBJECT(self, "Failed to parse caps"); + return FALSE; + } + + self->configured_audio_channels = 0; + s = gst_caps_get_structure(caps, 0); + gst_structure_get_int(s, "audio-channels", &self->configured_audio_channels); + + gst_caps_replace(&self->configured_caps, caps); + GST_OBJECT_UNLOCK(self); + + video_format = gst_ntv2_video_format_from_caps(caps); + if (video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Unsupported caps %" GST_PTR_FORMAT, caps); + return FALSE; + } + + self->video_format = video_format; + + // Configure render delay based on the framerate and queue size + gst_base_sink_set_render_delay( + GST_BASE_SINK(self), + gst_util_uint64_scale(self->queue_size / 2, + self->configured_info.fps_d * GST_SECOND, + self->configured_info.fps_n)); + + g_assert(self->device != NULL); + + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + if (!::NTV2DeviceCanDoVideoFormat(self->device_id, video_format)) { + GST_ERROR_OBJECT(self, "Device does not support mode %d", + (int)video_format); + return FALSE; + } + + self->device->device->SetMode(self->channel, NTV2_MODE_DISPLAY, false); + + GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", + (int)video_format, (int)self->channel); + self->device->device->SetVideoFormat(video_format, false, false, + self->channel); + + if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR); + + NTV2ReferenceSource reference_source; + switch (self->reference_source) { + case GST_AJA_REFERENCE_SOURCE_EXTERNAL: + reference_source = ::NTV2_REFERENCE_EXTERNAL; + break; + case GST_AJA_REFERENCE_SOURCE_FREERUN: + case GST_AJA_REFERENCE_SOURCE_AUTO: + reference_source = ::NTV2_REFERENCE_FREERUN; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_1: + reference_source = ::NTV2_REFERENCE_INPUT1; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_2: + reference_source = ::NTV2_REFERENCE_INPUT2; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_3: + reference_source = ::NTV2_REFERENCE_INPUT3; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_4: + reference_source = ::NTV2_REFERENCE_INPUT4; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_5: + reference_source = ::NTV2_REFERENCE_INPUT5; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_6: + reference_source = ::NTV2_REFERENCE_INPUT6; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_7: + reference_source = ::NTV2_REFERENCE_INPUT7; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_8: + reference_source = ::NTV2_REFERENCE_INPUT8; + break; + default: + g_assert_not_reached(); + break; + } + GST_DEBUG_OBJECT(self, "Configuring reference source %d", + (int)reference_source); + self->device->device->SetFramePulseReference(reference_source); + + if (!self->device->device->EnableChannel(self->channel)) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + return FALSE; + } + + self->device->device->DMABufferAutoLock(false, true, 0); + + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) + self->device->device->SetSDITransmitEnable(self->channel, true); + + const NTV2Standard standard(::GetNTV2StandardFromVideoFormat(video_format)); + self->device->device->SetSDIOutputStandard(self->channel, standard); + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(video_format); + self->device->device->SetVANCMode(::NTV2_VANCMODE_OFF, standard, geometry, + self->channel); + + NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); + self->f2_start_line = + (smpte_line_num_info.GetLastLine( + smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1) + + 1); + + if (self->configured_audio_channels) { + switch (self->audio_system_setting) { + case GST_AJA_AUDIO_SYSTEM_1: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + case GST_AJA_AUDIO_SYSTEM_2: + self->audio_system = ::NTV2_AUDIOSYSTEM_2; + break; + case GST_AJA_AUDIO_SYSTEM_3: + self->audio_system = ::NTV2_AUDIOSYSTEM_3; + break; + case GST_AJA_AUDIO_SYSTEM_4: + self->audio_system = ::NTV2_AUDIOSYSTEM_4; + break; + case GST_AJA_AUDIO_SYSTEM_5: + self->audio_system = ::NTV2_AUDIOSYSTEM_5; + break; + case GST_AJA_AUDIO_SYSTEM_6: + self->audio_system = ::NTV2_AUDIOSYSTEM_6; + break; + case GST_AJA_AUDIO_SYSTEM_7: + self->audio_system = ::NTV2_AUDIOSYSTEM_7; + break; + case GST_AJA_AUDIO_SYSTEM_8: + self->audio_system = ::NTV2_AUDIOSYSTEM_8; + break; + case GST_AJA_AUDIO_SYSTEM_AUTO: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) + self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); + if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + default: + g_assert_not_reached(); + break; + } + + GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); + + self->device->device->SetNumberAudioChannels( + self->configured_audio_channels, self->audio_system); + self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); + self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, + self->audio_system); + self->device->device->SetSDIOutputAudioSystem(self->channel, + self->audio_system); + self->device->device->SetSDIOutputDS2AudioSystem(self->channel, + self->audio_system); + self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->audio_system); + } else { + self->audio_system = ::NTV2_AUDIOSYSTEM_INVALID; + } + + CNTV2SignalRouter router; + + self->device->device->GetRouting(router); + + // Always use the framebuffer associated with the channel + NTV2OutputCrosspointID framebuffer_id = + ::GetFrameBufferOutputXptFromChannel(self->channel, false, false); + + NTV2InputCrosspointID output_destination_id; + switch (self->output_destination) { + case GST_AJA_OUTPUT_DESTINATION_AUTO: + output_destination_id = ::GetSDIOutputInputXpt(self->channel, false); + break; + case GST_AJA_OUTPUT_DESTINATION_SDI1: + output_destination_id = ::NTV2_XptSDIOut1Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI2: + output_destination_id = ::NTV2_XptSDIOut2Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI3: + output_destination_id = ::NTV2_XptSDIOut3Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI4: + output_destination_id = ::NTV2_XptSDIOut4Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI5: + output_destination_id = ::NTV2_XptSDIOut5Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI6: + output_destination_id = ::NTV2_XptSDIOut6Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI7: + output_destination_id = ::NTV2_XptSDIOut7Input; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI8: + output_destination_id = ::NTV2_XptSDIOut8Input; + break; + case GST_AJA_OUTPUT_DESTINATION_ANALOG: + output_destination_id = ::NTV2_XptAnalogOutInput; + break; + case GST_AJA_OUTPUT_DESTINATION_HDMI: + output_destination_id = ::NTV2_XptHDMIOutInput; + break; + default: + g_assert_not_reached(); + break; + } + + // Need to remove old routes for the output and framebuffer we're going to use + NTV2ActualConnections connections = router.GetConnections(); + + for (NTV2ActualConnectionsConstIter iter = connections.begin(); + iter != connections.end(); iter++) { + if (iter->first == output_destination_id || iter->second == framebuffer_id) + router.RemoveConnection(iter->first, iter->second); + } + + GST_DEBUG_OBJECT(self, "Creating connection %d - %d", output_destination_id, + framebuffer_id); + router.AddConnection(output_destination_id, framebuffer_id); + + { + std::stringstream os; + CNTV2SignalRouter oldRouter; + self->device->device->GetRouting(oldRouter); + oldRouter.Print(os); + GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); + } + self->device->device->ApplySignalRoute(router, true); + { + std::stringstream os; + CNTV2SignalRouter currentRouter; + self->device->device->GetRouting(currentRouter); + currentRouter.Print(os); + GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); + } + + return TRUE; +} + +static GstCaps *gst_aja_sink_get_caps(GstBaseSink *bsink, GstCaps *filter) { + GstAjaSink *self = GST_AJA_SINK(bsink); + GstCaps *caps; + + if (self->device) { + caps = gst_ntv2_supported_caps(self->device_id); + } else { + caps = gst_pad_get_pad_template_caps(GST_BASE_SINK_PAD(self)); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + return caps; +} + +static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event) { + GstAjaSink *self = GST_AJA_SINK(bsink); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_EOS: { + GST_DEBUG_OBJECT(self, "Signalling EOS"); + + g_mutex_lock(&self->queue_lock); + self->eos = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + break; + } + case GST_EVENT_FLUSH_START: { + g_mutex_lock(&self->queue_lock); + self->flushing = TRUE; + self->draining = FALSE; + g_cond_signal(&self->drain_cond); + g_mutex_unlock(&self->queue_lock); + break; + } + case GST_EVENT_FLUSH_STOP: { + QueueItem *item; + + g_mutex_lock(&self->queue_lock); + while ( + (item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + if (item->type == QUEUE_ITEM_TYPE_FRAME) { + gst_video_frame_unmap(&item->frame); + if (item->audio_buffer) { + gst_buffer_unmap(item->audio_buffer, &item->audio_map); + gst_buffer_unref(item->audio_buffer); + } + if (item->anc_packet_list) { + delete item->anc_packet_list; + } + } + } + g_cond_signal(&self->queue_cond); + + self->flushing = FALSE; + g_cond_signal(&self->drain_cond); + g_mutex_unlock(&self->queue_lock); + break; + } + default: + break; + } + + return GST_BASE_SINK_CLASS(parent_class)->event(bsink, event); +} + +static gboolean gst_aja_sink_propose_allocation(GstBaseSink *bsink, + GstQuery *query) { + GstAjaSink *self = GST_AJA_SINK(bsink); + + if (self->allocator) { + GstAllocationParams params; + + gst_allocation_params_init(¶ms); + params.prefix = 0; + params.padding = 0; + params.align = 4095; + + gst_query_add_allocation_param(query, self->allocator, ¶ms); + } + + return TRUE; +} + +static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, + GstBuffer *buffer) { + GstAjaSink *self = GST_AJA_SINK(bsink); + GstFlowReturn flow_ret = GST_FLOW_OK; + GstAjaAudioMeta *meta; + GstBuffer *item_buffer = NULL, *item_audio_buffer = NULL; + GstVideoTimeCodeMeta *tc_meta; + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAME, + .frame = + { + {0}, + }, + .audio_buffer = NULL, + .audio_map = GST_MAP_INFO_INIT, + .tc = NTV2_RP188(), + .anc_packet_list = NULL, + }; + + guint video_buffer_size = ::GetVideoActiveSize( + self->video_format, ::NTV2_FBF_10BIT_YCBCR, ::NTV2_VANCMODE_OFF); + + meta = gst_buffer_get_aja_audio_meta(buffer); + tc_meta = gst_buffer_get_video_time_code_meta(buffer); + + if (gst_buffer_n_memory(buffer) == 1) { + GstMemory *mem = gst_buffer_peek_memory(buffer, 0); + + if (gst_memory_get_sizes(mem, NULL, NULL) == video_buffer_size && + strcmp(mem->allocator->mem_type, GST_AJA_ALLOCATOR_MEMTYPE) == 0 && + GST_AJA_ALLOCATOR(mem->allocator)->device->device->GetIndexNumber() == + self->device->device->GetIndexNumber()) { + item_buffer = gst_buffer_ref(buffer); + } + } + + if (!item_buffer) { + GstVideoFrame in_frame; + + GST_DEBUG_OBJECT(self, "Allocating new video buffer"); + + if (!self->buffer_pool) { + self->buffer_pool = gst_buffer_pool_new(); + GstStructure *config = gst_buffer_pool_get_config(self->buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, video_buffer_size, + self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->buffer_pool, config); + gst_buffer_pool_set_active(self->buffer_pool, TRUE); + } + + if (!gst_video_frame_map(&in_frame, &self->configured_info, buffer, + GST_MAP_READ)) { + GST_ERROR_OBJECT(self, "Failed to map buffer"); + return GST_FLOW_ERROR; + } + + flow_ret = + gst_buffer_pool_acquire_buffer(self->buffer_pool, &item_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_video_frame_unmap(&in_frame); + return flow_ret; + } + + item.type = QUEUE_ITEM_TYPE_FRAME; + + gst_video_frame_map(&item.frame, &self->configured_info, item_buffer, + GST_MAP_READWRITE); + gst_video_frame_copy(&item.frame, &in_frame); + gst_video_frame_unmap(&in_frame); + gst_buffer_unref(item_buffer); + } else { + item.type = QUEUE_ITEM_TYPE_FRAME; + + gst_video_frame_map(&item.frame, &self->configured_info, item_buffer, + GST_MAP_READ); + gst_buffer_unref(item_buffer); + } + + if (meta) { + if (gst_buffer_n_memory(meta->buffer) == 1) { + GstMemory *mem = gst_buffer_peek_memory(meta->buffer, 0); + + if (strcmp(mem->allocator->mem_type, GST_AJA_ALLOCATOR_MEMTYPE) == 0 && + GST_AJA_ALLOCATOR(mem->allocator)->device->device->GetIndexNumber() == + self->device->device->GetIndexNumber()) { + item_audio_buffer = gst_buffer_ref(meta->buffer); + } + } + + if (!item_audio_buffer) { + GstMapInfo audio_map; + + GST_DEBUG_OBJECT(self, "Allocating new audio buffer"); + + if (!self->audio_buffer_pool) { + guint audio_buffer_size = 1UL * 1024UL * 1024UL; + + self->audio_buffer_pool = gst_buffer_pool_new(); + GstStructure *config = + gst_buffer_pool_get_config(self->audio_buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, audio_buffer_size, + self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->audio_buffer_pool, config); + gst_buffer_pool_set_active(self->audio_buffer_pool, TRUE); + } + + flow_ret = gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, + &item_audio_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_video_frame_unmap(&item.frame); + return flow_ret; + } + + gst_buffer_set_size(item_audio_buffer, gst_buffer_get_size(meta->buffer)); + + gst_buffer_map(meta->buffer, &audio_map, GST_MAP_READ); + gst_buffer_map(item_audio_buffer, &item.audio_map, GST_MAP_READWRITE); + memcpy(item.audio_map.data, audio_map.data, audio_map.size); + gst_buffer_unmap(meta->buffer, &audio_map); + item.audio_buffer = item_audio_buffer; + } else { + gst_buffer_map(item_audio_buffer, &item.audio_map, GST_MAP_READ); + item.audio_buffer = item_audio_buffer; + } + } else { + item.audio_buffer = NULL; + } + + if (tc_meta) { + TimecodeFormat tc_format = ::kTCFormatUnknown; + + if (tc_meta->tc.config.fps_n == 24 && tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat24fps; + } else if (tc_meta->tc.config.fps_n == 25 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat25fps; + } else if (tc_meta->tc.config.fps_n == 30 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat30fps; + } else if (tc_meta->tc.config.fps_n == 30000 && + tc_meta->tc.config.fps_d == 1001) { + tc_format = kTCFormat30fpsDF; + } else if (tc_meta->tc.config.fps_n == 48 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat48fps; + } else if (tc_meta->tc.config.fps_n == 50 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat50fps; + } else if (tc_meta->tc.config.fps_n == 60 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat60fps; + } else if (tc_meta->tc.config.fps_n == 60000 && + tc_meta->tc.config.fps_d == 1001) { + tc_format = kTCFormat60fpsDF; + } + + const CRP188 rp188(tc_meta->tc.frames, tc_meta->tc.seconds, + tc_meta->tc.minutes, tc_meta->tc.hours, tc_format); + rp188.GetRP188Reg(item.tc); + } else { + item.tc.fDBB = 0xffffffff; + } + + // TODO: Handle AFD/Bar meta +#if 0 + if (bar_meta || afd_meta) { + const uint16_t kF1PktLineNumAFDBAR(11); + const AJAAncillaryDataLocation kAFDBARLocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, kF1PktLineNumAFDBAR, + AJAAncDataHorizOffset_AnyVanc); + const uint16_t kF2PktLineNumAFDBAR(self->f2_start_line + 11); + const AJAAncillaryDataLocation kAFDBARLocF2( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, kF2PktLineNumAFDBAR, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData pkt; + pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF1); + item.anc_packet_list->AddAncillaryData(pkt); + + if (self->configured_info.interlace_mode != GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + AJAAncillaryData pkt2; + pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF2); + item.anc_packet_list->AddAncillaryData(pkt); + } + } +#endif + + GstVideoCaptionMeta *caption_meta; + gpointer iter = NULL; + while ( + (caption_meta = (GstVideoCaptionMeta *)gst_buffer_iterate_meta_filtered( + buffer, &iter, GST_VIDEO_CAPTION_META_API_TYPE))) { + if (!item.anc_packet_list) item.anc_packet_list = new AJAAncillaryList; + + if (caption_meta->caption_type == GST_VIDEO_CAPTION_TYPE_CEA708_CDP) { + const uint16_t kF1PktLineNumCEA708(9); + const AJAAncillaryDataLocation kCEA708LocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, kF1PktLineNumCEA708, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData_Cea708 pkt; + + pkt.SetDID(GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8); + pkt.SetSID(GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff); + pkt.SetDataLocation(kCEA708LocF1); + pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); + pkt.SetPayloadData(caption_meta->data, caption_meta->size); + + item.anc_packet_list->AddAncillaryData(pkt); + } else { + GST_WARNING_OBJECT(self, "Unhandled caption type %d", + caption_meta->caption_type); + } + } + + g_mutex_lock(&self->queue_lock); + while (gst_queue_array_get_length(self->queue) >= self->queue_size) { + QueueItem *tmp = (QueueItem *)gst_queue_array_pop_head_struct(self->queue); + + if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { + GST_WARNING_OBJECT(self, "Element queue overrun, dropping old frame"); + + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, + GST_BUFFER_PTS(tmp->frame.buffer), + gst_util_uint64_scale(GST_SECOND, self->configured_info.fps_d, + self->configured_info.fps_n)); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + gst_video_frame_unmap(&tmp->frame); + if (tmp->audio_buffer) { + gst_buffer_unmap(tmp->audio_buffer, &tmp->audio_map); + gst_buffer_unref(tmp->audio_buffer); + } + if (tmp->anc_packet_list) { + delete tmp->anc_packet_list; + } + } + } + + GST_TRACE_OBJECT(self, "Queuing frame video %p audio %p", + GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), + item.audio_buffer ? item.audio_map.data : NULL); + gst_queue_array_push_tail_struct(self->queue, &item); + GST_TRACE_OBJECT(self, "%u frames queued", + gst_queue_array_get_length(self->queue)); + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return flow_ret; +} + +static void output_thread_func(AJAThread *thread, void *data) { + GstAjaSink *self = GST_AJA_SINK(data); + GstClock *clock = NULL; + guint64 frames_renderded_start = G_MAXUINT64; + GstClockTime frames_renderded_start_time = GST_CLOCK_TIME_NONE; + guint64 frames_dropped_last = G_MAXUINT64; + AUTOCIRCULATE_TRANSFER transfer; + + if (self->output_cpu_core != G_MAXUINT) { + cpu_set_t mask; + pthread_t current_thread = pthread_self(); + + CPU_ZERO(&mask); + CPU_SET(self->output_cpu_core, &mask); + + if (pthread_setaffinity_np(current_thread, sizeof(mask), &mask) != 0) { + GST_ERROR_OBJECT(self, + "Failed to set affinity for current thread to core %u", + self->output_cpu_core); + } + } + + g_mutex_lock(&self->queue_lock); +restart: + if (self->draining && gst_queue_array_get_length(self->queue) == 0) { + GST_DEBUG_OBJECT(self, "Drained"); + self->draining = FALSE; + g_cond_signal(&self->drain_cond); + } + + GST_DEBUG_OBJECT(self, "Waiting for playing or shutdown"); + while ((!self->playing && !self->shutdown) || + (self->playing && + gst_queue_array_get_length(self->queue) < self->queue_size / 2 && + !self->eos)) + g_cond_wait(&self->queue_cond, &self->queue_lock); + if (self->shutdown) { + GST_DEBUG_OBJECT(self, "Shutting down"); + g_mutex_unlock(&self->queue_lock); + return; + } + + GST_DEBUG_OBJECT(self, "Starting playing"); + g_mutex_unlock(&self->queue_lock); + + { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + + self->device->device->EnableOutputInterrupt(self->channel); + self->device->device->SubscribeOutputVerticalEvent(self->channel); + if (!self->device->device->AutoCirculateInitForOutput( + self->channel, self->queue_size / 2, self->audio_system, + AUTOCIRCULATE_WITH_RP188 | AUTOCIRCULATE_WITH_ANC, 1)) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); + goto out; + } + self->device->device->AutoCirculateStart(self->channel); + } + + gst_clear_object(&clock); + clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + frames_renderded_start = G_MAXUINT64; + frames_renderded_start_time = GST_CLOCK_TIME_NONE; + frames_dropped_last = G_MAXUINT64; + + transfer.acANCBuffer.Allocate(2048); + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_INTERLEAVED) + transfer.acANCField2Buffer.Allocate(2048); + + g_mutex_lock(&self->queue_lock); + while (self->playing && !self->shutdown && + !(self->draining && gst_queue_array_get_length(self->queue) == 0)) { + AUTOCIRCULATE_STATUS status; + + self->device->device->AutoCirculateGetStatus(self->channel, status); + + GST_TRACE_OBJECT(self, + "Start frame %d " + "end frame %d " + "active frame %d " + "start time %" G_GUINT64_FORMAT + " " + "current time %" G_GUINT64_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + status.acStartFrame, status.acEndFrame, + status.acActiveFrame, status.acRDTSCStartTime, + status.acRDTSCCurrentTime, status.acFramesProcessed, + status.acFramesDropped, status.acBufferLevel); + + // Trivial drift calculation + // + // TODO: Should probably take averages over a timespan (say 1 minute) into a + // ringbuffer and calculate a linear regression over them + // FIXME: Add some compensation by dropping/duplicating frames as needed + // but make this configurable + // FIXME: Should use transfer.acTransferStatus.acFrameStamp after + // AutoCirculateTransfer() + if (frames_renderded_start_time == GST_CLOCK_TIME_NONE && + status.acRDTSCStartTime != 0 && + status.acFramesProcessed + status.acFramesDropped > self->queue_size && + clock) { + frames_renderded_start = + status.acFramesProcessed + status.acFramesDropped; + frames_renderded_start_time = gst_clock_get_time(clock); + } + + if (clock && frames_renderded_start_time != GST_CLOCK_TIME_NONE) { + GstClockTime now = gst_clock_get_time(clock); + GstClockTime diff = now - frames_renderded_start_time; + guint64 frames_rendered = + (status.acFramesProcessed + status.acFramesDropped) - + frames_renderded_start; + guint64 frames_produced = + gst_util_uint64_scale(diff, self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + gdouble fps_rendered = ((gdouble)frames_rendered * GST_SECOND) / diff; + + GST_TRACE_OBJECT(self, + "Frames rendered %" G_GUINT64_FORMAT + ", frames produced %" G_GUINT64_FORMAT + ", FPS rendered %lf", + frames_rendered, frames_produced, fps_rendered); + } + + // Detect if we were too slow with providing frames and report if that was + // the case together with the amount of frames dropped + if (frames_dropped_last == G_MAXUINT64) { + frames_dropped_last = status.acFramesDropped; + } else if (frames_dropped_last < status.acFramesDropped) { + GST_WARNING_OBJECT(self, "Dropped %" G_GUINT64_FORMAT " frames", + status.acFramesDropped - frames_dropped_last); + + GstClockTime timestamp = + gst_util_uint64_scale(status.acFramesProcessed + frames_dropped_last, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstClockTime timestamp_end = gst_util_uint64_scale( + status.acFramesProcessed + status.acFramesDropped, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, + timestamp, timestamp_end - timestamp); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + frames_dropped_last = status.acFramesDropped; + } + + if (status.GetNumAvailableOutputFrames() > 1) { + QueueItem item, *item_p; + + while ((item_p = (QueueItem *)gst_queue_array_pop_head_struct( + self->queue)) == NULL && + self->playing && !self->shutdown && !self->draining) { + GST_DEBUG_OBJECT( + self, + "Element queue underrun, waiting for more frames or shutdown"); + g_cond_wait(&self->queue_cond, &self->queue_lock); + } + + if (!self->playing || self->shutdown || (!item_p && self->draining)) { + if (item_p && item_p->type == QUEUE_ITEM_TYPE_FRAME) { + gst_video_frame_unmap(&item_p->frame); + if (item_p->audio_buffer) { + gst_buffer_unmap(item_p->audio_buffer, &item_p->audio_map); + gst_buffer_unref(item_p->audio_buffer); + } + if (item_p->anc_packet_list) { + delete item_p->anc_packet_list; + } + } + break; + } + + if (item_p && item_p->type != QUEUE_ITEM_TYPE_FRAME) { + continue; + } + + GST_TRACE_OBJECT(self, "%u frames queued", + gst_queue_array_get_length(self->queue)); + + item = *item_p; + g_mutex_unlock(&self->queue_lock); + + GST_TRACE_OBJECT(self, + "Transferring frame: " + "Video %p %" G_GSIZE_FORMAT + " " + "Audio %p %" G_GSIZE_FORMAT, + GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), + GST_VIDEO_FRAME_SIZE(&item.frame), + item.audio_buffer ? item.audio_map.data : NULL, + item.audio_buffer ? item.audio_map.size : 0); + + // Set timecodes if provided by upstream + if (item.tc.IsValid() && item.tc.fDBB != 0xffffffff) { + NTV2TimeCodes timecodes; + + timecodes[::NTV2ChannelToTimecodeIndex(self->channel, false)] = item.tc; + timecodes[::NTV2ChannelToTimecodeIndex(self->channel, true)] = item.tc; + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + timecodes[::NTV2ChannelToTimecodeIndex(self->channel, false, true)] = + item.tc; + transfer.SetOutputTimeCodes(timecodes); + } + + transfer.SetVideoBuffer( + (guint *)GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), + GST_VIDEO_FRAME_SIZE(&item.frame)); + if (item.audio_buffer) { + transfer.SetAudioBuffer((guint *)item.audio_map.data, + item.audio_map.size); + } + + // Clear VANC and fill in captions as needed + transfer.acANCBuffer.Fill(ULWord(0)); + transfer.acANCField2Buffer.Fill(ULWord(0)); + + if (item.anc_packet_list) { + item.anc_packet_list->GetTransmitData( + transfer.acANCBuffer, transfer.acANCField2Buffer, + self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, + self->f2_start_line); + } + + if (!self->device->device->AutoCirculateTransfer(self->channel, + transfer)) { + GST_WARNING_OBJECT(self, "Failed to transfer frame"); + } + + gst_video_frame_unmap(&item.frame); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + if (item.anc_packet_list) { + delete item.anc_packet_list; + } + + g_mutex_lock(&self->queue_lock); + } else { + g_mutex_unlock(&self->queue_lock); + + self->device->device->WaitForOutputVerticalInterrupt(self->channel); + + g_mutex_lock(&self->queue_lock); + } + } + +out : { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + self->device->device->UnsubscribeOutputVerticalEvent(self->channel); + self->device->device->DisableOutputInterrupt(self->channel); +} + + if ((!self->playing || self->draining) && !self->shutdown) goto restart; + g_mutex_unlock(&self->queue_lock); + + gst_clear_object(&clock); + + GST_DEBUG_OBJECT(self, "Stopped"); +} diff --git a/gstajasink.h b/gstajasink.h new file mode 100644 index 0000000000..270cbd9e4e --- /dev/null +++ b/gstajasink.h @@ -0,0 +1,95 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SINK (gst_aja_sink_get_type()) +#define GST_AJA_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SINK, GstAjaSink)) +#define GST_AJA_SINK_CAST(obj) ((GstAjaSink *)obj) +#define GST_AJA_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SINK, GstAjaSinkClass)) +#define GST_IS_AJA_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SINK)) +#define GST_IS_AJA_SINK_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SINK)) + +typedef struct _GstAjaSink GstAjaSink; +typedef struct _GstAjaSinkClass GstAjaSinkClass; + +struct _GstAjaSink { + GstBaseSink parent; + + // Everything below protected by queue lock + GMutex queue_lock; + GCond queue_cond; + GstQueueArray *queue; + gboolean eos; + gboolean playing; + gboolean shutdown; + gboolean draining; + // Hold by set_caps() to wait until drained + GCond drain_cond; + gboolean flushing; + + GstAjaDevice *device; + NTV2DeviceID device_id; + GstAllocator *allocator; + + // Only allocated on demand + GstBufferPool *buffer_pool; + GstBufferPool *audio_buffer_pool; + + // Properties + gchar *device_identifier; + NTV2Channel channel; + guint queue_size; + guint output_cpu_core; + + GstAjaAudioSystem audio_system_setting; + GstAjaOutputDestination output_destination; + GstAjaReferenceSource reference_source; + + NTV2AudioSystem audio_system; + NTV2VideoFormat video_format; + guint32 f2_start_line; + + GstCaps *configured_caps; + GstVideoInfo configured_info; + gint configured_audio_channels; + + AJAThread *output_thread; +}; + +struct _GstAjaSinkClass { + GstBaseSinkClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_sink_get_type(void); + +G_END_DECLS diff --git a/gstajasinkcombiner.cpp b/gstajasinkcombiner.cpp new file mode 100644 index 0000000000..122f895824 --- /dev/null +++ b/gstajasinkcombiner.cpp @@ -0,0 +1,294 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstajacommon.h" +#include "gstajasinkcombiner.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_combiner_debug); +#define GST_CAT_DEFAULT gst_aja_sink_combiner_debug + +static GstStaticPadTemplate video_sink_template = GST_STATIC_PAD_TEMPLATE( + "video", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstStaticPadTemplate audio_sink_template = + GST_STATIC_PAD_TEMPLATE("audio", GST_PAD_SINK, GST_PAD_REQUEST, + GST_STATIC_CAPS("audio/x-raw, " + "format = (string) S32LE, " + "rate = (int) 48000, " + "channels = (int) [ 1, 16 ], " + "layout = (string) interleaved")); + +static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( + "src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +G_DEFINE_TYPE(GstAjaSinkCombiner, gst_aja_sink_combiner, GST_TYPE_AGGREGATOR); +#define parent_class gst_aja_sink_combiner_parent_class + +static void gst_aja_sink_combiner_finalize(GObject *object) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(object); + + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, NULL); + gst_caps_replace(&self->video_caps, NULL); + GST_OBJECT_UNLOCK(self); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static GstFlowReturn gst_aja_sink_combiner_aggregate(GstAggregator *aggregator, + gboolean timeout) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + GstBuffer *video_buffer, *audio_buffer; + + if (gst_aggregator_pad_is_eos(GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) && + gst_aggregator_pad_is_eos(GST_AGGREGATOR_PAD_CAST(self->video_sinkpad))) { + GST_DEBUG_OBJECT(self, "All pads EOS"); + return GST_FLOW_EOS; + } + + // FIXME: We currently assume that upstream provides + // - properly chunked buffers (1 buffer = 1 video frame) + // - properly synchronized buffers (audio/video starting at the same time) + // - no gaps + // + // This can be achieved externally with elements like audiobuffersplit and + // videorate. + + video_buffer = gst_aggregator_pad_peek_buffer( + GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)); + if (!video_buffer) return GST_AGGREGATOR_FLOW_NEED_DATA; + + audio_buffer = gst_aggregator_pad_peek_buffer( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)); + if (!audio_buffer && !gst_aggregator_pad_is_eos( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad))) { + gst_buffer_unref(video_buffer); + GST_TRACE_OBJECT(self, "Audio not ready yet, waiting"); + return GST_AGGREGATOR_FLOW_NEED_DATA; + } + + gst_aggregator_pad_drop_buffer(GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)); + video_buffer = gst_buffer_make_writable(video_buffer); + GST_TRACE_OBJECT(self, + "Outputting buffer with video %" GST_PTR_FORMAT + " and audio %" GST_PTR_FORMAT, + video_buffer, audio_buffer); + if (audio_buffer) { + gst_buffer_add_aja_audio_meta(video_buffer, audio_buffer); + gst_buffer_unref(audio_buffer); + gst_aggregator_pad_drop_buffer( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)); + } + + if (!gst_pad_has_current_caps(GST_AGGREGATOR_SRC_PAD(self)) || + self->caps_changed) { + GstCaps *caps = gst_caps_copy(self->video_caps); + GstStructure *s; + + s = gst_caps_get_structure(caps, 0); + if (self->audio_caps) { + const GstStructure *s2; + gint audio_channels; + + s2 = gst_caps_get_structure(self->audio_caps, 0); + + gst_structure_get_int(s2, "channels", &audio_channels); + gst_structure_set(s, "audio-channels", G_TYPE_INT, audio_channels, NULL); + } else { + gst_structure_set(s, "audio-channels", G_TYPE_INT, 0, NULL); + } + + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + + gst_aggregator_set_src_caps(GST_AGGREGATOR(self), caps); + gst_caps_unref(caps); + self->caps_changed = FALSE; + } + + // Update the position for synchronization purposes + GST_AGGREGATOR_PAD_CAST(GST_AGGREGATOR_SRC_PAD(self))->segment.position = + GST_BUFFER_PTS(video_buffer); + if (GST_BUFFER_DURATION_IS_VALID(video_buffer)) + GST_AGGREGATOR_PAD_CAST(GST_AGGREGATOR_SRC_PAD(self))->segment.position += + GST_BUFFER_DURATION(video_buffer); + + return gst_aggregator_finish_buffer(GST_AGGREGATOR_CAST(self), video_buffer); +} + +static gboolean gst_aja_sink_combiner_sink_event(GstAggregator *aggregator, + GstAggregatorPad *agg_pad, + GstEvent *event) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_SEGMENT: { + const GstSegment *segment; + + gst_event_parse_segment(event, &segment); + gst_aggregator_update_segment(GST_AGGREGATOR(self), segment); + break; + } + case GST_EVENT_CAPS: { + GstCaps *caps; + + gst_event_parse_caps(event, &caps); + + if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) { + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, caps); + self->caps_changed = TRUE; + GST_OBJECT_UNLOCK(self); + } else if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)) { + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->video_caps, caps); + self->caps_changed = TRUE; + GST_OBJECT_UNLOCK(self); + } + + break; + } + default: + break; + } + + return GST_AGGREGATOR_CLASS(parent_class) + ->sink_event(aggregator, agg_pad, event); +} + +static gboolean gst_aja_sink_combiner_sink_query(GstAggregator *aggregator, + GstAggregatorPad *agg_pad, + GstQuery *query) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + + if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) { + caps = gst_pad_get_pad_template_caps(GST_PAD(agg_pad)); + } else if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)) { + caps = gst_pad_peer_query_caps(GST_AGGREGATOR_SRC_PAD(self), NULL); + caps = gst_caps_make_writable(caps); + guint caps_size = gst_caps_get_size(caps); + for (guint i = 0; i < caps_size; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gst_structure_remove_field(s, "audio-channels"); + } + } else { + g_assert_not_reached(); + } + + if (filter) { + GstCaps *tmp = gst_caps_intersect(filter, caps); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + + return TRUE; + } + case GST_QUERY_ALLOCATION: { + // Proxy to the sink for both pads so that the AJA allocator can be + // used upstream as needed. + return gst_pad_peer_query(GST_AGGREGATOR_SRC_PAD(self), query); + } + default: + break; + } + + return GST_AGGREGATOR_CLASS(parent_class) + ->sink_query(aggregator, agg_pad, query); +} + +static gboolean gst_aja_sink_combiner_negotiate(GstAggregator *aggregator) { + return TRUE; +} + +static gboolean gst_aja_sink_combiner_stop(GstAggregator *aggregator) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, NULL); + gst_caps_replace(&self->video_caps, NULL); + GST_OBJECT_UNLOCK(self); + + return TRUE; +} + +static void gst_aja_sink_combiner_class_init(GstAjaSinkCombinerClass *klass) { + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + GstAggregatorClass *aggregator_class; + + gobject_class = (GObjectClass *)klass; + gstelement_class = (GstElementClass *)klass; + aggregator_class = (GstAggregatorClass *)klass; + + gobject_class->finalize = gst_aja_sink_combiner_finalize; + + gst_element_class_set_static_metadata( + gstelement_class, "AJA sink audio/video combiner", "Audio/Video/Combiner", + "Combines corresponding audio/video frames", + "Sebastian Dröge "); + + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &video_sink_template, GST_TYPE_AGGREGATOR_PAD); + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &audio_sink_template, GST_TYPE_AGGREGATOR_PAD); + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &src_template, GST_TYPE_AGGREGATOR_PAD); + + aggregator_class->aggregate = gst_aja_sink_combiner_aggregate; + aggregator_class->stop = gst_aja_sink_combiner_stop; + aggregator_class->sink_event = gst_aja_sink_combiner_sink_event; + aggregator_class->sink_query = gst_aja_sink_combiner_sink_query; + aggregator_class->negotiate = gst_aja_sink_combiner_negotiate; + aggregator_class->get_next_time = gst_aggregator_simple_get_next_time; + + // We don't support requesting new pads + gstelement_class->request_new_pad = NULL; + + GST_DEBUG_CATEGORY_INIT(gst_aja_sink_combiner_debug, "ajasinkcombiner", 0, + "AJA sink combiner"); +} + +static void gst_aja_sink_combiner_init(GstAjaSinkCombiner *self) { + GstPadTemplate *templ; + + templ = gst_static_pad_template_get(&video_sink_template); + self->video_sinkpad = + GST_PAD(g_object_new(GST_TYPE_AGGREGATOR_PAD, "name", "video", + "direction", GST_PAD_SINK, "template", templ, NULL)); + gst_object_unref(templ); + gst_element_add_pad(GST_ELEMENT_CAST(self), self->video_sinkpad); + + templ = gst_static_pad_template_get(&audio_sink_template); + self->audio_sinkpad = + GST_PAD(g_object_new(GST_TYPE_AGGREGATOR_PAD, "name", "audio", + "direction", GST_PAD_SINK, "template", templ, NULL)); + gst_object_unref(templ); + gst_element_add_pad(GST_ELEMENT_CAST(self), self->audio_sinkpad); +} diff --git a/gstajasinkcombiner.h b/gstajasinkcombiner.h new file mode 100644 index 0000000000..32d3a89e9f --- /dev/null +++ b/gstajasinkcombiner.h @@ -0,0 +1,60 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SINK_COMBINER (gst_aja_sink_combiner_get_type()) +#define GST_AJA_SINK_COMBINER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SINK_COMBINER, \ + GstAjaSinkCombiner)) +#define GST_AJA_SINK_COMBINER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SINK_COMBINER, \ + GstAjaSinkCombinerClass)) +#define IS_GST_AJA_SINK_COMBINER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SINK_COMBINER)) +#define IS_GST_AJA_SINK_COMBINER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SINK_COMBINER)) + +typedef struct _GstAjaSinkCombiner GstAjaSinkCombiner; +typedef struct _GstAjaSinkCombinerClass GstAjaSinkCombinerClass; + +struct _GstAjaSinkCombiner { + GstAggregator parent; + + GstPad *audio_sinkpad, *video_sinkpad; + GstCaps *audio_caps, *video_caps; + gboolean caps_changed; +}; + +struct _GstAjaSinkCombinerClass { + GstAggregatorClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_sink_combiner_get_type(void); + +G_END_DECLS diff --git a/meson.build b/meson.build new file mode 100644 index 0000000000..d1d9ce33c1 --- /dev/null +++ b/meson.build @@ -0,0 +1,97 @@ +project('gst-aja', 'cpp', + version : '0.1.0', + meson_version : '>= 0.54.0', + default_options : [ 'warning_level=1', + 'buildtype=debugoptimized', + 'cpp_std=c++11', + 'cpp_eh=none', + 'cpp_rtti=false', + ] +) + +plugins_install_dir = '@0@/gstreamer-1.0'.format(get_option('libdir')) + +cxx = meson.get_compiler('cpp') + +if cxx.has_argument('-fvisibility=hidden') + add_project_arguments('-fvisibility=hidden', language: 'cpp') +endif + +if cxx.get_id() == 'msvc' + # Ignore several spurious warnings for things gstreamer does very commonly + # If a warning is completely useless and spammy, use '/wdXXXX' to suppress it + # If a warning is harmless but hard to fix, use '/woXXXX' so it's shown once + # NOTE: Only add warnings here if you are sure they're spurious + test_cppflags = [] + msvc_args = [ + '/wd4018', # implicit signed/unsigned conversion + '/wd4146', # unary minus on unsigned (beware INT_MIN) + '/wd4244', # lossy type conversion (e.g. double -> int) + '/wd4305', # truncating type conversion (e.g. double -> float) + ] + add_project_arguments(msvc_args, language : 'cpp') + # Disable SAFESEH with MSVC for plugins and libs that use external deps that + # are built with MinGW + noseh_link_args = ['/SAFESEH:NO'] +else + test_cppflags = ['-Wno-non-virtual-dtor'] + noseh_link_args = [] +endif + +common_flags = [ + '-DAJALinux=1', + '-DAJA_LINUX=1', +] +foreach cxxflag: test_cppflags + if cxx.has_argument(cxxflag) + common_flags += [ cxxflag ] + endif +endforeach + +gst_dep = dependency('gstreamer-1.0', version : '>= 1.18', required : true) +gstbase_dep = dependency('gstreamer-base-1.0', version : '>= 1.18', required : true) +gstaudio_dep = dependency('gstreamer-audio-1.0', version : '>= 1.18', required : true) +gstvideo_dep = dependency('gstreamer-video-1.0', version : '>= 1.18', required : true) + +thread_dep = dependency('threads') +rt_dep = cxx.find_library('rt', required : false) + +aja_sdk_dir = get_option('aja-sdk-dir') +aja_includedirs = [ + '-I@0@/ajalibraries'.format(aja_sdk_dir), + '-I@0@/ajalibraries/ajantv2/includes'.format(aja_sdk_dir), + '-I@0@/ajalibraries/ajantv2/src/lin'.format(aja_sdk_dir), +] + +message('Looking for AJA SDK in directory ' + aja_sdk_dir) +if not cxx.has_header('ajabase/common/videotypes.h', + args : aja_includedirs, + ) + error('Cannot find AJA SDK') +endif + +aja_libdir = '@0@/lib'.format(aja_sdk_dir) + +ajantv2_dep = cxx.find_library('ajantv2', + required : true, + dirs : [aja_libdir], +) + +gstaja = library('gstaja', + ['plugin.cpp', + 'gstajacommon.cpp', + 'gstajasink.cpp', + 'gstajasinkcombiner.cpp', + ], + cpp_args : [ + aja_includedirs, + '-DPACKAGE="gst-aja"', + '-DGST_PACKAGE_NAME="gstreamer-aja"', + '-DGST_PACKAGE_ORIGIN="https://github.com/centricular/gstreamer-aja"', + '-DVERSION="@0@"'.format(meson.project_version())] + common_flags, + link_args : noseh_link_args, + dependencies : [gstvideo_dep, gstaudio_dep, gstbase_dep, gst_dep, ajantv2_dep, thread_dep, rt_dep], + install : true, + install_dir : plugins_install_dir, +) + diff --git a/meson_options.txt b/meson_options.txt new file mode 100644 index 0000000000..db37bb9ef4 --- /dev/null +++ b/meson_options.txt @@ -0,0 +1,2 @@ +option('aja-sdk-dir', type : 'string', value : 'ntv2sdklinux_16.0.0.4', + description : 'Directory with AJA SDK, e.g. ntv2sdklinux_16.0.0.4') diff --git a/plugin.cpp b/plugin.cpp new file mode 100644 index 0000000000..d6367c4877 --- /dev/null +++ b/plugin.cpp @@ -0,0 +1,38 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#include + +#include "gstajacommon.h" +#include "gstajasink.h" +#include "gstajasinkcombiner.h" + +static gboolean plugin_init(GstPlugin* plugin) { + gst_aja_common_init(); + + gst_element_register(plugin, "ajasink", GST_RANK_NONE, GST_TYPE_AJA_SINK); + gst_element_register(plugin, "ajasinkcombiner", GST_RANK_NONE, + GST_TYPE_AJA_SINK_COMBINER); + + return TRUE; +} + +GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, aja, + "GStreamer AJA plugin", plugin_init, VERSION, "LGPL", + GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) From 453f482c0b632efdf7830f274fe18b32d15258ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 8 Feb 2021 20:31:22 +0200 Subject: [PATCH 02/73] Add AJA source --- gstajacommon.cpp | 61 +++ gstajacommon.h | 49 ++ gstajasrc.cpp | 1280 ++++++++++++++++++++++++++++++++++++++++++++ gstajasrc.h | 90 ++++ gstajasrcdemux.cpp | 162 ++++++ gstajasrcdemux.h | 59 ++ meson.build | 2 + plugin.cpp | 8 + 8 files changed, 1711 insertions(+) create mode 100644 gstajasrc.cpp create mode 100644 gstajasrc.h create mode 100644 gstajasrcdemux.cpp create mode 100644 gstajasrcdemux.h diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 2019f7295e..90474a61e0 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -471,6 +471,67 @@ GType gst_aja_reference_source_get_type(void) { return (GType)id; } +GType gst_aja_input_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_INPUT_SOURCE_AUTO, "auto", "Auto (based on selected channel)"}, + {GST_AJA_INPUT_SOURCE_ANALOG1, "analog-1", "Analog Input 1"}, + {GST_AJA_INPUT_SOURCE_SDI1, "sdi-1", "SDI Input 1"}, + {GST_AJA_INPUT_SOURCE_SDI2, "sdi-2", "SDI Input 2"}, + {GST_AJA_INPUT_SOURCE_SDI3, "sdi-3", "SDI Input 3"}, + {GST_AJA_INPUT_SOURCE_SDI4, "sdi-4", "SDI Input 4"}, + {GST_AJA_INPUT_SOURCE_SDI5, "sdi-5", "SDI Input 5"}, + {GST_AJA_INPUT_SOURCE_SDI6, "sdi-6", "SDI Input 6"}, + {GST_AJA_INPUT_SOURCE_SDI7, "sdi-7", "SDI Input 7"}, + {GST_AJA_INPUT_SOURCE_SDI8, "sdi-8", "SDI Input 8"}, + {GST_AJA_INPUT_SOURCE_HDMI1, "hdmi-1", "HDMI Input 1"}, + {GST_AJA_INPUT_SOURCE_HDMI2, "hdmi-2", "HDMI Input 2"}, + {GST_AJA_INPUT_SOURCE_HDMI3, "hdmi-3", "HDMI Input 3"}, + {GST_AJA_INPUT_SOURCE_HDMI4, "hdmi-4", "HDMI Input 4"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaInputSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_video_format_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + // TODO: Implement: {GST_AJA_VIDEO_FORMAT_AUTO, "auto", "Autodetect"}, + {GST_AJA_VIDEO_FORMAT_1080i_5000, "1080i-5000", "1080i 5000"}, + {GST_AJA_VIDEO_FORMAT_1080i_5994, "1080i-5994", "1080i 5994"}, + {GST_AJA_VIDEO_FORMAT_1080i_6000, "1080i-6000", "1080i 6000"}, + {GST_AJA_VIDEO_FORMAT_720p_5994, "720p-5994", "720p 5994"}, + {GST_AJA_VIDEO_FORMAT_720p_6000, "720p-6000", "720p 6000"}, + {GST_AJA_VIDEO_FORMAT_1080p_2997, "1080p-2997", "1080p 2997"}, + {GST_AJA_VIDEO_FORMAT_1080p_3000, "1080p-3000", "1080p 3000"}, + {GST_AJA_VIDEO_FORMAT_1080p_2500, "1080p-2500", "1080p 2500"}, + {GST_AJA_VIDEO_FORMAT_1080p_2398, "1080p-2398", "1080p 2398"}, + {GST_AJA_VIDEO_FORMAT_1080p_2400, "1080p-2400", "1080p 2400"}, + {GST_AJA_VIDEO_FORMAT_720p_5000, "720p-5000", "720p 5000"}, + {GST_AJA_VIDEO_FORMAT_720p_2398, "720p-2398", "720p 2398"}, + {GST_AJA_VIDEO_FORMAT_720p_2500, "720p-2500", "720p 2500"}, + {GST_AJA_VIDEO_FORMAT_1080p_5000_A, "1080p-5000-a", "1080p 5000 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_5994_A, "1080p-5994-a", "1080p 5994 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_6000_A, "1080p-6000-a", "1080p 6000 A"}, + {GST_AJA_VIDEO_FORMAT_625_5000, "625-5000", "625 5000"}, + {GST_AJA_VIDEO_FORMAT_525_5994, "525-5994", "525 5994"}, + {GST_AJA_VIDEO_FORMAT_525_2398, "525-2398", "525 2398"}, + {GST_AJA_VIDEO_FORMAT_525_2400, "525-2400", "525 2400"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaVideoFormat", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + void gst_aja_common_init(void) { GST_DEBUG_CATEGORY_INIT(gst_aja_debug, "aja", 0, "Debug category for AJA plugin"); diff --git a/gstajacommon.h b/gstajacommon.h index 965229d505..33f37622f2 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -156,6 +156,55 @@ typedef enum { G_GNUC_INTERNAL GType gst_aja_reference_source_get_type(void); +typedef enum { + GST_AJA_INPUT_SOURCE_AUTO, + GST_AJA_INPUT_SOURCE_ANALOG1, + GST_AJA_INPUT_SOURCE_HDMI1, + GST_AJA_INPUT_SOURCE_HDMI2, + GST_AJA_INPUT_SOURCE_HDMI3, + GST_AJA_INPUT_SOURCE_HDMI4, + GST_AJA_INPUT_SOURCE_SDI1, + GST_AJA_INPUT_SOURCE_SDI2, + GST_AJA_INPUT_SOURCE_SDI3, + GST_AJA_INPUT_SOURCE_SDI4, + GST_AJA_INPUT_SOURCE_SDI5, + GST_AJA_INPUT_SOURCE_SDI6, + GST_AJA_INPUT_SOURCE_SDI7, + GST_AJA_INPUT_SOURCE_SDI8, +} GstAjaInputSource; + +#define GST_TYPE_AJA_INPUT_SOURCE (gst_aja_input_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_input_source_get_type(void); + +typedef enum { + // TODO: Implement: GST_AJA_VIDEO_FORMAT_AUTO, + GST_AJA_VIDEO_FORMAT_1080i_5000, + GST_AJA_VIDEO_FORMAT_1080i_5994, + GST_AJA_VIDEO_FORMAT_1080i_6000, + GST_AJA_VIDEO_FORMAT_720p_5994, + GST_AJA_VIDEO_FORMAT_720p_6000, + GST_AJA_VIDEO_FORMAT_1080p_2997, + GST_AJA_VIDEO_FORMAT_1080p_3000, + GST_AJA_VIDEO_FORMAT_1080p_2500, + GST_AJA_VIDEO_FORMAT_1080p_2398, + GST_AJA_VIDEO_FORMAT_1080p_2400, + GST_AJA_VIDEO_FORMAT_720p_5000, + GST_AJA_VIDEO_FORMAT_720p_2398, + GST_AJA_VIDEO_FORMAT_720p_2500, + GST_AJA_VIDEO_FORMAT_1080p_5000_A, + GST_AJA_VIDEO_FORMAT_1080p_5994_A, + GST_AJA_VIDEO_FORMAT_1080p_6000_A, + GST_AJA_VIDEO_FORMAT_625_5000, + GST_AJA_VIDEO_FORMAT_525_5994, + GST_AJA_VIDEO_FORMAT_525_2398, + GST_AJA_VIDEO_FORMAT_525_2400, +} GstAjaVideoFormat; + +#define GST_TYPE_AJA_VIDEO_FORMAT (gst_aja_video_format_get_type()) +G_GNUC_INTERNAL +GType gst_aja_video_format_get_type(void); + G_GNUC_INTERNAL void gst_aja_common_init(void); diff --git a/gstajasrc.cpp b/gstajasrc.cpp new file mode 100644 index 0000000000..6c4e4e50cb --- /dev/null +++ b/gstajasrc.cpp @@ -0,0 +1,1280 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include + +#include "gstajacommon.h" +#include "gstajasrc.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); +#define GST_CAT_DEFAULT gst_aja_src_debug + +#define DEFAULT_DEVICE_IDENTIFIER ("0") +#define DEFAULT_CHANNEL (::NTV2_CHANNEL1) +// TODO: GST_AJA_VIDEO_FORMAT_AUTO +#define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_1080i_5000) +#define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) +#define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) +#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) +#define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) + +enum { + PROP_0, + PROP_DEVICE_IDENTIFIER, + PROP_CHANNEL, + PROP_VIDEO_FORMAT, + PROP_AUDIO_SYSTEM, + PROP_INPUT_SOURCE, + PROP_REFERENCE_SOURCE, + PROP_QUEUE_SIZE, + PROP_CAPTURE_CPU_CORE, +}; + +typedef enum { + QUEUE_ITEM_TYPE_FRAME, +} QueueItemType; + +typedef struct { + QueueItemType type; + + // For FRAME + GstClockTime capture_time; + GstBuffer *video_buffer; + GstBuffer *audio_buffer; + GstBuffer *anc_buffer, *anc_buffer2; + NTV2_RP188 tc; +} QueueItem; + +static void gst_aja_src_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec); +static void gst_aja_src_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec); +static void gst_aja_src_finalize(GObject *object); + +static GstCaps *gst_aja_src_get_caps(GstBaseSrc *bsrc, GstCaps *filter); +static gboolean gst_aja_src_query(GstBaseSrc *bsrc, GstQuery *query); +static gboolean gst_aja_src_unlock(GstBaseSrc *bsrc); +static gboolean gst_aja_src_unlock_stop(GstBaseSrc *bsrc); + +static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer); + +static gboolean gst_aja_src_open(GstAjaSrc *src); +static gboolean gst_aja_src_close(GstAjaSrc *src); +static gboolean gst_aja_src_start(GstAjaSrc *src); +static gboolean gst_aja_src_stop(GstAjaSrc *src); + +static GstStateChangeReturn gst_aja_src_change_state(GstElement *element, + GstStateChange transition); + +static void capture_thread_func(AJAThread *thread, void *data); + +#define parent_class gst_aja_src_parent_class +G_DEFINE_TYPE(GstAjaSrc, gst_aja_src, GST_TYPE_PUSH_SRC); + +static void gst_aja_src_class_init(GstAjaSrcClass *klass) { + GObjectClass *gobject_class = G_OBJECT_CLASS(klass); + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS(klass); + GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS(klass); + GstCaps *templ_caps; + + gobject_class->set_property = gst_aja_src_set_property; + gobject_class->get_property = gst_aja_src_get_property; + gobject_class->finalize = gst_aja_src_finalize; + + g_object_class_install_property( + gobject_class, PROP_DEVICE_IDENTIFIER, + g_param_spec_string( + "device-identifier", "Device identifier", + "Input device instance to use", DEFAULT_DEVICE_IDENTIFIER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CHANNEL, + g_param_spec_uint( + "channel", "Channel", "Channel to use", 0, NTV2_MAX_NUM_CHANNELS - 1, + DEFAULT_CHANNEL, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_VIDEO_FORMAT, + g_param_spec_enum( + "video-format", "Video Format", "Video format to use", + GST_TYPE_AJA_VIDEO_FORMAT, DEFAULT_VIDEO_FORMAT, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_QUEUE_SIZE, + g_param_spec_uint( + "queue-size", "Queue Size", + "Size of internal queue in number of video frames. " + "Half of this is allocated as device buffers and equal to the " + "latency.", + 1, G_MAXINT, DEFAULT_QUEUE_SIZE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_AUDIO_SYSTEM, + g_param_spec_enum( + "audio-system", "Audio System", "Audio system to use", + GST_TYPE_AJA_AUDIO_SYSTEM, DEFAULT_AUDIO_SYSTEM, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_INPUT_SOURCE, + g_param_spec_enum( + "input-source", "Input Source", "Input source to use", + GST_TYPE_AJA_INPUT_SOURCE, DEFAULT_INPUT_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_REFERENCE_SOURCE, + g_param_spec_enum( + "reference-source", "Reference Source", "Reference source to use", + GST_TYPE_AJA_REFERENCE_SOURCE, DEFAULT_REFERENCE_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CAPTURE_CPU_CORE, + g_param_spec_uint( + "capture-cpu-core", "Capture CPU Core", + "Sets the affinity of the capture thread to this CPU core " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CAPTURE_CPU_CORE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + element_class->change_state = GST_DEBUG_FUNCPTR(gst_aja_src_change_state); + + basesrc_class->get_caps = GST_DEBUG_FUNCPTR(gst_aja_src_get_caps); + basesrc_class->negotiate = NULL; + basesrc_class->query = GST_DEBUG_FUNCPTR(gst_aja_src_query); + basesrc_class->unlock = GST_DEBUG_FUNCPTR(gst_aja_src_unlock); + basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR(gst_aja_src_unlock_stop); + + pushsrc_class->create = GST_DEBUG_FUNCPTR(gst_aja_src_create); + + templ_caps = gst_ntv2_supported_caps(DEVICE_ID_INVALID); + gst_element_class_add_pad_template( + element_class, + gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps)); + gst_caps_unref(templ_caps); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video src", "Audio/Video/Src", + "Captures audio/video frames with AJA devices", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_src_debug, "ajasrc", 0, "AJA src"); +} + +static void gst_aja_src_init(GstAjaSrc *self) { + g_mutex_init(&self->queue_lock); + g_cond_init(&self->queue_cond); + + self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); + self->channel = DEFAULT_CHANNEL; + self->queue_size = DEFAULT_QUEUE_SIZE; + self->video_format_setting = DEFAULT_VIDEO_FORMAT; + self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; + self->input_source = DEFAULT_INPUT_SOURCE; + self->reference_source = DEFAULT_REFERENCE_SOURCE; + self->capture_cpu_core = DEFAULT_CAPTURE_CPU_CORE; + + self->queue = + gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); + gst_base_src_set_live(GST_BASE_SRC_CAST(self), TRUE); + gst_base_src_set_format(GST_BASE_SRC_CAST(self), GST_FORMAT_TIME); +} + +void gst_aja_src_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec) { + GstAjaSrc *self = GST_AJA_SRC(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_free(self->device_identifier); + self->device_identifier = g_value_dup_string(value); + break; + case PROP_CHANNEL: + self->channel = (NTV2Channel)g_value_get_uint(value); + break; + case PROP_QUEUE_SIZE: + self->queue_size = g_value_get_uint(value); + break; + case PROP_VIDEO_FORMAT: + self->video_format_setting = (GstAjaVideoFormat)g_value_get_enum(value); + break; + case PROP_AUDIO_SYSTEM: + self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value); + break; + case PROP_INPUT_SOURCE: + self->input_source = (GstAjaInputSource)g_value_get_enum(value); + break; + case PROP_REFERENCE_SOURCE: + self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); + break; + case PROP_CAPTURE_CPU_CORE: + self->capture_cpu_core = g_value_get_uint(value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, + GParamSpec *pspec) { + GstAjaSrc *self = GST_AJA_SRC(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_value_set_string(value, self->device_identifier); + break; + case PROP_CHANNEL: + g_value_set_uint(value, self->channel); + break; + case PROP_QUEUE_SIZE: + g_value_set_uint(value, self->queue_size); + break; + case PROP_VIDEO_FORMAT: + g_value_set_enum(value, self->video_format_setting); + break; + case PROP_AUDIO_SYSTEM: + g_value_set_enum(value, self->audio_system_setting); + break; + case PROP_INPUT_SOURCE: + g_value_set_enum(value, self->input_source); + break; + case PROP_REFERENCE_SOURCE: + g_value_set_enum(value, self->reference_source); + break; + case PROP_CAPTURE_CPU_CORE: + g_value_set_uint(value, self->capture_cpu_core); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_src_finalize(GObject *object) { + GstAjaSrc *self = GST_AJA_SRC(object); + + g_assert(self->device == NULL); + g_assert(gst_queue_array_get_length(self->queue) == 0); + g_clear_pointer(&self->queue, gst_queue_array_free); + + g_mutex_clear(&self->queue_lock); + g_cond_clear(&self->queue_cond); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static gboolean gst_aja_src_open(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Opening device"); + + g_assert(self->device == NULL); + + self->device = gst_aja_device_obtain(self->device_identifier); + if (!self->device) { + GST_ERROR_OBJECT(self, "Failed to open device"); + return FALSE; + } + + if (!self->device->device->IsDeviceReady(false)) { + g_clear_pointer(&self->device, gst_aja_device_unref); + return FALSE; + } + + self->device->device->SetEveryFrameServices(::NTV2_OEM_TASKS); + self->device_id = self->device->device->GetDeviceID(); + + std::string serial_number; + if (!self->device->device->GetSerialNumberString(serial_number)) + serial_number = "none"; + + GST_DEBUG_OBJECT(self, + "Opened device with ID %d at index %d (%s, version %s, " + "serial number %s, can do VANC %d)", + self->device_id, self->device->device->GetIndexNumber(), + self->device->device->GetDisplayName().c_str(), + self->device->device->GetDeviceVersionString().c_str(), + serial_number.c_str(), + ::NTV2DeviceCanDoCustomAnc(self->device_id)); + + GST_DEBUG_OBJECT(self, + "Using SDK version %d.%d.%d.%d (%s) and driver version %s", + AJA_NTV2_SDK_VERSION_MAJOR, AJA_NTV2_SDK_VERSION_MINOR, + AJA_NTV2_SDK_VERSION_POINT, AJA_NTV2_SDK_BUILD_NUMBER, + AJA_NTV2_SDK_BUILD_DATETIME, + self->device->device->GetDriverVersionString().c_str()); + + self->device->device->SetMultiFormatMode(true); + + self->allocator = gst_aja_allocator_new(self->device); + + GST_DEBUG_OBJECT(self, "Opened device"); + + return TRUE; +} + +static gboolean gst_aja_src_close(GstAjaSrc *self) { + gst_clear_object(&self->allocator); + g_clear_pointer(&self->device, gst_aja_device_unref); + self->device_id = DEVICE_ID_INVALID; + + GST_DEBUG_OBJECT(self, "Closed device"); + + return TRUE; +} + +static gboolean gst_aja_src_start(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Starting"); + + { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + switch (self->video_format_setting) { + // TODO: GST_AJA_VIDEO_FORMAT_AUTO + case GST_AJA_VIDEO_FORMAT_1080i_5000: + self->video_format = ::NTV2_FORMAT_1080i_5000; + break; + case GST_AJA_VIDEO_FORMAT_1080i_5994: + self->video_format = ::NTV2_FORMAT_1080i_5994; + break; + case GST_AJA_VIDEO_FORMAT_1080i_6000: + self->video_format = ::NTV2_FORMAT_1080i_6000; + break; + case GST_AJA_VIDEO_FORMAT_720p_5994: + self->video_format = ::NTV2_FORMAT_720p_5994; + break; + case GST_AJA_VIDEO_FORMAT_720p_6000: + self->video_format = ::NTV2_FORMAT_720p_6000; + break; + case GST_AJA_VIDEO_FORMAT_1080p_2997: + self->video_format = ::NTV2_FORMAT_1080p_2997; + break; + case GST_AJA_VIDEO_FORMAT_1080p_3000: + self->video_format = ::NTV2_FORMAT_1080p_3000; + break; + case GST_AJA_VIDEO_FORMAT_1080p_2500: + self->video_format = ::NTV2_FORMAT_1080p_2500; + break; + case GST_AJA_VIDEO_FORMAT_1080p_2398: + self->video_format = ::NTV2_FORMAT_1080p_2398; + break; + case GST_AJA_VIDEO_FORMAT_1080p_2400: + self->video_format = ::NTV2_FORMAT_1080p_2400; + break; + case GST_AJA_VIDEO_FORMAT_720p_5000: + self->video_format = ::NTV2_FORMAT_720p_5000; + break; + case GST_AJA_VIDEO_FORMAT_720p_2398: + self->video_format = ::NTV2_FORMAT_720p_2398; + break; + case GST_AJA_VIDEO_FORMAT_720p_2500: + self->video_format = ::NTV2_FORMAT_720p_2500; + break; + case GST_AJA_VIDEO_FORMAT_1080p_5000_A: + self->video_format = ::NTV2_FORMAT_1080p_5000_A; + break; + case GST_AJA_VIDEO_FORMAT_1080p_5994_A: + self->video_format = ::NTV2_FORMAT_1080p_5994_A; + break; + case GST_AJA_VIDEO_FORMAT_1080p_6000_A: + self->video_format = ::NTV2_FORMAT_1080p_6000_A; + break; + case GST_AJA_VIDEO_FORMAT_625_5000: + self->video_format = ::NTV2_FORMAT_625_5000; + break; + case GST_AJA_VIDEO_FORMAT_525_5994: + self->video_format = ::NTV2_FORMAT_525_5994; + break; + case GST_AJA_VIDEO_FORMAT_525_2398: + self->video_format = ::NTV2_FORMAT_525_2398; + break; + case GST_AJA_VIDEO_FORMAT_525_2400: + self->video_format = ::NTV2_FORMAT_525_2400; + break; + default: + g_assert_not_reached(); + break; + } + + if (!::NTV2DeviceCanDoVideoFormat(self->device_id, self->video_format)) { + GST_ERROR_OBJECT(self, "Device does not support mode %d", + (int)self->video_format); + return FALSE; + } + + gst_clear_caps(&self->configured_caps); + self->configured_caps = gst_ntv2_video_format_to_caps(self->video_format); + gst_video_info_from_caps(&self->configured_info, self->configured_caps); + + self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); + + GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", + (int)self->video_format, (int)self->channel); + self->device->device->SetVideoFormat(self->video_format, false, false, + self->channel); + + if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR); + + self->device->device->DMABufferAutoLock(false, true, 0); + + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) + self->device->device->SetSDITransmitEnable(self->channel, false); + + self->device->device->SetEnableVANCData(false, false, self->channel); + + CNTV2SignalRouter router; + + self->device->device->GetRouting(router); + + // Always use the framebuffer associated with the channel + NTV2InputCrosspointID framebuffer_id = + ::GetFrameBufferInputXptFromChannel(self->channel, false); + + NTV2InputSource input_source; + NTV2OutputCrosspointID input_source_id; + switch (self->input_source) { + case GST_AJA_INPUT_SOURCE_AUTO: + input_source = ::NTV2ChannelToInputSource(self->channel); + input_source_id = + ::GetSDIInputOutputXptFromChannel(self->channel, false); + break; + case GST_AJA_INPUT_SOURCE_ANALOG1: + input_source = ::NTV2_INPUTSOURCE_ANALOG1; + input_source_id = ::NTV2_XptAnalogIn; + break; + case GST_AJA_INPUT_SOURCE_HDMI1: + input_source = ::NTV2_INPUTSOURCE_HDMI1; + input_source_id = ::NTV2_XptHDMIIn1; + break; + case GST_AJA_INPUT_SOURCE_HDMI2: + input_source = ::NTV2_INPUTSOURCE_HDMI2; + input_source_id = ::NTV2_XptHDMIIn2; + break; + case GST_AJA_INPUT_SOURCE_HDMI3: + input_source = ::NTV2_INPUTSOURCE_HDMI3; + input_source_id = ::NTV2_XptHDMIIn3; + break; + case GST_AJA_INPUT_SOURCE_HDMI4: + input_source = ::NTV2_INPUTSOURCE_HDMI4; + input_source_id = ::NTV2_XptHDMIIn4; + break; + case GST_AJA_INPUT_SOURCE_SDI1: + input_source = ::NTV2_INPUTSOURCE_SDI1; + input_source_id = ::NTV2_XptSDIIn1; + break; + case GST_AJA_INPUT_SOURCE_SDI2: + input_source = ::NTV2_INPUTSOURCE_SDI2; + input_source_id = ::NTV2_XptSDIIn2; + break; + case GST_AJA_INPUT_SOURCE_SDI3: + input_source = ::NTV2_INPUTSOURCE_SDI3; + input_source_id = ::NTV2_XptSDIIn3; + break; + case GST_AJA_INPUT_SOURCE_SDI4: + input_source = ::NTV2_INPUTSOURCE_SDI4; + input_source_id = ::NTV2_XptSDIIn4; + break; + case GST_AJA_INPUT_SOURCE_SDI5: + input_source = ::NTV2_INPUTSOURCE_SDI5; + input_source_id = ::NTV2_XptSDIIn5; + break; + case GST_AJA_INPUT_SOURCE_SDI6: + input_source = ::NTV2_INPUTSOURCE_SDI6; + input_source_id = ::NTV2_XptSDIIn6; + break; + case GST_AJA_INPUT_SOURCE_SDI7: + input_source = ::NTV2_INPUTSOURCE_SDI7; + input_source_id = ::NTV2_XptSDIIn7; + break; + case GST_AJA_INPUT_SOURCE_SDI8: + input_source = ::NTV2_INPUTSOURCE_SDI8; + input_source_id = ::NTV2_XptSDIIn8; + break; + default: + g_assert_not_reached(); + break; + } + + // Need to remove old routes for the output and framebuffer we're going to + // use + NTV2ActualConnections connections = router.GetConnections(); + + for (NTV2ActualConnectionsConstIter iter = connections.begin(); + iter != connections.end(); iter++) { + if (iter->first == framebuffer_id || iter->second == input_source_id) + router.RemoveConnection(iter->first, iter->second); + } + + GST_DEBUG_OBJECT(self, "Creating connection %d - %d", framebuffer_id, + input_source_id); + router.AddConnection(framebuffer_id, input_source_id); + + { + std::stringstream os; + CNTV2SignalRouter oldRouter; + self->device->device->GetRouting(oldRouter); + oldRouter.Print(os); + GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); + } + self->device->device->ApplySignalRoute(router, true); + { + std::stringstream os; + CNTV2SignalRouter currentRouter; + self->device->device->GetRouting(currentRouter); + currentRouter.Print(os); + GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); + } + + switch (self->audio_system_setting) { + case GST_AJA_AUDIO_SYSTEM_1: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + case GST_AJA_AUDIO_SYSTEM_2: + self->audio_system = ::NTV2_AUDIOSYSTEM_2; + break; + case GST_AJA_AUDIO_SYSTEM_3: + self->audio_system = ::NTV2_AUDIOSYSTEM_3; + break; + case GST_AJA_AUDIO_SYSTEM_4: + self->audio_system = ::NTV2_AUDIOSYSTEM_4; + break; + case GST_AJA_AUDIO_SYSTEM_5: + self->audio_system = ::NTV2_AUDIOSYSTEM_5; + break; + case GST_AJA_AUDIO_SYSTEM_6: + self->audio_system = ::NTV2_AUDIOSYSTEM_6; + break; + case GST_AJA_AUDIO_SYSTEM_7: + self->audio_system = ::NTV2_AUDIOSYSTEM_7; + break; + case GST_AJA_AUDIO_SYSTEM_8: + self->audio_system = ::NTV2_AUDIOSYSTEM_8; + break; + case GST_AJA_AUDIO_SYSTEM_AUTO: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) + self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); + if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + default: + g_assert_not_reached(); + break; + } + + GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); + + // TODO: make configurable + self->device->device->SetAudioSystemInputSource( + self->audio_system, NTV2_AUDIO_EMBEDDED, + ::NTV2InputSourceToEmbeddedAudioInput(input_source)); + self->device->device->SetEmbeddedAudioInput( + ::NTV2ChannelToEmbeddedAudioInput(self->channel), self->audio_system); + self->configured_audio_channels = + ::NTV2DeviceGetMaxAudioChannels(self->device_id); + self->device->device->SetNumberAudioChannels( + self->configured_audio_channels, self->audio_system); + self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); + self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, + self->audio_system); + self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->audio_system); + self->device->device->SetEmbeddedAudioClock( + ::NTV2_EMBEDDED_AUDIO_CLOCK_VIDEO_INPUT, self->audio_system); + + gst_caps_set_simple(self->configured_caps, "audio-channels", G_TYPE_INT, + self->configured_audio_channels, NULL); + + NTV2ReferenceSource reference_source; + switch (self->reference_source) { + case GST_AJA_REFERENCE_SOURCE_AUTO: + reference_source = ::NTV2InputSourceToReferenceSource(input_source); + break; + case GST_AJA_REFERENCE_SOURCE_EXTERNAL: + reference_source = ::NTV2_REFERENCE_EXTERNAL; + break; + case GST_AJA_REFERENCE_SOURCE_FREERUN: + reference_source = ::NTV2_REFERENCE_FREERUN; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_1: + reference_source = ::NTV2_REFERENCE_INPUT1; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_2: + reference_source = ::NTV2_REFERENCE_INPUT2; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_3: + reference_source = ::NTV2_REFERENCE_INPUT3; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_4: + reference_source = ::NTV2_REFERENCE_INPUT4; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_5: + reference_source = ::NTV2_REFERENCE_INPUT5; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_6: + reference_source = ::NTV2_REFERENCE_INPUT6; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_7: + reference_source = ::NTV2_REFERENCE_INPUT7; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_8: + reference_source = ::NTV2_REFERENCE_INPUT8; + break; + default: + g_assert_not_reached(); + break; + } + GST_DEBUG_OBJECT(self, "Configuring reference source %d", + (int)reference_source); + + self->device->device->SetReference(reference_source); + } + + guint video_buffer_size = ::GetVideoActiveSize( + self->video_format, ::NTV2_FBF_10BIT_YCBCR, ::NTV2_VANCMODE_OFF); + + self->buffer_pool = gst_buffer_pool_new(); + GstStructure *config = gst_buffer_pool_get_config(self->buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, video_buffer_size, + 2 * self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->buffer_pool, config); + gst_buffer_pool_set_active(self->buffer_pool, TRUE); + + guint audio_buffer_size = 401 * 1024; + + self->audio_buffer_pool = gst_buffer_pool_new(); + config = gst_buffer_pool_get_config(self->audio_buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, audio_buffer_size, + 2 * self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->audio_buffer_pool, config); + gst_buffer_pool_set_active(self->audio_buffer_pool, TRUE); + + guint anc_buffer_size = 8 * 1024; + + self->anc_buffer_pool = gst_buffer_pool_new(); + config = gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, anc_buffer_size, + self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + + self->capture_thread = new AJAThread(); + self->capture_thread->Attach(capture_thread_func, self); + self->capture_thread->SetPriority(AJA_ThreadPriority_High); + self->capture_thread->Start(); + g_mutex_lock(&self->queue_lock); + self->shutdown = FALSE; + self->playing = FALSE; + self->flushing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + gst_element_post_message(GST_ELEMENT_CAST(self), + gst_message_new_latency(GST_OBJECT_CAST(self))); + + return TRUE; +} + +static gboolean gst_aja_src_stop(GstAjaSrc *self) { + QueueItem *item; + + GST_DEBUG_OBJECT(self, "Stopping"); + + g_mutex_lock(&self->queue_lock); + self->shutdown = TRUE; + self->flushing = TRUE; + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + if (self->capture_thread) { + self->capture_thread->Stop(); + delete self->capture_thread; + self->capture_thread = NULL; + } + + GST_OBJECT_LOCK(self); + gst_clear_caps(&self->configured_caps); + self->configured_audio_channels = 0; + GST_OBJECT_UNLOCK(self); + + while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + if (item->type == QUEUE_ITEM_TYPE_FRAME) { + gst_clear_buffer(&item->video_buffer); + gst_clear_buffer(&item->audio_buffer); + gst_clear_buffer(&item->anc_buffer); + gst_clear_buffer(&item->anc_buffer2); + } + } + + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } + + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + + GST_DEBUG_OBJECT(self, "Stopped"); + + return TRUE; +} + +static GstStateChangeReturn gst_aja_src_change_state( + GstElement *element, GstStateChange transition) { + GstAjaSrc *self = GST_AJA_SRC(element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!gst_aja_src_open(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + if (!gst_aja_src_start(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); + if (ret == GST_STATE_CHANGE_FAILURE) return ret; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + g_mutex_lock(&self->queue_lock); + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + g_mutex_lock(&self->queue_lock); + self->playing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (!gst_aja_src_stop(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + if (!gst_aja_src_close(self)) return GST_STATE_CHANGE_FAILURE; + break; + default: + break; + } + + return ret; +} + +static GstCaps *gst_aja_src_get_caps(GstBaseSrc *bsrc, GstCaps *filter) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + GstCaps *caps; + + if (self->device) { + caps = gst_ntv2_supported_caps(self->device_id); + } else { + caps = gst_pad_get_pad_template_caps(GST_BASE_SRC_PAD(self)); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + return caps; +} + +static gboolean gst_aja_src_query(GstBaseSrc *bsrc, GstQuery *query) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + gboolean ret = TRUE; + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_LATENCY: { + if (self->configured_caps) { + GstClockTime min, max; + + min = gst_util_uint64_scale_ceil(GST_SECOND, + 3 * self->configured_info.fps_d, + self->configured_info.fps_n); + max = self->queue_size * min; + + gst_query_set_latency(query, TRUE, min, max); + ret = TRUE; + } else { + ret = FALSE; + } + + return ret; + } + + default: + return GST_BASE_SRC_CLASS(parent_class)->query(bsrc, query); + break; + } +} + +static gboolean gst_aja_src_unlock(GstBaseSrc *bsrc) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + + g_mutex_lock(&self->queue_lock); + self->flushing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static gboolean gst_aja_src_unlock_stop(GstBaseSrc *bsrc) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + + g_mutex_lock(&self->queue_lock); + self->flushing = FALSE; + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { + GstAjaSrc *self = GST_AJA_SRC(psrc); + GstFlowReturn flow_ret = GST_FLOW_OK; + QueueItem item; + + g_mutex_lock(&self->queue_lock); + while (gst_queue_array_is_empty(self->queue) && !self->flushing) { + g_cond_wait(&self->queue_cond, &self->queue_lock); + } + + if (self->flushing) { + g_mutex_unlock(&self->queue_lock); + GST_DEBUG_OBJECT(self, "Flushing"); + return GST_FLOW_FLUSHING; + } + + item = *(QueueItem *)gst_queue_array_pop_head_struct(self->queue); + + *buffer = item.video_buffer; + gst_buffer_add_aja_audio_meta(*buffer, item.audio_buffer); + gst_buffer_unref(item.audio_buffer); + + if (item.tc.IsValid()) { + TimecodeFormat tc_format = ::kTCFormatUnknown; + GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE; + + if (self->configured_info.fps_n == 24 && self->configured_info.fps_d == 1) { + tc_format = kTCFormat24fps; + } else if (self->configured_info.fps_n == 25 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat25fps; + } else if (self->configured_info.fps_n == 30 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat30fps; + } else if (self->configured_info.fps_n == 30000 && + self->configured_info.fps_d == 1001) { + tc_format = kTCFormat30fpsDF; + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME); + } else if (self->configured_info.fps_n == 48 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat48fps; + } else if (self->configured_info.fps_n == 50 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat50fps; + } else if (self->configured_info.fps_n == 60 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat60fps; + } else if (self->configured_info.fps_n == 60000 && + self->configured_info.fps_d == 1001) { + tc_format = kTCFormat60fpsDF; + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME); + } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_INTERLACED); + + CRP188 rp188(item.tc, tc_format); + guint hours, minutes, seconds, frames; + rp188.GetRP188Hrs(hours); + rp188.GetRP188Mins(minutes); + rp188.GetRP188Secs(seconds); + rp188.GetRP188Frms(frames); + + GstVideoTimeCode tc; + gst_video_time_code_init(&tc, self->configured_info.fps_n, + self->configured_info.fps_d, NULL, flags, hours, + minutes, seconds, frames, 0); + gst_buffer_add_video_time_code_meta(*buffer, &tc); + } + + if (item.anc_buffer) { + AJAAncillaryList anc_packets; + GstMapInfo map = GST_MAP_INFO_INIT; + GstMapInfo map2 = GST_MAP_INFO_INIT; + + gst_buffer_map(item.anc_buffer, &map, GST_MAP_READ); + if (item.anc_buffer2) gst_buffer_map(item.anc_buffer2, &map2, GST_MAP_READ); + + NTV2_POINTER ptr1(map.data, map.size); + NTV2_POINTER ptr2(map2.data, map2.size); + + AJAAncillaryList::SetFromDeviceAncBuffers(ptr1, ptr2, anc_packets); + // anc_packets.ParseAllAncillaryData(); + // std::stringstream os; + // anc_packets.Print(os); + // GST_ERROR_OBJECT(self, "meh %u %lu\n%s", + // anc_packets.CountAncillaryData(), + // map.size, os.str().c_str()); + + if (anc_packets.CountAncillaryDataWithType(AJAAncillaryDataType_Cea708)) { + AJAAncillaryData packet = + anc_packets.GetAncillaryDataWithType(AJAAncillaryDataType_Cea708); + + if (packet.GetPayloadData() && packet.GetPayloadByteCount() && + AJA_SUCCESS(packet.ParsePayloadData())) { + gst_buffer_add_video_caption_meta( + *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet.GetPayloadData(), + packet.GetPayloadByteCount()); + } + } + + // TODO: Add AFD/Bar meta + + if (item.anc_buffer2) gst_buffer_unmap(item.anc_buffer2, &map2); + gst_buffer_unmap(item.anc_buffer, &map); + } + + gst_clear_buffer(&item.anc_buffer); + gst_clear_buffer(&item.anc_buffer2); + g_mutex_unlock(&self->queue_lock); + + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { + gst_base_src_set_caps(GST_BASE_SRC_CAST(self), self->configured_caps); + } + + return flow_ret; +} + +static void capture_thread_func(AJAThread *thread, void *data) { + GstAjaSrc *self = GST_AJA_SRC(data); + GstClock *clock = NULL; + AUTOCIRCULATE_TRANSFER transfer; + + if (self->capture_cpu_core != G_MAXUINT) { + cpu_set_t mask; + pthread_t current_thread = pthread_self(); + + CPU_ZERO(&mask); + CPU_SET(self->capture_cpu_core, &mask); + + if (pthread_setaffinity_np(current_thread, sizeof(mask), &mask) != 0) { + GST_ERROR_OBJECT(self, + "Failed to set affinity for current thread to core %u", + self->capture_cpu_core); + } + } + + g_mutex_lock(&self->queue_lock); +restart: + GST_DEBUG_OBJECT(self, "Waiting for playing or shutdown"); + while (!self->playing && !self->shutdown) + g_cond_wait(&self->queue_cond, &self->queue_lock); + if (self->shutdown) { + GST_DEBUG_OBJECT(self, "Shutting down"); + g_mutex_unlock(&self->queue_lock); + return; + } + + GST_DEBUG_OBJECT(self, "Starting capture"); + g_mutex_unlock(&self->queue_lock); + + // TODO: Wait for stable input signal + + if (!self->device->device->EnableChannel(self->channel)) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to enable channel")); + goto out; + } + + { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + + self->device->device->EnableInputInterrupt(self->channel); + self->device->device->SubscribeInputVerticalEvent(self->channel); + if (!self->device->device->AutoCirculateInitForInput( + self->channel, self->queue_size / 2, self->audio_system, + AUTOCIRCULATE_WITH_RP188 | AUTOCIRCULATE_WITH_ANC, 1)) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); + goto out; + } + self->device->device->AutoCirculateStart(self->channel); + } + + gst_clear_object(&clock); + clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + + g_mutex_lock(&self->queue_lock); + while (self->playing && !self->shutdown) { + AUTOCIRCULATE_STATUS status; + + self->device->device->AutoCirculateGetStatus(self->channel, status); + + GST_TRACE_OBJECT(self, + "Start frame %d " + "end frame %d " + "active frame %d " + "start time %" G_GUINT64_FORMAT + " " + "current time %" G_GUINT64_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + status.acStartFrame, status.acEndFrame, + status.acActiveFrame, status.acRDTSCStartTime, + status.acRDTSCCurrentTime, status.acFramesProcessed, + status.acFramesDropped, status.acBufferLevel); + + // TODO: Drop detection + // TODO: Signal loss detection + + if (status.IsRunning() && status.acBufferLevel > 1) { + GstBuffer *video_buffer = NULL; + GstBuffer *audio_buffer = NULL; + GstBuffer *anc_buffer = NULL, *anc_buffer2 = NULL; + GstMapInfo video_map = GST_MAP_INFO_INIT; + GstMapInfo audio_map = GST_MAP_INFO_INIT; + GstMapInfo anc_map = GST_MAP_INFO_INIT; + GstMapInfo anc_map2 = GST_MAP_INFO_INIT; + AUTOCIRCULATE_TRANSFER transfer; + + if (gst_buffer_pool_acquire_buffer(self->buffer_pool, &video_buffer, + NULL) != GST_FLOW_OK) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire video buffer")); + break; + } + + if (gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, &audio_buffer, + NULL) != GST_FLOW_OK) { + gst_buffer_unref(video_buffer); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire audio buffer")); + break; + } + + if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer, + NULL) != GST_FLOW_OK) { + gst_buffer_unref(audio_buffer); + gst_buffer_unref(video_buffer); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); + break; + } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer2, + NULL) != GST_FLOW_OK) { + gst_buffer_unref(anc_buffer); + gst_buffer_unref(audio_buffer); + gst_buffer_unref(video_buffer); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); + break; + } + } + + gst_buffer_map(video_buffer, &video_map, GST_MAP_READWRITE); + gst_buffer_map(audio_buffer, &audio_map, GST_MAP_READWRITE); + gst_buffer_map(anc_buffer, &anc_map, GST_MAP_READWRITE); + if (anc_buffer2) + gst_buffer_map(anc_buffer2, &anc_map2, GST_MAP_READWRITE); + + transfer.acFrameBufferFormat = ::NTV2_FBF_10BIT_YCBCR; + + transfer.SetVideoBuffer((ULWord *)video_map.data, video_map.size); + transfer.SetAudioBuffer((ULWord *)audio_map.data, audio_map.size); + transfer.SetAncBuffers((ULWord *)anc_map.data, anc_map.size, + (ULWord *)anc_map2.data, anc_map2.size); + + g_mutex_unlock(&self->queue_lock); + + bool transfered = true; + if (!self->device->device->AutoCirculateTransfer(self->channel, + transfer)) { + GST_WARNING_OBJECT(self, "Failed to transfer frame"); + transfered = false; + } + + if (anc_buffer2) gst_buffer_unmap(anc_buffer2, &anc_map2); + gst_buffer_unmap(anc_buffer, &anc_map); + gst_buffer_unmap(audio_buffer, &audio_map); + gst_buffer_unmap(video_buffer, &video_map); + + g_mutex_lock(&self->queue_lock); + + if (!transfered) { + gst_clear_buffer(&anc_buffer2); + gst_clear_buffer(&anc_buffer); + gst_clear_buffer(&audio_buffer); + gst_clear_buffer(&video_buffer); + continue; + } + + gst_buffer_set_size(audio_buffer, transfer.GetCapturedAudioByteCount()); + gst_buffer_set_size(anc_buffer, transfer.GetCapturedAncByteCount(false)); + if (anc_buffer2) + gst_buffer_set_size(anc_buffer2, + transfer.GetCapturedAncByteCount(true)); + + NTV2_RP188 time_code; + transfer.acTransferStatus.acFrameStamp.GetInputTimeCode( + time_code, ::NTV2ChannelToTimecodeIndex(self->channel, false)); + + gint64 frame_time = transfer.acTransferStatus.acFrameStamp.acFrameTime; + gint64 now_sys = g_get_real_time(); + GstClockTime now_gst = gst_clock_get_time(clock); + if (now_sys * 10 > frame_time) { + GstClockTime diff = now_sys * 1000 - frame_time * 100; + if (now_gst > diff) + now_gst -= diff; + else + now_gst = 0; + } + + GstClockTime base_time = + gst_element_get_base_time(GST_ELEMENT_CAST(self)); + if (now_gst > base_time) + now_gst -= base_time; + else + now_gst = 0; + + GST_BUFFER_PTS(video_buffer) = now_gst; + GST_BUFFER_PTS(audio_buffer) = now_gst; + + // TODO: Drift detection and compensation + + QueueItem item = {.type = QUEUE_ITEM_TYPE_FRAME, + .capture_time = now_gst, + .video_buffer = video_buffer, + .audio_buffer = audio_buffer, + .anc_buffer = anc_buffer, + .anc_buffer2 = anc_buffer2, + .tc = time_code}; + + while (gst_queue_array_get_length(self->queue) >= self->queue_size) { + QueueItem *tmp = + (QueueItem *)gst_queue_array_pop_head_struct(self->queue); + + if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { + GST_WARNING_OBJECT(self, "Element queue overrun, dropping old frame"); + + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, + GST_CLOCK_TIME_NONE, tmp->capture_time, + gst_util_uint64_scale(GST_SECOND, self->configured_info.fps_d, + self->configured_info.fps_n)); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + gst_clear_buffer(&tmp->video_buffer); + gst_clear_buffer(&tmp->audio_buffer); + gst_clear_buffer(&tmp->anc_buffer); + gst_clear_buffer(&tmp->anc_buffer2); + } + } + + GST_TRACE_OBJECT(self, "Queuing frame %" GST_TIME_FORMAT, + GST_TIME_ARGS(now_gst)); + gst_queue_array_push_tail_struct(self->queue, &item); + GST_TRACE_OBJECT(self, "%u frames queued", + gst_queue_array_get_length(self->queue)); + g_cond_signal(&self->queue_cond); + + } else { + g_mutex_unlock(&self->queue_lock); + self->device->device->WaitForInputVerticalInterrupt(self->channel); + g_mutex_lock(&self->queue_lock); + } + } + +out : { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + self->device->device->UnsubscribeInputVerticalEvent(self->channel); + self->device->device->DisableInputInterrupt(self->channel); +} + + if (!self->playing && !self->shutdown) goto restart; + g_mutex_unlock(&self->queue_lock); + + gst_clear_object(&clock); + + GST_DEBUG_OBJECT(self, "Stopped"); +} diff --git a/gstajasrc.h b/gstajasrc.h new file mode 100644 index 0000000000..000b77f7bd --- /dev/null +++ b/gstajasrc.h @@ -0,0 +1,90 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SRC (gst_aja_src_get_type()) +#define GST_AJA_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SRC, GstAjaSrc)) +#define GST_AJA_SRC_CAST(obj) ((GstAjaSrc *)obj) +#define GST_AJA_SRC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SRC, GstAjaSrcClass)) +#define GST_IS_AJA_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SRC)) +#define GST_IS_AJA_SRC_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SRC)) + +typedef struct _GstAjaSrc GstAjaSrc; +typedef struct _GstAjaSrcClass GstAjaSrcClass; + +struct _GstAjaSrc { + GstPushSrc parent; + + // Everything below protected by queue lock + GMutex queue_lock; + GCond queue_cond; + GstQueueArray *queue; + gboolean playing; + gboolean shutdown; + gboolean flushing; + + GstAjaDevice *device; + NTV2DeviceID device_id; + GstAllocator *allocator; + GstBufferPool *buffer_pool; + GstBufferPool *audio_buffer_pool; + GstBufferPool *anc_buffer_pool; + + // Properties + gchar *device_identifier; + NTV2Channel channel; + GstAjaAudioSystem audio_system_setting; + GstAjaVideoFormat video_format_setting; + GstAjaInputSource input_source; + GstAjaReferenceSource reference_source; + guint queue_size; + guint capture_cpu_core; + + NTV2AudioSystem audio_system; + NTV2VideoFormat video_format; + guint32 f2_start_line; + + GstCaps *configured_caps; + GstVideoInfo configured_info; + gint configured_audio_channels; + + AJAThread *capture_thread; +}; + +struct _GstAjaSrcClass { + GstPushSrcClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_src_get_type(void); + +G_END_DECLS diff --git a/gstajasrcdemux.cpp b/gstajasrcdemux.cpp new file mode 100644 index 0000000000..67edc6dd52 --- /dev/null +++ b/gstajasrcdemux.cpp @@ -0,0 +1,162 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include + +#include "gstajacommon.h" +#include "gstajasrcdemux.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_src_demux_debug); +#define GST_CAT_DEFAULT gst_aja_src_demux_debug + +static GstStaticPadTemplate video_src_template = GST_STATIC_PAD_TEMPLATE( + "video", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstStaticPadTemplate audio_src_template = GST_STATIC_PAD_TEMPLATE( + "audio", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("audio/x-raw")); + +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE( + "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstFlowReturn gst_aja_src_demux_sink_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer); +static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, + GstEvent *event); + +#define parent_class gst_aja_src_demux_parent_class +G_DEFINE_TYPE(GstAjaSrcDemux, gst_aja_src_demux, GST_TYPE_ELEMENT); + +static void gst_aja_src_demux_class_init(GstAjaSrcDemuxClass *klass) { + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + + gst_element_class_add_static_pad_template(element_class, &sink_template); + gst_element_class_add_static_pad_template(element_class, &video_src_template); + gst_element_class_add_static_pad_template(element_class, &audio_src_template); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video source demuxer", "Audio/Video/Demux", + "Demuxes audio/video from video buffers", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_src_demux_debug, "ajasrcdemux", 0, + "AJA source demuxer"); +} + +static void gst_aja_src_demux_init(GstAjaSrcDemux *self) { + self->sink = gst_pad_new_from_static_template(&sink_template, "sink"); + gst_pad_set_chain_function(self->sink, + GST_DEBUG_FUNCPTR(gst_aja_src_demux_sink_chain)); + gst_pad_set_event_function(self->sink, + GST_DEBUG_FUNCPTR(gst_aja_src_demux_sink_event)); + gst_element_add_pad(GST_ELEMENT(self), self->sink); + + self->audio_src = + gst_pad_new_from_static_template(&audio_src_template, "audio"); + gst_pad_use_fixed_caps(self->audio_src); + gst_element_add_pad(GST_ELEMENT(self), self->audio_src); + + self->video_src = + gst_pad_new_from_static_template(&video_src_template, "video"); + gst_pad_use_fixed_caps(self->video_src); + gst_element_add_pad(GST_ELEMENT(self), self->video_src); +} + +static GstFlowReturn gst_aja_src_demux_sink_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + GstAjaAudioMeta *meta = gst_buffer_get_aja_audio_meta(buffer); + GstFlowReturn audio_flow_ret = GST_FLOW_OK; + GstFlowReturn video_flow_ret = GST_FLOW_OK; + + if (meta) { + GstBuffer *audio_buffer; + buffer = gst_buffer_make_writable(buffer); + meta = gst_buffer_get_aja_audio_meta(buffer); + audio_buffer = gst_buffer_ref(meta->buffer); + gst_buffer_remove_meta(buffer, GST_META_CAST(meta)); + + audio_flow_ret = gst_pad_push(self->audio_src, audio_buffer); + } else { + GstEvent *event = + gst_event_new_gap(GST_BUFFER_PTS(buffer), GST_BUFFER_DURATION(buffer)); + gst_pad_push_event(self->audio_src, event); + } + + video_flow_ret = gst_pad_push(self->video_src, buffer); + + // Combine flows the way it makes sense + if (video_flow_ret == GST_FLOW_NOT_LINKED && + audio_flow_ret == GST_FLOW_NOT_LINKED) + return GST_FLOW_NOT_LINKED; + if (video_flow_ret == GST_FLOW_EOS && audio_flow_ret == GST_FLOW_EOS) + return GST_FLOW_EOS; + if (video_flow_ret == GST_FLOW_FLUSHING || + video_flow_ret <= GST_FLOW_NOT_NEGOTIATED) + return video_flow_ret; + if (audio_flow_ret == GST_FLOW_FLUSHING || + audio_flow_ret <= GST_FLOW_NOT_NEGOTIATED) + return audio_flow_ret; + return GST_FLOW_OK; +} + +static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, + GstEvent *event) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_CAPS: { + GstCaps *caps; + GstStructure *s; + GstAudioInfo audio_info; + gint audio_channels = 0; + + gst_event_parse_caps(event, &caps); + s = gst_caps_get_structure(caps, 0); + + gst_structure_get_int(s, "audio-channels", &audio_channels); + + GstCaps *audio_caps, *video_caps; + + gst_audio_info_init(&audio_info); + gst_audio_info_set_format(&audio_info, GST_AUDIO_FORMAT_S32LE, 48000, + audio_channels ? audio_channels : 1, NULL); + audio_caps = gst_audio_info_to_caps(&audio_info); + gst_pad_set_caps(self->audio_src, audio_caps); + gst_caps_unref(audio_caps); + + video_caps = gst_caps_ref(caps); + gst_event_unref(event); + video_caps = gst_caps_make_writable(video_caps); + s = gst_caps_get_structure(video_caps, 0); + gst_structure_remove_field(s, "audio-channels"); + gst_pad_set_caps(self->video_src, video_caps); + gst_caps_unref(video_caps); + + return TRUE; + } + default: + return gst_pad_event_default(pad, parent, event); + } +} diff --git a/gstajasrcdemux.h b/gstajasrcdemux.h new file mode 100644 index 0000000000..fe1b791729 --- /dev/null +++ b/gstajasrcdemux.h @@ -0,0 +1,59 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SRC_DEMUX (gst_aja_src_demux_get_type()) +#define GST_AJA_SRC_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SRC_DEMUX, GstAjaSrcDemux)) +#define GST_AJA_SRC_DEMUX_CAST(obj) ((GstAjaSrcDemux *)obj) +#define GST_AJA_SRC_DEMUX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SRC_DEMUX, \ + GstAjaSrcDemuxClass)) +#define GST_IS_AJA_SRC_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SRC_DEMUX)) +#define GST_IS_AJA_SRC_DEMUX_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SRC_DEMUX)) + +typedef struct _GstAjaSrcDemux GstAjaSrcDemux; +typedef struct _GstAjaSrcDemuxClass GstAjaSrcDemuxClass; + +struct _GstAjaSrcDemux { + GstElement parent; + + GstPad *sink; + GstPad *video_src, *audio_src; +}; + +struct _GstAjaSrcDemuxClass { + GstElementClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_src_demux_get_type(void); + +G_END_DECLS diff --git a/meson.build b/meson.build index d1d9ce33c1..66cc7c2858 100644 --- a/meson.build +++ b/meson.build @@ -82,6 +82,8 @@ gstaja = library('gstaja', 'gstajacommon.cpp', 'gstajasink.cpp', 'gstajasinkcombiner.cpp', + 'gstajasrc.cpp', + 'gstajasrcdemux.cpp', ], cpp_args : [ aja_includedirs, diff --git a/plugin.cpp b/plugin.cpp index d6367c4877..1423c69c7e 100644 --- a/plugin.cpp +++ b/plugin.cpp @@ -17,15 +17,23 @@ * Boston, MA 02110-1335, USA. */ +#include #include #include "gstajacommon.h" #include "gstajasink.h" #include "gstajasinkcombiner.h" +#include "gstajasrc.h" +#include "gstajasrcdemux.h" static gboolean plugin_init(GstPlugin* plugin) { + AJADebug::Open(); + gst_aja_common_init(); + gst_element_register(plugin, "ajasrc", GST_RANK_NONE, GST_TYPE_AJA_SRC); + gst_element_register(plugin, "ajasrcdemux", GST_RANK_NONE, + GST_TYPE_AJA_SRC_DEMUX); gst_element_register(plugin, "ajasink", GST_RANK_NONE, GST_TYPE_AJA_SINK); gst_element_register(plugin, "ajasinkcombiner", GST_RANK_NONE, GST_TYPE_AJA_SINK_COMBINER); From 7be26718b8b6014c112f054ddd4235eaa7dba244 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 9 Feb 2021 13:24:20 +0200 Subject: [PATCH 03/73] Add configuration for audio source, embedded audio input and timecode index --- gstajacommon.cpp | 72 +++++++++++++++++++ gstajacommon.h | 55 ++++++++++++++ gstajasink.cpp | 95 ++++++++++++++++++++++-- gstajasink.h | 1 + gstajasrc.cpp | 184 +++++++++++++++++++++++++++++++++++++++++++++-- gstajasrc.h | 3 + 6 files changed, 397 insertions(+), 13 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 90474a61e0..bafdb6f8fe 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -532,6 +532,78 @@ GType gst_aja_video_format_get_type(void) { return (GType)id; } +GType gst_aja_audio_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_AUDIO_SOURCE_EMBEDDED, "embedded", "Embedded"}, + {GST_AJA_AUDIO_SOURCE_AES, "aes", "AES"}, + {GST_AJA_AUDIO_SOURCE_ANALOG, "analog", "Analog"}, + {GST_AJA_AUDIO_SOURCE_HDMI, "hdmi", "HDMI"}, + {GST_AJA_AUDIO_SOURCE_MIC, "mic", "Microphone"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaAudioSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_embedded_audio_input_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, "auto", "Auto"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1, "video-1", "Video 1"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2, "video-2", "Video 2"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3, "video-3", "Video 3"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4, "video-4", "Video 4"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5, "video-5", "Video 5"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6, "video-6", "Video 6"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7, "video-7", "Video 7"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8, "video-8", "Video 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaEmbeddedAudioInput", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_timecode_index_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_TIMECODE_INDEX_AUTO, "auto", "Auto"}, + {GST_AJA_TIMECODE_INDEX_SDI1, "sdi-1", "SDI 1"}, + {GST_AJA_TIMECODE_INDEX_SDI2, "sdi-2", "SDI 2"}, + {GST_AJA_TIMECODE_INDEX_SDI3, "sdi-3", "SDI 3"}, + {GST_AJA_TIMECODE_INDEX_SDI4, "sdi-4", "SDI 4"}, + {GST_AJA_TIMECODE_INDEX_SDI5, "sdi-5", "SDI 5"}, + {GST_AJA_TIMECODE_INDEX_SDI6, "sdi-6", "SDI 6"}, + {GST_AJA_TIMECODE_INDEX_SDI7, "sdi-7", "SDI 7"}, + {GST_AJA_TIMECODE_INDEX_SDI8, "sdi-8", "SDI 8"}, + {GST_AJA_TIMECODE_INDEX_SDI1_LTC, "sdi-1-ltc", "SDI 1 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI2_LTC, "sdi-2-ltc", "SDI 2 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI3_LTC, "sdi-3-ltc", "SDI 3 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI4_LTC, "sdi-4-ltc", "SDI 4 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI5_LTC, "sdi-5-ltc", "SDI 5 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI6_LTC, "sdi-6-ltc", "SDI 6 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI7_LTC, "sdi-7-ltc", "SDI 7 LTC"}, + {GST_AJA_TIMECODE_INDEX_SDI8_LTC, "sdi-8-ltc", "SDI 8 LTC"}, + {GST_AJA_TIMECODE_INDEX_LTC1, "ltc-1", "LTC 1"}, + {GST_AJA_TIMECODE_INDEX_LTC2, "ltc-2", "LTC 2"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaTimecodeIndex", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + void gst_aja_common_init(void) { GST_DEBUG_CATEGORY_INIT(gst_aja_debug, "aja", 0, "Debug category for AJA plugin"); diff --git a/gstajacommon.h b/gstajacommon.h index 33f37622f2..e32fe09a46 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -205,6 +205,61 @@ typedef enum { G_GNUC_INTERNAL GType gst_aja_video_format_get_type(void); +typedef enum { + GST_AJA_AUDIO_SOURCE_EMBEDDED, + GST_AJA_AUDIO_SOURCE_AES, + GST_AJA_AUDIO_SOURCE_ANALOG, + GST_AJA_AUDIO_SOURCE_HDMI, + GST_AJA_AUDIO_SOURCE_MIC, +} GstAjaAudioSource; + +#define GST_TYPE_AJA_AUDIO_SOURCE (gst_aja_audio_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_audio_source_get_type(void); + +typedef enum { + GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8, +} GstAjaEmbeddedAudioInput; + +#define GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT \ + (gst_aja_embedded_audio_input_get_type()) +G_GNUC_INTERNAL +GType gst_aja_embedded_audio_input_get_type(void); + +typedef enum { + GST_AJA_TIMECODE_INDEX_AUTO, + GST_AJA_TIMECODE_INDEX_SDI1, + GST_AJA_TIMECODE_INDEX_SDI2, + GST_AJA_TIMECODE_INDEX_SDI3, + GST_AJA_TIMECODE_INDEX_SDI4, + GST_AJA_TIMECODE_INDEX_SDI5, + GST_AJA_TIMECODE_INDEX_SDI6, + GST_AJA_TIMECODE_INDEX_SDI7, + GST_AJA_TIMECODE_INDEX_SDI8, + GST_AJA_TIMECODE_INDEX_SDI1_LTC, + GST_AJA_TIMECODE_INDEX_SDI2_LTC, + GST_AJA_TIMECODE_INDEX_SDI3_LTC, + GST_AJA_TIMECODE_INDEX_SDI4_LTC, + GST_AJA_TIMECODE_INDEX_SDI5_LTC, + GST_AJA_TIMECODE_INDEX_SDI6_LTC, + GST_AJA_TIMECODE_INDEX_SDI7_LTC, + GST_AJA_TIMECODE_INDEX_SDI8_LTC, + GST_AJA_TIMECODE_INDEX_LTC1, + GST_AJA_TIMECODE_INDEX_LTC2, +} GstAjaTimecodeIndex; + +#define GST_TYPE_AJA_TIMECODE_INDEX (gst_aja_timecode_index_get_type()) +G_GNUC_INTERNAL +GType gst_aja_timecode_index_get_type(void); + G_GNUC_INTERNAL void gst_aja_common_init(void); diff --git a/gstajasink.cpp b/gstajasink.cpp index 5645d00274..54a229942c 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -35,6 +35,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_CHANNEL (::NTV2_CHANNEL1) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_AUTO) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT) @@ -45,6 +46,7 @@ enum { PROP_CHANNEL, PROP_AUDIO_SYSTEM, PROP_OUTPUT_DESTINATION, + PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, PROP_OUTPUT_CPU_CORE, @@ -144,6 +146,14 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_TIMECODE_INDEX, + g_param_spec_enum( + "timecode-index", "Timecode Index", "Timecode index to use", + GST_TYPE_AJA_TIMECODE_INDEX, DEFAULT_TIMECODE_INDEX, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_REFERENCE_SOURCE, g_param_spec_enum( @@ -195,6 +205,7 @@ static void gst_aja_sink_init(GstAjaSink *self) { self->queue_size = DEFAULT_QUEUE_SIZE; self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->output_destination = DEFAULT_OUTPUT_DESTINATION; + self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; self->output_cpu_core = DEFAULT_OUTPUT_CPU_CORE; @@ -226,6 +237,9 @@ void gst_aja_sink_set_property(GObject *object, guint property_id, self->output_destination = (GstAjaOutputDestination)g_value_get_enum(value); break; + case PROP_TIMECODE_INDEX: + self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); + break; case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; @@ -258,6 +272,9 @@ void gst_aja_sink_get_property(GObject *object, guint property_id, case PROP_OUTPUT_DESTINATION: g_value_set_enum(value, self->output_destination); break; + case PROP_TIMECODE_INDEX: + g_value_set_enum(value, self->timecode_index); + break; case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; @@ -1137,6 +1154,75 @@ restart: GST_DEBUG_OBJECT(self, "Starting playing"); g_mutex_unlock(&self->queue_lock); + NTV2TCIndexes tc_indexes; + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_AUTO: + tc_indexes.insert(::NTV2ChannelToTimecodeIndex(self->channel, false)); + tc_indexes.insert(::NTV2ChannelToTimecodeIndex(self->channel, true)); + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + tc_indexes.insert( + ::NTV2ChannelToTimecodeIndex(self->channel, false, true)); + break; + case GST_AJA_TIMECODE_INDEX_SDI1: + tc_indexes.insert(::NTV2_TCINDEX_SDI1); + break; + case GST_AJA_TIMECODE_INDEX_SDI2: + tc_indexes.insert(::NTV2_TCINDEX_SDI2); + break; + case GST_AJA_TIMECODE_INDEX_SDI3: + tc_indexes.insert(::NTV2_TCINDEX_SDI3); + break; + case GST_AJA_TIMECODE_INDEX_SDI4: + tc_indexes.insert(::NTV2_TCINDEX_SDI4); + break; + case GST_AJA_TIMECODE_INDEX_SDI5: + tc_indexes.insert(::NTV2_TCINDEX_SDI5); + break; + case GST_AJA_TIMECODE_INDEX_SDI6: + tc_indexes.insert(::NTV2_TCINDEX_SDI6); + break; + case GST_AJA_TIMECODE_INDEX_SDI7: + tc_indexes.insert(::NTV2_TCINDEX_SDI7); + break; + case GST_AJA_TIMECODE_INDEX_SDI8: + tc_indexes.insert(::NTV2_TCINDEX_SDI8); + break; + case GST_AJA_TIMECODE_INDEX_SDI1_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI1_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI2_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI2_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI3_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI3_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI4_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI4_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI5_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI5_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI6_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI6_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI7_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI7_LTC); + break; + case GST_AJA_TIMECODE_INDEX_SDI8_LTC: + tc_indexes.insert(::NTV2_TCINDEX_SDI8_LTC); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + tc_indexes.insert(::NTV2_TCINDEX_LTC1); + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + tc_indexes.insert(::NTV2_TCINDEX_LTC2); + break; + default: + g_assert_not_reached(); + break; + } + { // Make sure to globally lock here as the routing settings and others are // global shared state @@ -1299,12 +1385,9 @@ restart: if (item.tc.IsValid() && item.tc.fDBB != 0xffffffff) { NTV2TimeCodes timecodes; - timecodes[::NTV2ChannelToTimecodeIndex(self->channel, false)] = item.tc; - timecodes[::NTV2ChannelToTimecodeIndex(self->channel, true)] = item.tc; - if (self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) - timecodes[::NTV2ChannelToTimecodeIndex(self->channel, false, true)] = - item.tc; + for (const auto &tc_index : tc_indexes) { + timecodes[tc_index] = item.tc; + } transfer.SetOutputTimeCodes(timecodes); } diff --git a/gstajasink.h b/gstajasink.h index 270cbd9e4e..95c7f3aca6 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -72,6 +72,7 @@ struct _GstAjaSink { GstAjaAudioSystem audio_system_setting; GstAjaOutputDestination output_destination; + GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; NTV2AudioSystem audio_system; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 6c4e4e50cb..6e64b9b660 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -37,6 +37,9 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_1080i_5000) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) +#define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) +#define DEFAULT_EMBEDDED_AUDIO_INPUT (GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_AUTO) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) @@ -48,6 +51,9 @@ enum { PROP_VIDEO_FORMAT, PROP_AUDIO_SYSTEM, PROP_INPUT_SOURCE, + PROP_AUDIO_SOURCE, + PROP_EMBEDDED_AUDIO_INPUT, + PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, PROP_CAPTURE_CPU_CORE, @@ -155,6 +161,31 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_AUDIO_SOURCE, + g_param_spec_enum( + "audio-source", "Audio Source", "Audio source to use", + GST_TYPE_AJA_AUDIO_SOURCE, DEFAULT_AUDIO_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_EMBEDDED_AUDIO_INPUT, + g_param_spec_enum( + "embedded-audio-input", "Embedded Audio Input", + "Embedded audio input to use", GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT, + DEFAULT_EMBEDDED_AUDIO_INPUT, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_TIMECODE_INDEX, + g_param_spec_enum( + "timecode-index", "Timecode Index", "Timecode index to use", + GST_TYPE_AJA_TIMECODE_INDEX, DEFAULT_TIMECODE_INDEX, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_REFERENCE_SOURCE, g_param_spec_enum( @@ -207,6 +238,9 @@ static void gst_aja_src_init(GstAjaSrc *self) { self->video_format_setting = DEFAULT_VIDEO_FORMAT; self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->input_source = DEFAULT_INPUT_SOURCE; + self->audio_source = DEFAULT_AUDIO_SOURCE; + self->embedded_audio_input = DEFAULT_EMBEDDED_AUDIO_INPUT; + self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; self->capture_cpu_core = DEFAULT_CAPTURE_CPU_CORE; @@ -240,6 +274,16 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_INPUT_SOURCE: self->input_source = (GstAjaInputSource)g_value_get_enum(value); break; + case PROP_AUDIO_SOURCE: + self->audio_source = (GstAjaAudioSource)g_value_get_enum(value); + break; + case PROP_EMBEDDED_AUDIO_INPUT: + self->embedded_audio_input = + (GstAjaEmbeddedAudioInput)g_value_get_enum(value); + break; + case PROP_TIMECODE_INDEX: + self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); + break; case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; @@ -275,6 +319,15 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_INPUT_SOURCE: g_value_set_enum(value, self->input_source); break; + case PROP_AUDIO_SOURCE: + g_value_set_enum(value, self->audio_source); + break; + case PROP_EMBEDDED_AUDIO_INPUT: + g_value_set_enum(value, self->embedded_audio_input); + break; + case PROP_TIMECODE_INDEX: + g_value_set_enum(value, self->timecode_index); + break; case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; @@ -608,12 +661,65 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); - // TODO: make configurable + NTV2AudioSource audio_source; + switch (self->audio_source) { + case GST_AJA_AUDIO_SOURCE_EMBEDDED: + audio_source = ::NTV2_AUDIO_EMBEDDED; + break; + case GST_AJA_AUDIO_SOURCE_AES: + audio_source = ::NTV2_AUDIO_AES; + break; + case GST_AJA_AUDIO_SOURCE_ANALOG: + audio_source = ::NTV2_AUDIO_ANALOG; + break; + case GST_AJA_AUDIO_SOURCE_HDMI: + audio_source = ::NTV2_AUDIO_HDMI; + break; + case GST_AJA_AUDIO_SOURCE_MIC: + audio_source = ::NTV2_AUDIO_MIC; + break; + default: + g_assert_not_reached(); + break; + } + + NTV2EmbeddedAudioInput embedded_audio_input; + switch (self->embedded_audio_input) { + case GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO: + embedded_audio_input = + ::NTV2InputSourceToEmbeddedAudioInput(input_source); + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_1; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_2; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_3; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_4; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_5; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_6; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_7; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_8; + break; + default: + g_assert_not_reached(); + break; + } + self->device->device->SetAudioSystemInputSource( - self->audio_system, NTV2_AUDIO_EMBEDDED, - ::NTV2InputSourceToEmbeddedAudioInput(input_source)); - self->device->device->SetEmbeddedAudioInput( - ::NTV2ChannelToEmbeddedAudioInput(self->channel), self->audio_system); + self->audio_system, audio_source, embedded_audio_input); self->configured_audio_channels = ::NTV2DeviceGetMaxAudioChannels(self->device_id); self->device->device->SetNumberAudioChannels( @@ -1076,6 +1182,70 @@ restart: gst_clear_object(&clock); clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + NTV2TCIndex tc_index; + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_AUTO: + tc_index = ::NTV2ChannelToTimecodeIndex(self->channel, false); + break; + case GST_AJA_TIMECODE_INDEX_SDI1: + tc_index = ::NTV2_TCINDEX_SDI1; + break; + case GST_AJA_TIMECODE_INDEX_SDI2: + tc_index = ::NTV2_TCINDEX_SDI2; + break; + case GST_AJA_TIMECODE_INDEX_SDI3: + tc_index = ::NTV2_TCINDEX_SDI3; + break; + case GST_AJA_TIMECODE_INDEX_SDI4: + tc_index = ::NTV2_TCINDEX_SDI4; + break; + case GST_AJA_TIMECODE_INDEX_SDI5: + tc_index = ::NTV2_TCINDEX_SDI5; + break; + case GST_AJA_TIMECODE_INDEX_SDI6: + tc_index = ::NTV2_TCINDEX_SDI6; + break; + case GST_AJA_TIMECODE_INDEX_SDI7: + tc_index = ::NTV2_TCINDEX_SDI7; + break; + case GST_AJA_TIMECODE_INDEX_SDI8: + tc_index = ::NTV2_TCINDEX_SDI8; + break; + case GST_AJA_TIMECODE_INDEX_SDI1_LTC: + tc_index = ::NTV2_TCINDEX_SDI1_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI2_LTC: + tc_index = ::NTV2_TCINDEX_SDI2_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI3_LTC: + tc_index = ::NTV2_TCINDEX_SDI3_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI4_LTC: + tc_index = ::NTV2_TCINDEX_SDI4_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI5_LTC: + tc_index = ::NTV2_TCINDEX_SDI5_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI6_LTC: + tc_index = ::NTV2_TCINDEX_SDI6_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI7_LTC: + tc_index = ::NTV2_TCINDEX_SDI7_LTC; + break; + case GST_AJA_TIMECODE_INDEX_SDI8_LTC: + tc_index = ::NTV2_TCINDEX_SDI8_LTC; + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + tc_index = ::NTV2_TCINDEX_LTC1; + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + tc_index = ::NTV2_TCINDEX_LTC2; + break; + default: + g_assert_not_reached(); + break; + } + g_mutex_lock(&self->queue_lock); while (self->playing && !self->shutdown) { AUTOCIRCULATE_STATUS status; @@ -1192,8 +1362,8 @@ restart: transfer.GetCapturedAncByteCount(true)); NTV2_RP188 time_code; - transfer.acTransferStatus.acFrameStamp.GetInputTimeCode( - time_code, ::NTV2ChannelToTimecodeIndex(self->channel, false)); + transfer.acTransferStatus.acFrameStamp.GetInputTimeCode(time_code, + tc_index); gint64 frame_time = transfer.acTransferStatus.acFrameStamp.acFrameTime; gint64 now_sys = g_get_real_time(); diff --git a/gstajasrc.h b/gstajasrc.h index 000b77f7bd..cd37a060db 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -65,6 +65,9 @@ struct _GstAjaSrc { GstAjaAudioSystem audio_system_setting; GstAjaVideoFormat video_format_setting; GstAjaInputSource input_source; + GstAjaAudioSource audio_source; + GstAjaEmbeddedAudioInput embedded_audio_input; + GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; guint queue_size; guint capture_cpu_core; From 00c2fcfc8e143b98436a0ec2770ce5ac79cba386 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 9 Feb 2021 13:50:13 +0200 Subject: [PATCH 04/73] Improve sink drift calculation by working with the transfer times --- gstajasink.cpp | 111 +++++++++++++++++++++++++++++++++---------------- 1 file changed, 76 insertions(+), 35 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 54a229942c..fdd598fb4e 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1276,41 +1276,6 @@ restart: status.acRDTSCCurrentTime, status.acFramesProcessed, status.acFramesDropped, status.acBufferLevel); - // Trivial drift calculation - // - // TODO: Should probably take averages over a timespan (say 1 minute) into a - // ringbuffer and calculate a linear regression over them - // FIXME: Add some compensation by dropping/duplicating frames as needed - // but make this configurable - // FIXME: Should use transfer.acTransferStatus.acFrameStamp after - // AutoCirculateTransfer() - if (frames_renderded_start_time == GST_CLOCK_TIME_NONE && - status.acRDTSCStartTime != 0 && - status.acFramesProcessed + status.acFramesDropped > self->queue_size && - clock) { - frames_renderded_start = - status.acFramesProcessed + status.acFramesDropped; - frames_renderded_start_time = gst_clock_get_time(clock); - } - - if (clock && frames_renderded_start_time != GST_CLOCK_TIME_NONE) { - GstClockTime now = gst_clock_get_time(clock); - GstClockTime diff = now - frames_renderded_start_time; - guint64 frames_rendered = - (status.acFramesProcessed + status.acFramesDropped) - - frames_renderded_start; - guint64 frames_produced = - gst_util_uint64_scale(diff, self->configured_info.fps_n, - self->configured_info.fps_d * GST_SECOND); - gdouble fps_rendered = ((gdouble)frames_rendered * GST_SECOND) / diff; - - GST_TRACE_OBJECT(self, - "Frames rendered %" G_GUINT64_FORMAT - ", frames produced %" G_GUINT64_FORMAT - ", FPS rendered %lf", - frames_rendered, frames_produced, fps_rendered); - } - // Detect if we were too slow with providing frames and report if that was // the case together with the amount of frames dropped if (frames_dropped_last == G_MAXUINT64) { @@ -1427,6 +1392,82 @@ restart: delete item.anc_packet_list; } + GST_TRACE_OBJECT( + self, + "Transferred frame. " + "frame time %" GST_TIME_FORMAT + " " + "current frame %u " + "current frame time %" GST_TIME_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + GST_TIME_ARGS(transfer.acTransferStatus.acFrameStamp.acFrameTime * + 100), + transfer.acTransferStatus.acFrameStamp.acCurrentFrame, + GST_TIME_ARGS( + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100), + transfer.acTransferStatus.acFramesProcessed, + transfer.acTransferStatus.acFramesDropped, + transfer.acTransferStatus.acBufferLevel); + + // Trivial drift calculation + // + // TODO: Should probably take averages over a timespan (say 1 minute) into + // a ringbuffer and calculate a linear regression over them + // FIXME: Add some compensation by dropping/duplicating frames as needed + // but make this configurable + if (frames_renderded_start_time == GST_CLOCK_TIME_NONE && + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime != 0 && + transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped > + self->queue_size && + clock) { + frames_renderded_start = transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped; + + GstClockTime now_gst = gst_clock_get_time(clock); + GstClockTime now_sys = g_get_real_time() * 1000; + GstClockTime render_time = + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100; + + if (render_time < now_sys) { + frames_renderded_start_time = now_gst - (now_sys - render_time); + } + } + + if (clock && frames_renderded_start_time != GST_CLOCK_TIME_NONE) { + GstClockTime now_gst = gst_clock_get_time(clock); + GstClockTime now_sys = g_get_real_time() * 1000; + GstClockTime render_time = + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100; + + GstClockTime sys_diff; + if (now_sys > render_time) { + sys_diff = now_sys - render_time; + } else { + sys_diff = 0; + } + + GstClockTime diff = now_gst - frames_renderded_start_time; + if (sys_diff < diff) diff -= sys_diff; + + guint64 frames_rendered = (transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped) - + frames_renderded_start; + guint64 frames_produced = + gst_util_uint64_scale(diff, self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + gdouble fps_rendered = ((gdouble)frames_rendered * GST_SECOND) / diff; + + GST_TRACE_OBJECT(self, + "Frames rendered %" G_GUINT64_FORMAT + ", frames produced %" G_GUINT64_FORMAT + ", FPS rendered %lf", + frames_rendered, frames_produced, fps_rendered); + } + g_mutex_lock(&self->queue_lock); } else { g_mutex_unlock(&self->queue_lock); From 12380026e6ba4cf600d33ffb90d535d298b315b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 9 Feb 2021 15:08:26 +0200 Subject: [PATCH 05/73] Simplify configuration of timecode index and embedded audio source --- gstajacommon.cpp | 45 ++------------- gstajacommon.h | 36 +----------- gstajasink.cpp | 136 +++++++++++++++++++++----------------------- gstajasink.h | 1 + gstajasrc.cpp | 143 ++++++++--------------------------------------- gstajasrc.h | 2 +- 6 files changed, 94 insertions(+), 269 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index bafdb6f8fe..99b064ca28 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -550,50 +550,13 @@ GType gst_aja_audio_source_get_type(void) { return (GType)id; } -GType gst_aja_embedded_audio_input_get_type(void) { - static gsize id = 0; - static const GEnumValue modes[] = { - {GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, "auto", "Auto"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1, "video-1", "Video 1"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2, "video-2", "Video 2"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3, "video-3", "Video 3"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4, "video-4", "Video 4"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5, "video-5", "Video 5"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6, "video-6", "Video 6"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7, "video-7", "Video 7"}, - {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8, "video-8", "Video 8"}, - {0, NULL, NULL}}; - - if (g_once_init_enter(&id)) { - GType tmp = g_enum_register_static("GstAjaEmbeddedAudioInput", modes); - g_once_init_leave(&id, tmp); - } - - return (GType)id; -} - GType gst_aja_timecode_index_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { - {GST_AJA_TIMECODE_INDEX_AUTO, "auto", "Auto"}, - {GST_AJA_TIMECODE_INDEX_SDI1, "sdi-1", "SDI 1"}, - {GST_AJA_TIMECODE_INDEX_SDI2, "sdi-2", "SDI 2"}, - {GST_AJA_TIMECODE_INDEX_SDI3, "sdi-3", "SDI 3"}, - {GST_AJA_TIMECODE_INDEX_SDI4, "sdi-4", "SDI 4"}, - {GST_AJA_TIMECODE_INDEX_SDI5, "sdi-5", "SDI 5"}, - {GST_AJA_TIMECODE_INDEX_SDI6, "sdi-6", "SDI 6"}, - {GST_AJA_TIMECODE_INDEX_SDI7, "sdi-7", "SDI 7"}, - {GST_AJA_TIMECODE_INDEX_SDI8, "sdi-8", "SDI 8"}, - {GST_AJA_TIMECODE_INDEX_SDI1_LTC, "sdi-1-ltc", "SDI 1 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI2_LTC, "sdi-2-ltc", "SDI 2 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI3_LTC, "sdi-3-ltc", "SDI 3 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI4_LTC, "sdi-4-ltc", "SDI 4 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI5_LTC, "sdi-5-ltc", "SDI 5 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI6_LTC, "sdi-6-ltc", "SDI 6 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI7_LTC, "sdi-7-ltc", "SDI 7 LTC"}, - {GST_AJA_TIMECODE_INDEX_SDI8_LTC, "sdi-8-ltc", "SDI 8 LTC"}, - {GST_AJA_TIMECODE_INDEX_LTC1, "ltc-1", "LTC 1"}, - {GST_AJA_TIMECODE_INDEX_LTC2, "ltc-2", "LTC 2"}, + {GST_AJA_TIMECODE_INDEX_VITC, "vitc", "Embedded SDI VITC"}, + {GST_AJA_TIMECODE_INDEX_VITC, "atc-ltc", "Embedded SDI ATC LTC"}, + {GST_AJA_TIMECODE_INDEX_LTC1, "ltc-1", "Analog LTC 1"}, + {GST_AJA_TIMECODE_INDEX_LTC2, "ltc-2", "Analog LTC 2"}, {0, NULL, NULL}}; if (g_once_init_enter(&id)) { diff --git a/gstajacommon.h b/gstajacommon.h index e32fe09a46..af646e4d27 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -218,40 +218,8 @@ G_GNUC_INTERNAL GType gst_aja_audio_source_get_type(void); typedef enum { - GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7, - GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8, -} GstAjaEmbeddedAudioInput; - -#define GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT \ - (gst_aja_embedded_audio_input_get_type()) -G_GNUC_INTERNAL -GType gst_aja_embedded_audio_input_get_type(void); - -typedef enum { - GST_AJA_TIMECODE_INDEX_AUTO, - GST_AJA_TIMECODE_INDEX_SDI1, - GST_AJA_TIMECODE_INDEX_SDI2, - GST_AJA_TIMECODE_INDEX_SDI3, - GST_AJA_TIMECODE_INDEX_SDI4, - GST_AJA_TIMECODE_INDEX_SDI5, - GST_AJA_TIMECODE_INDEX_SDI6, - GST_AJA_TIMECODE_INDEX_SDI7, - GST_AJA_TIMECODE_INDEX_SDI8, - GST_AJA_TIMECODE_INDEX_SDI1_LTC, - GST_AJA_TIMECODE_INDEX_SDI2_LTC, - GST_AJA_TIMECODE_INDEX_SDI3_LTC, - GST_AJA_TIMECODE_INDEX_SDI4_LTC, - GST_AJA_TIMECODE_INDEX_SDI5_LTC, - GST_AJA_TIMECODE_INDEX_SDI6_LTC, - GST_AJA_TIMECODE_INDEX_SDI7_LTC, - GST_AJA_TIMECODE_INDEX_SDI8_LTC, + GST_AJA_TIMECODE_INDEX_VITC, + GST_AJA_TIMECODE_INDEX_ATC_LTC, GST_AJA_TIMECODE_INDEX_LTC1, GST_AJA_TIMECODE_INDEX_LTC2, } GstAjaTimecodeIndex; diff --git a/gstajasink.cpp b/gstajasink.cpp index fdd598fb4e..ad2275ba5a 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -35,7 +35,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_CHANNEL (::NTV2_CHANNEL1) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) -#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_AUTO) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT) @@ -420,6 +420,11 @@ static gboolean gst_aja_sink_stop(GstAjaSink *self) { gst_clear_object(&self->audio_buffer_pool); } + if (self->tc_indexes) { + delete self->tc_indexes; + self->tc_indexes = NULL; + } + GST_DEBUG_OBJECT(self, "Stopped"); return TRUE; @@ -689,33 +694,66 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { NTV2OutputCrosspointID framebuffer_id = ::GetFrameBufferOutputXptFromChannel(self->channel, false, false); + NTV2TCIndex tc_indexes_vitc[2] = {::NTV2_TCINDEX_INVALID, + ::NTV2_TCINDEX_INVALID}; + NTV2TCIndex tc_index_atc_ltc = ::NTV2_TCINDEX_INVALID; NTV2InputCrosspointID output_destination_id; switch (self->output_destination) { case GST_AJA_OUTPUT_DESTINATION_AUTO: + tc_indexes_vitc[0] = + ::NTV2ChannelToTimecodeIndex(self->channel, false, false); + tc_indexes_vitc[1] = + ::NTV2ChannelToTimecodeIndex(self->channel, false, true); + tc_index_atc_ltc = + ::NTV2ChannelToTimecodeIndex(self->channel, false, true); output_destination_id = ::GetSDIOutputInputXpt(self->channel, false); break; case GST_AJA_OUTPUT_DESTINATION_SDI1: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI1; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI1_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI1_LTC; output_destination_id = ::NTV2_XptSDIOut1Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI2: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI2; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI2_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI2_LTC; output_destination_id = ::NTV2_XptSDIOut2Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI3: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI3; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI3_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI3_LTC; output_destination_id = ::NTV2_XptSDIOut3Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI4: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI4; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI4_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI4_LTC; output_destination_id = ::NTV2_XptSDIOut4Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI5: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI5; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI5_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI5_LTC; output_destination_id = ::NTV2_XptSDIOut5Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI6: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI6; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI6_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI6_LTC; output_destination_id = ::NTV2_XptSDIOut6Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI7: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI7; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI7_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI7_LTC; output_destination_id = ::NTV2_XptSDIOut7Input; break; case GST_AJA_OUTPUT_DESTINATION_SDI8: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI8; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI8_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI8_LTC; output_destination_id = ::NTV2_XptSDIOut8Input; break; case GST_AJA_OUTPUT_DESTINATION_ANALOG: @@ -729,6 +767,29 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { break; } + if (!self->tc_indexes) self->tc_indexes = new NTV2TCIndexes; + + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + self->tc_indexes->insert(tc_indexes_vitc[0]); + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + self->tc_indexes->insert(tc_indexes_vitc[1]); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + self->tc_indexes->insert(tc_index_atc_ltc); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + self->tc_indexes->insert(::NTV2_TCINDEX_LTC1); + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + self->tc_indexes->insert(::NTV2_TCINDEX_LTC2); + break; + default: + g_assert_not_reached(); + break; + } + // Need to remove old routes for the output and framebuffer we're going to use NTV2ActualConnections connections = router.GetConnections(); @@ -1154,75 +1215,6 @@ restart: GST_DEBUG_OBJECT(self, "Starting playing"); g_mutex_unlock(&self->queue_lock); - NTV2TCIndexes tc_indexes; - switch (self->timecode_index) { - case GST_AJA_TIMECODE_INDEX_AUTO: - tc_indexes.insert(::NTV2ChannelToTimecodeIndex(self->channel, false)); - tc_indexes.insert(::NTV2ChannelToTimecodeIndex(self->channel, true)); - if (self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) - tc_indexes.insert( - ::NTV2ChannelToTimecodeIndex(self->channel, false, true)); - break; - case GST_AJA_TIMECODE_INDEX_SDI1: - tc_indexes.insert(::NTV2_TCINDEX_SDI1); - break; - case GST_AJA_TIMECODE_INDEX_SDI2: - tc_indexes.insert(::NTV2_TCINDEX_SDI2); - break; - case GST_AJA_TIMECODE_INDEX_SDI3: - tc_indexes.insert(::NTV2_TCINDEX_SDI3); - break; - case GST_AJA_TIMECODE_INDEX_SDI4: - tc_indexes.insert(::NTV2_TCINDEX_SDI4); - break; - case GST_AJA_TIMECODE_INDEX_SDI5: - tc_indexes.insert(::NTV2_TCINDEX_SDI5); - break; - case GST_AJA_TIMECODE_INDEX_SDI6: - tc_indexes.insert(::NTV2_TCINDEX_SDI6); - break; - case GST_AJA_TIMECODE_INDEX_SDI7: - tc_indexes.insert(::NTV2_TCINDEX_SDI7); - break; - case GST_AJA_TIMECODE_INDEX_SDI8: - tc_indexes.insert(::NTV2_TCINDEX_SDI8); - break; - case GST_AJA_TIMECODE_INDEX_SDI1_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI1_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI2_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI2_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI3_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI3_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI4_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI4_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI5_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI5_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI6_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI6_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI7_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI7_LTC); - break; - case GST_AJA_TIMECODE_INDEX_SDI8_LTC: - tc_indexes.insert(::NTV2_TCINDEX_SDI8_LTC); - break; - case GST_AJA_TIMECODE_INDEX_LTC1: - tc_indexes.insert(::NTV2_TCINDEX_LTC1); - break; - case GST_AJA_TIMECODE_INDEX_LTC2: - tc_indexes.insert(::NTV2_TCINDEX_LTC2); - break; - default: - g_assert_not_reached(); - break; - } - { // Make sure to globally lock here as the routing settings and others are // global shared state @@ -1347,10 +1339,10 @@ restart: item.audio_buffer ? item.audio_map.size : 0); // Set timecodes if provided by upstream - if (item.tc.IsValid() && item.tc.fDBB != 0xffffffff) { + if (item.tc.IsValid() && item.tc.fDBB != 0xffffffff && self->tc_indexes) { NTV2TimeCodes timecodes; - for (const auto &tc_index : tc_indexes) { + for (const auto &tc_index : *self->tc_indexes) { timecodes[tc_index] = item.tc; } transfer.SetOutputTimeCodes(timecodes); diff --git a/gstajasink.h b/gstajasink.h index 95c7f3aca6..b490f4e58e 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -78,6 +78,7 @@ struct _GstAjaSink { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; guint32 f2_start_line; + NTV2TCIndexes *tc_indexes; GstCaps *configured_caps; GstVideoInfo configured_info; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 6e64b9b660..39f71ca6a7 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -38,8 +38,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) #define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) -#define DEFAULT_EMBEDDED_AUDIO_INPUT (GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO) -#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_AUTO) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) @@ -52,7 +51,6 @@ enum { PROP_AUDIO_SYSTEM, PROP_INPUT_SOURCE, PROP_AUDIO_SOURCE, - PROP_EMBEDDED_AUDIO_INPUT, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, @@ -169,15 +167,6 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); - g_object_class_install_property( - gobject_class, PROP_EMBEDDED_AUDIO_INPUT, - g_param_spec_enum( - "embedded-audio-input", "Embedded Audio Input", - "Embedded audio input to use", GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT, - DEFAULT_EMBEDDED_AUDIO_INPUT, - (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | - G_PARAM_CONSTRUCT))); - g_object_class_install_property( gobject_class, PROP_TIMECODE_INDEX, g_param_spec_enum( @@ -239,7 +228,6 @@ static void gst_aja_src_init(GstAjaSrc *self) { self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->input_source = DEFAULT_INPUT_SOURCE; self->audio_source = DEFAULT_AUDIO_SOURCE; - self->embedded_audio_input = DEFAULT_EMBEDDED_AUDIO_INPUT; self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; self->capture_cpu_core = DEFAULT_CAPTURE_CPU_CORE; @@ -277,10 +265,6 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_AUDIO_SOURCE: self->audio_source = (GstAjaAudioSource)g_value_get_enum(value); break; - case PROP_EMBEDDED_AUDIO_INPUT: - self->embedded_audio_input = - (GstAjaEmbeddedAudioInput)g_value_get_enum(value); - break; case PROP_TIMECODE_INDEX: self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); break; @@ -322,9 +306,6 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_AUDIO_SOURCE: g_value_set_enum(value, self->audio_source); break; - case PROP_EMBEDDED_AUDIO_INPUT: - g_value_set_enum(value, self->embedded_audio_input); - break; case PROP_TIMECODE_INDEX: g_value_set_enum(value, self->timecode_index); break; @@ -683,43 +664,9 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { break; } - NTV2EmbeddedAudioInput embedded_audio_input; - switch (self->embedded_audio_input) { - case GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO: - embedded_audio_input = - ::NTV2InputSourceToEmbeddedAudioInput(input_source); - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_1: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_1; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_2: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_2; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_3: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_3; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_4: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_4; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_5: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_5; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_6: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_6; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_7: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_7; - break; - case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO_8: - embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_8; - break; - default: - g_assert_not_reached(); - break; - } - self->device->device->SetAudioSystemInputSource( - self->audio_system, audio_source, embedded_audio_input); + self->audio_system, audio_source, + ::NTV2InputSourceToEmbeddedAudioInput(input_source)); self->configured_audio_channels = ::NTV2DeviceGetMaxAudioChannels(self->device_id); self->device->device->SetNumberAudioChannels( @@ -778,6 +725,24 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { (int)reference_source); self->device->device->SetReference(reference_source); + + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + self->tc_index = ::NTV2_TCINDEX_LTC1; + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + self->tc_index = ::NTV2_TCINDEX_LTC2; + break; + default: + g_assert_not_reached(); + break; + } } guint video_buffer_size = ::GetVideoActiveSize( @@ -1182,70 +1147,6 @@ restart: gst_clear_object(&clock); clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); - NTV2TCIndex tc_index; - switch (self->timecode_index) { - case GST_AJA_TIMECODE_INDEX_AUTO: - tc_index = ::NTV2ChannelToTimecodeIndex(self->channel, false); - break; - case GST_AJA_TIMECODE_INDEX_SDI1: - tc_index = ::NTV2_TCINDEX_SDI1; - break; - case GST_AJA_TIMECODE_INDEX_SDI2: - tc_index = ::NTV2_TCINDEX_SDI2; - break; - case GST_AJA_TIMECODE_INDEX_SDI3: - tc_index = ::NTV2_TCINDEX_SDI3; - break; - case GST_AJA_TIMECODE_INDEX_SDI4: - tc_index = ::NTV2_TCINDEX_SDI4; - break; - case GST_AJA_TIMECODE_INDEX_SDI5: - tc_index = ::NTV2_TCINDEX_SDI5; - break; - case GST_AJA_TIMECODE_INDEX_SDI6: - tc_index = ::NTV2_TCINDEX_SDI6; - break; - case GST_AJA_TIMECODE_INDEX_SDI7: - tc_index = ::NTV2_TCINDEX_SDI7; - break; - case GST_AJA_TIMECODE_INDEX_SDI8: - tc_index = ::NTV2_TCINDEX_SDI8; - break; - case GST_AJA_TIMECODE_INDEX_SDI1_LTC: - tc_index = ::NTV2_TCINDEX_SDI1_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI2_LTC: - tc_index = ::NTV2_TCINDEX_SDI2_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI3_LTC: - tc_index = ::NTV2_TCINDEX_SDI3_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI4_LTC: - tc_index = ::NTV2_TCINDEX_SDI4_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI5_LTC: - tc_index = ::NTV2_TCINDEX_SDI5_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI6_LTC: - tc_index = ::NTV2_TCINDEX_SDI6_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI7_LTC: - tc_index = ::NTV2_TCINDEX_SDI7_LTC; - break; - case GST_AJA_TIMECODE_INDEX_SDI8_LTC: - tc_index = ::NTV2_TCINDEX_SDI8_LTC; - break; - case GST_AJA_TIMECODE_INDEX_LTC1: - tc_index = ::NTV2_TCINDEX_LTC1; - break; - case GST_AJA_TIMECODE_INDEX_LTC2: - tc_index = ::NTV2_TCINDEX_LTC2; - break; - default: - g_assert_not_reached(); - break; - } - g_mutex_lock(&self->queue_lock); while (self->playing && !self->shutdown) { AUTOCIRCULATE_STATUS status; @@ -1363,7 +1264,7 @@ restart: NTV2_RP188 time_code; transfer.acTransferStatus.acFrameStamp.GetInputTimeCode(time_code, - tc_index); + self->tc_index); gint64 frame_time = transfer.acTransferStatus.acFrameStamp.acFrameTime; gint64 now_sys = g_get_real_time(); diff --git a/gstajasrc.h b/gstajasrc.h index cd37a060db..4baf959426 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -66,7 +66,6 @@ struct _GstAjaSrc { GstAjaVideoFormat video_format_setting; GstAjaInputSource input_source; GstAjaAudioSource audio_source; - GstAjaEmbeddedAudioInput embedded_audio_input; GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; guint queue_size; @@ -75,6 +74,7 @@ struct _GstAjaSrc { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; guint32 f2_start_line; + NTV2TCIndex tc_index; GstCaps *configured_caps; GstVideoInfo configured_info; From 7ce3f2f400e88c3524af0d59894229b6ecf2b008 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 9 Feb 2021 15:46:48 +0200 Subject: [PATCH 06/73] Implement signal loss and frame drop detection in the source --- gstajasink.cpp | 22 ++++++++-------- gstajasrc.cpp | 69 ++++++++++++++++++++++++++++++++++++++++++++++++-- gstajasrc.h | 1 + 3 files changed, 79 insertions(+), 13 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index ad2275ba5a..679e7ac305 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1173,8 +1173,8 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, static void output_thread_func(AJAThread *thread, void *data) { GstAjaSink *self = GST_AJA_SINK(data); GstClock *clock = NULL; - guint64 frames_renderded_start = G_MAXUINT64; - GstClockTime frames_renderded_start_time = GST_CLOCK_TIME_NONE; + guint64 frames_rendered_start = G_MAXUINT64; + GstClockTime frames_rendered_start_time = GST_CLOCK_TIME_NONE; guint64 frames_dropped_last = G_MAXUINT64; AUTOCIRCULATE_TRANSFER transfer; @@ -1236,8 +1236,8 @@ restart: gst_clear_object(&clock); clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); - frames_renderded_start = G_MAXUINT64; - frames_renderded_start_time = GST_CLOCK_TIME_NONE; + frames_rendered_start = G_MAXUINT64; + frames_rendered_start_time = GST_CLOCK_TIME_NONE; frames_dropped_last = G_MAXUINT64; transfer.acANCBuffer.Allocate(2048); @@ -1410,14 +1410,14 @@ restart: // a ringbuffer and calculate a linear regression over them // FIXME: Add some compensation by dropping/duplicating frames as needed // but make this configurable - if (frames_renderded_start_time == GST_CLOCK_TIME_NONE && + if (frames_rendered_start_time == GST_CLOCK_TIME_NONE && transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime != 0 && transfer.acTransferStatus.acFramesProcessed + transfer.acTransferStatus.acFramesDropped > self->queue_size && clock) { - frames_renderded_start = transfer.acTransferStatus.acFramesProcessed + - transfer.acTransferStatus.acFramesDropped; + frames_rendered_start = transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped; GstClockTime now_gst = gst_clock_get_time(clock); GstClockTime now_sys = g_get_real_time() * 1000; @@ -1425,11 +1425,11 @@ restart: transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100; if (render_time < now_sys) { - frames_renderded_start_time = now_gst - (now_sys - render_time); + frames_rendered_start_time = now_gst - (now_sys - render_time); } } - if (clock && frames_renderded_start_time != GST_CLOCK_TIME_NONE) { + if (clock && frames_rendered_start_time != GST_CLOCK_TIME_NONE) { GstClockTime now_gst = gst_clock_get_time(clock); GstClockTime now_sys = g_get_real_time() * 1000; GstClockTime render_time = @@ -1442,12 +1442,12 @@ restart: sys_diff = 0; } - GstClockTime diff = now_gst - frames_renderded_start_time; + GstClockTime diff = now_gst - frames_rendered_start_time; if (sys_diff < diff) diff -= sys_diff; guint64 frames_rendered = (transfer.acTransferStatus.acFramesProcessed + transfer.acTransferStatus.acFramesDropped) - - frames_renderded_start; + frames_rendered_start; guint64 frames_produced = gst_util_uint64_scale(diff, self->configured_info.fps_n, self->configured_info.fps_d * GST_SECOND); diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 39f71ca6a7..3eb8e7e6bf 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -573,6 +573,8 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { break; } + self->configured_input_source = input_source; + // Need to remove old routes for the output and framebuffer we're going to // use NTV2ActualConnections connections = router.GetConnections(); @@ -1088,6 +1090,8 @@ static void capture_thread_func(AJAThread *thread, void *data) { GstAjaSrc *self = GST_AJA_SRC(data); GstClock *clock = NULL; AUTOCIRCULATE_TRANSFER transfer; + guint64 frames_dropped_last = G_MAXUINT64; + gboolean have_signal = TRUE; if (self->capture_cpu_core != G_MAXUINT) { cpu_set_t mask; @@ -1147,8 +1151,50 @@ restart: gst_clear_object(&clock); clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + frames_dropped_last = G_MAXUINT64; + have_signal = TRUE; + g_mutex_lock(&self->queue_lock); while (self->playing && !self->shutdown) { + // Check for valid signal first + NTV2VideoFormat current_video_format = + self->device->device->GetInputVideoFormat( + self->configured_input_source); + if (current_video_format == ::NTV2_FORMAT_UNKNOWN) { + GST_DEBUG_OBJECT(self, "No signal, waiting"); + g_mutex_unlock(&self->queue_lock); + self->device->device->WaitForInputVerticalInterrupt(self->channel); + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), + ("No input source was detected")); + have_signal = FALSE; + } + g_mutex_lock(&self->queue_lock); + continue; + } else if (current_video_format != self->video_format) { + // TODO: Handle GST_AJA_VIDEO_FORMAT_AUTO here + GST_DEBUG_OBJECT(self, + "Different input format %u than configured %u, waiting", + current_video_format, self->video_format); + g_mutex_unlock(&self->queue_lock); + self->device->device->WaitForInputVerticalInterrupt(self->channel); + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), + ("Different input source was detected")); + have_signal = FALSE; + } + g_mutex_lock(&self->queue_lock); + continue; + } + + if (!have_signal) { + GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, ("Signal recovered"), + ("Input source detected")); + have_signal = TRUE; + } + AUTOCIRCULATE_STATUS status; self->device->device->AutoCirculateGetStatus(self->channel, status); @@ -1169,8 +1215,27 @@ restart: status.acRDTSCCurrentTime, status.acFramesProcessed, status.acFramesDropped, status.acBufferLevel); - // TODO: Drop detection - // TODO: Signal loss detection + if (frames_dropped_last == G_MAXUINT64) { + frames_dropped_last = status.acFramesDropped; + } else if (frames_dropped_last < status.acFramesDropped) { + GST_WARNING_OBJECT(self, "Dropped %" G_GUINT64_FORMAT " frames", + status.acFramesDropped - frames_dropped_last); + + GstClockTime timestamp = + gst_util_uint64_scale(status.acFramesProcessed + frames_dropped_last, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstClockTime timestamp_end = gst_util_uint64_scale( + status.acFramesProcessed + status.acFramesDropped, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, + timestamp, timestamp_end - timestamp); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + frames_dropped_last = status.acFramesDropped; + } if (status.IsRunning() && status.acBufferLevel > 1) { GstBuffer *video_buffer = NULL; diff --git a/gstajasrc.h b/gstajasrc.h index 4baf959426..85e1243d65 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -73,6 +73,7 @@ struct _GstAjaSrc { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; + NTV2InputSource configured_input_source; guint32 f2_start_line; NTV2TCIndex tc_index; From e8584abd4ecfbdc01c83edc4de9fa5b226434b0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 9 Feb 2021 16:22:18 +0200 Subject: [PATCH 07/73] Add README.md --- README.md | 197 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 197 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000000..41d737d93e --- /dev/null +++ b/README.md @@ -0,0 +1,197 @@ +# GStreamer AJA source/sink plugin + +[GStreamer](https://gstreamer.freedesktop.org/) plugin for +[AJA](https://www.aja.com) capture and output cards. + +This plugin requires the NTV2 SDK version 16 or newer. + +## Example usage + +Capture 1080p30 audio/video and display it locally + +```sh +gst-launch-1.0 ajasrc video-format=1080p-3000 ! ajasrcdemux name=d \ + d.video ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! videoconvert ! autovideosink \ + d.audio ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! audioconvert ! audioresample ! autoaudiosink +``` + +Output a 1080p2997 test audio/video stream + +```sh +gst-launch-1.0 videotestsrc pattern=ball ! video/x-raw,format=v210,width=1920,height=1080,framerate=30000/1001,interlace-mode=progressive ! timeoverlay ! timecodestamper ! combiner.video \ + audiotestsrc freq=440 ! audio/x-raw,format=S32LE,rate=48000,channels=16 ! audiobuffersplit output-buffer-duration=1/30 ! combiner.audio \ + ajasinkcombiner name=combiner ! ajasink channel=0 +``` + +Capture 1080p30 audio/video and directly output it again on the same card + +```sh +gst-launch-1.0 ajasrc video-format=1080p-3000 channel=1 input-source=sdi-1 audio-system=2 ! ajasrcdemux name=d \ + d.video ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! c.video \ + d.audio ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! c.audio \ + ajasinkcombiner name=c ! ajasink channel=0 reference-source=input-1 +``` + +## Configuration + +### Source + +``` + audio-source : Audio source to use + flags: readable, writable + Enum "GstAjaAudioSource" Default: 0, "Embedded" + (0): Embedded - embedded + (1): AES - aes + (2): Analog - analog + (3): HDMI - hdmi + (4): Microphone - mic + audio-system : Audio system to use + flags: readable, writable + Enum "GstAjaAudioSystem" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Audio system 1 - 1 + (2): Audio system 2 - 2 + (3): Audio system 3 - 3 + (4): Audio system 4 - 4 + (5): Audio system 5 - 5 + (6): Audio system 6 - 6 + (7): Audio system 7 - 7 + (8): Audio system 8 - 8 + capture-cpu-core : Sets the affinity of the capture thread to this CPU core (-1=disabled) + flags: readable, writable + Unsigned Integer. Range: 0 - 4294967295 Default: 4294967295 + channel : Channel to use + flags: readable, writable + Unsigned Integer. Range: 0 - 7 Default: 0 + device-identifier : Input device instance to use + flags: readable, writable + String. Default: "0" + input-source : Input source to use + flags: readable, writable + Enum "GstAjaInputSource" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Analog Input 1 - analog-1 + (6): SDI Input 1 - sdi-1 + (7): SDI Input 2 - sdi-2 + (8): SDI Input 3 - sdi-3 + (9): SDI Input 4 - sdi-4 + (10): SDI Input 5 - sdi-5 + (11): SDI Input 6 - sdi-6 + (12): SDI Input 7 - sdi-7 + (13): SDI Input 8 - sdi-8 + (2): HDMI Input 1 - hdmi-1 + (3): HDMI Input 2 - hdmi-2 + (4): HDMI Input 3 - hdmi-3 + (5): HDMI Input 4 - hdmi-4 + queue-size : Size of internal queue in number of video frames. Half of this is allocated as device buffers and equal to the latency. + flags: readable, writable + Unsigned Integer. Range: 1 - 2147483647 Default: 16 + reference-source : Reference source to use + flags: readable, writable + Enum "GstAjaReferenceSource" Default: 1, "Freerun" + (0): Auto - auto + (1): Freerun - freerun + (2): External - external + (3): SDI Input 1 - input-1 + (4): SDI Input 2 - input-2 + (5): SDI Input 3 - input-3 + (6): SDI Input 4 - input-4 + (7): SDI Input 5 - input-5 + (8): SDI Input 6 - input-6 + (9): SDI Input 7 - input-7 + (10): SDI Input 8 - input-8 + timecode-index : Timecode index to use + flags: readable, writable + Enum "GstAjaTimecodeIndex" Default: 0, "Embedded SDI ATC LTC" + (0): Embedded SDI VITC - vitc + (0): Embedded SDI ATC LTC - atc-ltc + (2): Analog LTC 1 - ltc-1 + (3): Analog LTC 2 - ltc-2 + video-format : Video format to use + flags: readable, writable + Enum "GstAjaVideoFormat" Default: 0, "1080i 5000" + (0): 1080i 5000 - 1080i-5000 + (1): 1080i 5994 - 1080i-5994 + (2): 1080i 6000 - 1080i-6000 + (3): 720p 5994 - 720p-5994 + (4): 720p 6000 - 720p-6000 + (5): 1080p 2997 - 1080p-2997 + (6): 1080p 3000 - 1080p-3000 + (7): 1080p 2500 - 1080p-2500 + (8): 1080p 2398 - 1080p-2398 + (9): 1080p 2400 - 1080p-2400 + (10): 720p 5000 - 720p-5000 + (11): 720p 2398 - 720p-2398 + (12): 720p 2500 - 720p-2500 + (13): 1080p 5000 A - 1080p-5000-a + (14): 1080p 5994 A - 1080p-5994-a + (15): 1080p 6000 A - 1080p-6000-a + (16): 625 5000 - 625-5000 + (17): 525 5994 - 525-5994 + (18): 525 2398 - 525-2398 + (19): 525 2400 - 525-2400 +``` + +### Sink + +``` + audio-system : Audio system to use + flags: readable, writable + Enum "GstAjaAudioSystem" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Audio system 1 - 1 + (2): Audio system 2 - 2 + (3): Audio system 3 - 3 + (4): Audio system 4 - 4 + (5): Audio system 5 - 5 + (6): Audio system 6 - 6 + (7): Audio system 7 - 7 + (8): Audio system 8 - 8 + channel : Channel to use + flags: readable, writable + Unsigned Integer. Range: 0 - 7 Default: 0 + device-identifier : Input device instance to use + flags: readable, writable + String. Default: "0" + output-cpu-core : Sets the affinity of the output thread to this CPU core (-1=disabled) + flags: readable, writable + Unsigned Integer. Range: 0 - 4294967295 Default: 4294967295 + output-destination : Output destination to use + flags: readable, writable + Enum "GstAjaOutputDestination" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Analog Output - analog + (2): SDI Output 1 - sdi-1 + (3): SDI Output 2 - sdi-2 + (4): SDI Output 3 - sdi-3 + (5): SDI Output 4 - sdi-4 + (6): SDI Output 5 - sdi-5 + (7): SDI Output 6 - sdi-6 + (8): SDI Output 7 - sdi-7 + (9): SDI Output 8 - sdi-8 + (10): HDMI Output - hdmi + queue-size : Size of internal queue in number of video frames. Half of this is allocated as device buffers and equal to the latency. + flags: readable, writable + Unsigned Integer. Range: 1 - 2147483647 Default: 16 + reference-source : Reference source to use + flags: readable, writable + Enum "GstAjaReferenceSource" Default: 0, "Auto" + (0): Auto - auto + (1): Freerun - freerun + (2): External - external + (3): SDI Input 1 - input-1 + (4): SDI Input 2 - input-2 + (5): SDI Input 3 - input-3 + (6): SDI Input 4 - input-4 + (7): SDI Input 5 - input-5 + (8): SDI Input 6 - input-6 + (9): SDI Input 7 - input-7 + (10): SDI Input 8 - input-8 + timecode-index : Timecode index to use + flags: readable, writable + Enum "GstAjaTimecodeIndex" Default: 0, "Embedded SDI ATC LTC" + (0): Embedded SDI VITC - vitc + (0): Embedded SDI ATC LTC - atc-ltc + (2): Analog LTC 1 - ltc-1 + (3): Analog LTC 2 - ltc-2 +``` From 18baa2a761d3ea6b8ff0934f295e044814a6e8c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 14:45:25 +0200 Subject: [PATCH 08/73] Allocate twice the queue length for the ANC buffers in interlace modes --- gstajasrc.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 3eb8e7e6bf..92e34b0e85 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -772,8 +772,14 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { self->anc_buffer_pool = gst_buffer_pool_new(); config = gst_buffer_pool_get_config(self->anc_buffer_pool); - gst_buffer_pool_config_set_params(config, NULL, anc_buffer_size, - self->queue_size, 0); + gst_buffer_pool_config_set_params( + config, NULL, anc_buffer_size, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); gst_buffer_pool_set_config(self->anc_buffer_pool, config); gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); From fe248a23e43264bebf841bc32943f102877b6fb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 14:45:39 +0200 Subject: [PATCH 09/73] Fill ANC buffers from the GStreamer streaming thread and not the rendering thread --- gstajasink.cpp | 164 +++++++++++++++++++++++++++++++++++++------------ gstajasink.h | 1 + 2 files changed, 126 insertions(+), 39 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 679e7ac305..06df999fc8 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -64,7 +64,10 @@ typedef struct { GstBuffer *audio_buffer; GstMapInfo audio_map; NTV2_RP188 tc; - AJAAncillaryList *anc_packet_list; + GstBuffer *anc_buffer; + GstMapInfo anc_map; + GstBuffer *anc_buffer2; + GstMapInfo anc_map2; } QueueItem; static void gst_aja_sink_set_property(GObject *object, guint property_id, @@ -404,8 +407,13 @@ static gboolean gst_aja_sink_stop(GstAjaSink *self) { gst_buffer_unmap(item->audio_buffer, &item->audio_map); gst_buffer_unref(item->audio_buffer); } - if (item->anc_packet_list) { - delete item->anc_packet_list; + if (item->anc_buffer) { + gst_buffer_unmap(item->anc_buffer, &item->anc_map); + gst_buffer_unref(item->anc_buffer); + } + if (item->anc_buffer2) { + gst_buffer_unmap(item->anc_buffer2, &item->anc_map2); + gst_buffer_unref(item->anc_buffer2); } } } @@ -420,6 +428,11 @@ static gboolean gst_aja_sink_stop(GstAjaSink *self) { gst_clear_object(&self->audio_buffer_pool); } + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + if (self->tc_indexes) { delete self->tc_indexes; self->tc_indexes = NULL; @@ -876,8 +889,14 @@ static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event) { gst_buffer_unmap(item->audio_buffer, &item->audio_map); gst_buffer_unref(item->audio_buffer); } - if (item->anc_packet_list) { - delete item->anc_packet_list; + + if (item->anc_buffer) { + gst_buffer_unmap(item->anc_buffer, &item->anc_map); + gst_buffer_unref(item->anc_buffer); + } + if (item->anc_buffer2) { + gst_buffer_unmap(item->anc_buffer2, &item->anc_map2); + gst_buffer_unref(item->anc_buffer2); } } } @@ -929,7 +948,10 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, .audio_buffer = NULL, .audio_map = GST_MAP_INFO_INIT, .tc = NTV2_RP188(), - .anc_packet_list = NULL, + .anc_buffer = NULL, + .anc_map = GST_MAP_INFO_INIT, + .anc_buffer2 = NULL, + .anc_map2 = GST_MAP_INFO_INIT, }; guint video_buffer_size = ::GetVideoActiveSize( @@ -1078,6 +1100,8 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, item.tc.fDBB = 0xffffffff; } + AJAAncillaryList anc_packet_list; + // TODO: Handle AFD/Bar meta #if 0 if (bar_meta || afd_meta) { @@ -1094,12 +1118,12 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, AJAAncillaryData pkt; pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF1); - item.anc_packet_list->AddAncillaryData(pkt); + anc_packet_list.AddAncillaryData(pkt); if (self->configured_info.interlace_mode != GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { AJAAncillaryData pkt2; pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF2); - item.anc_packet_list->AddAncillaryData(pkt); + anc_packet_list.AddAncillaryData(pkt); } } #endif @@ -1109,8 +1133,6 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, while ( (caption_meta = (GstVideoCaptionMeta *)gst_buffer_iterate_meta_filtered( buffer, &iter, GST_VIDEO_CAPTION_META_API_TYPE))) { - if (!item.anc_packet_list) item.anc_packet_list = new AJAAncillaryList; - if (caption_meta->caption_type == GST_VIDEO_CAPTION_TYPE_CEA708_CDP) { const uint16_t kF1PktLineNumCEA708(9); const AJAAncillaryDataLocation kCEA708LocF1( @@ -1126,13 +1148,77 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); pkt.SetPayloadData(caption_meta->data, caption_meta->size); - item.anc_packet_list->AddAncillaryData(pkt); + anc_packet_list.AddAncillaryData(pkt); } else { GST_WARNING_OBJECT(self, "Unhandled caption type %d", caption_meta->caption_type); } } + if (!anc_packet_list.IsEmpty()) { + if (!self->anc_buffer_pool) { + self->anc_buffer_pool = gst_buffer_pool_new(); + GstStructure *config = gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params( + config, NULL, 8 * 1024, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + } + + flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, + &item.anc_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_video_frame_unmap(&item.frame); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + return flow_ret; + } + gst_buffer_map(item.anc_buffer, &item.anc_map, GST_MAP_READWRITE); + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, + &item.anc_buffer2, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_video_frame_unmap(&item.frame); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + if (item.anc_buffer) { + gst_buffer_unmap(item.anc_buffer, &item.anc_map); + gst_buffer_unref(item.anc_buffer); + } + + return flow_ret; + } + gst_buffer_map(item.anc_buffer2, &item.anc_map2, GST_MAP_READWRITE); + } + + NTV2_POINTER anc_ptr1(item.anc_map.data, item.anc_map.size); + NTV2_POINTER anc_ptr2(item.anc_map2.data, item.anc_map2.size); + + anc_ptr1.Fill(ULWord(0)); + anc_ptr2.Fill(ULWord(0)); + anc_packet_list.GetTransmitData(anc_ptr1, anc_ptr2, + self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, + self->f2_start_line); + } + g_mutex_lock(&self->queue_lock); while (gst_queue_array_get_length(self->queue) >= self->queue_size) { QueueItem *tmp = (QueueItem *)gst_queue_array_pop_head_struct(self->queue); @@ -1152,8 +1238,13 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, gst_buffer_unmap(tmp->audio_buffer, &tmp->audio_map); gst_buffer_unref(tmp->audio_buffer); } - if (tmp->anc_packet_list) { - delete tmp->anc_packet_list; + if (tmp->anc_buffer) { + gst_buffer_unmap(tmp->anc_buffer, &tmp->anc_map); + gst_buffer_unref(tmp->anc_buffer); + } + if (tmp->anc_buffer2) { + gst_buffer_unmap(tmp->anc_buffer2, &tmp->anc_map2); + gst_buffer_unref(tmp->anc_buffer2); } } } @@ -1240,11 +1331,6 @@ restart: frames_rendered_start_time = GST_CLOCK_TIME_NONE; frames_dropped_last = G_MAXUINT64; - transfer.acANCBuffer.Allocate(2048); - if (self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_INTERLEAVED) - transfer.acANCField2Buffer.Allocate(2048); - g_mutex_lock(&self->queue_lock); while (self->playing && !self->shutdown && !(self->draining && gst_queue_array_get_length(self->queue) == 0)) { @@ -1311,8 +1397,14 @@ restart: gst_buffer_unmap(item_p->audio_buffer, &item_p->audio_map); gst_buffer_unref(item_p->audio_buffer); } - if (item_p->anc_packet_list) { - delete item_p->anc_packet_list; + + if (item_p->anc_buffer) { + gst_buffer_unmap(item_p->anc_buffer, &item_p->anc_map); + gst_buffer_unref(item_p->anc_buffer); + } + if (item_p->anc_buffer2) { + gst_buffer_unmap(item_p->anc_buffer2, &item_p->anc_map2); + gst_buffer_unref(item_p->anc_buffer2); } } break; @@ -1349,24 +1441,12 @@ restart: } transfer.SetVideoBuffer( - (guint *)GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), + (ULWord *)GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), GST_VIDEO_FRAME_SIZE(&item.frame)); - if (item.audio_buffer) { - transfer.SetAudioBuffer((guint *)item.audio_map.data, - item.audio_map.size); - } - - // Clear VANC and fill in captions as needed - transfer.acANCBuffer.Fill(ULWord(0)); - transfer.acANCField2Buffer.Fill(ULWord(0)); - - if (item.anc_packet_list) { - item.anc_packet_list->GetTransmitData( - transfer.acANCBuffer, transfer.acANCField2Buffer, - self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, - self->f2_start_line); - } + transfer.SetAudioBuffer((ULWord *)item.audio_map.data, + item.audio_map.size); + transfer.SetAncBuffers((ULWord *)item.anc_map.data, item.anc_map.size, + (ULWord *)item.anc_map2.data, item.anc_map2.size); if (!self->device->device->AutoCirculateTransfer(self->channel, transfer)) { @@ -1380,8 +1460,14 @@ restart: gst_buffer_unref(item.audio_buffer); } - if (item.anc_packet_list) { - delete item.anc_packet_list; + if (item.anc_buffer) { + gst_buffer_unmap(item.anc_buffer, &item.anc_map); + gst_buffer_unref(item.anc_buffer); + } + + if (item.anc_buffer2) { + gst_buffer_unmap(item.anc_buffer2, &item.anc_map2); + gst_buffer_unref(item.anc_buffer2); } GST_TRACE_OBJECT( diff --git a/gstajasink.h b/gstajasink.h index b490f4e58e..705d7e894d 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -63,6 +63,7 @@ struct _GstAjaSink { // Only allocated on demand GstBufferPool *buffer_pool; GstBufferPool *audio_buffer_pool; + GstBufferPool *anc_buffer_pool; // Properties gchar *device_identifier; From e2e54c363179bb34fe716d4ba7aaca3d79337c95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 15:57:52 +0200 Subject: [PATCH 10/73] Capture VANC from "TALL" video frames if firmware VANC extraction can't be used This is the case if the device does not support it, or if the input channel and SDI input are not the same. --- gstajasrc.cpp | 158 +++++++++++++++++++++++++++++++------------------- gstajasrc.h | 2 +- 2 files changed, 100 insertions(+), 60 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 92e34b0e85..5fcc1b5bef 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -498,16 +498,11 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) self->device->device->SetSDITransmitEnable(self->channel, false); - self->device->device->SetEnableVANCData(false, false, self->channel); - - CNTV2SignalRouter router; - - self->device->device->GetRouting(router); - // Always use the framebuffer associated with the channel NTV2InputCrosspointID framebuffer_id = ::GetFrameBufferInputXptFromChannel(self->channel, false); + NTV2VANCMode vanc_mode; NTV2InputSource input_source; NTV2OutputCrosspointID input_source_id; switch (self->input_source) { @@ -515,58 +510,74 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { input_source = ::NTV2ChannelToInputSource(self->channel); input_source_id = ::GetSDIInputOutputXptFromChannel(self->channel, false); + vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) + ? ::NTV2_VANCMODE_OFF + : ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_ANALOG1: input_source = ::NTV2_INPUTSOURCE_ANALOG1; input_source_id = ::NTV2_XptAnalogIn; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_HDMI1: input_source = ::NTV2_INPUTSOURCE_HDMI1; input_source_id = ::NTV2_XptHDMIIn1; + vanc_mode = ::NTV2_VANCMODE_OFF; break; case GST_AJA_INPUT_SOURCE_HDMI2: input_source = ::NTV2_INPUTSOURCE_HDMI2; input_source_id = ::NTV2_XptHDMIIn2; + vanc_mode = ::NTV2_VANCMODE_OFF; break; case GST_AJA_INPUT_SOURCE_HDMI3: input_source = ::NTV2_INPUTSOURCE_HDMI3; input_source_id = ::NTV2_XptHDMIIn3; + vanc_mode = ::NTV2_VANCMODE_OFF; break; case GST_AJA_INPUT_SOURCE_HDMI4: input_source = ::NTV2_INPUTSOURCE_HDMI4; input_source_id = ::NTV2_XptHDMIIn4; + vanc_mode = ::NTV2_VANCMODE_OFF; break; case GST_AJA_INPUT_SOURCE_SDI1: input_source = ::NTV2_INPUTSOURCE_SDI1; input_source_id = ::NTV2_XptSDIIn1; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI2: input_source = ::NTV2_INPUTSOURCE_SDI2; input_source_id = ::NTV2_XptSDIIn2; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI3: input_source = ::NTV2_INPUTSOURCE_SDI3; input_source_id = ::NTV2_XptSDIIn3; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI4: input_source = ::NTV2_INPUTSOURCE_SDI4; input_source_id = ::NTV2_XptSDIIn4; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI5: input_source = ::NTV2_INPUTSOURCE_SDI5; input_source_id = ::NTV2_XptSDIIn5; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI6: input_source = ::NTV2_INPUTSOURCE_SDI6; input_source_id = ::NTV2_XptSDIIn6; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI7: input_source = ::NTV2_INPUTSOURCE_SDI7; input_source_id = ::NTV2_XptSDIIn7; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_INPUT_SOURCE_SDI8: input_source = ::NTV2_INPUTSOURCE_SDI8; input_source_id = ::NTV2_XptSDIIn8; + vanc_mode = ::NTV2_VANCMODE_TALL; break; default: g_assert_not_reached(); @@ -575,6 +586,15 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { self->configured_input_source = input_source; + self->vanc_mode = vanc_mode; + self->device->device->SetEnableVANCData(NTV2_IS_VANCMODE_TALL(vanc_mode), + NTV2_IS_VANCMODE_TALLER(vanc_mode), + self->channel); + + CNTV2SignalRouter router; + + self->device->device->GetRouting(router); + // Need to remove old routes for the output and framebuffer we're going to // use NTV2ActualConnections connections = router.GetConnections(); @@ -748,7 +768,7 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { } guint video_buffer_size = ::GetVideoActiveSize( - self->video_format, ::NTV2_FBF_10BIT_YCBCR, ::NTV2_VANCMODE_OFF); + self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); self->buffer_pool = gst_buffer_pool_new(); GstStructure *config = gst_buffer_pool_get_config(self->buffer_pool); @@ -770,19 +790,21 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { guint anc_buffer_size = 8 * 1024; - self->anc_buffer_pool = gst_buffer_pool_new(); - config = gst_buffer_pool_get_config(self->anc_buffer_pool); - gst_buffer_pool_config_set_params( - config, NULL, anc_buffer_size, - (self->configured_info.interlace_mode == - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE - ? 1 - : 2) * - self->queue_size, - 0); - gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); - gst_buffer_pool_set_config(self->anc_buffer_pool, config); - gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->anc_buffer_pool = gst_buffer_pool_new(); + config = gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params( + config, NULL, anc_buffer_size, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + } self->capture_thread = new AJAThread(); self->capture_thread->Attach(capture_thread_func, self); @@ -987,6 +1009,7 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { } item = *(QueueItem *)gst_queue_array_pop_head_struct(self->queue); + g_mutex_unlock(&self->queue_lock); *buffer = item.video_buffer; gst_buffer_add_aja_audio_meta(*buffer, item.audio_buffer); @@ -1044,8 +1067,9 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { gst_buffer_add_video_time_code_meta(*buffer, &tc); } + AJAAncillaryList anc_packets; + if (item.anc_buffer) { - AJAAncillaryList anc_packets; GstMapInfo map = GST_MAP_INFO_INIT; GstMapInfo map2 = GST_MAP_INFO_INIT; @@ -1056,34 +1080,43 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { NTV2_POINTER ptr2(map2.data, map2.size); AJAAncillaryList::SetFromDeviceAncBuffers(ptr1, ptr2, anc_packets); - // anc_packets.ParseAllAncillaryData(); - // std::stringstream os; - // anc_packets.Print(os); - // GST_ERROR_OBJECT(self, "meh %u %lu\n%s", - // anc_packets.CountAncillaryData(), - // map.size, os.str().c_str()); - - if (anc_packets.CountAncillaryDataWithType(AJAAncillaryDataType_Cea708)) { - AJAAncillaryData packet = - anc_packets.GetAncillaryDataWithType(AJAAncillaryDataType_Cea708); - - if (packet.GetPayloadData() && packet.GetPayloadByteCount() && - AJA_SUCCESS(packet.ParsePayloadData())) { - gst_buffer_add_video_caption_meta( - *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet.GetPayloadData(), - packet.GetPayloadByteCount()); - } - } - - // TODO: Add AFD/Bar meta if (item.anc_buffer2) gst_buffer_unmap(item.anc_buffer2, &map2); gst_buffer_unmap(item.anc_buffer, &map); + } else if (self->vanc_mode != ::NTV2_VANCMODE_OFF) { + GstMapInfo map; + + NTV2FormatDescriptor format_desc(self->video_format, ::NTV2_FBF_10BIT_YCBCR, + self->vanc_mode); + + gst_buffer_map(item.video_buffer, &map, GST_MAP_READ); + NTV2_POINTER ptr(map.data, map.size); + AJAAncillaryList::SetFromVANCData(ptr, format_desc, anc_packets); + gst_buffer_unmap(item.video_buffer, &map); + + guint offset = + format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); + guint size = format_desc.GetVisibleRasterBytes(); + + gst_buffer_resize(item.video_buffer, offset, size); } gst_clear_buffer(&item.anc_buffer); gst_clear_buffer(&item.anc_buffer2); - g_mutex_unlock(&self->queue_lock); + + if (anc_packets.CountAncillaryDataWithType(AJAAncillaryDataType_Cea708)) { + AJAAncillaryData packet = + anc_packets.GetAncillaryDataWithType(AJAAncillaryDataType_Cea708); + + if (packet.GetPayloadData() && packet.GetPayloadByteCount() && + AJA_SUCCESS(packet.ParsePayloadData())) { + gst_buffer_add_video_caption_meta( + *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet.GetPayloadData(), + packet.GetPayloadByteCount()); + } + } + + // TODO: Add AFD/Bar meta if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { gst_base_src_set_caps(GST_BASE_SRC_CAST(self), self->configured_caps); @@ -1146,7 +1179,10 @@ restart: self->device->device->SubscribeInputVerticalEvent(self->channel); if (!self->device->device->AutoCirculateInitForInput( self->channel, self->queue_size / 2, self->audio_system, - AUTOCIRCULATE_WITH_RP188 | AUTOCIRCULATE_WITH_ANC, 1)) { + AUTOCIRCULATE_WITH_RP188 | + (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC + : 0), + 1)) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to initialize autocirculate")); goto out; @@ -1268,31 +1304,33 @@ restart: break; } - if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer, - NULL) != GST_FLOW_OK) { - gst_buffer_unref(audio_buffer); - gst_buffer_unref(video_buffer); - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to acquire anc buffer")); - break; - } - - if (self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { - if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer2, + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer, NULL) != GST_FLOW_OK) { - gst_buffer_unref(anc_buffer); gst_buffer_unref(audio_buffer); gst_buffer_unref(video_buffer); GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to acquire anc buffer")); break; } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + if (gst_buffer_pool_acquire_buffer( + self->anc_buffer_pool, &anc_buffer2, NULL) != GST_FLOW_OK) { + gst_buffer_unref(anc_buffer); + gst_buffer_unref(audio_buffer); + gst_buffer_unref(video_buffer); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); + break; + } + } } gst_buffer_map(video_buffer, &video_map, GST_MAP_READWRITE); gst_buffer_map(audio_buffer, &audio_map, GST_MAP_READWRITE); - gst_buffer_map(anc_buffer, &anc_map, GST_MAP_READWRITE); + if (anc_buffer) gst_buffer_map(anc_buffer, &anc_map, GST_MAP_READWRITE); if (anc_buffer2) gst_buffer_map(anc_buffer2, &anc_map2, GST_MAP_READWRITE); @@ -1313,7 +1351,7 @@ restart: } if (anc_buffer2) gst_buffer_unmap(anc_buffer2, &anc_map2); - gst_buffer_unmap(anc_buffer, &anc_map); + if (anc_buffer) gst_buffer_unmap(anc_buffer, &anc_map); gst_buffer_unmap(audio_buffer, &audio_map); gst_buffer_unmap(video_buffer, &video_map); @@ -1328,7 +1366,9 @@ restart: } gst_buffer_set_size(audio_buffer, transfer.GetCapturedAudioByteCount()); - gst_buffer_set_size(anc_buffer, transfer.GetCapturedAncByteCount(false)); + if (anc_buffer) + gst_buffer_set_size(anc_buffer, + transfer.GetCapturedAncByteCount(false)); if (anc_buffer2) gst_buffer_set_size(anc_buffer2, transfer.GetCapturedAncByteCount(true)); diff --git a/gstajasrc.h b/gstajasrc.h index 85e1243d65..a8ae0e7111 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -73,8 +73,8 @@ struct _GstAjaSrc { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; + NTV2VANCMode vanc_mode; NTV2InputSource configured_input_source; - guint32 f2_start_line; NTV2TCIndex tc_index; GstCaps *configured_caps; From 2b4e0f7eced7607967dd16d61d3ebc6f8844678c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 16:01:30 +0200 Subject: [PATCH 11/73] Only allocate VANC buffer pool if the device supports extraction --- gstajasrc.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 5fcc1b5bef..752c90b61d 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -790,7 +790,8 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { guint anc_buffer_size = 8 * 1024; - if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { self->anc_buffer_pool = gst_buffer_pool_new(); config = gst_buffer_pool_get_config(self->anc_buffer_pool); gst_buffer_pool_config_set_params( @@ -1304,7 +1305,8 @@ restart: break; } - if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer, NULL) != GST_FLOW_OK) { gst_buffer_unref(audio_buffer); From 19f752f8d6afd5402f421d6b0285a130da2bdf44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 19:41:09 +0200 Subject: [PATCH 12/73] Output VANC via "TALL" video frames if firmware VANC extraction can't be used This is the case if the device does not support it, or if the output channel and SDI output are not the same. --- gstajasink.cpp | 244 ++++++++++++++++++++++++++++++------------------- gstajasink.h | 1 + 2 files changed, 150 insertions(+), 95 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 06df999fc8..10677d7a03 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -60,7 +60,8 @@ typedef struct { QueueItemType type; // For FRAME - GstVideoFrame frame; + GstBuffer *video_buffer; + GstMapInfo video_map; GstBuffer *audio_buffer; GstMapInfo audio_map; NTV2_RP188 tc; @@ -402,7 +403,9 @@ static gboolean gst_aja_sink_stop(GstAjaSink *self) { while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { if (item->type == QUEUE_ITEM_TYPE_FRAME) { - gst_video_frame_unmap(&item->frame); + gst_buffer_unmap(item->video_buffer, &item->video_map); + gst_buffer_unref(item->video_buffer); + if (item->audio_buffer) { gst_buffer_unmap(item->audio_buffer, &item->audio_map); gst_buffer_unref(item->audio_buffer); @@ -631,19 +634,6 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) self->device->device->SetSDITransmitEnable(self->channel, true); - const NTV2Standard standard(::GetNTV2StandardFromVideoFormat(video_format)); - self->device->device->SetSDIOutputStandard(self->channel, standard); - const NTV2FrameGeometry geometry = - ::GetNTV2FrameGeometryFromVideoFormat(video_format); - self->device->device->SetVANCMode(::NTV2_VANCMODE_OFF, standard, geometry, - self->channel); - - NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); - self->f2_start_line = - (smpte_line_num_info.GetLastLine( - smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1) + - 1); - if (self->configured_audio_channels) { switch (self->audio_system_setting) { case GST_AJA_AUDIO_SYSTEM_1: @@ -699,14 +689,11 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { self->audio_system = ::NTV2_AUDIOSYSTEM_INVALID; } - CNTV2SignalRouter router; - - self->device->device->GetRouting(router); - // Always use the framebuffer associated with the channel NTV2OutputCrosspointID framebuffer_id = ::GetFrameBufferOutputXptFromChannel(self->channel, false, false); + NTV2VANCMode vanc_mode; NTV2TCIndex tc_indexes_vitc[2] = {::NTV2_TCINDEX_INVALID, ::NTV2_TCINDEX_INVALID}; NTV2TCIndex tc_index_atc_ltc = ::NTV2_TCINDEX_INVALID; @@ -720,60 +707,73 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { tc_index_atc_ltc = ::NTV2ChannelToTimecodeIndex(self->channel, false, true); output_destination_id = ::GetSDIOutputInputXpt(self->channel, false); + vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) + ? ::NTV2_VANCMODE_OFF + : ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI1: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI1; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI1_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI1_LTC; output_destination_id = ::NTV2_XptSDIOut1Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI2: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI2; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI2_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI2_LTC; output_destination_id = ::NTV2_XptSDIOut2Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI3: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI3; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI3_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI3_LTC; output_destination_id = ::NTV2_XptSDIOut3Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI4: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI4; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI4_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI4_LTC; output_destination_id = ::NTV2_XptSDIOut4Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI5: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI5; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI5_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI5_LTC; output_destination_id = ::NTV2_XptSDIOut5Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI6: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI6; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI6_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI6_LTC; output_destination_id = ::NTV2_XptSDIOut6Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI7: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI7; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI7_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI7_LTC; output_destination_id = ::NTV2_XptSDIOut7Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_SDI8: tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI8; tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI8_2; tc_index_atc_ltc = ::NTV2_TCINDEX_SDI8_LTC; output_destination_id = ::NTV2_XptSDIOut8Input; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_ANALOG: output_destination_id = ::NTV2_XptAnalogOutInput; + vanc_mode = ::NTV2_VANCMODE_TALL; break; case GST_AJA_OUTPUT_DESTINATION_HDMI: output_destination_id = ::NTV2_XptHDMIOutInput; + vanc_mode = ::NTV2_VANCMODE_OFF; break; default: g_assert_not_reached(); @@ -803,6 +803,34 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { break; } + const NTV2Standard standard(::GetNTV2StandardFromVideoFormat(video_format)); + self->device->device->SetSDIOutputStandard(self->channel, standard); + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(video_format); + + self->vanc_mode = + ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->device->device->SetVANCMode(self->vanc_mode, standard, geometry, + self->channel); + } else { + const NTV2FrameGeometry vanc_geometry = + ::GetVANCFrameGeometry(geometry, self->vanc_mode); + + self->device->device->SetVANCMode(self->vanc_mode, standard, vanc_geometry, + self->channel); + } + + NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); + self->f2_start_line = + (smpte_line_num_info.GetLastLine( + smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1) + + 1); + + CNTV2SignalRouter router; + + self->device->device->GetRouting(router); + // Need to remove old routes for the output and framebuffer we're going to use NTV2ActualConnections connections = router.GetConnections(); @@ -884,7 +912,9 @@ static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event) { while ( (item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { if (item->type == QUEUE_ITEM_TYPE_FRAME) { - gst_video_frame_unmap(&item->frame); + gst_buffer_unmap(item->video_buffer, &item->video_map); + gst_buffer_unref(item->video_buffer); + if (item->audio_buffer) { gst_buffer_unmap(item->audio_buffer, &item->audio_map); gst_buffer_unref(item->audio_buffer); @@ -941,10 +971,8 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, GstVideoTimeCodeMeta *tc_meta; QueueItem item = { .type = QUEUE_ITEM_TYPE_FRAME, - .frame = - { - {0}, - }, + .video_buffer = NULL, + .video_map = GST_MAP_INFO_INIT, .audio_buffer = NULL, .audio_map = GST_MAP_INFO_INIT, .tc = NTV2_RP188(), @@ -955,15 +983,20 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, }; guint video_buffer_size = ::GetVideoActiveSize( - self->video_format, ::NTV2_FBF_10BIT_YCBCR, ::NTV2_VANCMODE_OFF); + self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); + NTV2FormatDescriptor format_desc(self->video_format, ::NTV2_FBF_10BIT_YCBCR, + self->vanc_mode); meta = gst_buffer_get_aja_audio_meta(buffer); tc_meta = gst_buffer_get_video_time_code_meta(buffer); - if (gst_buffer_n_memory(buffer) == 1) { + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + gst_buffer_n_memory(buffer) == 1) { GstMemory *mem = gst_buffer_peek_memory(buffer, 0); + gsize offset; - if (gst_memory_get_sizes(mem, NULL, NULL) == video_buffer_size && + if (gst_memory_get_sizes(mem, &offset, NULL) == video_buffer_size && + offset == 0 && strcmp(mem->allocator->mem_type, GST_AJA_ALLOCATOR_MEMTYPE) == 0 && GST_AJA_ALLOCATOR(mem->allocator)->device->device->GetIndexNumber() == self->device->device->GetIndexNumber()) { @@ -1001,17 +1034,23 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, item.type = QUEUE_ITEM_TYPE_FRAME; - gst_video_frame_map(&item.frame, &self->configured_info, item_buffer, - GST_MAP_READWRITE); - gst_video_frame_copy(&item.frame, &in_frame); + item.video_buffer = item_buffer; + gst_buffer_map(item.video_buffer, &item.video_map, GST_MAP_WRITE); + + guint offset = + format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); + guint size = format_desc.GetVisibleRasterBytes(); + + if (offset != 0) memset(item.video_map.data, 0, offset); + memcpy(item.video_map.data + offset, + GST_VIDEO_FRAME_PLANE_DATA(&in_frame, 0), size); + gst_video_frame_unmap(&in_frame); - gst_buffer_unref(item_buffer); } else { item.type = QUEUE_ITEM_TYPE_FRAME; - gst_video_frame_map(&item.frame, &self->configured_info, item_buffer, - GST_MAP_READ); - gst_buffer_unref(item_buffer); + item.video_buffer = item_buffer; + gst_buffer_map(item.video_buffer, &item.video_map, GST_MAP_READ); } if (meta) { @@ -1046,7 +1085,8 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, flow_ret = gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, &item_audio_buffer, NULL); if (flow_ret != GST_FLOW_OK) { - gst_video_frame_unmap(&item.frame); + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); return flow_ret; } @@ -1156,67 +1196,77 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, } if (!anc_packet_list.IsEmpty()) { - if (!self->anc_buffer_pool) { - self->anc_buffer_pool = gst_buffer_pool_new(); - GstStructure *config = gst_buffer_pool_get_config(self->anc_buffer_pool); - gst_buffer_pool_config_set_params( - config, NULL, 8 * 1024, - (self->configured_info.interlace_mode == - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE - ? 1 - : 2) * - self->queue_size, - 0); - gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); - gst_buffer_pool_set_config(self->anc_buffer_pool, config); - gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); - } - - flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, - &item.anc_buffer, NULL); - if (flow_ret != GST_FLOW_OK) { - gst_video_frame_unmap(&item.frame); - - if (item.audio_buffer) { - gst_buffer_unmap(item.audio_buffer, &item.audio_map); - gst_buffer_unref(item.audio_buffer); + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { + if (!self->anc_buffer_pool) { + self->anc_buffer_pool = gst_buffer_pool_new(); + GstStructure *config = + gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params( + config, NULL, 8 * 1024, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); } - return flow_ret; - } - gst_buffer_map(item.anc_buffer, &item.anc_map, GST_MAP_READWRITE); - - if (self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, - &item.anc_buffer2, NULL); + &item.anc_buffer, NULL); if (flow_ret != GST_FLOW_OK) { - gst_video_frame_unmap(&item.frame); + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); if (item.audio_buffer) { gst_buffer_unmap(item.audio_buffer, &item.audio_map); gst_buffer_unref(item.audio_buffer); } - if (item.anc_buffer) { - gst_buffer_unmap(item.anc_buffer, &item.anc_map); - gst_buffer_unref(item.anc_buffer); - } - return flow_ret; } - gst_buffer_map(item.anc_buffer2, &item.anc_map2, GST_MAP_READWRITE); + gst_buffer_map(item.anc_buffer, &item.anc_map, GST_MAP_READWRITE); + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, + &item.anc_buffer2, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + if (item.anc_buffer) { + gst_buffer_unmap(item.anc_buffer, &item.anc_map); + gst_buffer_unref(item.anc_buffer); + } + + return flow_ret; + } + gst_buffer_map(item.anc_buffer2, &item.anc_map2, GST_MAP_READWRITE); + } + + NTV2_POINTER anc_ptr1(item.anc_map.data, item.anc_map.size); + NTV2_POINTER anc_ptr2(item.anc_map2.data, item.anc_map2.size); + + anc_ptr1.Fill(ULWord(0)); + anc_ptr2.Fill(ULWord(0)); + anc_packet_list.GetTransmitData(anc_ptr1, anc_ptr2, + self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, + self->f2_start_line); + } else { + NTV2_POINTER ptr(item.video_map.data, item.video_map.size); + + anc_packet_list.GetVANCTransmitData(ptr, format_desc); } - - NTV2_POINTER anc_ptr1(item.anc_map.data, item.anc_map.size); - NTV2_POINTER anc_ptr2(item.anc_map2.data, item.anc_map2.size); - - anc_ptr1.Fill(ULWord(0)); - anc_ptr2.Fill(ULWord(0)); - anc_packet_list.GetTransmitData(anc_ptr1, anc_ptr2, - self->configured_info.interlace_mode != - GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, - self->f2_start_line); } g_mutex_lock(&self->queue_lock); @@ -1228,12 +1278,13 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, GstMessage *msg = gst_message_new_qos( GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, - GST_BUFFER_PTS(tmp->frame.buffer), + GST_BUFFER_PTS(tmp->video_buffer), gst_util_uint64_scale(GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n)); gst_element_post_message(GST_ELEMENT_CAST(self), msg); - gst_video_frame_unmap(&tmp->frame); + gst_buffer_unmap(tmp->video_buffer, &tmp->video_map); + gst_buffer_unref(tmp->video_buffer); if (tmp->audio_buffer) { gst_buffer_unmap(tmp->audio_buffer, &tmp->audio_map); gst_buffer_unref(tmp->audio_buffer); @@ -1249,8 +1300,7 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, } } - GST_TRACE_OBJECT(self, "Queuing frame video %p audio %p", - GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), + GST_TRACE_OBJECT(self, "Queuing frame video %p audio %p", item.video_map.data, item.audio_buffer ? item.audio_map.data : NULL); gst_queue_array_push_tail_struct(self->queue, &item); GST_TRACE_OBJECT(self, "%u frames queued", @@ -1317,7 +1367,10 @@ restart: self->device->device->SubscribeOutputVerticalEvent(self->channel); if (!self->device->device->AutoCirculateInitForOutput( self->channel, self->queue_size / 2, self->audio_system, - AUTOCIRCULATE_WITH_RP188 | AUTOCIRCULATE_WITH_ANC, 1)) { + AUTOCIRCULATE_WITH_RP188 | + (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC + : 0), + 1)) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to initialize autocirculate")); goto out; @@ -1392,7 +1445,9 @@ restart: if (!self->playing || self->shutdown || (!item_p && self->draining)) { if (item_p && item_p->type == QUEUE_ITEM_TYPE_FRAME) { - gst_video_frame_unmap(&item_p->frame); + gst_buffer_unmap(item_p->video_buffer, &item_p->video_map); + gst_buffer_unref(item_p->video_buffer); + if (item_p->audio_buffer) { gst_buffer_unmap(item_p->audio_buffer, &item_p->audio_map); gst_buffer_unref(item_p->audio_buffer); @@ -1425,8 +1480,7 @@ restart: "Video %p %" G_GSIZE_FORMAT " " "Audio %p %" G_GSIZE_FORMAT, - GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), - GST_VIDEO_FRAME_SIZE(&item.frame), + item.video_map.data, item.video_map.size, item.audio_buffer ? item.audio_map.data : NULL, item.audio_buffer ? item.audio_map.size : 0); @@ -1440,9 +1494,8 @@ restart: transfer.SetOutputTimeCodes(timecodes); } - transfer.SetVideoBuffer( - (ULWord *)GST_VIDEO_FRAME_PLANE_DATA(&item.frame, 0), - GST_VIDEO_FRAME_SIZE(&item.frame)); + transfer.SetVideoBuffer((ULWord *)item.video_map.data, + item.video_map.size); transfer.SetAudioBuffer((ULWord *)item.audio_map.data, item.audio_map.size); transfer.SetAncBuffers((ULWord *)item.anc_map.data, item.anc_map.size, @@ -1453,7 +1506,8 @@ restart: GST_WARNING_OBJECT(self, "Failed to transfer frame"); } - gst_video_frame_unmap(&item.frame); + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); if (item.audio_buffer) { gst_buffer_unmap(item.audio_buffer, &item.audio_map); diff --git a/gstajasink.h b/gstajasink.h index 705d7e894d..9106e267c3 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -78,6 +78,7 @@ struct _GstAjaSink { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; + NTV2VANCMode vanc_mode; guint32 f2_start_line; NTV2TCIndexes *tc_indexes; From b64c7f3fdc841eafb67407664be7caf8365d1581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 19 Feb 2021 20:39:58 +0200 Subject: [PATCH 13/73] Only propose the AJA allocator from the sink if no TALL frames are allocated --- gstajasink.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 10677d7a03..eeaef60a4c 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -948,7 +948,7 @@ static gboolean gst_aja_sink_propose_allocation(GstBaseSink *bsink, GstQuery *query) { GstAjaSink *self = GST_AJA_SINK(bsink); - if (self->allocator) { + if (self->allocator && self->vanc_mode == ::NTV2_VANCMODE_OFF) { GstAllocationParams params; gst_allocation_params_init(¶ms); From bdddb634f79d6f415629780269e7616e15b9d0e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Mar 2021 13:41:22 +0200 Subject: [PATCH 14/73] Fill VANC area with black instead of zeroes when outputting TALL frames --- gstajasink.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index eeaef60a4c..0ccc2df6e8 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1041,7 +1041,10 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); guint size = format_desc.GetVisibleRasterBytes(); - if (offset != 0) memset(item.video_map.data, 0, offset); + if (offset != 0) + ::SetRasterLinesBlack(::NTV2_FBF_10BIT_YCBCR, item.video_map.data, + format_desc.GetBytesPerRow(), + format_desc.GetFirstActiveLine()); memcpy(item.video_map.data + offset, GST_VIDEO_FRAME_PLANE_DATA(&in_frame, 0), size); From e70dcfa588c4d54840bc7142241f3c9a5728a22c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Mar 2021 13:41:56 +0200 Subject: [PATCH 15/73] Output captions on line 12 instead of line 9 1080p2997 for example does not allow line 9 in TALL frames. --- gstajasink.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 0ccc2df6e8..64fad29cce 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1177,11 +1177,9 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, (caption_meta = (GstVideoCaptionMeta *)gst_buffer_iterate_meta_filtered( buffer, &iter, GST_VIDEO_CAPTION_META_API_TYPE))) { if (caption_meta->caption_type == GST_VIDEO_CAPTION_TYPE_CEA708_CDP) { - const uint16_t kF1PktLineNumCEA708(9); const AJAAncillaryDataLocation kCEA708LocF1( AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, - AJAAncillaryDataSpace_VANC, kF1PktLineNumCEA708, - AJAAncDataHorizOffset_AnyVanc); + AJAAncillaryDataSpace_VANC, 12, AJAAncDataHorizOffset_AnyVanc); AJAAncillaryData_Cea708 pkt; From 6deeb2389ea000e7d626ca377b2cc2e26eba6772 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Mar 2021 14:35:34 +0200 Subject: [PATCH 16/73] Work around AJA SDK not recogizing CEA708 packets when capturing TALL frames in SD modes Stop using CountAncillaryDataWithType(AJAAncillaryDataType_Cea708) etc because for SD it doesn't recognize the packets. It assumes they would only be received on AJAAncillaryDataChannel_Y but for SD it is actually AJAAncillaryDataChannel_Both. --- gstajasrc.cpp | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 752c90b61d..99031674f9 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1105,15 +1105,23 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { gst_clear_buffer(&item.anc_buffer); gst_clear_buffer(&item.anc_buffer2); - if (anc_packets.CountAncillaryDataWithType(AJAAncillaryDataType_Cea708)) { - AJAAncillaryData packet = - anc_packets.GetAncillaryDataWithType(AJAAncillaryDataType_Cea708); + // Not using CountAncillaryDataWithType(AJAAncillaryDataType_Cea708) etc + // here because for SD it doesn't recognize the packets. It assumes they + // would only be received on AJAAncillaryDataChannel_Y but for SD it is + // actually AJAAncillaryDataChannel_Both. + // + // See AJA SDK support ticket #4844. + guint32 n_vanc_packets = anc_packets.CountAncillaryData(); + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); - if (packet.GetPayloadData() && packet.GetPayloadByteCount() && - AJA_SUCCESS(packet.ParsePayloadData())) { + if (packet->GetDID() == AJAAncillaryData_CEA708_DID && + packet->GetSID() == AJAAncillaryData_CEA708_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { gst_buffer_add_video_caption_meta( - *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet.GetPayloadData(), - packet.GetPayloadByteCount()); + *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet->GetPayloadData(), + packet->GetPayloadByteCount()); } } From 195ffb2101c0ade5cd4d6f08df09dc3f5a78a4b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Mar 2021 17:56:27 +0200 Subject: [PATCH 17/73] Work around AJA SDK truncating ADF packets in SD modes when writing to TALL frames ADF packets that are not a multiple of 12 large are truncated at the end in SD modes. Instead of directly converting all the packets at once like for non-SD modes, do the conversion (partially) manually. --- gstajasink.cpp | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 64fad29cce..5483511cdd 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1266,7 +1266,33 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, } else { NTV2_POINTER ptr(item.video_map.data, item.video_map.size); - anc_packet_list.GetVANCTransmitData(ptr, format_desc); + // Work around bug in GetVANCTransmitData() for SD formats that + // truncates ADF packets that are not a multiple of 12 words long. + // + // See AJA SDK support ticket #4845. + if (format_desc.IsSDFormat()) { + guint32 n_vanc_packets = anc_packet_list.CountAncillaryData(); + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packet_list.GetAncillaryDataAtIndex(i); + + ULWord line_offset = 0; + if (!format_desc.GetLineOffsetFromSMPTELine( + packet->GetLocationLineNumber(), line_offset)) + continue; + + UWordSequence data; + if (packet->GenerateTransmitData(data) != AJA_STATUS_SUCCESS) + continue; + + // Pad to a multiple of 12 words + while (data.size() < 12 || data.size() % 12 != 0) + data.push_back(0x040); + ::YUVComponentsTo10BitYUVPackedBuffer(data, ptr, format_desc, + line_offset); + } + } else { + anc_packet_list.GetVANCTransmitData(ptr, format_desc); + } } } From 8075fe577c982b3bc2fc46b6f7135d1e4727cc20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 5 Mar 2021 21:50:22 +0200 Subject: [PATCH 18/73] Streamline VANC/geometry/standard configuration between source and sink This shouldn't have any effect but makes the configuration the same on both sides. --- gstajasink.cpp | 8 ++++---- gstajasrc.cpp | 23 ++++++++++++++++++++--- 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 5483511cdd..84f786123b 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -811,14 +811,14 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { self->vanc_mode = ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { - self->device->device->SetVANCMode(self->vanc_mode, standard, geometry, - self->channel); + self->device->device->SetFrameGeometry(geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); } else { const NTV2FrameGeometry vanc_geometry = ::GetVANCFrameGeometry(geometry, self->vanc_mode); - self->device->device->SetVANCMode(self->vanc_mode, standard, vanc_geometry, - self->channel); + self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); } NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 99031674f9..8021cc2a9c 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -587,9 +587,26 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { self->configured_input_source = input_source; self->vanc_mode = vanc_mode; - self->device->device->SetEnableVANCData(NTV2_IS_VANCMODE_TALL(vanc_mode), - NTV2_IS_VANCMODE_TALLER(vanc_mode), - self->channel); + + const NTV2Standard standard( + ::GetNTV2StandardFromVideoFormat(self->video_format)); + self->device->device->SetStandard(standard, self->channel); + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(self->video_format); + + self->vanc_mode = + ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->device->device->SetFrameGeometry(geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); + } else { + const NTV2FrameGeometry vanc_geometry = + ::GetVANCFrameGeometry(geometry, self->vanc_mode); + + self->device->device->SetFrameGeometry(vanc_geometry, false, + self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); + } CNTV2SignalRouter router; From fa5385bc8e36788e8180886c14cf727f52c66b8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 6 Jul 2021 11:47:42 +0300 Subject: [PATCH 19/73] ajasrc: Set output buffer duration based on the framerate as an estimate --- gstajasrc.cpp | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8021cc2a9c..7bbf797db0 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1423,7 +1423,11 @@ restart: now_gst = 0; GST_BUFFER_PTS(video_buffer) = now_gst; + GST_BUFFER_DURATION(video_buffer) = gst_util_uint64_scale( + GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); GST_BUFFER_PTS(audio_buffer) = now_gst; + GST_BUFFER_DURATION(audio_buffer) = gst_util_uint64_scale( + GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); // TODO: Drift detection and compensation From 735768b905b40cf497a20007996f79f809406881 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 2 Jul 2021 11:03:00 +0300 Subject: [PATCH 20/73] Add support for UHD/UHD2 modes and SDI quad-link modes --- gstajacommon.cpp | 305 ++++++++++++++++++++++++++++---- gstajacommon.h | 62 ++++++- gstajasink.cpp | 322 +++++++++++++++++++++++++++++++++- gstajasink.h | 2 + gstajasrc.cpp | 448 +++++++++++++++++++++++++++++++++++++++-------- gstajasrc.h | 2 + 6 files changed, 1016 insertions(+), 125 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 99b064ca28..fc71862b47 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -27,87 +27,288 @@ #include "gstajacommon.h" GST_DEBUG_CATEGORY_STATIC(gst_aja_debug); + #define GST_CAT_DEFAULT gst_aja_debug -static const NTV2VideoFormat supported_video_formats[] = { - NTV2_FORMAT_1080i_5000, NTV2_FORMAT_1080i_5994, - NTV2_FORMAT_1080i_6000, NTV2_FORMAT_720p_5994, - NTV2_FORMAT_720p_6000, NTV2_FORMAT_1080p_2997, - NTV2_FORMAT_1080p_3000, NTV2_FORMAT_1080p_2500, - NTV2_FORMAT_1080p_2398, NTV2_FORMAT_1080p_2400, - NTV2_FORMAT_720p_5000, NTV2_FORMAT_720p_2398, - NTV2_FORMAT_720p_2500, NTV2_FORMAT_1080p_5000_A, - NTV2_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_6000_A, - NTV2_FORMAT_625_5000, NTV2_FORMAT_525_5994, - NTV2_FORMAT_525_2398, NTV2_FORMAT_525_2400}; +typedef struct { + GstAjaVideoFormat gst_format; + NTV2VideoFormat aja_format; + NTV2VideoFormat quad_format; +} FormatMapEntry; + +static const FormatMapEntry format_map[] = { + {GST_AJA_VIDEO_FORMAT_1080i_5000, NTV2_FORMAT_1080i_5000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080i_5994, NTV2_FORMAT_1080i_5994, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080i_6000, NTV2_FORMAT_1080i_6000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5994, NTV2_FORMAT_720p_5994, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_6000, NTV2_FORMAT_720p_6000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2997, NTV2_FORMAT_1080p_2997, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_3000, NTV2_FORMAT_1080p_3000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2500, NTV2_FORMAT_1080p_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2398, NTV2_FORMAT_1080p_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2400, NTV2_FORMAT_1080p_2400, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_5000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_2398, NTV2_FORMAT_720p_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_3000, NTV2_FORMAT_1080p_5000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_5994_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_6000_A, NTV2_FORMAT_1080p_6000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_625_5000, NTV2_FORMAT_625_5000, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_5994, NTV2_FORMAT_525_5994, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_2398, NTV2_FORMAT_525_2398, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_2400, NTV2_FORMAT_525_2400, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_2160p_2398, NTV2_FORMAT_3840x2160p_2398, + NTV2_FORMAT_4x1920x1080p_2398}, + {GST_AJA_VIDEO_FORMAT_2160p_2400, NTV2_FORMAT_3840x2160p_2400, + NTV2_FORMAT_4x1920x1080p_2400}, + {GST_AJA_VIDEO_FORMAT_2160p_2500, NTV2_FORMAT_3840x2160p_2500, + NTV2_FORMAT_4x1920x1080p_2500}, + {GST_AJA_VIDEO_FORMAT_2160p_2997, NTV2_FORMAT_3840x2160p_2997, + NTV2_FORMAT_4x1920x1080p_2997}, + {GST_AJA_VIDEO_FORMAT_2160p_3000, NTV2_FORMAT_3840x2160p_3000, + NTV2_FORMAT_4x1920x1080p_3000}, + {GST_AJA_VIDEO_FORMAT_2160p_5000, NTV2_FORMAT_3840x2160p_5000, + NTV2_FORMAT_4x1920x1080p_5000}, + {GST_AJA_VIDEO_FORMAT_2160p_5994, NTV2_FORMAT_3840x2160p_5994, + NTV2_FORMAT_4x1920x1080p_5994}, + {GST_AJA_VIDEO_FORMAT_2160p_6000, NTV2_FORMAT_3840x2160p_6000, + NTV2_FORMAT_4x1920x1080p_6000}, + {GST_AJA_VIDEO_FORMAT_4320p_2398, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2398}, + {GST_AJA_VIDEO_FORMAT_4320p_2400, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2400}, + {GST_AJA_VIDEO_FORMAT_4320p_2500, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2500}, + {GST_AJA_VIDEO_FORMAT_4320p_2997, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2997}, + {GST_AJA_VIDEO_FORMAT_4320p_3000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_3000}, + {GST_AJA_VIDEO_FORMAT_4320p_5000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_5000}, + {GST_AJA_VIDEO_FORMAT_4320p_5994, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_5994}, + {GST_AJA_VIDEO_FORMAT_4320p_6000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_6000}, +}; GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { GstCaps *caps = gst_caps_new_empty(); - for (gsize i = 0; i < G_N_ELEMENTS(supported_video_formats); i++) { - NTV2VideoFormat format = supported_video_formats[i]; + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; - if (device_id == DEVICE_ID_INVALID || - ::NTV2DeviceCanDoVideoFormat(device_id, format)) { - gst_caps_append(caps, gst_ntv2_video_format_to_caps(format)); + if (device_id == DEVICE_ID_INVALID) { + gst_caps_append(caps, gst_aja_video_format_to_caps(format.gst_format)); + } else { + if ((format.aja_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.aja_format)) || + (format.quad_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.quad_format))) { + gst_caps_append(caps, gst_aja_video_format_to_caps(format.gst_format)); + } } } return caps; } +GstCaps *gst_aja_video_format_to_caps(GstAjaVideoFormat format) { + const FormatMapEntry *entry = NULL; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &tmp = format_map[i]; + + if (tmp.gst_format == format) { + entry = &tmp; + break; + } + } + g_assert(entry != NULL); + + if (entry->aja_format != NTV2_FORMAT_UNKNOWN) + return gst_ntv2_video_format_to_caps(entry->aja_format); + if (entry->quad_format != NTV2_FORMAT_UNKNOWN) + return gst_ntv2_video_format_to_caps(entry->quad_format); + + g_assert_not_reached(); +} + +bool gst_video_info_from_aja_video_format(GstVideoInfo *info, + GstAjaVideoFormat format) { + const FormatMapEntry *entry = NULL; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &tmp = format_map[i]; + + if (tmp.gst_format == format) { + entry = &tmp; + break; + } + } + g_assert(entry != NULL); + + if (entry->aja_format != NTV2_FORMAT_UNKNOWN) + return gst_video_info_from_ntv2_video_format(info, entry->aja_format); + if (entry->quad_format != NTV2_FORMAT_UNKNOWN) + return gst_video_info_from_ntv2_video_format(info, entry->quad_format); + + g_assert_not_reached(); +} + GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format) { GstVideoInfo info; + if (!gst_video_info_from_ntv2_video_format(&info, format)) return NULL; + + return gst_video_info_to_caps(&info); +} + +bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, + NTV2VideoFormat format) { + if (format == NTV2_FORMAT_UNKNOWN) return false; + guint width = ::GetDisplayWidth(format); guint height = ::GetDisplayHeight(format); NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); guint fps_n, fps_d; ::GetFramesPerSecond(fps, fps_n, fps_d); - gst_video_info_set_format(&info, GST_VIDEO_FORMAT_v210, width, height); - info.fps_n = fps_n; - info.fps_d = fps_d; + gst_video_info_set_format(info, GST_VIDEO_FORMAT_v210, width, height); + info->fps_n = fps_n; + info->fps_d = fps_d; if (NTV2_IS_525_FORMAT(format)) { - info.par_n = 10; - info.par_d = 11; + info->par_n = 10; + info->par_d = 11; } else if (NTV2_IS_625_FORMAT(format)) { - info.par_n = 12; - info.par_d = 11; + info->par_n = 12; + info->par_d = 11; } - info.interlace_mode = !::IsProgressiveTransport(format) - ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED - : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + info->interlace_mode = !::IsProgressiveTransport(format) + ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED + : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; - return gst_video_info_to_caps(&info); + return true; } -NTV2VideoFormat gst_ntv2_video_format_from_caps(GstCaps *caps) { +NTV2VideoFormat gst_ntv2_video_format_from_caps(const GstCaps *caps, + bool quad) { GstVideoInfo info; if (!gst_video_info_from_caps(&info, caps)) return NTV2_FORMAT_UNKNOWN; - for (gsize i = 0; i < G_N_ELEMENTS(supported_video_formats); i++) { - NTV2VideoFormat format = supported_video_formats[i]; + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; + NTV2VideoFormat f = !quad ? format.aja_format : format.quad_format; - guint width = ::GetDisplayWidth(format); - guint height = ::GetDisplayHeight(format); - NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); + if (f == NTV2_FORMAT_UNKNOWN) continue; + + guint width = ::GetDisplayWidth(f); + guint height = ::GetDisplayHeight(f); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(f); guint fps_n, fps_d; ::GetFramesPerSecond(fps, fps_n, fps_d); if (width == (guint)info.width && height == (guint)info.height && (guint)info.fps_n == fps_n && (guint)info.fps_d == fps_d && - ((!::IsProgressiveTransport(format) && + ((!::IsProgressiveTransport(f) && info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) || - (::IsProgressiveTransport(format) && + (::IsProgressiveTransport(f) && info.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE))) - return format; + return f; } return NTV2_FORMAT_UNKNOWN; } +GstAjaVideoFormat gst_aja_video_format_from_caps(const GstCaps *caps) { + GstVideoInfo info; + + if (!gst_video_info_from_caps(&info, caps)) + return GST_AJA_VIDEO_FORMAT_INVALID; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; + NTV2VideoFormat f = (format.aja_format != NTV2_FORMAT_UNKNOWN) + ? format.aja_format + : format.quad_format; + + if (f == NTV2_FORMAT_UNKNOWN) continue; + + guint width = ::GetDisplayWidth(f); + guint height = ::GetDisplayHeight(f); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(f); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + if (width == (guint)info.width && height == (guint)info.height && + (guint)info.fps_n == fps_n && (guint)info.fps_d == fps_d && + ((!::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) || + (::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE))) + return format.gst_format; + } + + return GST_AJA_VIDEO_FORMAT_INVALID; +} + +GstAjaVideoFormat gst_aja_video_format_from_ntv2_format( + NTV2VideoFormat format) { + if (format == NTV2_FORMAT_UNKNOWN) return GST_AJA_VIDEO_FORMAT_INVALID; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &entry = format_map[i]; + if (entry.aja_format == format || entry.quad_format == format) + return entry.gst_format; + } + + return GST_AJA_VIDEO_FORMAT_INVALID; +} + +NTV2VideoFormat gst_ntv2_video_format_from_aja_format(GstAjaVideoFormat format, + bool quad) { + if (format == GST_AJA_VIDEO_FORMAT_INVALID) return NTV2_FORMAT_UNKNOWN; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &entry = format_map[i]; + if (entry.gst_format == format) { + if (!quad && entry.aja_format != NTV2_FORMAT_UNKNOWN) + return entry.aja_format; + if (quad && entry.quad_format != NTV2_FORMAT_UNKNOWN) + return entry.quad_format; + } + } + + return NTV2_FORMAT_UNKNOWN; +} + +bool gst_ntv2_video_format_is_quad(NTV2VideoFormat format) { + return (format >= NTV2_FORMAT_FIRST_4K_DEF_FORMAT && + format < NTV2_FORMAT_END_4K_DEF_FORMATS) || + (format >= NTV2_FORMAT_FIRST_4K_DEF_FORMAT2 && + format < NTV2_FORMAT_END_4K_DEF_FORMATS2) || + (format >= NTV2_FORMAT_FIRST_UHD2_DEF_FORMAT && + format < NTV2_FORMAT_END_UHD2_DEF_FORMATS) || + (format >= NTV2_FORMAT_FIRST_UHD2_FULL_DEF_FORMAT && + format < NTV2_FORMAT_END_UHD2_FULL_DEF_FORMATS); +} + GType gst_aja_audio_meta_api_get_type(void) { static volatile GType type; @@ -498,6 +699,22 @@ GType gst_aja_input_source_get_type(void) { return (GType)id; } +GType gst_aja_sdi_mode_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_SDI_MODE_SINGLE_LINK, "single-link", "Single Link"}, + {GST_AJA_SDI_MODE_QUAD_LINK_SQD, "quad-link-sqd", "Quad Link SQD"}, + {GST_AJA_SDI_MODE_QUAD_LINK_TSI, "quad-link-tsi", "Quad Link TSI"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaSdiMode", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + GType gst_aja_video_format_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { @@ -522,6 +739,22 @@ GType gst_aja_video_format_get_type(void) { {GST_AJA_VIDEO_FORMAT_525_5994, "525-5994", "525 5994"}, {GST_AJA_VIDEO_FORMAT_525_2398, "525-2398", "525 2398"}, {GST_AJA_VIDEO_FORMAT_525_2400, "525-2400", "525 2400"}, + {GST_AJA_VIDEO_FORMAT_2160p_2398, "2160p-2398", "2160p 2398"}, + {GST_AJA_VIDEO_FORMAT_2160p_2400, "2160p-2400", "2160p 2400"}, + {GST_AJA_VIDEO_FORMAT_2160p_2500, "2160p-2500", "2160p 2500"}, + {GST_AJA_VIDEO_FORMAT_2160p_2997, "2160p-2997", "2160p 2997"}, + {GST_AJA_VIDEO_FORMAT_2160p_3000, "2160p-3000", "2160p 3000"}, + {GST_AJA_VIDEO_FORMAT_2160p_5000, "2160p-5000", "2160p 5000"}, + {GST_AJA_VIDEO_FORMAT_2160p_5994, "2160p-5994", "2160p 5994"}, + {GST_AJA_VIDEO_FORMAT_2160p_6000, "2160p-6000", "2160p 6000"}, + {GST_AJA_VIDEO_FORMAT_4320p_2398, "4320p-2398", "4320p 2398"}, + {GST_AJA_VIDEO_FORMAT_4320p_2400, "4320p-2400", "4320p 2400"}, + {GST_AJA_VIDEO_FORMAT_4320p_2500, "4320p-2500", "4320p 2500"}, + {GST_AJA_VIDEO_FORMAT_4320p_2997, "4320p-2997", "4320p 2997"}, + {GST_AJA_VIDEO_FORMAT_4320p_3000, "4320p-3000", "4320p 3000"}, + {GST_AJA_VIDEO_FORMAT_4320p_5000, "4320p-5000", "4320p 5000"}, + {GST_AJA_VIDEO_FORMAT_4320p_5994, "4320p-5994", "4320p 5994"}, + {GST_AJA_VIDEO_FORMAT_4320p_6000, "4320p-6000", "4320p 6000"}, {0, NULL, NULL}}; if (g_once_init_enter(&id)) { diff --git a/gstajacommon.h b/gstajacommon.h index af646e4d27..f64103a9b6 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -54,13 +54,6 @@ G_GNUC_INTERNAL GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, GstBuffer *audio_buffer); -G_GNUC_INTERNAL -GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id); -G_GNUC_INTERNAL -GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format); -G_GNUC_INTERNAL -NTV2VideoFormat gst_ntv2_video_format_from_caps(GstCaps *caps); - typedef struct { CNTV2Card *device; } GstAjaDevice; @@ -178,6 +171,17 @@ G_GNUC_INTERNAL GType gst_aja_input_source_get_type(void); typedef enum { + GST_AJA_SDI_MODE_SINGLE_LINK, + GST_AJA_SDI_MODE_QUAD_LINK_SQD, + GST_AJA_SDI_MODE_QUAD_LINK_TSI, +} GstAjaSdiMode; + +#define GST_TYPE_AJA_SDI_MODE (gst_aja_sdi_mode_get_type()) +G_GNUC_INTERNAL +GType gst_aja_sdi_mode_get_type(void); + +typedef enum { + GST_AJA_VIDEO_FORMAT_INVALID = -1, // TODO: Implement: GST_AJA_VIDEO_FORMAT_AUTO, GST_AJA_VIDEO_FORMAT_1080i_5000, GST_AJA_VIDEO_FORMAT_1080i_5994, @@ -199,6 +203,22 @@ typedef enum { GST_AJA_VIDEO_FORMAT_525_5994, GST_AJA_VIDEO_FORMAT_525_2398, GST_AJA_VIDEO_FORMAT_525_2400, + GST_AJA_VIDEO_FORMAT_2160p_2398, + GST_AJA_VIDEO_FORMAT_2160p_2400, + GST_AJA_VIDEO_FORMAT_2160p_2500, + GST_AJA_VIDEO_FORMAT_2160p_2997, + GST_AJA_VIDEO_FORMAT_2160p_3000, + GST_AJA_VIDEO_FORMAT_2160p_5000, + GST_AJA_VIDEO_FORMAT_2160p_5994, + GST_AJA_VIDEO_FORMAT_2160p_6000, + GST_AJA_VIDEO_FORMAT_4320p_2398, + GST_AJA_VIDEO_FORMAT_4320p_2400, + GST_AJA_VIDEO_FORMAT_4320p_2500, + GST_AJA_VIDEO_FORMAT_4320p_2997, + GST_AJA_VIDEO_FORMAT_4320p_3000, + GST_AJA_VIDEO_FORMAT_4320p_5000, + GST_AJA_VIDEO_FORMAT_4320p_5994, + GST_AJA_VIDEO_FORMAT_4320p_6000, } GstAjaVideoFormat; #define GST_TYPE_AJA_VIDEO_FORMAT (gst_aja_video_format_get_type()) @@ -238,3 +258,31 @@ class ShmMutexLocker { ShmMutexLocker(); ~ShmMutexLocker(); }; + +G_GNUC_INTERNAL +GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id); + +G_GNUC_INTERNAL +GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format); +G_GNUC_INTERNAL +bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, + NTV2VideoFormat format); +G_GNUC_INTERNAL +NTV2VideoFormat gst_ntv2_video_format_from_caps(const GstCaps *caps, bool quad); + +G_GNUC_INTERNAL +GstCaps *gst_aja_video_format_to_caps(GstAjaVideoFormat format); +G_GNUC_INTERNAL +bool gst_video_info_from_aja_video_format(GstVideoInfo *info, + GstAjaVideoFormat format); +G_GNUC_INTERNAL +GstAjaVideoFormat gst_aja_video_format_from_caps(const GstCaps *caps); + +G_GNUC_INTERNAL +GstAjaVideoFormat gst_aja_video_format_from_ntv2_format(NTV2VideoFormat format); +G_GNUC_INTERNAL +NTV2VideoFormat gst_ntv2_video_format_from_aja_format(GstAjaVideoFormat format, + bool quad); + +G_GNUC_INTERNAL +bool gst_ntv2_video_format_is_quad(NTV2VideoFormat format); diff --git a/gstajasink.cpp b/gstajasink.cpp index 84f786123b..d4008a8fb7 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -35,6 +35,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_CHANNEL (::NTV2_CHANNEL1) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) +#define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) #define DEFAULT_QUEUE_SIZE (16) @@ -46,6 +47,7 @@ enum { PROP_CHANNEL, PROP_AUDIO_SYSTEM, PROP_OUTPUT_DESTINATION, + PROP_SDI_MODE, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, @@ -150,6 +152,14 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_SDI_MODE, + g_param_spec_enum( + "sdi-mode", "SDI Mode", "SDI mode to use", GST_TYPE_AJA_SDI_MODE, + DEFAULT_SDI_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_TIMECODE_INDEX, g_param_spec_enum( @@ -241,6 +251,9 @@ void gst_aja_sink_set_property(GObject *object, guint property_id, self->output_destination = (GstAjaOutputDestination)g_value_get_enum(value); break; + case PROP_SDI_MODE: + self->sdi_mode = (GstAjaSdiMode)g_value_get_enum(value); + break; case PROP_TIMECODE_INDEX: self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); break; @@ -276,6 +289,9 @@ void gst_aja_sink_get_property(GObject *object, guint property_id, case PROP_OUTPUT_DESTINATION: g_value_set_enum(value, self->output_destination); break; + case PROP_SDI_MODE: + g_value_set_enum(value, self->sdi_mode); + break; case PROP_TIMECODE_INDEX: g_value_set_enum(value, self->timecode_index); break; @@ -540,12 +556,14 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { gst_caps_replace(&self->configured_caps, caps); GST_OBJECT_UNLOCK(self); - video_format = gst_ntv2_video_format_from_caps(caps); + bool quad_mode = (self->sdi_mode != GST_AJA_SDI_MODE_SINGLE_LINK); + video_format = gst_ntv2_video_format_from_caps(caps, quad_mode); if (video_format == NTV2_FORMAT_UNKNOWN) { GST_ERROR_OBJECT(self, "Unsupported caps %" GST_PTR_FORMAT, caps); return FALSE; } + self->quad_mode = quad_mode; self->video_format = video_format; // Configure render delay based on the framerate and queue size @@ -568,11 +586,21 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { } self->device->device->SetMode(self->channel, NTV2_MODE_DISPLAY, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetMode((NTV2Channel)(self->channel + i), + NTV2_MODE_DISPLAY, false); + } GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", (int)video_format, (int)self->channel); self->device->device->SetVideoFormat(video_format, false, false, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetVideoFormat(video_format, false, false, + (NTV2Channel)(self->channel + i)); + } if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, ::NTV2_FBF_10BIT_YCBCR)) { @@ -582,6 +610,11 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { } self->device->device->SetFrameBufferFormat(self->channel, ::NTV2_FBF_10BIT_YCBCR); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetFrameBufferFormat( + (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); + } NTV2ReferenceSource reference_source; switch (self->reference_source) { @@ -628,11 +661,25 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { GST_ERROR_OBJECT(self, "Failed to enable channel"); return FALSE; } + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + return FALSE; + } + } + } self->device->device->DMABufferAutoLock(false, true, 0); if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) self->device->device->SetSDITransmitEnable(self->channel, true); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDITransmitEnable( + (NTV2Channel)(self->channel + i), true); + } if (self->configured_audio_channels) { switch (self->audio_system_setting) { @@ -683,6 +730,14 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { self->audio_system); self->device->device->SetSDIOutputDS2AudioSystem(self->channel, self->audio_system); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetSDIOutputAudioSystem( + (NTV2Channel)(self->channel + i), self->audio_system); + self->device->device->SetSDIOutputDS2AudioSystem( + (NTV2Channel)(self->channel + i), self->audio_system); + } + } self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, self->audio_system); } else { @@ -805,6 +860,11 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { const NTV2Standard standard(::GetNTV2StandardFromVideoFormat(video_format)); self->device->device->SetSDIOutputStandard(self->channel, standard); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDIOutputStandard( + (NTV2Channel)(self->channel + i), standard); + } const NTV2FrameGeometry geometry = ::GetNTV2FrameGeometryFromVideoFormat(video_format); @@ -813,12 +873,67 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { self->device->device->SetFrameGeometry(geometry, false, self->channel); self->device->device->SetVANCMode(self->vanc_mode, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } } else { const NTV2FrameGeometry vanc_geometry = ::GetVANCFrameGeometry(geometry, self->vanc_mode); self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); self->device->device->SetVANCMode(self->vanc_mode, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + vanc_geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } + } + + if (self->quad_mode) { + switch (self->sdi_mode) { + case GST_AJA_SDI_MODE_SINGLE_LINK: + g_assert_not_reached(); + break; + case GST_AJA_SDI_MODE_QUAD_LINK_SQD: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(true, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + } + break; + case GST_AJA_SDI_MODE_QUAD_LINK_TSI: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } + break; + } + } else { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); } NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); @@ -834,16 +949,209 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { // Need to remove old routes for the output and framebuffer we're going to use NTV2ActualConnections connections = router.GetConnections(); - for (NTV2ActualConnectionsConstIter iter = connections.begin(); - iter != connections.end(); iter++) { - if (iter->first == output_destination_id || iter->second == framebuffer_id) - router.RemoveConnection(iter->first, iter->second); + if (self->quad_mode) { + if (self->channel == NTV2_CHANNEL1) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptSDIOut1Input || + iter->first == NTV2_XptSDIOut1InputDS2 || + iter->first == NTV2_XptSDIOut2Input || + iter->first == NTV2_XptSDIOut2InputDS2 || + iter->first == NTV2_XptSDIOut3Input || + iter->first == NTV2_XptSDIOut4Input || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptFrameBuffer1YUV || + iter->second == NTV2_XptFrameBuffer2YUV || + iter->second == NTV2_XptFrameBuffer3YUV || + iter->second == NTV2_XptFrameBuffer4YUV || + iter->second == NTV2_XptFrameBuffer1_DS2YUV || + iter->second == NTV2_XptFrameBuffer2_DS2YUV || + iter->first == NTV2_XptSDIOut1Input || + iter->first == NTV2_XptSDIOut2Input || + iter->first == NTV2_XptSDIOut3Input || + iter->first == NTV2_XptSDIOut4Input) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL5) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptSDIOut5Input || + iter->first == NTV2_XptSDIOut5InputDS2 || + iter->first == NTV2_XptSDIOut6Input || + iter->first == NTV2_XptSDIOut6InputDS2 || + iter->first == NTV2_XptSDIOut7Input || + iter->first == NTV2_XptSDIOut8Input || + iter->second == NTV2_Xpt425Mux3AYUV || + iter->second == NTV2_Xpt425Mux3BYUV || + iter->second == NTV2_Xpt425Mux4AYUV || + iter->second == NTV2_Xpt425Mux4BYUV || + iter->first == NTV2_Xpt425Mux3AInput || + iter->first == NTV2_Xpt425Mux3BInput || + iter->first == NTV2_Xpt425Mux4AInput || + iter->first == NTV2_Xpt425Mux4BInput || + iter->second == NTV2_XptFrameBuffer5YUV || + iter->second == NTV2_XptFrameBuffer6YUV || + iter->second == NTV2_XptFrameBuffer7YUV || + iter->second == NTV2_XptFrameBuffer8YUV || + iter->second == NTV2_XptFrameBuffer3_DS2YUV || + iter->second == NTV2_XptFrameBuffer4_DS2YUV || + iter->second == NTV2_XptFrameBuffer5_DS2YUV || + iter->second == NTV2_XptFrameBuffer6_DS2YUV || + iter->first == NTV2_XptSDIOut5Input || + iter->first == NTV2_XptSDIOut6Input || + iter->first == NTV2_XptSDIOut7Input || + iter->first == NTV2_XptSDIOut8Input) + router.RemoveConnection(iter->first, iter->second); + } + } else { + g_assert_not_reached(); + } + } else { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == output_destination_id || + iter->second == framebuffer_id) + router.RemoveConnection(iter->first, iter->second); + + if (((output_destination_id == NTV2_XptSDIOut6Input || + output_destination_id == NTV2_XptSDIOut8Input) && + iter->second == NTV2_XptFrameBuffer6_DS2YUV) || + ((output_destination_id == NTV2_XptSDIOut5Input || + output_destination_id == NTV2_XptSDIOut6Input) && + iter->second == NTV2_XptFrameBuffer5_DS2YUV) || + ((output_destination_id == NTV2_XptSDIOut2Input || + output_destination_id == NTV2_XptSDIOut4Input) && + iter->second == NTV2_XptFrameBuffer2_DS2YUV) || + ((output_destination_id == NTV2_XptSDIOut1Input || + output_destination_id == NTV2_XptSDIOut2Input) && + iter->second == NTV2_XptFrameBuffer1_DS2YUV)) + router.RemoveConnection(iter->first, iter->second); + } + } + + if (self->quad_mode) { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && + !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && + !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) + framebuffer_id = NTV2_Xpt425Mux1AYUV; + else if (self->channel == NTV2_CHANNEL5) + framebuffer_id = NTV2_Xpt425Mux3AYUV; + else + g_assert_not_reached(); + } } GST_DEBUG_OBJECT(self, "Creating connection %d - %d", output_destination_id, framebuffer_id); router.AddConnection(output_destination_id, framebuffer_id); + if (self->quad_mode) { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { + if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut4Input, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, + NTV2_XptFrameBuffer3_DS2YUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_XptFrameBuffer4YUV); + router.AddConnection(NTV2_XptSDIOut8Input, + NTV2_XptFrameBuffer4_DS2YUV); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut1InputDS2, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut2InputDS2, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut5InputDS2, + NTV2_XptFrameBuffer3_DS2YUV); + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_XptFrameBuffer4YUV); + router.AddConnection(NTV2_XptSDIOut6InputDS2, + NTV2_XptFrameBuffer4_DS2YUV); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_4K_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptSDIOut4Input, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptFrameBuffer1YUV); + router.AddConnection(NTV2_Xpt425Mux1BInput, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_Xpt425Mux2BInput, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptSDIOut8Input, NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptFrameBuffer5YUV); + router.AddConnection(NTV2_Xpt425Mux3BInput, + NTV2_XptFrameBuffer5_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_Xpt425Mux4BInput, + NTV2_XptFrameBuffer6_DS2YUV); + } else { + g_assert_not_reached(); + } + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut1InputDS2, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptSDIOut2InputDS2, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptFrameBuffer1YUV); + router.AddConnection(NTV2_Xpt425Mux1BInput, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_Xpt425Mux2BInput, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut5InputDS2, NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptSDIOut6InputDS2, NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptFrameBuffer5YUV); + router.AddConnection(NTV2_Xpt425Mux3BInput, + NTV2_XptFrameBuffer5_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_Xpt425Mux4BInput, + NTV2_XptFrameBuffer6_DS2YUV); + } else { + g_assert_not_reached(); + } + } + } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_SQD) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_XptFrameBuffer3YUV); + router.AddConnection(NTV2_XptSDIOut4Input, NTV2_XptFrameBuffer4YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_XptFrameBuffer7YUV); + router.AddConnection(NTV2_XptSDIOut8Input, NTV2_XptFrameBuffer8YUV); + } else { + g_assert_not_reached(); + } + } + } + { std::stringstream os; CNTV2SignalRouter oldRouter; @@ -1573,8 +1881,8 @@ restart: // Trivial drift calculation // - // TODO: Should probably take averages over a timespan (say 1 minute) into - // a ringbuffer and calculate a linear regression over them + // TODO: Should probably take averages over a timespan (say 1 minute) + // into a ringbuffer and calculate a linear regression over them // FIXME: Add some compensation by dropping/duplicating frames as needed // but make this configurable if (frames_rendered_start_time == GST_CLOCK_TIME_NONE && diff --git a/gstajasink.h b/gstajasink.h index 9106e267c3..1cc8e237b2 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -73,11 +73,13 @@ struct _GstAjaSink { GstAjaAudioSystem audio_system_setting; GstAjaOutputDestination output_destination; + GstAjaSdiMode sdi_mode; GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; NTV2AudioSystem audio_system; NTV2VideoFormat video_format; + bool quad_mode; NTV2VANCMode vanc_mode; guint32 f2_start_line; NTV2TCIndexes *tc_indexes; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 7bbf797db0..02cafd64fc 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -37,6 +37,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_1080i_5000) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) +#define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) #define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) @@ -50,6 +51,7 @@ enum { PROP_VIDEO_FORMAT, PROP_AUDIO_SYSTEM, PROP_INPUT_SOURCE, + PROP_SDI_MODE, PROP_AUDIO_SOURCE, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, @@ -159,6 +161,14 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_SDI_MODE, + g_param_spec_enum( + "sdi-input-mode", "SDI Input Mode", "SDI input mode to use", + GST_TYPE_AJA_SDI_MODE, DEFAULT_SDI_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_AUDIO_SOURCE, g_param_spec_enum( @@ -262,6 +272,9 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_INPUT_SOURCE: self->input_source = (GstAjaInputSource)g_value_get_enum(value); break; + case PROP_SDI_MODE: + self->sdi_mode = (GstAjaSdiMode)g_value_get_enum(value); + break; case PROP_AUDIO_SOURCE: self->audio_source = (GstAjaAudioSource)g_value_get_enum(value); break; @@ -303,6 +316,9 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_INPUT_SOURCE: g_value_set_enum(value, self->input_source); break; + case PROP_SDI_MODE: + g_value_set_enum(value, self->sdi_mode); + break; case PROP_AUDIO_SOURCE: g_value_set_enum(value, self->audio_source); break; @@ -400,71 +416,21 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { // global shared state ShmMutexLocker locker; - switch (self->video_format_setting) { - // TODO: GST_AJA_VIDEO_FORMAT_AUTO - case GST_AJA_VIDEO_FORMAT_1080i_5000: - self->video_format = ::NTV2_FORMAT_1080i_5000; - break; - case GST_AJA_VIDEO_FORMAT_1080i_5994: - self->video_format = ::NTV2_FORMAT_1080i_5994; - break; - case GST_AJA_VIDEO_FORMAT_1080i_6000: - self->video_format = ::NTV2_FORMAT_1080i_6000; - break; - case GST_AJA_VIDEO_FORMAT_720p_5994: - self->video_format = ::NTV2_FORMAT_720p_5994; - break; - case GST_AJA_VIDEO_FORMAT_720p_6000: - self->video_format = ::NTV2_FORMAT_720p_6000; - break; - case GST_AJA_VIDEO_FORMAT_1080p_2997: - self->video_format = ::NTV2_FORMAT_1080p_2997; - break; - case GST_AJA_VIDEO_FORMAT_1080p_3000: - self->video_format = ::NTV2_FORMAT_1080p_3000; - break; - case GST_AJA_VIDEO_FORMAT_1080p_2500: - self->video_format = ::NTV2_FORMAT_1080p_2500; - break; - case GST_AJA_VIDEO_FORMAT_1080p_2398: - self->video_format = ::NTV2_FORMAT_1080p_2398; - break; - case GST_AJA_VIDEO_FORMAT_1080p_2400: - self->video_format = ::NTV2_FORMAT_1080p_2400; - break; - case GST_AJA_VIDEO_FORMAT_720p_5000: - self->video_format = ::NTV2_FORMAT_720p_5000; - break; - case GST_AJA_VIDEO_FORMAT_720p_2398: - self->video_format = ::NTV2_FORMAT_720p_2398; - break; - case GST_AJA_VIDEO_FORMAT_720p_2500: - self->video_format = ::NTV2_FORMAT_720p_2500; - break; - case GST_AJA_VIDEO_FORMAT_1080p_5000_A: - self->video_format = ::NTV2_FORMAT_1080p_5000_A; - break; - case GST_AJA_VIDEO_FORMAT_1080p_5994_A: - self->video_format = ::NTV2_FORMAT_1080p_5994_A; - break; - case GST_AJA_VIDEO_FORMAT_1080p_6000_A: - self->video_format = ::NTV2_FORMAT_1080p_6000_A; - break; - case GST_AJA_VIDEO_FORMAT_625_5000: - self->video_format = ::NTV2_FORMAT_625_5000; - break; - case GST_AJA_VIDEO_FORMAT_525_5994: - self->video_format = ::NTV2_FORMAT_525_5994; - break; - case GST_AJA_VIDEO_FORMAT_525_2398: - self->video_format = ::NTV2_FORMAT_525_2398; - break; - case GST_AJA_VIDEO_FORMAT_525_2400: - self->video_format = ::NTV2_FORMAT_525_2400; - break; - default: - g_assert_not_reached(); - break; +#define NEEDS_QUAD_MODE(self) \ + (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_SQD || \ + self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI || \ + (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && \ + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) + + self->quad_mode = NEEDS_QUAD_MODE(self); + self->video_format = gst_ntv2_video_format_from_aja_format( + self->video_format_setting, self->quad_mode); + +#undef NEEDS_QUAD_MODE + + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Unsupported mode"); + return FALSE; } if (!::NTV2DeviceCanDoVideoFormat(self->device_id, self->video_format)) { @@ -473,11 +439,78 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { return FALSE; } + if (self->quad_mode) { + if (self->channel != ::NTV2_CHANNEL1 && + self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; + } + } + gst_clear_caps(&self->configured_caps); - self->configured_caps = gst_ntv2_video_format_to_caps(self->video_format); - gst_video_info_from_caps(&self->configured_info, self->configured_caps); + gst_video_info_from_ntv2_video_format(&self->configured_info, + self->video_format); + self->configured_caps = gst_video_info_to_caps(&self->configured_info); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } else { + switch (self->sdi_mode) { + case GST_AJA_SDI_MODE_SINGLE_LINK: + g_assert_not_reached(); + break; + case GST_AJA_SDI_MODE_QUAD_LINK_SQD: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(true, + self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, + self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + } + break; + case GST_AJA_SDI_MODE_QUAD_LINK_TSI: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, + self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } + break; + } + } + } else { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + } self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetMode((NTV2Channel)(self->channel + i), + NTV2_MODE_CAPTURE, false); + } GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", (int)self->video_format, (int)self->channel); @@ -492,11 +525,22 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { } self->device->device->SetFrameBufferFormat(self->channel, ::NTV2_FBF_10BIT_YCBCR); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetFrameBufferFormat( + (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); + } self->device->device->DMABufferAutoLock(false, true, 0); - if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) { self->device->device->SetSDITransmitEnable(self->channel, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDITransmitEnable( + (NTV2Channel)(self->channel + i), false); + } + } // Always use the framebuffer associated with the channel NTV2InputCrosspointID framebuffer_id = @@ -591,6 +635,12 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { const NTV2Standard standard( ::GetNTV2StandardFromVideoFormat(self->video_format)); self->device->device->SetStandard(standard, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetStandard(standard, + (NTV2Channel)(self->channel + i)); + } + const NTV2FrameGeometry geometry = ::GetNTV2FrameGeometryFromVideoFormat(self->video_format); @@ -599,6 +649,15 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { self->device->device->SetFrameGeometry(geometry, false, self->channel); self->device->device->SetVANCMode(self->vanc_mode, self->channel); + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } } else { const NTV2FrameGeometry vanc_geometry = ::GetVANCFrameGeometry(geometry, self->vanc_mode); @@ -606,6 +665,15 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); self->device->device->SetVANCMode(self->vanc_mode, self->channel); + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + vanc_geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } } CNTV2SignalRouter router; @@ -616,16 +684,225 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { // use NTV2ActualConnections connections = router.GetConnections(); - for (NTV2ActualConnectionsConstIter iter = connections.begin(); - iter != connections.end(); iter++) { - if (iter->first == framebuffer_id || iter->second == input_source_id) - router.RemoveConnection(iter->first, iter->second); + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + // Need to disconnect the 4 inputs corresponding to this channel from + // their framebuffers/muxers, and muxers from their framebuffers + for (auto iter = connections.begin(); iter != connections.end(); + iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptHDMIIn1 || + iter->second == NTV2_XptHDMIIn1Q2 || + iter->second == NTV2_XptHDMIIn1Q3 || + iter->second == NTV2_XptHDMIIn1Q4) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL1) { + for (auto iter = connections.begin(); iter != connections.end(); + iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer1DS2Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->first == NTV2_XptFrameBuffer2DS2Input || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptSDIIn1 || + iter->second == NTV2_XptSDIIn2 || + iter->second == NTV2_XptSDIIn3 || + iter->second == NTV2_XptSDIIn4 || + iter->second == NTV2_XptSDIIn1DS2 || + iter->second == NTV2_XptSDIIn2DS2 || + iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer3Input || + iter->first == NTV2_XptFrameBuffer4Input) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL5) { + for (auto iter = connections.begin(); iter != connections.end(); + iter++) { + if (iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer5BInput || + iter->first == NTV2_XptFrameBuffer5DS2Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer6BInput || + iter->first == NTV2_XptFrameBuffer6DS2Input || + iter->second == NTV2_Xpt425Mux3AYUV || + iter->second == NTV2_Xpt425Mux3BYUV || + iter->second == NTV2_Xpt425Mux4AYUV || + iter->second == NTV2_Xpt425Mux4BYUV || + iter->first == NTV2_Xpt425Mux3AInput || + iter->first == NTV2_Xpt425Mux3BInput || + iter->first == NTV2_Xpt425Mux4AInput || + iter->first == NTV2_Xpt425Mux4BInput || + iter->second == NTV2_XptSDIIn5 || + iter->second == NTV2_XptSDIIn6 || + iter->second == NTV2_XptSDIIn7 || + iter->second == NTV2_XptSDIIn8 || + iter->second == NTV2_XptSDIIn5DS2 || + iter->second == NTV2_XptSDIIn6DS2 || + iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer7Input || + iter->first == NTV2_XptFrameBuffer8Input) + router.RemoveConnection(iter->first, iter->second); + } + } else { + g_assert_not_reached(); + } + } else { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == framebuffer_id || iter->second == input_source_id) + router.RemoveConnection(iter->first, iter->second); + + if (((input_source_id == NTV2_XptSDIIn6 || + input_source_id == NTV2_XptSDIIn8) && + iter->first == NTV2_XptFrameBuffer6BInput) || + ((input_source_id == NTV2_XptSDIIn5 || + input_source_id == NTV2_XptSDIIn6) && + iter->first == NTV2_XptFrameBuffer5BInput) || + ((input_source_id == NTV2_XptSDIIn4 || + input_source_id == NTV2_XptSDIIn2) && + iter->first == NTV2_XptFrameBuffer2BInput) || + ((input_source_id == NTV2_XptSDIIn1 || + input_source_id == NTV2_XptSDIIn2) && + iter->first == NTV2_XptFrameBuffer1BInput)) + router.RemoveConnection(iter->first, iter->second); + } + } + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + input_source_id = NTV2_Xpt425Mux1AYUV; + } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && + !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && + !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) + input_source_id = NTV2_Xpt425Mux1AYUV; + else if (self->channel == NTV2_CHANNEL5) + input_source_id = NTV2_Xpt425Mux3AYUV; + else + g_assert_not_reached(); + } } GST_DEBUG_OBJECT(self, "Creating connection %d - %d", framebuffer_id, input_source_id); router.AddConnection(framebuffer_id, input_source_id); + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + router.AddConnection(NTV2_XptFrameBuffer1BInput, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptHDMIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptHDMIIn1Q2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptHDMIIn1Q3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptHDMIIn1Q4); + } else { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { + if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, + NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, + NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, + NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, + NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, + NTV2_XptSDIIn1DS2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, + NTV2_XptSDIIn2DS2); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, + NTV2_XptSDIIn5DS2); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, + NTV2_XptSDIIn6DS2); + } else { + g_assert_not_reached(); + } + // FIXME: Need special handling of NTV2_IS_4K_HFR_VIDEO_FORMAT for + // TSI? + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1BInput, + NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, + NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, + NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptSDIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptSDIIn2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptSDIIn3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5BInput, + NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptFrameBuffer6Input, + NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptFrameBuffer6BInput, + NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptSDIIn5); + router.AddConnection(NTV2_Xpt425Mux3BInput, NTV2_XptSDIIn6); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptSDIIn7); + router.AddConnection(NTV2_Xpt425Mux4BInput, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer3Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer4Input, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer6Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer7Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer8Input, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } + } + } + { std::stringstream os; CNTV2SignalRouter oldRouter; @@ -1194,6 +1471,17 @@ restart: goto out; } + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to enable channel")); + goto out; + } + } + } + { // Make sure to globally lock here as the routing settings and others are // global shared state @@ -1228,6 +1516,13 @@ restart: NTV2VideoFormat current_video_format = self->device->device->GetInputVideoFormat( self->configured_input_source); + NTV2VideoFormat effective_video_format = self->video_format; + // Can't call this unconditionally as it also maps e.g. 3840x2160p to 1080p + if (self->quad_mode) { + effective_video_format = + ::GetQuarterSizedVideoFormat(effective_video_format); + } + if (current_video_format == ::NTV2_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT(self, "No signal, waiting"); g_mutex_unlock(&self->queue_lock); @@ -1240,11 +1535,14 @@ restart: } g_mutex_lock(&self->queue_lock); continue; - } else if (current_video_format != self->video_format) { + } else if (current_video_format != effective_video_format && + current_video_format != self->video_format) { // TODO: Handle GST_AJA_VIDEO_FORMAT_AUTO here GST_DEBUG_OBJECT(self, - "Different input format %u than configured %u, waiting", - current_video_format, self->video_format); + "Different input format %u than configured %u " + "(effective %u), waiting", + current_video_format, self->video_format, + effective_video_format); g_mutex_unlock(&self->queue_lock); self->device->device->WaitForInputVerticalInterrupt(self->channel); frames_dropped_last = G_MAXUINT64; diff --git a/gstajasrc.h b/gstajasrc.h index a8ae0e7111..cfd661b4da 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -64,6 +64,7 @@ struct _GstAjaSrc { NTV2Channel channel; GstAjaAudioSystem audio_system_setting; GstAjaVideoFormat video_format_setting; + GstAjaSdiMode sdi_mode; GstAjaInputSource input_source; GstAjaAudioSource audio_source; GstAjaTimecodeIndex timecode_index; @@ -73,6 +74,7 @@ struct _GstAjaSrc { NTV2AudioSystem audio_system; NTV2VideoFormat video_format; + bool quad_mode; NTV2VANCMode vanc_mode; NTV2InputSource configured_input_source; NTV2TCIndex tc_index; From 86920489f8b258d2272d204ce03cf7556e98f3aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 6 Jul 2021 14:22:32 +0300 Subject: [PATCH 21/73] Add support for 2/4/8k DCI modes --- gstajacommon.cpp | 88 ++++++++++++++++++++++++++++++++++++++++++++++++ gstajacommon.h | 24 +++++++++++++ 2 files changed, 112 insertions(+) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index fc71862b47..aebda17516 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -69,10 +69,29 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_6000_A, NTV2_FORMAT_1080p_6000_A, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_625_5000, NTV2_FORMAT_625_5000, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_525_5994, NTV2_FORMAT_525_5994, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_525_2398, NTV2_FORMAT_525_2398, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_525_2400, NTV2_FORMAT_525_2400, NTV2_FORMAT_UNKNOWN}, + + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, NTV2_FORMAT_1080p_2K_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, NTV2_FORMAT_1080p_2K_2400, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, NTV2_FORMAT_1080p_2K_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, NTV2_FORMAT_1080p_2K_2997, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, NTV2_FORMAT_1080p_2K_3000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, NTV2_FORMAT_1080p_2K_5000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, NTV2_FORMAT_1080p_2K_5994_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, NTV2_FORMAT_1080p_2K_6000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_2160p_2398, NTV2_FORMAT_3840x2160p_2398, NTV2_FORMAT_4x1920x1080p_2398}, {GST_AJA_VIDEO_FORMAT_2160p_2400, NTV2_FORMAT_3840x2160p_2400, @@ -89,6 +108,24 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_4x1920x1080p_5994}, {GST_AJA_VIDEO_FORMAT_2160p_6000, NTV2_FORMAT_3840x2160p_6000, NTV2_FORMAT_4x1920x1080p_6000}, + + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, NTV2_FORMAT_4096x2160p_2398, + NTV2_FORMAT_4x2048x1080p_2398}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, NTV2_FORMAT_4096x2160p_2400, + NTV2_FORMAT_4x2048x1080p_2400}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, NTV2_FORMAT_4096x2160p_2500, + NTV2_FORMAT_4x2048x1080p_2500}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, NTV2_FORMAT_4096x2160p_2997, + NTV2_FORMAT_4x2048x1080p_2997}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, NTV2_FORMAT_4096x2160p_3000, + NTV2_FORMAT_4x2048x1080p_3000}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, NTV2_FORMAT_4096x2160p_5000, + NTV2_FORMAT_4x2048x1080p_5000}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, NTV2_FORMAT_4096x2160p_5994, + NTV2_FORMAT_4x2048x1080p_5994}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, NTV2_FORMAT_4096x2160p_6000, + NTV2_FORMAT_4x2048x1080p_6000}, + {GST_AJA_VIDEO_FORMAT_4320p_2398, NTV2_FORMAT_UNKNOWN, NTV2_FORMAT_4x3840x2160p_2398}, {GST_AJA_VIDEO_FORMAT_4320p_2400, NTV2_FORMAT_UNKNOWN, @@ -105,6 +142,23 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_4x3840x2160p_5994}, {GST_AJA_VIDEO_FORMAT_4320p_6000, NTV2_FORMAT_UNKNOWN, NTV2_FORMAT_4x3840x2160p_6000}, + + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2398}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2400}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2500}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2997}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_3000}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_5000}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_5994}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_6000}, }; GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { @@ -735,10 +789,24 @@ GType gst_aja_video_format_get_type(void) { {GST_AJA_VIDEO_FORMAT_1080p_5000_A, "1080p-5000-a", "1080p 5000 A"}, {GST_AJA_VIDEO_FORMAT_1080p_5994_A, "1080p-5994-a", "1080p 5994 A"}, {GST_AJA_VIDEO_FORMAT_1080p_6000_A, "1080p-6000-a", "1080p 6000 A"}, + {GST_AJA_VIDEO_FORMAT_625_5000, "625-5000", "625 5000"}, {GST_AJA_VIDEO_FORMAT_525_5994, "525-5994", "525 5994"}, {GST_AJA_VIDEO_FORMAT_525_2398, "525-2398", "525 2398"}, {GST_AJA_VIDEO_FORMAT_525_2400, "525-2400", "525 2400"}, + + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, "1080p-dci-2398", "1080p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, "1080p-dci-2400", "1080p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, "1080p-dci-2500", "1080p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, "1080p-dci-2997", "1080p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, "1080p-dci-3000", "1080p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, "1080p-dci-5000-a", + "1080p DCI 5000 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, "1080p-dci-5994-a", + "1080p DCI 5994 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, "1080p-dci-6000-a", + "1080p DCI 6000 A"}, + {GST_AJA_VIDEO_FORMAT_2160p_2398, "2160p-2398", "2160p 2398"}, {GST_AJA_VIDEO_FORMAT_2160p_2400, "2160p-2400", "2160p 2400"}, {GST_AJA_VIDEO_FORMAT_2160p_2500, "2160p-2500", "2160p 2500"}, @@ -747,6 +815,16 @@ GType gst_aja_video_format_get_type(void) { {GST_AJA_VIDEO_FORMAT_2160p_5000, "2160p-5000", "2160p 5000"}, {GST_AJA_VIDEO_FORMAT_2160p_5994, "2160p-5994", "2160p 5994"}, {GST_AJA_VIDEO_FORMAT_2160p_6000, "2160p-6000", "2160p 6000"}, + + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, "2160p-dci-2398", "2160p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, "2160p-dci-2400", "2160p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, "2160p-dci-2500", "2160p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, "2160p-dci-2997", "2160p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, "2160p-dci-3000", "2160p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, "2160p-dci-5000", "2160p DCI 5000"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, "2160p-dci-5994", "2160p DCI 5994"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, "2160p-dci-6000", "2160p DCI 6000"}, + {GST_AJA_VIDEO_FORMAT_4320p_2398, "4320p-2398", "4320p 2398"}, {GST_AJA_VIDEO_FORMAT_4320p_2400, "4320p-2400", "4320p 2400"}, {GST_AJA_VIDEO_FORMAT_4320p_2500, "4320p-2500", "4320p 2500"}, @@ -755,6 +833,16 @@ GType gst_aja_video_format_get_type(void) { {GST_AJA_VIDEO_FORMAT_4320p_5000, "4320p-5000", "4320p 5000"}, {GST_AJA_VIDEO_FORMAT_4320p_5994, "4320p-5994", "4320p 5994"}, {GST_AJA_VIDEO_FORMAT_4320p_6000, "4320p-6000", "4320p 6000"}, + + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, "4320p-dci-2398", "4320p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, "4320p-dci-2400", "4320p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, "4320p-dci-2500", "4320p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, "4320p-dci-2997", "4320p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, "4320p-dci-3000", "4320p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, "4320p-dci-5000", "4320p DCI 5000"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, "4320p-dci-5994", "4320p DCI 5994"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, "4320p-dci-6000", "4320p DCI 6000"}, + {0, NULL, NULL}}; if (g_once_init_enter(&id)) { diff --git a/gstajacommon.h b/gstajacommon.h index f64103a9b6..41fb588596 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -203,6 +203,14 @@ typedef enum { GST_AJA_VIDEO_FORMAT_525_5994, GST_AJA_VIDEO_FORMAT_525_2398, GST_AJA_VIDEO_FORMAT_525_2400, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, + GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, + GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, + GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, + GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, GST_AJA_VIDEO_FORMAT_2160p_2398, GST_AJA_VIDEO_FORMAT_2160p_2400, GST_AJA_VIDEO_FORMAT_2160p_2500, @@ -211,6 +219,14 @@ typedef enum { GST_AJA_VIDEO_FORMAT_2160p_5000, GST_AJA_VIDEO_FORMAT_2160p_5994, GST_AJA_VIDEO_FORMAT_2160p_6000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, + GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, + GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, GST_AJA_VIDEO_FORMAT_4320p_2398, GST_AJA_VIDEO_FORMAT_4320p_2400, GST_AJA_VIDEO_FORMAT_4320p_2500, @@ -219,6 +235,14 @@ typedef enum { GST_AJA_VIDEO_FORMAT_4320p_5000, GST_AJA_VIDEO_FORMAT_4320p_5994, GST_AJA_VIDEO_FORMAT_4320p_6000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, + GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, + GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, } GstAjaVideoFormat; #define GST_TYPE_AJA_VIDEO_FORMAT (gst_aja_video_format_get_type()) From 99c86891a46c2fcb6ed548aac6f3b3d2c0a2aa28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 7 Jul 2021 10:31:38 +0300 Subject: [PATCH 22/73] Read colorimetry information from VPID and create caps from detected input format This does not implement automatic mode selection yet, for which it is necessary to change the routing at runtime based on the detected format. It is a first step into that direction though. --- gstajasrc.cpp | 121 ++++++++++++++++++++++++++++++++++++++++++-------- gstajasrc.h | 5 ++- 2 files changed, 105 insertions(+), 21 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 02cafd64fc..7b22fd4374 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -24,6 +24,7 @@ #include #include #include +#include #include "gstajacommon.h" #include "gstajasrc.h" @@ -72,6 +73,9 @@ typedef struct { GstBuffer *audio_buffer; GstBuffer *anc_buffer, *anc_buffer2; NTV2_RP188 tc; + + NTV2VideoFormat detected_format; + guint32 vpid; } QueueItem; static void gst_aja_src_set_property(GObject *object, guint property_id, @@ -447,10 +451,8 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { } } - gst_clear_caps(&self->configured_caps); gst_video_info_from_ntv2_video_format(&self->configured_info, self->video_format); - self->configured_caps = gst_video_info_to_caps(&self->configured_info); if (self->quad_mode) { if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && @@ -995,9 +997,6 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { self->device->device->SetEmbeddedAudioClock( ::NTV2_EMBEDDED_AUDIO_CLOCK_VIDEO_INPUT, self->audio_system); - gst_caps_set_simple(self->configured_caps, "audio-channels", G_TYPE_INT, - self->configured_audio_channels, NULL); - NTV2ReferenceSource reference_source; switch (self->reference_source) { case GST_AJA_REFERENCE_SOURCE_AUTO: @@ -1137,7 +1136,8 @@ static gboolean gst_aja_src_stop(GstAjaSrc *self) { } GST_OBJECT_LOCK(self); - gst_clear_caps(&self->configured_caps); + memset(&self->current_info, 0, sizeof(self->current_info)); + memset(&self->configured_info, 0, sizeof(self->configured_info)); self->configured_audio_channels = 0; GST_OBJECT_UNLOCK(self); @@ -1243,12 +1243,12 @@ static gboolean gst_aja_src_query(GstBaseSrc *bsrc, GstQuery *query) { switch (GST_QUERY_TYPE(query)) { case GST_QUERY_LATENCY: { - if (self->configured_caps) { + if (self->current_info.finfo && + self->current_info.finfo->format != GST_VIDEO_FORMAT_UNKNOWN) { GstClockTime min, max; - min = gst_util_uint64_scale_ceil(GST_SECOND, - 3 * self->configured_info.fps_d, - self->configured_info.fps_n); + min = gst_util_uint64_scale_ceil( + GST_SECOND, 3 * self->current_info.fps_d, self->current_info.fps_n); max = self->queue_size * min; gst_query_set_latency(query, TRUE, min, max); @@ -1421,8 +1421,77 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { // TODO: Add AFD/Bar meta - if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { - gst_base_src_set_caps(GST_BASE_SRC_CAST(self), self->configured_caps); + bool caps_changed = false; + + CNTV2VPID vpid(item.vpid); + if (vpid.IsValid()) { + GstVideoInfo info; + + if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { + switch (vpid.GetTransferCharacteristics()) { + default: + case NTV2_VPID_TC_SDR_TV: + // SDR is the default, do nothing here. + break; + case NTV2_VPID_TC_HLG: + info.colorimetry.transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67; + break; + case NTV2_VPID_TC_PQ: + info.colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE2084; + break; + } + + switch (vpid.GetColorimetry()) { + case NTV2_VPID_Color_Rec709: + info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709; + info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; + break; + case NTV2_VPID_Color_UHDTV: + info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020; + info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020; + break; + default: + // Default handling + break; + } + + switch (vpid.GetRGBRange()) { + case NTV2_VPID_Range_Full: + info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255; + break; + case NTV2_VPID_Range_Narrow: + info.colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235; + break; + } + + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || + !gst_video_info_is_equal(&info, &self->current_info)) { + self->current_info = info; + caps_changed = true; + } + } + } else { + GstVideoInfo info; + + if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || + !gst_video_info_is_equal(&info, &self->current_info)) { + self->current_info = info; + caps_changed = true; + } + } else if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { + self->current_info = self->configured_info; + caps_changed = true; + } + } + + if (caps_changed) { + GstCaps *caps = gst_video_info_to_caps(&self->current_info); + gst_caps_set_simple(caps, "audio-channels", G_TYPE_INT, + self->configured_audio_channels, NULL); + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + gst_base_src_set_caps(GST_BASE_SRC_CAST(self), caps); + gst_caps_unref(caps); } return flow_ret; @@ -1516,6 +1585,15 @@ restart: NTV2VideoFormat current_video_format = self->device->device->GetInputVideoFormat( self->configured_input_source); + + ULWord vpid_a = 0; + ULWord vpid_b = 0; + self->device->device->ReadSDIInVPID(self->channel, vpid_a, vpid_b); + + GST_DEBUG_OBJECT(self, + "Detected input video format %u with VPID %08x / %08x", + current_video_format, vpid_a, vpid_b); + NTV2VideoFormat effective_video_format = self->video_format; // Can't call this unconditionally as it also maps e.g. 3840x2160p to 1080p if (self->quad_mode) { @@ -1729,13 +1807,18 @@ restart: // TODO: Drift detection and compensation - QueueItem item = {.type = QUEUE_ITEM_TYPE_FRAME, - .capture_time = now_gst, - .video_buffer = video_buffer, - .audio_buffer = audio_buffer, - .anc_buffer = anc_buffer, - .anc_buffer2 = anc_buffer2, - .tc = time_code}; + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAME, + .capture_time = now_gst, + .video_buffer = video_buffer, + .audio_buffer = audio_buffer, + .anc_buffer = anc_buffer, + .anc_buffer2 = anc_buffer2, + .tc = time_code, + .detected_format = + (self->quad_mode ? ::GetQuadSizedVideoFormat(current_video_format) + : current_video_format), + .vpid = vpid_a}; while (gst_queue_array_get_length(self->queue) >= self->queue_size) { QueueItem *tmp = diff --git a/gstajasrc.h b/gstajasrc.h index cfd661b4da..dcfeeff1b1 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -79,8 +79,9 @@ struct _GstAjaSrc { NTV2InputSource configured_input_source; NTV2TCIndex tc_index; - GstCaps *configured_caps; - GstVideoInfo configured_info; + GstVideoInfo configured_info; // Based on properties + GstVideoInfo current_info; // Based on properties + stream metadata + gint configured_audio_channels; AJAThread *capture_thread; From fe13e5f211d2e5d01e65d0e45dc628f172f299fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 7 Jul 2021 12:28:38 +0300 Subject: [PATCH 23/73] Add support for AFD/Bar VANC in the source element and widescreen NTSC/PAL --- gstajacommon.cpp | 19 +++++++++++++- gstajasrc.cpp | 68 ++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 84 insertions(+), 3 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index aebda17516..dadcbb7a50 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -174,7 +174,24 @@ GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { ::NTV2DeviceCanDoVideoFormat(device_id, format.aja_format)) || (format.quad_format != NTV2_FORMAT_UNKNOWN && ::NTV2DeviceCanDoVideoFormat(device_id, format.quad_format))) { - gst_caps_append(caps, gst_aja_video_format_to_caps(format.gst_format)); + GstCaps *tmp = gst_aja_video_format_to_caps(format.gst_format); + + // Widescreen PAL/NTSC + if (format.gst_format == GST_AJA_VIDEO_FORMAT_525_2398 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_2400 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_5994) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 40, + 33, NULL); + gst_caps_append(tmp, tmp2); + } else if (format.gst_format == GST_AJA_VIDEO_FORMAT_625_5000) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, + 11, NULL); + gst_caps_append(tmp, tmp2); + } + + gst_caps_append(caps, tmp); } } } diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 7b22fd4374..91c1e1295e 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1406,6 +1406,8 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { // // See AJA SDK support ticket #4844. guint32 n_vanc_packets = anc_packets.CountAncillaryData(); + bool aspect_ratio_flag = false; + bool have_afd_bar = false; for (guint32 i = 0; i < n_vanc_packets; i++) { AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); @@ -1413,14 +1415,46 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { packet->GetSID() == AJAAncillaryData_CEA708_SID && packet->GetPayloadData() && packet->GetPayloadByteCount() && AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Found CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); gst_buffer_add_video_caption_meta( *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet->GetPayloadData(), packet->GetPayloadByteCount()); + } else if (packet->GetDID() == 0x41 && packet->GetSID() == 0x05 && + packet->GetPayloadData() && packet->GetPayloadByteCount() == 8) { + const guint8 *data = packet->GetPayloadData(); + + have_afd_bar = true; + aspect_ratio_flag = (data[0] >> 2) & 0x1; + + GstVideoAFDValue afd = (GstVideoAFDValue)((data[0] >> 3) & 0xf); + gboolean is_letterbox = ((data[3] >> 4) & 0x3) == 0; + guint16 bar1 = GST_READ_UINT16_BE(&data[4]); + guint16 bar2 = GST_READ_UINT16_BE(&data[6]); + + GST_TRACE_OBJECT(self, + "Found AFD/Bar VANC at line %u: AR %u, AFD %u, " + "letterbox %u, bar1 %u, bar2 %u", + packet->GetLocationLineNumber(), aspect_ratio_flag, afd, + is_letterbox, bar1, bar2); + + const NTV2Standard standard( + ::GetNTV2StandardFromVideoFormat(item.detected_format)); + const NTV2SmpteLineNumber smpte_line_num_info = + ::GetSmpteLineNumber(standard); + bool field2 = + packet->GetLocationLineNumber() > + smpte_line_num_info.GetLastLine( + smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1); + + gst_buffer_add_video_afd_meta(*buffer, field2 ? 1 : 0, + GST_VIDEO_AFD_SPEC_SMPTE_ST2016_1, afd); + gst_buffer_add_video_bar_meta(*buffer, field2 ? 1 : 0, is_letterbox, bar1, + bar2); } } - // TODO: Add AFD/Bar meta - bool caps_changed = false; CNTV2VPID vpid(item.vpid); @@ -1464,6 +1498,17 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { break; } + if (!have_afd_bar && vpid.GetImageAspect16x9()) aspect_ratio_flag = true; + + // Widescreen PAL/NTSC + if (aspect_ratio_flag && info.height == 486) { + info.par_n = 40; + info.par_d = 33; + } else if (aspect_ratio_flag && info.height == 576) { + info.par_n = 16; + info.par_d = 11; + } + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || !gst_video_info_is_equal(&info, &self->current_info)) { self->current_info = info; @@ -1474,6 +1519,15 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { GstVideoInfo info; if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { + // Widescreen PAL/NTSC + if (aspect_ratio_flag && info.height == 486) { + info.par_n = 40; + info.par_d = 33; + } else if (aspect_ratio_flag && info.height == 576) { + info.par_n = 16; + info.par_d = 11; + } + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || !gst_video_info_is_equal(&info, &self->current_info)) { self->current_info = info; @@ -1481,6 +1535,16 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { } } else if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { self->current_info = self->configured_info; + + // Widescreen PAL/NTSC + if (aspect_ratio_flag && self->current_info.height == 486) { + self->current_info.par_n = 40; + self->current_info.par_d = 33; + } else if (aspect_ratio_flag && self->current_info.height == 576) { + self->current_info.par_n = 16; + self->current_info.par_d = 11; + } + caps_changed = true; } } From 83eb812f063b24c2648652e877674d7eb9c7e47b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 10:16:19 +0300 Subject: [PATCH 24/73] Wait for vsync after reporting signal loss --- gstajasrc.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 91c1e1295e..7ed1689609 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1668,13 +1668,13 @@ restart: if (current_video_format == ::NTV2_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT(self, "No signal, waiting"); g_mutex_unlock(&self->queue_lock); - self->device->device->WaitForInputVerticalInterrupt(self->channel); frames_dropped_last = G_MAXUINT64; if (have_signal) { GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), ("No input source was detected")); have_signal = FALSE; } + self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; } else if (current_video_format != effective_video_format && @@ -1686,13 +1686,13 @@ restart: current_video_format, self->video_format, effective_video_format); g_mutex_unlock(&self->queue_lock); - self->device->device->WaitForInputVerticalInterrupt(self->channel); frames_dropped_last = G_MAXUINT64; if (have_signal) { GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), ("Different input source was detected")); have_signal = FALSE; } + self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; } From e116719597f452a95bea15a36d3bc625f017fccc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 10:16:40 +0300 Subject: [PATCH 25/73] Report signal loss if the expected mode is reported but no frame was captured for more than 32 iterations --- gstajasrc.cpp | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 7ed1689609..fb27f7fa5e 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1567,6 +1567,7 @@ static void capture_thread_func(AJAThread *thread, void *data) { AUTOCIRCULATE_TRANSFER transfer; guint64 frames_dropped_last = G_MAXUINT64; gboolean have_signal = TRUE; + guint iterations_without_frame = 0; if (self->capture_cpu_core != G_MAXUINT) { cpu_set_t mask; @@ -1755,6 +1756,8 @@ restart: GstMapInfo anc_map2 = GST_MAP_INFO_INIT; AUTOCIRCULATE_TRANSFER transfer; + iterations_without_frame = 0; + if (gst_buffer_pool_acquire_buffer(self->buffer_pool, &video_buffer, NULL) != GST_FLOW_OK) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), @@ -1914,7 +1917,23 @@ restart: } else { g_mutex_unlock(&self->queue_lock); + + // If we don't have a frame for 32 iterations (512ms) then consider + // this as signal loss too even if the driver still reports the + // expected mode above + if (have_signal && iterations_without_frame < 32) { + iterations_without_frame++; + } else { + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, + ("Signal lost"), ("No frames captured")); + have_signal = FALSE; + } + } + self->device->device->WaitForInputVerticalInterrupt(self->channel); + g_mutex_lock(&self->queue_lock); } } From 4d95164c0ca21b495187de49223cf85230b1925e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 10:50:15 +0300 Subject: [PATCH 26/73] In quad mode, stop autocirculate on the other channels before starting It might theoretically be running from single link mode from a previous run that was not cleaned up properly. --- gstajasrc.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index fb27f7fa5e..f3ad18aab0 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1622,6 +1622,12 @@ restart: ShmMutexLocker locker; self->device->device->AutoCirculateStop(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->AutoCirculateStop( + (NTV2Channel)(self->channel + i)); + } + } self->device->device->EnableInputInterrupt(self->channel); self->device->device->SubscribeInputVerticalEvent(self->channel); From 08308c28a2f6649c79153730197c449003aa32f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 11:36:24 +0300 Subject: [PATCH 27/73] Include detected video format in warning message if it is different from the configured one --- gstajasrc.cpp | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index f3ad18aab0..3223836d66 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1687,16 +1687,25 @@ restart: } else if (current_video_format != effective_video_format && current_video_format != self->video_format) { // TODO: Handle GST_AJA_VIDEO_FORMAT_AUTO here + + std::string current_string = + NTV2VideoFormatToString(current_video_format); + std::string configured_string = + NTV2VideoFormatToString(self->video_format); + std::string effective_string = + NTV2VideoFormatToString(effective_video_format); + GST_DEBUG_OBJECT(self, - "Different input format %u than configured %u " - "(effective %u), waiting", - current_video_format, self->video_format, - effective_video_format); + "Different input format %s than configured %s " + "(effective %s), waiting", + current_string.c_str(), configured_string.c_str(), + effective_string.c_str()); g_mutex_unlock(&self->queue_lock); frames_dropped_last = G_MAXUINT64; if (have_signal) { GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), - ("Different input source was detected")); + ("Different input source (%s) was detected", + current_string.c_str())); have_signal = FALSE; } self->device->device->WaitForInputVerticalInterrupt(self->channel); From 02519523df0ee2e332adfbb71f289304910fbdc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 12:22:28 +0300 Subject: [PATCH 28/73] Only report signal recovery after actually capturing a frame --- gstajasrc.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 3223836d66..91f8b2a013 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1713,12 +1713,6 @@ restart: continue; } - if (!have_signal) { - GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, ("Signal recovered"), - ("Input source detected")); - have_signal = TRUE; - } - AUTOCIRCULATE_STATUS status; self->device->device->AutoCirculateGetStatus(self->channel, status); @@ -1771,6 +1765,12 @@ restart: GstMapInfo anc_map2 = GST_MAP_INFO_INIT; AUTOCIRCULATE_TRANSFER transfer; + if (!have_signal) { + GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, ("Signal recovered"), + ("Input source detected")); + have_signal = TRUE; + } + iterations_without_frame = 0; if (gst_buffer_pool_acquire_buffer(self->buffer_pool, &video_buffer, From d03f733bbaeb12ff4d5d84efb302b58d168b7eab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 12:33:52 +0300 Subject: [PATCH 29/73] Implement auto mode detection in ajasrc This also allows switching modes while running. --- gstajacommon.cpp | 2 +- gstajacommon.h | 2 +- gstajasrc.cpp | 1339 ++++++++++++++++++++++++---------------------- 3 files changed, 696 insertions(+), 647 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index dadcbb7a50..1c53bba0cf 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -789,7 +789,7 @@ GType gst_aja_sdi_mode_get_type(void) { GType gst_aja_video_format_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { - // TODO: Implement: {GST_AJA_VIDEO_FORMAT_AUTO, "auto", "Autodetect"}, + {GST_AJA_VIDEO_FORMAT_AUTO, "auto", "Auto detect format"}, {GST_AJA_VIDEO_FORMAT_1080i_5000, "1080i-5000", "1080i 5000"}, {GST_AJA_VIDEO_FORMAT_1080i_5994, "1080i-5994", "1080i 5994"}, {GST_AJA_VIDEO_FORMAT_1080i_6000, "1080i-6000", "1080i 6000"}, diff --git a/gstajacommon.h b/gstajacommon.h index 41fb588596..5f49bb8c16 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -182,7 +182,7 @@ GType gst_aja_sdi_mode_get_type(void); typedef enum { GST_AJA_VIDEO_FORMAT_INVALID = -1, - // TODO: Implement: GST_AJA_VIDEO_FORMAT_AUTO, + GST_AJA_VIDEO_FORMAT_AUTO, GST_AJA_VIDEO_FORMAT_1080i_5000, GST_AJA_VIDEO_FORMAT_1080i_5994, GST_AJA_VIDEO_FORMAT_1080i_6000, diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 91f8b2a013..1cb991f39a 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -34,7 +34,6 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_DEVICE_IDENTIFIER ("0") #define DEFAULT_CHANNEL (::NTV2_CHANNEL1) -// TODO: GST_AJA_VIDEO_FORMAT_AUTO #define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_1080i_5000) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) @@ -93,7 +92,6 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer); static gboolean gst_aja_src_open(GstAjaSrc *src); static gboolean gst_aja_src_close(GstAjaSrc *src); -static gboolean gst_aja_src_start(GstAjaSrc *src); static gboolean gst_aja_src_stop(GstAjaSrc *src); static GstStateChangeReturn gst_aja_src_change_state(GstElement *element, @@ -250,6 +248,8 @@ static void gst_aja_src_init(GstAjaSrc *self) { gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); gst_base_src_set_live(GST_BASE_SRC_CAST(self), TRUE); gst_base_src_set_format(GST_BASE_SRC_CAST(self), GST_FORMAT_TIME); + + self->video_format = NTV2_FORMAT_UNKNOWN; } void gst_aja_src_set_property(GObject *object, guint property_id, @@ -412,652 +412,686 @@ static gboolean gst_aja_src_close(GstAjaSrc *self) { return TRUE; } -static gboolean gst_aja_src_start(GstAjaSrc *self) { +// Must be called with ShmMutexLocker +static gboolean gst_aja_src_configure(GstAjaSrc *self) { GST_DEBUG_OBJECT(self, "Starting"); - { - // Make sure to globally lock here as the routing settings and others are - // global shared state - ShmMutexLocker locker; - #define NEEDS_QUAD_MODE(self) \ (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_SQD || \ self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI || \ (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && \ self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) - self->quad_mode = NEEDS_QUAD_MODE(self); - self->video_format = gst_ntv2_video_format_from_aja_format( - self->video_format_setting, self->quad_mode); + self->quad_mode = NEEDS_QUAD_MODE(self); #undef NEEDS_QUAD_MODE - if (self->video_format == NTV2_FORMAT_UNKNOWN) { - GST_ERROR_OBJECT(self, "Unsupported mode"); - return FALSE; + self->device->device->AutoCirculateStop(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->AutoCirculateStop((NTV2Channel)(self->channel + i)); } + } - if (!::NTV2DeviceCanDoVideoFormat(self->device_id, self->video_format)) { - GST_ERROR_OBJECT(self, "Device does not support mode %d", - (int)self->video_format); - return FALSE; - } + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } - if (self->quad_mode) { - if (self->channel != ::NTV2_CHANNEL1 && - self->channel != ::NTV2_CHANNEL5) { - GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + + NTV2VANCMode vanc_mode; + NTV2InputSource input_source; + NTV2OutputCrosspointID input_source_id; + switch (self->input_source) { + case GST_AJA_INPUT_SOURCE_AUTO: + input_source = ::NTV2ChannelToInputSource(self->channel); + input_source_id = ::GetSDIInputOutputXptFromChannel(self->channel, false); + vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) + ? ::NTV2_VANCMODE_OFF + : ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_ANALOG1: + input_source = ::NTV2_INPUTSOURCE_ANALOG1; + input_source_id = ::NTV2_XptAnalogIn; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_HDMI1: + input_source = ::NTV2_INPUTSOURCE_HDMI1; + input_source_id = ::NTV2_XptHDMIIn1; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI2: + input_source = ::NTV2_INPUTSOURCE_HDMI2; + input_source_id = ::NTV2_XptHDMIIn2; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI3: + input_source = ::NTV2_INPUTSOURCE_HDMI3; + input_source_id = ::NTV2_XptHDMIIn3; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI4: + input_source = ::NTV2_INPUTSOURCE_HDMI4; + input_source_id = ::NTV2_XptHDMIIn4; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_SDI1: + input_source = ::NTV2_INPUTSOURCE_SDI1; + input_source_id = ::NTV2_XptSDIIn1; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI2: + input_source = ::NTV2_INPUTSOURCE_SDI2; + input_source_id = ::NTV2_XptSDIIn2; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI3: + input_source = ::NTV2_INPUTSOURCE_SDI3; + input_source_id = ::NTV2_XptSDIIn3; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI4: + input_source = ::NTV2_INPUTSOURCE_SDI4; + input_source_id = ::NTV2_XptSDIIn4; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI5: + input_source = ::NTV2_INPUTSOURCE_SDI5; + input_source_id = ::NTV2_XptSDIIn5; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI6: + input_source = ::NTV2_INPUTSOURCE_SDI6; + input_source_id = ::NTV2_XptSDIIn6; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI7: + input_source = ::NTV2_INPUTSOURCE_SDI7; + input_source_id = ::NTV2_XptSDIIn7; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI8: + input_source = ::NTV2_INPUTSOURCE_SDI8; + input_source_id = ::NTV2_XptSDIIn8; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + default: + g_assert_not_reached(); + break; + } + + self->configured_input_source = input_source; + + self->vanc_mode = vanc_mode; + + if (!self->device->device->EnableChannel(self->channel)) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + return FALSE; + } + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); return FALSE; } } + } - gst_video_info_from_ntv2_video_format(&self->configured_info, - self->video_format); + self->device->device->EnableInputInterrupt(self->channel); + self->device->device->SubscribeInputVerticalEvent(self->channel); - if (self->quad_mode) { - if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && - self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { - self->device->device->SetQuadQuadFrameEnable(false, self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, self->channel); - self->device->device->Set4kSquaresEnable(true, self->channel); - self->device->device->SetTsiFrameEnable(true, self->channel); - } else { - switch (self->sdi_mode) { - case GST_AJA_SDI_MODE_SINGLE_LINK: - g_assert_not_reached(); - break; - case GST_AJA_SDI_MODE_QUAD_LINK_SQD: - if (self->configured_info.height > 2160) { - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - self->device->device->SetQuadQuadFrameEnable(true, self->channel); - self->device->device->SetQuadQuadSquaresEnable(true, - self->channel); - } else { - self->device->device->SetQuadQuadFrameEnable(false, - self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, - self->channel); - self->device->device->Set4kSquaresEnable(true, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - } - break; - case GST_AJA_SDI_MODE_QUAD_LINK_TSI: - if (self->configured_info.height > 2160) { - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - self->device->device->SetQuadQuadFrameEnable(true, self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, - self->channel); - } else { - self->device->device->SetQuadQuadFrameEnable(false, - self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, - self->channel); - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(true, self->channel); - } - break; - } - } - } else { - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - self->device->device->SetQuadQuadFrameEnable(false, self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) { + self->device->device->WaitForInputVerticalInterrupt(self->channel, 10); + self->video_format = self->device->device->GetInputVideoFormat( + self->configured_input_source); + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Input video format not detected"); + return TRUE; } + std::string configured_string = NTV2VideoFormatToString(self->video_format); + GST_DEBUG_OBJECT(self, "Detected input video format %s (%d)", + configured_string.c_str(), (int)self->video_format); + } else { + self->video_format = gst_ntv2_video_format_from_aja_format( + self->video_format_setting, self->quad_mode); + } - self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); - if (self->quad_mode) { - for (int i = 1; i < 4; i++) - self->device->device->SetMode((NTV2Channel)(self->channel + i), - NTV2_MODE_CAPTURE, false); - } + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Unsupported mode"); + return FALSE; + } - GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", - (int)self->video_format, (int)self->channel); - self->device->device->SetVideoFormat(self->video_format, false, false, - self->channel); + if (!::NTV2DeviceCanDoVideoFormat(self->device_id, self->video_format)) { + GST_ERROR_OBJECT(self, "Device does not support mode %d", + (int)self->video_format); + return FALSE; + } - if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, - ::NTV2_FBF_10BIT_YCBCR)) { - GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", - (int)::NTV2_FBF_10BIT_YCBCR); + if (self->quad_mode) { + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); return FALSE; } - self->device->device->SetFrameBufferFormat(self->channel, - ::NTV2_FBF_10BIT_YCBCR); - if (self->quad_mode) { - for (int i = 1; i < 4; i++) - self->device->device->SetFrameBufferFormat( - (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); - } + } - self->device->device->DMABufferAutoLock(false, true, 0); + gst_video_info_from_ntv2_video_format(&self->configured_info, + self->video_format); - if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) { - self->device->device->SetSDITransmitEnable(self->channel, false); - if (self->quad_mode) { - for (int i = 1; i < 4; i++) - self->device->device->SetSDITransmitEnable( - (NTV2Channel)(self->channel + i), false); - } - } - - // Always use the framebuffer associated with the channel - NTV2InputCrosspointID framebuffer_id = - ::GetFrameBufferInputXptFromChannel(self->channel, false); - - NTV2VANCMode vanc_mode; - NTV2InputSource input_source; - NTV2OutputCrosspointID input_source_id; - switch (self->input_source) { - case GST_AJA_INPUT_SOURCE_AUTO: - input_source = ::NTV2ChannelToInputSource(self->channel); - input_source_id = - ::GetSDIInputOutputXptFromChannel(self->channel, false); - vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) - ? ::NTV2_VANCMODE_OFF - : ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_ANALOG1: - input_source = ::NTV2_INPUTSOURCE_ANALOG1; - input_source_id = ::NTV2_XptAnalogIn; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_HDMI1: - input_source = ::NTV2_INPUTSOURCE_HDMI1; - input_source_id = ::NTV2_XptHDMIIn1; - vanc_mode = ::NTV2_VANCMODE_OFF; - break; - case GST_AJA_INPUT_SOURCE_HDMI2: - input_source = ::NTV2_INPUTSOURCE_HDMI2; - input_source_id = ::NTV2_XptHDMIIn2; - vanc_mode = ::NTV2_VANCMODE_OFF; - break; - case GST_AJA_INPUT_SOURCE_HDMI3: - input_source = ::NTV2_INPUTSOURCE_HDMI3; - input_source_id = ::NTV2_XptHDMIIn3; - vanc_mode = ::NTV2_VANCMODE_OFF; - break; - case GST_AJA_INPUT_SOURCE_HDMI4: - input_source = ::NTV2_INPUTSOURCE_HDMI4; - input_source_id = ::NTV2_XptHDMIIn4; - vanc_mode = ::NTV2_VANCMODE_OFF; - break; - case GST_AJA_INPUT_SOURCE_SDI1: - input_source = ::NTV2_INPUTSOURCE_SDI1; - input_source_id = ::NTV2_XptSDIIn1; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI2: - input_source = ::NTV2_INPUTSOURCE_SDI2; - input_source_id = ::NTV2_XptSDIIn2; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI3: - input_source = ::NTV2_INPUTSOURCE_SDI3; - input_source_id = ::NTV2_XptSDIIn3; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI4: - input_source = ::NTV2_INPUTSOURCE_SDI4; - input_source_id = ::NTV2_XptSDIIn4; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI5: - input_source = ::NTV2_INPUTSOURCE_SDI5; - input_source_id = ::NTV2_XptSDIIn5; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI6: - input_source = ::NTV2_INPUTSOURCE_SDI6; - input_source_id = ::NTV2_XptSDIIn6; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI7: - input_source = ::NTV2_INPUTSOURCE_SDI7; - input_source_id = ::NTV2_XptSDIIn7; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - case GST_AJA_INPUT_SOURCE_SDI8: - input_source = ::NTV2_INPUTSOURCE_SDI8; - input_source_id = ::NTV2_XptSDIIn8; - vanc_mode = ::NTV2_VANCMODE_TALL; - break; - default: - g_assert_not_reached(); - break; - } - - self->configured_input_source = input_source; - - self->vanc_mode = vanc_mode; - - const NTV2Standard standard( - ::GetNTV2StandardFromVideoFormat(self->video_format)); - self->device->device->SetStandard(standard, self->channel); - if (self->quad_mode) { - for (int i = 1; i < 4; i++) - self->device->device->SetStandard(standard, - (NTV2Channel)(self->channel + i)); - } - - const NTV2FrameGeometry geometry = - ::GetNTV2FrameGeometryFromVideoFormat(self->video_format); - - self->vanc_mode = - ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; - if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { - self->device->device->SetFrameGeometry(geometry, false, self->channel); - self->device->device->SetVANCMode(self->vanc_mode, self->channel); - - if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - self->device->device->SetFrameGeometry( - geometry, false, (NTV2Channel)(self->channel + i)); - self->device->device->SetVANCMode(self->vanc_mode, - (NTV2Channel)(self->channel + i)); - } - } + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); } else { - const NTV2FrameGeometry vanc_geometry = - ::GetVANCFrameGeometry(geometry, self->vanc_mode); - - self->device->device->SetFrameGeometry(vanc_geometry, false, - self->channel); - self->device->device->SetVANCMode(self->vanc_mode, self->channel); - - if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - self->device->device->SetFrameGeometry( - vanc_geometry, false, (NTV2Channel)(self->channel + i)); - self->device->device->SetVANCMode(self->vanc_mode, - (NTV2Channel)(self->channel + i)); - } - } - } - - CNTV2SignalRouter router; - - self->device->device->GetRouting(router); - - // Need to remove old routes for the output and framebuffer we're going to - // use - NTV2ActualConnections connections = router.GetConnections(); - - if (self->quad_mode) { - if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && - self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { - // Need to disconnect the 4 inputs corresponding to this channel from - // their framebuffers/muxers, and muxers from their framebuffers - for (auto iter = connections.begin(); iter != connections.end(); - iter++) { - if (iter->first == NTV2_XptFrameBuffer1Input || - iter->first == NTV2_XptFrameBuffer1BInput || - iter->first == NTV2_XptFrameBuffer2Input || - iter->first == NTV2_XptFrameBuffer2BInput || - iter->second == NTV2_Xpt425Mux1AYUV || - iter->second == NTV2_Xpt425Mux1BYUV || - iter->second == NTV2_Xpt425Mux2AYUV || - iter->second == NTV2_Xpt425Mux2BYUV || - iter->first == NTV2_Xpt425Mux1AInput || - iter->first == NTV2_Xpt425Mux1BInput || - iter->first == NTV2_Xpt425Mux2AInput || - iter->first == NTV2_Xpt425Mux2BInput || - iter->second == NTV2_XptHDMIIn1 || - iter->second == NTV2_XptHDMIIn1Q2 || - iter->second == NTV2_XptHDMIIn1Q3 || - iter->second == NTV2_XptHDMIIn1Q4) - router.RemoveConnection(iter->first, iter->second); - } - } else if (self->channel == NTV2_CHANNEL1) { - for (auto iter = connections.begin(); iter != connections.end(); - iter++) { - if (iter->first == NTV2_XptFrameBuffer1Input || - iter->first == NTV2_XptFrameBuffer1BInput || - iter->first == NTV2_XptFrameBuffer1DS2Input || - iter->first == NTV2_XptFrameBuffer2Input || - iter->first == NTV2_XptFrameBuffer2BInput || - iter->first == NTV2_XptFrameBuffer2DS2Input || - iter->second == NTV2_Xpt425Mux1AYUV || - iter->second == NTV2_Xpt425Mux1BYUV || - iter->second == NTV2_Xpt425Mux2AYUV || - iter->second == NTV2_Xpt425Mux2BYUV || - iter->first == NTV2_Xpt425Mux1AInput || - iter->first == NTV2_Xpt425Mux1BInput || - iter->first == NTV2_Xpt425Mux2AInput || - iter->first == NTV2_Xpt425Mux2BInput || - iter->second == NTV2_XptSDIIn1 || - iter->second == NTV2_XptSDIIn2 || - iter->second == NTV2_XptSDIIn3 || - iter->second == NTV2_XptSDIIn4 || - iter->second == NTV2_XptSDIIn1DS2 || - iter->second == NTV2_XptSDIIn2DS2 || - iter->first == NTV2_XptFrameBuffer1Input || - iter->first == NTV2_XptFrameBuffer2Input || - iter->first == NTV2_XptFrameBuffer3Input || - iter->first == NTV2_XptFrameBuffer4Input) - router.RemoveConnection(iter->first, iter->second); - } - } else if (self->channel == NTV2_CHANNEL5) { - for (auto iter = connections.begin(); iter != connections.end(); - iter++) { - if (iter->first == NTV2_XptFrameBuffer5Input || - iter->first == NTV2_XptFrameBuffer5BInput || - iter->first == NTV2_XptFrameBuffer5DS2Input || - iter->first == NTV2_XptFrameBuffer6Input || - iter->first == NTV2_XptFrameBuffer6BInput || - iter->first == NTV2_XptFrameBuffer6DS2Input || - iter->second == NTV2_Xpt425Mux3AYUV || - iter->second == NTV2_Xpt425Mux3BYUV || - iter->second == NTV2_Xpt425Mux4AYUV || - iter->second == NTV2_Xpt425Mux4BYUV || - iter->first == NTV2_Xpt425Mux3AInput || - iter->first == NTV2_Xpt425Mux3BInput || - iter->first == NTV2_Xpt425Mux4AInput || - iter->first == NTV2_Xpt425Mux4BInput || - iter->second == NTV2_XptSDIIn5 || - iter->second == NTV2_XptSDIIn6 || - iter->second == NTV2_XptSDIIn7 || - iter->second == NTV2_XptSDIIn8 || - iter->second == NTV2_XptSDIIn5DS2 || - iter->second == NTV2_XptSDIIn6DS2 || - iter->first == NTV2_XptFrameBuffer5Input || - iter->first == NTV2_XptFrameBuffer6Input || - iter->first == NTV2_XptFrameBuffer7Input || - iter->first == NTV2_XptFrameBuffer8Input) - router.RemoveConnection(iter->first, iter->second); - } - } else { - g_assert_not_reached(); - } - } else { - for (auto iter = connections.begin(); iter != connections.end(); iter++) { - if (iter->first == framebuffer_id || iter->second == input_source_id) - router.RemoveConnection(iter->first, iter->second); - - if (((input_source_id == NTV2_XptSDIIn6 || - input_source_id == NTV2_XptSDIIn8) && - iter->first == NTV2_XptFrameBuffer6BInput) || - ((input_source_id == NTV2_XptSDIIn5 || - input_source_id == NTV2_XptSDIIn6) && - iter->first == NTV2_XptFrameBuffer5BInput) || - ((input_source_id == NTV2_XptSDIIn4 || - input_source_id == NTV2_XptSDIIn2) && - iter->first == NTV2_XptFrameBuffer2BInput) || - ((input_source_id == NTV2_XptSDIIn1 || - input_source_id == NTV2_XptSDIIn2) && - iter->first == NTV2_XptFrameBuffer1BInput)) - router.RemoveConnection(iter->first, iter->second); - } - } - - if (self->quad_mode) { - if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && - self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { - input_source_id = NTV2_Xpt425Mux1AYUV; - } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && - !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && - !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { - if (self->channel == NTV2_CHANNEL1) - input_source_id = NTV2_Xpt425Mux1AYUV; - else if (self->channel == NTV2_CHANNEL5) - input_source_id = NTV2_Xpt425Mux3AYUV; - else + switch (self->sdi_mode) { + case GST_AJA_SDI_MODE_SINGLE_LINK: g_assert_not_reached(); + break; + case GST_AJA_SDI_MODE_QUAD_LINK_SQD: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(true, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + } + break; + case GST_AJA_SDI_MODE_QUAD_LINK_TSI: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } + break; } } + } else { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + } - GST_DEBUG_OBJECT(self, "Creating connection %d - %d", framebuffer_id, - input_source_id); - router.AddConnection(framebuffer_id, input_source_id); + self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetMode((NTV2Channel)(self->channel + i), + NTV2_MODE_CAPTURE, false); + } + + std::string configured_string = NTV2VideoFormatToString(self->video_format); + GST_DEBUG_OBJECT(self, "Configuring video format %s (%d) on channel %d", + configured_string.c_str(), (int)self->video_format, + (int)self->channel); + self->device->device->SetVideoFormat(self->video_format, false, false, + self->channel); + + if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetFrameBufferFormat( + (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); + } + + self->device->device->DMABufferAutoLock(false, true, 0); + + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) { + self->device->device->SetSDITransmitEnable(self->channel, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDITransmitEnable( + (NTV2Channel)(self->channel + i), false); + } + } + + // Always use the framebuffer associated with the channel + NTV2InputCrosspointID framebuffer_id = + ::GetFrameBufferInputXptFromChannel(self->channel, false); + + const NTV2Standard standard( + ::GetNTV2StandardFromVideoFormat(self->video_format)); + self->device->device->SetStandard(standard, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetStandard(standard, + (NTV2Channel)(self->channel + i)); + } + + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(self->video_format); + + self->vanc_mode = + ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->device->device->SetFrameGeometry(geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); if (self->quad_mode) { - if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && - self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { - router.AddConnection(NTV2_XptFrameBuffer1BInput, NTV2_Xpt425Mux1BYUV); - router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_Xpt425Mux2AYUV); - router.AddConnection(NTV2_XptFrameBuffer2BInput, NTV2_Xpt425Mux2BYUV); + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } + } else { + const NTV2FrameGeometry vanc_geometry = + ::GetVANCFrameGeometry(geometry, self->vanc_mode); - router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptHDMIIn1); - router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptHDMIIn1Q2); - router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptHDMIIn1Q3); - router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptHDMIIn1Q4); - } else { - if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { - if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { - if (self->channel == NTV2_CHANNEL1) { - router.AddConnection(NTV2_XptFrameBuffer1DS2Input, - NTV2_XptSDIIn2); - router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn3); - router.AddConnection(NTV2_XptFrameBuffer2DS2Input, - NTV2_XptSDIIn4); - } else if (self->channel == NTV2_CHANNEL5) { - router.AddConnection(NTV2_XptFrameBuffer5DS2Input, - NTV2_XptSDIIn6); - router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn7); - router.AddConnection(NTV2_XptFrameBuffer6DS2Input, - NTV2_XptSDIIn8); - } else { - g_assert_not_reached(); - } - } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { - if (self->channel == NTV2_CHANNEL1) { - router.AddConnection(NTV2_XptFrameBuffer1DS2Input, - NTV2_XptSDIIn1DS2); - router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); - router.AddConnection(NTV2_XptFrameBuffer2DS2Input, - NTV2_XptSDIIn2DS2); - } else if (self->channel == NTV2_CHANNEL5) { - router.AddConnection(NTV2_XptFrameBuffer5DS2Input, - NTV2_XptSDIIn5DS2); - router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn6); - router.AddConnection(NTV2_XptFrameBuffer6DS2Input, - NTV2_XptSDIIn6DS2); - } else { - g_assert_not_reached(); - } - // FIXME: Need special handling of NTV2_IS_4K_HFR_VIDEO_FORMAT for - // TSI? + self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + vanc_geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } + } + + CNTV2SignalRouter router; + + self->device->device->GetRouting(router); + + // Need to remove old routes for the output and framebuffer we're going to + // use + NTV2ActualConnections connections = router.GetConnections(); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + // Need to disconnect the 4 inputs corresponding to this channel from + // their framebuffers/muxers, and muxers from their framebuffers + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptHDMIIn1 || + iter->second == NTV2_XptHDMIIn1Q2 || + iter->second == NTV2_XptHDMIIn1Q3 || + iter->second == NTV2_XptHDMIIn1Q4) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL1) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer1DS2Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->first == NTV2_XptFrameBuffer2DS2Input || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptSDIIn1 || iter->second == NTV2_XptSDIIn2 || + iter->second == NTV2_XptSDIIn3 || iter->second == NTV2_XptSDIIn4 || + iter->second == NTV2_XptSDIIn1DS2 || + iter->second == NTV2_XptSDIIn2DS2 || + iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer3Input || + iter->first == NTV2_XptFrameBuffer4Input) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL5) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer5BInput || + iter->first == NTV2_XptFrameBuffer5DS2Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer6BInput || + iter->first == NTV2_XptFrameBuffer6DS2Input || + iter->second == NTV2_Xpt425Mux3AYUV || + iter->second == NTV2_Xpt425Mux3BYUV || + iter->second == NTV2_Xpt425Mux4AYUV || + iter->second == NTV2_Xpt425Mux4BYUV || + iter->first == NTV2_Xpt425Mux3AInput || + iter->first == NTV2_Xpt425Mux3BInput || + iter->first == NTV2_Xpt425Mux4AInput || + iter->first == NTV2_Xpt425Mux4BInput || + iter->second == NTV2_XptSDIIn5 || iter->second == NTV2_XptSDIIn6 || + iter->second == NTV2_XptSDIIn7 || iter->second == NTV2_XptSDIIn8 || + iter->second == NTV2_XptSDIIn5DS2 || + iter->second == NTV2_XptSDIIn6DS2 || + iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer7Input || + iter->first == NTV2_XptFrameBuffer8Input) + router.RemoveConnection(iter->first, iter->second); + } + } else { + g_assert_not_reached(); + } + } else { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == framebuffer_id || iter->second == input_source_id) + router.RemoveConnection(iter->first, iter->second); + + if (((input_source_id == NTV2_XptSDIIn6 || + input_source_id == NTV2_XptSDIIn8) && + iter->first == NTV2_XptFrameBuffer6BInput) || + ((input_source_id == NTV2_XptSDIIn5 || + input_source_id == NTV2_XptSDIIn6) && + iter->first == NTV2_XptFrameBuffer5BInput) || + ((input_source_id == NTV2_XptSDIIn4 || + input_source_id == NTV2_XptSDIIn2) && + iter->first == NTV2_XptFrameBuffer2BInput) || + ((input_source_id == NTV2_XptSDIIn1 || + input_source_id == NTV2_XptSDIIn2) && + iter->first == NTV2_XptFrameBuffer1BInput)) + router.RemoveConnection(iter->first, iter->second); + } + } + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + input_source_id = NTV2_Xpt425Mux1AYUV; + } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && + !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && + !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) + input_source_id = NTV2_Xpt425Mux1AYUV; + else if (self->channel == NTV2_CHANNEL5) + input_source_id = NTV2_Xpt425Mux3AYUV; + else + g_assert_not_reached(); + } + } + + GST_DEBUG_OBJECT(self, "Creating connection %d - %d", framebuffer_id, + input_source_id); + router.AddConnection(framebuffer_id, input_source_id); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + router.AddConnection(NTV2_XptFrameBuffer1BInput, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptHDMIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptHDMIIn1Q2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptHDMIIn1Q3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptHDMIIn1Q4); + } else { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { + if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, NTV2_XptSDIIn8); } else { - if (self->channel == NTV2_CHANNEL1) { - router.AddConnection(NTV2_XptFrameBuffer1BInput, - NTV2_Xpt425Mux1BYUV); - router.AddConnection(NTV2_XptFrameBuffer2Input, - NTV2_Xpt425Mux2AYUV); - router.AddConnection(NTV2_XptFrameBuffer2BInput, - NTV2_Xpt425Mux2BYUV); - - router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptSDIIn1); - router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptSDIIn2); - router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptSDIIn3); - router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptSDIIn4); - } else if (self->channel == NTV2_CHANNEL5) { - router.AddConnection(NTV2_XptFrameBuffer5BInput, - NTV2_Xpt425Mux3BYUV); - router.AddConnection(NTV2_XptFrameBuffer6Input, - NTV2_Xpt425Mux4AYUV); - router.AddConnection(NTV2_XptFrameBuffer6BInput, - NTV2_Xpt425Mux4BYUV); - - router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptSDIIn5); - router.AddConnection(NTV2_Xpt425Mux3BInput, NTV2_XptSDIIn6); - router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptSDIIn7); - router.AddConnection(NTV2_Xpt425Mux4BInput, NTV2_XptSDIIn8); - } else { - g_assert_not_reached(); - } + g_assert_not_reached(); } + } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, + NTV2_XptSDIIn1DS2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, + NTV2_XptSDIIn2DS2); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, + NTV2_XptSDIIn5DS2); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, + NTV2_XptSDIIn6DS2); + } else { + g_assert_not_reached(); + } + // FIXME: Need special handling of NTV2_IS_4K_HFR_VIDEO_FORMAT for + // TSI? } else { if (self->channel == NTV2_CHANNEL1) { - router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); - router.AddConnection(NTV2_XptFrameBuffer3Input, NTV2_XptSDIIn3); - router.AddConnection(NTV2_XptFrameBuffer4Input, NTV2_XptSDIIn4); + router.AddConnection(NTV2_XptFrameBuffer1BInput, + NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, + NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, + NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptSDIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptSDIIn2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptSDIIn3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptSDIIn4); } else if (self->channel == NTV2_CHANNEL5) { - router.AddConnection(NTV2_XptFrameBuffer6Input, NTV2_XptSDIIn6); - router.AddConnection(NTV2_XptFrameBuffer7Input, NTV2_XptSDIIn7); - router.AddConnection(NTV2_XptFrameBuffer8Input, NTV2_XptSDIIn8); + router.AddConnection(NTV2_XptFrameBuffer5BInput, + NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptFrameBuffer6Input, + NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptFrameBuffer6BInput, + NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptSDIIn5); + router.AddConnection(NTV2_Xpt425Mux3BInput, NTV2_XptSDIIn6); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptSDIIn7); + router.AddConnection(NTV2_Xpt425Mux4BInput, NTV2_XptSDIIn8); } else { g_assert_not_reached(); } } + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer3Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer4Input, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer6Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer7Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer8Input, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } } } + } - { - std::stringstream os; - CNTV2SignalRouter oldRouter; - self->device->device->GetRouting(oldRouter); - oldRouter.Print(os); - GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); - } - self->device->device->ApplySignalRoute(router, true); - { - std::stringstream os; - CNTV2SignalRouter currentRouter; - self->device->device->GetRouting(currentRouter); - currentRouter.Print(os); - GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); - } + { + std::stringstream os; + CNTV2SignalRouter oldRouter; + self->device->device->GetRouting(oldRouter); + oldRouter.Print(os); + GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); + } + self->device->device->ApplySignalRoute(router, true); + { + std::stringstream os; + CNTV2SignalRouter currentRouter; + self->device->device->GetRouting(currentRouter); + currentRouter.Print(os); + GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); + } - switch (self->audio_system_setting) { - case GST_AJA_AUDIO_SYSTEM_1: + switch (self->audio_system_setting) { + case GST_AJA_AUDIO_SYSTEM_1: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + case GST_AJA_AUDIO_SYSTEM_2: + self->audio_system = ::NTV2_AUDIOSYSTEM_2; + break; + case GST_AJA_AUDIO_SYSTEM_3: + self->audio_system = ::NTV2_AUDIOSYSTEM_3; + break; + case GST_AJA_AUDIO_SYSTEM_4: + self->audio_system = ::NTV2_AUDIOSYSTEM_4; + break; + case GST_AJA_AUDIO_SYSTEM_5: + self->audio_system = ::NTV2_AUDIOSYSTEM_5; + break; + case GST_AJA_AUDIO_SYSTEM_6: + self->audio_system = ::NTV2_AUDIOSYSTEM_6; + break; + case GST_AJA_AUDIO_SYSTEM_7: + self->audio_system = ::NTV2_AUDIOSYSTEM_7; + break; + case GST_AJA_AUDIO_SYSTEM_8: + self->audio_system = ::NTV2_AUDIOSYSTEM_8; + break; + case GST_AJA_AUDIO_SYSTEM_AUTO: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) + self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); + if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) self->audio_system = ::NTV2_AUDIOSYSTEM_1; - break; - case GST_AJA_AUDIO_SYSTEM_2: - self->audio_system = ::NTV2_AUDIOSYSTEM_2; - break; - case GST_AJA_AUDIO_SYSTEM_3: - self->audio_system = ::NTV2_AUDIOSYSTEM_3; - break; - case GST_AJA_AUDIO_SYSTEM_4: - self->audio_system = ::NTV2_AUDIOSYSTEM_4; - break; - case GST_AJA_AUDIO_SYSTEM_5: - self->audio_system = ::NTV2_AUDIOSYSTEM_5; - break; - case GST_AJA_AUDIO_SYSTEM_6: - self->audio_system = ::NTV2_AUDIOSYSTEM_6; - break; - case GST_AJA_AUDIO_SYSTEM_7: - self->audio_system = ::NTV2_AUDIOSYSTEM_7; - break; - case GST_AJA_AUDIO_SYSTEM_8: - self->audio_system = ::NTV2_AUDIOSYSTEM_8; - break; - case GST_AJA_AUDIO_SYSTEM_AUTO: - self->audio_system = ::NTV2_AUDIOSYSTEM_1; - if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) - self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); - if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) - self->audio_system = ::NTV2_AUDIOSYSTEM_1; - break; - default: - g_assert_not_reached(); - break; - } + break; + default: + g_assert_not_reached(); + break; + } - GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); + GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); - NTV2AudioSource audio_source; - switch (self->audio_source) { - case GST_AJA_AUDIO_SOURCE_EMBEDDED: - audio_source = ::NTV2_AUDIO_EMBEDDED; - break; - case GST_AJA_AUDIO_SOURCE_AES: - audio_source = ::NTV2_AUDIO_AES; - break; - case GST_AJA_AUDIO_SOURCE_ANALOG: - audio_source = ::NTV2_AUDIO_ANALOG; - break; - case GST_AJA_AUDIO_SOURCE_HDMI: - audio_source = ::NTV2_AUDIO_HDMI; - break; - case GST_AJA_AUDIO_SOURCE_MIC: - audio_source = ::NTV2_AUDIO_MIC; - break; - default: - g_assert_not_reached(); - break; - } + NTV2AudioSource audio_source; + switch (self->audio_source) { + case GST_AJA_AUDIO_SOURCE_EMBEDDED: + audio_source = ::NTV2_AUDIO_EMBEDDED; + break; + case GST_AJA_AUDIO_SOURCE_AES: + audio_source = ::NTV2_AUDIO_AES; + break; + case GST_AJA_AUDIO_SOURCE_ANALOG: + audio_source = ::NTV2_AUDIO_ANALOG; + break; + case GST_AJA_AUDIO_SOURCE_HDMI: + audio_source = ::NTV2_AUDIO_HDMI; + break; + case GST_AJA_AUDIO_SOURCE_MIC: + audio_source = ::NTV2_AUDIO_MIC; + break; + default: + g_assert_not_reached(); + break; + } - self->device->device->SetAudioSystemInputSource( - self->audio_system, audio_source, - ::NTV2InputSourceToEmbeddedAudioInput(input_source)); - self->configured_audio_channels = - ::NTV2DeviceGetMaxAudioChannels(self->device_id); - self->device->device->SetNumberAudioChannels( - self->configured_audio_channels, self->audio_system); - self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); - self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, - self->audio_system); - self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->device->device->SetAudioSystemInputSource( + self->audio_system, audio_source, + ::NTV2InputSourceToEmbeddedAudioInput(input_source)); + self->configured_audio_channels = + ::NTV2DeviceGetMaxAudioChannels(self->device_id); + self->device->device->SetNumberAudioChannels(self->configured_audio_channels, + self->audio_system); + self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); + self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, self->audio_system); - self->device->device->SetEmbeddedAudioClock( - ::NTV2_EMBEDDED_AUDIO_CLOCK_VIDEO_INPUT, self->audio_system); + self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->audio_system); + self->device->device->SetEmbeddedAudioClock( + ::NTV2_EMBEDDED_AUDIO_CLOCK_VIDEO_INPUT, self->audio_system); - NTV2ReferenceSource reference_source; - switch (self->reference_source) { - case GST_AJA_REFERENCE_SOURCE_AUTO: - reference_source = ::NTV2InputSourceToReferenceSource(input_source); - break; - case GST_AJA_REFERENCE_SOURCE_EXTERNAL: - reference_source = ::NTV2_REFERENCE_EXTERNAL; - break; - case GST_AJA_REFERENCE_SOURCE_FREERUN: - reference_source = ::NTV2_REFERENCE_FREERUN; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_1: - reference_source = ::NTV2_REFERENCE_INPUT1; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_2: - reference_source = ::NTV2_REFERENCE_INPUT2; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_3: - reference_source = ::NTV2_REFERENCE_INPUT3; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_4: - reference_source = ::NTV2_REFERENCE_INPUT4; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_5: - reference_source = ::NTV2_REFERENCE_INPUT5; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_6: - reference_source = ::NTV2_REFERENCE_INPUT6; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_7: - reference_source = ::NTV2_REFERENCE_INPUT7; - break; - case GST_AJA_REFERENCE_SOURCE_INPUT_8: - reference_source = ::NTV2_REFERENCE_INPUT8; - break; - default: - g_assert_not_reached(); - break; - } - GST_DEBUG_OBJECT(self, "Configuring reference source %d", - (int)reference_source); + NTV2ReferenceSource reference_source; + switch (self->reference_source) { + case GST_AJA_REFERENCE_SOURCE_AUTO: + reference_source = ::NTV2InputSourceToReferenceSource(input_source); + break; + case GST_AJA_REFERENCE_SOURCE_EXTERNAL: + reference_source = ::NTV2_REFERENCE_EXTERNAL; + break; + case GST_AJA_REFERENCE_SOURCE_FREERUN: + reference_source = ::NTV2_REFERENCE_FREERUN; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_1: + reference_source = ::NTV2_REFERENCE_INPUT1; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_2: + reference_source = ::NTV2_REFERENCE_INPUT2; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_3: + reference_source = ::NTV2_REFERENCE_INPUT3; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_4: + reference_source = ::NTV2_REFERENCE_INPUT4; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_5: + reference_source = ::NTV2_REFERENCE_INPUT5; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_6: + reference_source = ::NTV2_REFERENCE_INPUT6; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_7: + reference_source = ::NTV2_REFERENCE_INPUT7; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_8: + reference_source = ::NTV2_REFERENCE_INPUT8; + break; + default: + g_assert_not_reached(); + break; + } + GST_DEBUG_OBJECT(self, "Configuring reference source %d", + (int)reference_source); - self->device->device->SetReference(reference_source); + self->device->device->SetReference(reference_source); - switch (self->timecode_index) { - case GST_AJA_TIMECODE_INDEX_VITC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); - break; - case GST_AJA_TIMECODE_INDEX_ATC_LTC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); - break; - case GST_AJA_TIMECODE_INDEX_LTC1: - self->tc_index = ::NTV2_TCINDEX_LTC1; - break; - case GST_AJA_TIMECODE_INDEX_LTC2: - self->tc_index = ::NTV2_TCINDEX_LTC2; - break; - default: - g_assert_not_reached(); - break; - } + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + self->tc_index = ::NTV2_TCINDEX_LTC1; + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + self->tc_index = ::NTV2_TCINDEX_LTC2; + break; + default: + g_assert_not_reached(); + break; } guint video_buffer_size = ::GetVideoActiveSize( @@ -1100,6 +1134,17 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); } + gst_element_post_message(GST_ELEMENT_CAST(self), + gst_message_new_latency(GST_OBJECT_CAST(self))); + + return TRUE; +} + +static gboolean gst_aja_src_start(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Starting"); + + self->video_format = NTV2_FORMAT_UNKNOWN; + self->capture_thread = new AJAThread(); self->capture_thread->Attach(capture_thread_func, self); self->capture_thread->SetPriority(AJA_ThreadPriority_High); @@ -1111,9 +1156,6 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { g_cond_signal(&self->queue_cond); g_mutex_unlock(&self->queue_lock); - gst_element_post_message(GST_ELEMENT_CAST(self), - gst_message_new_latency(GST_OBJECT_CAST(self))); - return TRUE; } @@ -1165,6 +1207,8 @@ static gboolean gst_aja_src_stop(GstAjaSrc *self) { gst_clear_object(&self->anc_buffer_pool); } + self->video_format = NTV2_FORMAT_UNKNOWN; + GST_DEBUG_OBJECT(self, "Stopped"); return TRUE; @@ -1597,53 +1641,6 @@ restart: GST_DEBUG_OBJECT(self, "Starting capture"); g_mutex_unlock(&self->queue_lock); - // TODO: Wait for stable input signal - - if (!self->device->device->EnableChannel(self->channel)) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to enable channel")); - goto out; - } - - if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - if (!self->device->device->EnableChannel( - (NTV2Channel)(self->channel + i))) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to enable channel")); - goto out; - } - } - } - - { - // Make sure to globally lock here as the routing settings and others are - // global shared state - ShmMutexLocker locker; - - self->device->device->AutoCirculateStop(self->channel); - if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - self->device->device->AutoCirculateStop( - (NTV2Channel)(self->channel + i)); - } - } - - self->device->device->EnableInputInterrupt(self->channel); - self->device->device->SubscribeInputVerticalEvent(self->channel); - if (!self->device->device->AutoCirculateInitForInput( - self->channel, self->queue_size / 2, self->audio_system, - AUTOCIRCULATE_WITH_RP188 | - (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC - : 0), - 1)) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to initialize autocirculate")); - goto out; - } - self->device->device->AutoCirculateStart(self->channel); - } - gst_clear_object(&clock); clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); @@ -1652,6 +1649,51 @@ restart: g_mutex_lock(&self->queue_lock); while (self->playing && !self->shutdown) { + // If we don't have a video format configured, configure the device now + // and potentially auto-detect the video format + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + // Don't keep queue locked while configuring as this might take a while + g_mutex_unlock(&self->queue_lock); + + if (!gst_aja_src_configure(self)) { + g_mutex_lock(&self->queue_lock); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + g_mutex_lock(&self->queue_lock); + + if (self->video_format == ::NTV2_FORMAT_UNKNOWN) { + GST_DEBUG_OBJECT(self, "No signal, waiting"); + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, + ("Signal lost"), + ("No input source was detected")); + have_signal = FALSE; + } + self->device->device->WaitForInputVerticalInterrupt(self->channel); + continue; + } + + if (!self->device->device->AutoCirculateInitForInput( + self->channel, self->queue_size / 2, self->audio_system, + AUTOCIRCULATE_WITH_RP188 | (self->vanc_mode == ::NTV2_VANCMODE_OFF + ? AUTOCIRCULATE_WITH_ANC + : 0), + 1)) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); + goto out; + } + + self->device->device->AutoCirculateStart(self->channel); + } + // Check for valid signal first NTV2VideoFormat current_video_format = self->device->device->GetInputVideoFormat( @@ -1673,6 +1715,9 @@ restart: } if (current_video_format == ::NTV2_FORMAT_UNKNOWN) { + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) + self->video_format = NTV2_FORMAT_UNKNOWN; + GST_DEBUG_OBJECT(self, "No signal, waiting"); g_mutex_unlock(&self->queue_lock); frames_dropped_last = G_MAXUINT64; @@ -1686,7 +1731,11 @@ restart: continue; } else if (current_video_format != effective_video_format && current_video_format != self->video_format) { - // TODO: Handle GST_AJA_VIDEO_FORMAT_AUTO here + // Try reconfiguring with the newly detected video format + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) { + self->video_format = NTV2_FORMAT_UNKNOWN; + continue; + } std::string current_string = NTV2VideoFormatToString(current_video_format); @@ -1766,8 +1815,8 @@ restart: AUTOCIRCULATE_TRANSFER transfer; if (!have_signal) { - GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, ("Signal recovered"), - ("Input source detected")); + GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, + ("Signal recovered"), ("Input source detected")); have_signal = TRUE; } From 57a2768cf6e1d3636e8ee866f9aaeba349d2762e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 12:37:08 +0300 Subject: [PATCH 30/73] Change default ajasrc video format to AUTO --- gstajasrc.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 1cb991f39a..8971f50976 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -34,7 +34,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_DEVICE_IDENTIFIER ("0") #define DEFAULT_CHANNEL (::NTV2_CHANNEL1) -#define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_1080i_5000) +#define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_AUTO) #define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) #define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) From 0c2a2559634f15232e38e2ada1bce5e219818188 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 14:08:17 +0300 Subject: [PATCH 31/73] Register GstAjaTimecodeIndex correctly --- gstajacommon.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 1c53bba0cf..440408002e 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -892,7 +892,7 @@ GType gst_aja_timecode_index_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { {GST_AJA_TIMECODE_INDEX_VITC, "vitc", "Embedded SDI VITC"}, - {GST_AJA_TIMECODE_INDEX_VITC, "atc-ltc", "Embedded SDI ATC LTC"}, + {GST_AJA_TIMECODE_INDEX_ATC_LTC, "atc-ltc", "Embedded SDI ATC LTC"}, {GST_AJA_TIMECODE_INDEX_LTC1, "ltc-1", "Analog LTC 1"}, {GST_AJA_TIMECODE_INDEX_LTC2, "ltc-2", "Analog LTC 2"}, {0, NULL, NULL}}; From 8e75c37ce4e5137143f414ae9a4893f26fd899f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 18 Aug 2021 17:36:46 +0300 Subject: [PATCH 32/73] Add signal property to ajasrc to allow checking/notify about signal availability at any time --- gstajasrc.cpp | 37 +++++++++++++++++++++++++++++++++++++ gstajasrc.h | 1 + 2 files changed, 38 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8971f50976..46b8ac4db1 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -57,6 +57,7 @@ enum { PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, PROP_CAPTURE_CPU_CORE, + PROP_SIGNAL, }; typedef enum { @@ -205,6 +206,13 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_SIGNAL, + g_param_spec_boolean( + "signal", "Input signal available", + "True if there is a valid input signal available", FALSE, + (GParamFlags)(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); + element_class->change_state = GST_DEBUG_FUNCPTR(gst_aja_src_change_state); basesrc_class->get_caps = GST_DEBUG_FUNCPTR(gst_aja_src_get_caps); @@ -335,6 +343,9 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_CAPTURE_CPU_CORE: g_value_set_uint(value, self->capture_cpu_core); break; + case PROP_SIGNAL: + g_value_set_boolean(value, self->signal); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); break; @@ -1144,6 +1155,7 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) { GST_DEBUG_OBJECT(self, "Starting"); self->video_format = NTV2_FORMAT_UNKNOWN; + self->signal = FALSE; self->capture_thread = new AJAThread(); self->capture_thread->Attach(capture_thread_func, self); @@ -1209,6 +1221,11 @@ static gboolean gst_aja_src_stop(GstAjaSrc *self) { self->video_format = NTV2_FORMAT_UNKNOWN; + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } + GST_DEBUG_OBJECT(self, "Stopped"); return TRUE; @@ -1676,6 +1693,10 @@ restart: ("No input source was detected")); have_signal = FALSE; } + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } self->device->device->WaitForInputVerticalInterrupt(self->channel); continue; } @@ -1726,6 +1747,10 @@ restart: ("No input source was detected")); have_signal = FALSE; } + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; @@ -1757,6 +1782,10 @@ restart: current_string.c_str())); have_signal = FALSE; } + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; @@ -1819,6 +1848,10 @@ restart: ("Signal recovered"), ("Input source detected")); have_signal = TRUE; } + if (!self->signal) { + self->signal = TRUE; + g_object_notify(G_OBJECT(self), "signal"); + } iterations_without_frame = 0; @@ -1994,6 +2027,10 @@ restart: ("Signal lost"), ("No frames captured")); have_signal = FALSE; } + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } } self->device->device->WaitForInputVerticalInterrupt(self->channel); diff --git a/gstajasrc.h b/gstajasrc.h index dcfeeff1b1..4cbddbac22 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -71,6 +71,7 @@ struct _GstAjaSrc { GstAjaReferenceSource reference_source; guint queue_size; guint capture_cpu_core; + gboolean signal; NTV2AudioSystem audio_system; NTV2VideoFormat video_format; From 3d40e2f5473ed7f6e1e60cb16983e54b797ba9b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 11:57:14 +0300 Subject: [PATCH 33/73] Don't include chroma-site and colorimetry in generic template caps --- gstajacommon.cpp | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 440408002e..ce1b01cd49 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -244,10 +244,21 @@ bool gst_video_info_from_aja_video_format(GstVideoInfo *info, GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format) { GstVideoInfo info; + GstCaps *caps; if (!gst_video_info_from_ntv2_video_format(&info, format)) return NULL; - return gst_video_info_to_caps(&info); + caps = gst_video_info_to_caps(&info); + if (!caps) return caps; + + guint n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + + gst_structure_remove_fields(s, "chroma-site", "colorimetry", NULL); + } + + return caps; } bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, From 420a90b00bae0457c3d34d90c9b6de578b333923 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 11:58:32 +0300 Subject: [PATCH 34/73] Add NTSC/PAL widescreen formats always to the template caps --- gstajacommon.cpp | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index ce1b01cd49..8b9efff32a 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -166,33 +166,34 @@ GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { const FormatMapEntry &format = format_map[i]; + GstCaps *tmp = NULL; if (device_id == DEVICE_ID_INVALID) { - gst_caps_append(caps, gst_aja_video_format_to_caps(format.gst_format)); - } else { - if ((format.aja_format != NTV2_FORMAT_UNKNOWN && - ::NTV2DeviceCanDoVideoFormat(device_id, format.aja_format)) || - (format.quad_format != NTV2_FORMAT_UNKNOWN && - ::NTV2DeviceCanDoVideoFormat(device_id, format.quad_format))) { - GstCaps *tmp = gst_aja_video_format_to_caps(format.gst_format); + tmp = gst_aja_video_format_to_caps(format.gst_format); + } else if ((format.aja_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.aja_format)) || + (format.quad_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.quad_format))) { + tmp = gst_aja_video_format_to_caps(format.gst_format); + } - // Widescreen PAL/NTSC - if (format.gst_format == GST_AJA_VIDEO_FORMAT_525_2398 || - format.gst_format == GST_AJA_VIDEO_FORMAT_525_2400 || - format.gst_format == GST_AJA_VIDEO_FORMAT_525_5994) { - GstCaps *tmp2 = gst_caps_copy(tmp); - gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 40, - 33, NULL); - gst_caps_append(tmp, tmp2); - } else if (format.gst_format == GST_AJA_VIDEO_FORMAT_625_5000) { - GstCaps *tmp2 = gst_caps_copy(tmp); - gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, - 11, NULL); - gst_caps_append(tmp, tmp2); - } - - gst_caps_append(caps, tmp); + if (tmp) { + // Widescreen PAL/NTSC + if (format.gst_format == GST_AJA_VIDEO_FORMAT_525_2398 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_2400 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_5994) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 40, + 33, NULL); + gst_caps_append(tmp, tmp2); + } else if (format.gst_format == GST_AJA_VIDEO_FORMAT_625_5000) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, + 11, NULL); + gst_caps_append(tmp, tmp2); } + + gst_caps_append(caps, tmp); } } From b3f5169d55f4ad9acc022b4b3cca35d3b7eedea8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 12:03:24 +0300 Subject: [PATCH 35/73] Intersect caps with configured mode if not auto in ajasrc get_caps() to provide more constrained caps --- gstajasrc.cpp | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 46b8ac4db1..8b1bb6be5f 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1288,6 +1288,32 @@ static GstCaps *gst_aja_src_get_caps(GstBaseSrc *bsrc, GstCaps *filter) { caps = gst_pad_get_pad_template_caps(GST_BASE_SRC_PAD(self)); } + // Intersect with the configured video format if any to constrain the caps + // further. + if (self->video_format_setting != GST_AJA_VIDEO_FORMAT_AUTO) { + GstCaps *configured_caps = + gst_aja_video_format_to_caps(self->video_format_setting); + + if (configured_caps) { + GstCaps *tmp; + + // Remove pixel-aspect-ratio from the configured caps to allow for both + // widescreen and non-widescreen PAL/NTSC. It's added back by the + // template caps above when intersecting. + guint n = gst_caps_get_size(configured_caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(configured_caps, i); + + gst_structure_remove_fields(s, "pixel-aspect-ratio", NULL); + } + + tmp = gst_caps_intersect(caps, configured_caps); + gst_caps_unref(caps); + gst_caps_unref(configured_caps); + caps = tmp; + } + } + if (filter) { GstCaps *tmp = gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); From 0b3c585beab22366f940adc176b09935ad780208 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 13:44:35 +0300 Subject: [PATCH 36/73] Enable LTC capture if requested --- gstajasrc.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8b1bb6be5f..ad60eb8de1 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1096,9 +1096,11 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { break; case GST_AJA_TIMECODE_INDEX_LTC1: self->tc_index = ::NTV2_TCINDEX_LTC1; + self->device->device->SetLTCInputEnable(true); break; case GST_AJA_TIMECODE_INDEX_LTC2: self->tc_index = ::NTV2_TCINDEX_LTC2; + self->device->device->SetLTCInputEnable(true); break; default: g_assert_not_reached(); From e3e1d09469596696a040e231d36cd788ed156cb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 13:44:47 +0300 Subject: [PATCH 37/73] Print captured timecodes to the debug log --- gstajasrc.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index ad60eb8de1..94de5af981 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1438,6 +1438,13 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_INTERLACED); CRP188 rp188(item.tc, tc_format); + + { + std::stringstream os; + os << rp188; + GST_DEBUG_OBJECT(self, "Adding timecode %s", os.str().c_str()); + } + guint hours, minutes, seconds, frames; rp188.GetRP188Hrs(hours); rp188.GetRP188Mins(minutes); From 5a79150aa3da2fdd06946faf6a0b499cc01dfb14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 20 Aug 2021 13:48:04 +0300 Subject: [PATCH 38/73] Output per-frame debug logs only at TRACE level --- gstajacommon.cpp | 6 +++--- gstajasrc.cpp | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 8b9efff32a..1584060819 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -412,7 +412,7 @@ static gboolean gst_aja_audio_meta_transform(GstBuffer *dest, GstMeta *meta, if (GST_META_TRANSFORM_IS_COPY(type)) { smeta = (GstAjaAudioMeta *)meta; - GST_DEBUG("copy AJA audio metadata"); + GST_TRACE("copy AJA audio metadata"); dmeta = gst_buffer_add_aja_audio_meta(dest, smeta->buffer); if (!dmeta) return FALSE; } else { @@ -506,7 +506,7 @@ static GstAjaMemory *_aja_memory_new_block(GstAjaAllocator *alloc, mem = (GstAjaMemory *)g_slice_alloc(sizeof(GstAjaMemory)); data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); - GST_DEBUG_OBJECT(alloc, "Allocated %" G_GSIZE_FORMAT " at %p", maxsize, data); + GST_TRACE_OBJECT(alloc, "Allocated %" G_GSIZE_FORMAT " at %p", maxsize, data); if (!alloc->device->device->DMABufferLock((ULWord *)data, maxsize, true)) { GST_WARNING_OBJECT(alloc, "Failed to pre-lock memory"); } @@ -575,7 +575,7 @@ static void gst_aja_allocator_free(GstAllocator *alloc, GstMemory *mem) { if (!mem->parent) { GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); - GST_DEBUG_OBJECT(alloc, "Freeing memory at %p", dmem->data); + GST_TRACE_OBJECT(alloc, "Freeing memory at %p", dmem->data); aja_alloc->device->device->DMABufferUnlock((ULWord *)dmem->data, mem->maxsize); AJAMemory::FreeAligned(dmem->data); diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 94de5af981..a23757c053 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1442,7 +1442,7 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { { std::stringstream os; os << rp188; - GST_DEBUG_OBJECT(self, "Adding timecode %s", os.str().c_str()); + GST_TRACE_OBJECT(self, "Adding timecode %s", os.str().c_str()); } guint hours, minutes, seconds, frames; @@ -1654,6 +1654,8 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { gst_caps_unref(caps); } + GST_TRACE_OBJECT(self, "Outputting buffer %" GST_PTR_FORMAT, *buffer); + return flow_ret; } @@ -1759,7 +1761,7 @@ restart: ULWord vpid_b = 0; self->device->device->ReadSDIInVPID(self->channel, vpid_a, vpid_b); - GST_DEBUG_OBJECT(self, + GST_TRACE_OBJECT(self, "Detected input video format %u with VPID %08x / %08x", current_video_format, vpid_a, vpid_b); From 86f7633adbed4886bd6b75ef930bc9083e7b5452 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 23 Aug 2021 11:58:47 +0300 Subject: [PATCH 39/73] Fix support for 1080p5000 mode --- gstajacommon.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 1584060819..e14219afb6 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -63,7 +63,7 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_2500, NTV2_FORMAT_UNKNOWN}, - {GST_AJA_VIDEO_FORMAT_1080p_3000, NTV2_FORMAT_1080p_5000_A, + {GST_AJA_VIDEO_FORMAT_1080p_5000_A, NTV2_FORMAT_1080p_5000_A, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_5994_A, NTV2_FORMAT_UNKNOWN}, From a2b30015c74e3e41f4a2cfdf7c6dc769dcefee3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 27 Aug 2021 14:40:29 +0300 Subject: [PATCH 40/73] Add AJA device provider --- gstajacommon.cpp | 18 ++--- gstajacommon.h | 12 +-- gstajadeviceprovider.cpp | 170 +++++++++++++++++++++++++++++++++++++++ gstajadeviceprovider.h | 70 ++++++++++++++++ gstajasink.cpp | 6 +- gstajasink.h | 2 +- gstajasrc.cpp | 14 ++-- gstajasrc.h | 2 +- meson.build | 1 + plugin.cpp | 4 + 10 files changed, 274 insertions(+), 25 deletions(-) create mode 100644 gstajadeviceprovider.cpp create mode 100644 gstajadeviceprovider.h diff --git a/gstajacommon.cpp b/gstajacommon.cpp index e14219afb6..004981a5e9 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -589,7 +589,7 @@ static void gst_aja_allocator_finalize(GObject *alloc) { GST_DEBUG_OBJECT(alloc, "Freeing allocator"); - gst_aja_device_unref(aja_alloc->device); + gst_aja_ntv2_device_unref(aja_alloc->device); G_OBJECT_CLASS(gst_aja_allocator_parent_class)->finalize(alloc); } @@ -617,11 +617,11 @@ static void gst_aja_allocator_init(GstAjaAllocator *aja_alloc) { alloc->mem_share = (GstMemoryShareFunction)_aja_memory_share; } -GstAllocator *gst_aja_allocator_new(GstAjaDevice *device) { +GstAllocator *gst_aja_allocator_new(GstAjaNtv2Device *device) { GstAjaAllocator *alloc = (GstAjaAllocator *)g_object_new(GST_TYPE_AJA_ALLOCATOR, NULL); - alloc->device = gst_aja_device_ref(device); + alloc->device = gst_aja_ntv2_device_ref(device); GST_DEBUG_OBJECT(alloc, "Creating allocator for device %d", device->device->GetIndexNumber()); @@ -629,7 +629,7 @@ GstAllocator *gst_aja_allocator_new(GstAjaDevice *device) { return GST_ALLOCATOR(alloc); } -GstAjaDevice *gst_aja_device_obtain(const gchar *device_identifier) { +GstAjaNtv2Device *gst_aja_ntv2_device_obtain(const gchar *device_identifier) { CNTV2Device *device = new CNTV2Device(); if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument(device_identifier, @@ -638,19 +638,19 @@ GstAjaDevice *gst_aja_device_obtain(const gchar *device_identifier) { return NULL; } - GstAjaDevice *dev = g_atomic_rc_box_new0(GstAjaDevice); + GstAjaNtv2Device *dev = g_atomic_rc_box_new0(GstAjaNtv2Device); dev->device = device; return dev; } -GstAjaDevice *gst_aja_device_ref(GstAjaDevice *device) { - return (GstAjaDevice *)g_atomic_rc_box_acquire(device); +GstAjaNtv2Device *gst_aja_ntv2_device_ref(GstAjaNtv2Device *device) { + return (GstAjaNtv2Device *)g_atomic_rc_box_acquire(device); } -void gst_aja_device_unref(GstAjaDevice *device) { +void gst_aja_ntv2_device_unref(GstAjaNtv2Device *device) { g_atomic_rc_box_release_full(device, [](gpointer data) { - GstAjaDevice *dev = (GstAjaDevice *)data; + GstAjaNtv2Device *dev = (GstAjaNtv2Device *)data; delete dev->device; }); diff --git a/gstajacommon.h b/gstajacommon.h index 5f49bb8c16..a976ded5a5 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -56,14 +56,14 @@ GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, typedef struct { CNTV2Card *device; -} GstAjaDevice; +} GstAjaNtv2Device; G_GNUC_INTERNAL -GstAjaDevice *gst_aja_device_obtain(const gchar *device_identifier); +GstAjaNtv2Device *gst_aja_ntv2_device_obtain(const gchar *device_identifier); G_GNUC_INTERNAL -GstAjaDevice *gst_aja_device_ref(GstAjaDevice *device); +GstAjaNtv2Device *gst_aja_ntv2_device_ref(GstAjaNtv2Device *device); G_GNUC_INTERNAL -void gst_aja_device_unref(GstAjaDevice *device); +void gst_aja_ntv2_device_unref(GstAjaNtv2Device *device); #define GST_AJA_ALLOCATOR_MEMTYPE "aja" @@ -85,7 +85,7 @@ typedef struct _GstAjaAllocatorClass GstAjaAllocatorClass; struct _GstAjaAllocator { GstAllocator allocator; - GstAjaDevice *device; + GstAjaNtv2Device *device; }; struct _GstAjaAllocatorClass { @@ -95,7 +95,7 @@ struct _GstAjaAllocatorClass { G_GNUC_INTERNAL GType gst_aja_allocator_get_type(void); G_GNUC_INTERNAL -GstAllocator *gst_aja_allocator_new(GstAjaDevice *device); +GstAllocator *gst_aja_allocator_new(GstAjaNtv2Device *device); typedef enum { GST_AJA_AUDIO_SYSTEM_AUTO, diff --git a/gstajadeviceprovider.cpp b/gstajadeviceprovider.cpp new file mode 100644 index 0000000000..361aac8cfe --- /dev/null +++ b/gstajadeviceprovider.cpp @@ -0,0 +1,170 @@ +/* + * Copyright (C) 2019 Mathieu Duponchelle + * Copyright (C) 2019,2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstajacommon.h" +#include "gstajadeviceprovider.h" + +static GstDevice *gst_aja_device_new(NTV2DeviceInfo &device, gboolean video); + +G_DEFINE_TYPE(GstAjaDeviceProvider, gst_aja_device_provider, + GST_TYPE_DEVICE_PROVIDER); + +static void gst_aja_device_provider_init(GstAjaDeviceProvider *self) {} + +static GList *gst_aja_device_provider_probe(GstDeviceProvider *provider) { + GList *ret = NULL; + + CNTV2DeviceScanner scanner; + + NTV2DeviceInfoList devices = scanner.GetDeviceInfoList(); + for (NTV2DeviceInfoList::iterator it = devices.begin(); it != devices.end(); + it++) { + // Skip non-input / non-output devices + if (it->numVidInputs == 0 && it->numVidOutputs == 0) continue; + + if (it->numVidInputs > 0) + ret = g_list_prepend(ret, gst_aja_device_new(*it, TRUE)); + if (it->numVidOutputs > 0) + ret = g_list_prepend(ret, gst_aja_device_new(*it, FALSE)); + } + + ret = g_list_reverse(ret); + + return ret; +} + +static void gst_aja_device_provider_class_init( + GstAjaDeviceProviderClass *klass) { + GstDeviceProviderClass *dm_class = GST_DEVICE_PROVIDER_CLASS(klass); + + dm_class->probe = GST_DEBUG_FUNCPTR(gst_aja_device_provider_probe); + + gst_device_provider_class_set_static_metadata( + dm_class, "AJA Device Provider", "Source/Audio/Video", + "List and provides AJA capture devices", + "Sebastian Dröge "); +} + +G_DEFINE_TYPE(GstAjaDevice, gst_aja_device, GST_TYPE_DEVICE); + +static void gst_aja_device_init(GstAjaDevice *self) {} + +static GstElement *gst_aja_device_create_element(GstDevice *device, + const gchar *name) { + GstAjaDevice *self = GST_AJA_DEVICE(device); + GstElement *ret = NULL; + + if (self->is_capture) { + ret = gst_element_factory_make("ajasrc", name); + } else { + ret = gst_element_factory_make("ajasink", name); + } + + if (ret) { + gchar *device_identifier = g_strdup_printf("%u", self->device_index); + + g_object_set(ret, "device-identifier", device_identifier, NULL); + g_free(device_identifier); + } + + return ret; +} + +static void gst_aja_device_class_init(GstAjaDeviceClass *klass) { + GstDeviceClass *gst_device_class = GST_DEVICE_CLASS(klass); + + gst_device_class->create_element = + GST_DEBUG_FUNCPTR(gst_aja_device_create_element); +} + +static GstDevice *gst_aja_device_new(NTV2DeviceInfo &device, + gboolean is_capture) { + GstDevice *ret; + gchar *display_name; + const gchar *device_class; + GstCaps *caps = NULL; + GstStructure *properties; + + device_class = is_capture ? "Audio/Video/Source" : "Audio/Video/Sink"; + display_name = g_strdup_printf("AJA %s (%s)", device.deviceIdentifier.c_str(), + is_capture ? "Source" : "Sink"); + + caps = gst_ntv2_supported_caps(device.deviceID); + + properties = gst_structure_new_empty("properties"); + + gst_structure_set( + properties, "device-id", G_TYPE_UINT, device.deviceID, "device-index", + G_TYPE_UINT, device.deviceIndex, "pci-slot", G_TYPE_UINT, device.pciSlot, + "serial-number", G_TYPE_UINT64, device.deviceSerialNumber, + "device-identifier", G_TYPE_STRING, device.deviceIdentifier.c_str(), + "num-audio-streams", G_TYPE_UINT, device.numAudioStreams, + "dual-link-support", G_TYPE_BOOLEAN, device.dualLinkSupport, + "sdi-3g-support", G_TYPE_BOOLEAN, device.sdi3GSupport, "sdi-12g-support", + G_TYPE_BOOLEAN, device.sdi12GSupport, "ip-support", G_TYPE_BOOLEAN, + device.ipSupport, "bi-directional-sdi", G_TYPE_BOOLEAN, + device.biDirectionalSDI, "ltc-in-support", G_TYPE_BOOLEAN, + device.ltcInSupport, "ltc-in-on-ref-port", G_TYPE_BOOLEAN, + device.ltcInOnRefPort, "2k-support", G_TYPE_BOOLEAN, device.has2KSupport, + "4k-support", G_TYPE_BOOLEAN, device.has4KSupport, "8k-support", + G_TYPE_BOOLEAN, device.has8KSupport, "multiformat-support", + G_TYPE_BOOLEAN, device.multiFormat, NULL); + + if (is_capture) { + gst_structure_set( + properties, "num-vid-inputs", G_TYPE_UINT, device.numVidInputs, + "num-anlg-vid-inputs", G_TYPE_UINT, device.numAnlgVidInputs, + "num-hdmi-vid-inputs", G_TYPE_UINT, device.numHDMIVidInputs, + "num-analog-audio-input-channels", G_TYPE_UINT, + device.numAnalogAudioInputChannels, "num-aes-audio-input-channels", + G_TYPE_UINT, device.numAESAudioInputChannels, + "num-embedded-audio-input-channels", G_TYPE_UINT, + device.numEmbeddedAudioInputChannels, "num-hdmi-audio-input-channels", + G_TYPE_UINT, device.numHDMIAudioInputChannels, NULL); + } else { + gst_structure_set( + properties, "num-vid-outputs", G_TYPE_UINT, device.numVidOutputs, + "num-anlg-vid-outputs", G_TYPE_UINT, device.numAnlgVidOutputs, + "num-hdmi-vid-outputs", G_TYPE_UINT, device.numHDMIVidOutputs, + "num-analog-audio-output-channels", G_TYPE_UINT, + device.numAnalogAudioOutputChannels, "num-aes-audio-output-channels", + G_TYPE_UINT, device.numAESAudioOutputChannels, + "num-embedded-audio-output-channels", G_TYPE_UINT, + device.numEmbeddedAudioOutputChannels, "num-hdmi-audio-output-channels", + G_TYPE_UINT, device.numHDMIAudioOutputChannels, NULL); + } + + ret = GST_DEVICE(g_object_new(GST_TYPE_AJA_DEVICE, "display-name", + display_name, "device-class", device_class, + "caps", caps, "properties", properties, NULL)); + + g_free(display_name); + gst_caps_unref(caps); + gst_structure_free(properties); + + GST_AJA_DEVICE(ret)->is_capture = is_capture; + GST_AJA_DEVICE(ret)->device_index = device.deviceIndex; + + return ret; +} diff --git a/gstajadeviceprovider.h b/gstajadeviceprovider.h new file mode 100644 index 0000000000..94829bdcfb --- /dev/null +++ b/gstajadeviceprovider.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2019 Mathieu Duponchelle + * Copyright (C) 2019,2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301, USA. + */ + +#ifndef _GST_AJA_DEVICE_PROVIDER_H_ +#define _GST_AJA_DEVICE_PROVIDER_H_ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_DEVICE_PROVIDER gst_aja_device_provider_get_type() +#define GST_AJA_DEVICE_PROVIDER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_DEVICE_PROVIDER, \ + GstAjaDeviceProvider)) + +typedef struct _GstAjaDeviceProvider GstAjaDeviceProvider; +typedef struct _GstAjaDeviceProviderClass GstAjaDeviceProviderClass; + +struct _GstAjaDeviceProviderClass { + GstDeviceProviderClass parent_class; +}; + +struct _GstAjaDeviceProvider { + GstDeviceProvider parent; +}; + +G_GNUC_INTERNAL +GType gst_aja_device_provider_get_type(void); + +#define GST_TYPE_AJA_DEVICE gst_aja_device_get_type() +#define GST_AJA_DEVICE(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_DEVICE, GstAjaDevice)) + +typedef struct _GstAjaDevice GstAjaDevice; +typedef struct _GstAjaDeviceClass GstAjaDeviceClass; + +struct _GstAjaDeviceClass { + GstDeviceClass parent_class; +}; + +struct _GstAjaDevice { + GstDevice parent; + gboolean is_capture; + guint device_index; +}; + +G_GNUC_INTERNAL +GType gst_aja_device_get_type(void); + +G_END_DECLS + +#endif /* _GST_AJA_DEVICE_PROVIDER_H_ */ diff --git a/gstajasink.cpp b/gstajasink.cpp index d4008a8fb7..5fec1fbab8 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -326,14 +326,14 @@ static gboolean gst_aja_sink_open(GstAjaSink *self) { g_assert(self->device == NULL); - self->device = gst_aja_device_obtain(self->device_identifier); + self->device = gst_aja_ntv2_device_obtain(self->device_identifier); if (!self->device) { GST_ERROR_OBJECT(self, "Failed to open device"); return FALSE; } if (!self->device->device->IsDeviceReady(false)) { - g_clear_pointer(&self->device, gst_aja_device_unref); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); return FALSE; } @@ -371,7 +371,7 @@ static gboolean gst_aja_sink_open(GstAjaSink *self) { static gboolean gst_aja_sink_close(GstAjaSink *self) { gst_clear_object(&self->allocator); - g_clear_pointer(&self->device, gst_aja_device_unref); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); self->device_id = DEVICE_ID_INVALID; GST_DEBUG_OBJECT(self, "Closed device"); diff --git a/gstajasink.h b/gstajasink.h index 1cc8e237b2..054a67e98c 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -56,7 +56,7 @@ struct _GstAjaSink { GCond drain_cond; gboolean flushing; - GstAjaDevice *device; + GstAjaNtv2Device *device; NTV2DeviceID device_id; GstAllocator *allocator; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index a23757c053..6c97cb5c15 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -230,7 +230,7 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { gst_caps_unref(templ_caps); gst_element_class_set_static_metadata( - element_class, "AJA audio/video src", "Audio/Video/Src", + element_class, "AJA audio/video src", "Audio/Video/Source", "Captures audio/video frames with AJA devices", "Sebastian Dröge "); @@ -370,14 +370,14 @@ static gboolean gst_aja_src_open(GstAjaSrc *self) { g_assert(self->device == NULL); - self->device = gst_aja_device_obtain(self->device_identifier); + self->device = gst_aja_ntv2_device_obtain(self->device_identifier); if (!self->device) { GST_ERROR_OBJECT(self, "Failed to open device"); return FALSE; } if (!self->device->device->IsDeviceReady(false)) { - g_clear_pointer(&self->device, gst_aja_device_unref); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); return FALSE; } @@ -415,7 +415,7 @@ static gboolean gst_aja_src_open(GstAjaSrc *self) { static gboolean gst_aja_src_close(GstAjaSrc *self) { gst_clear_object(&self->allocator); - g_clear_pointer(&self->device, gst_aja_device_unref); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); self->device_id = DEVICE_ID_INVALID; GST_DEBUG_OBJECT(self, "Closed device"); @@ -1561,7 +1561,11 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { switch (vpid.GetTransferCharacteristics()) { default: case NTV2_VPID_TC_SDR_TV: - // SDR is the default, do nothing here. + if (info.height < 720) { + info.colorimetry.transfer = GST_VIDEO_TRANSFER_BT601; + } else { + info.colorimetry.transfer = GST_VIDEO_TRANSFER_BT709; + } break; case NTV2_VPID_TC_HLG: info.colorimetry.transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67; diff --git a/gstajasrc.h b/gstajasrc.h index 4cbddbac22..f73b4c646d 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -52,7 +52,7 @@ struct _GstAjaSrc { gboolean shutdown; gboolean flushing; - GstAjaDevice *device; + GstAjaNtv2Device *device; NTV2DeviceID device_id; GstAllocator *allocator; GstBufferPool *buffer_pool; diff --git a/meson.build b/meson.build index 66cc7c2858..b46abe89ac 100644 --- a/meson.build +++ b/meson.build @@ -84,6 +84,7 @@ gstaja = library('gstaja', 'gstajasinkcombiner.cpp', 'gstajasrc.cpp', 'gstajasrcdemux.cpp', + 'gstajadeviceprovider.cpp', ], cpp_args : [ aja_includedirs, diff --git a/plugin.cpp b/plugin.cpp index 1423c69c7e..128de52e48 100644 --- a/plugin.cpp +++ b/plugin.cpp @@ -21,6 +21,7 @@ #include #include "gstajacommon.h" +#include "gstajadeviceprovider.h" #include "gstajasink.h" #include "gstajasinkcombiner.h" #include "gstajasrc.h" @@ -38,6 +39,9 @@ static gboolean plugin_init(GstPlugin* plugin) { gst_element_register(plugin, "ajasinkcombiner", GST_RANK_NONE, GST_TYPE_AJA_SINK_COMBINER); + gst_device_provider_register(plugin, "ajadeviceprovider", GST_RANK_PRIMARY, + GST_TYPE_AJA_DEVICE_PROVIDER); + return TRUE; } From d61b415230a8e1a987c2a50112ad1a59ae6f5fcb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 27 Aug 2021 15:02:50 +0300 Subject: [PATCH 41/73] Add properties for selecting capture/output frame buffers to use And calculate them automatically be default based on the number of frame buffers available and the number of channels. This works around various bugs in the AJA SDK when selecting these manually. See AJA support ticket #5056. --- gstajasink.cpp | 76 ++++++++++++++++++++++++++++++++++++++++++-------- gstajasink.h | 1 + gstajasrc.cpp | 55 ++++++++++++++++++++++++++++++++++-- gstajasrc.h | 1 + 4 files changed, 120 insertions(+), 13 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 5fec1fbab8..d48c7497b9 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -39,6 +39,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) #define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_START_FRAME (0) +#define DEFAULT_END_FRAME (0) #define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT) enum { @@ -51,6 +53,8 @@ enum { PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_QUEUE_SIZE, + PROP_START_FRAME, + PROP_END_FRAME, PROP_OUTPUT_CPU_CORE, }; @@ -135,6 +139,24 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { 1, G_MAXINT, DEFAULT_QUEUE_SIZE, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property( + gobject_class, PROP_START_FRAME, + g_param_spec_uint( + "start-frame", "Start Frame", + "Start frame buffer to be used for output (auto if same number as " + "end-frame).", + 0, G_MAXINT, DEFAULT_START_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_END_FRAME, + g_param_spec_uint( + "end-frame", "End Frame", + "End frame buffer to be used for output (auto if same number as " + "start-frame).", + 0, G_MAXINT, DEFAULT_END_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property( gobject_class, PROP_AUDIO_SYSTEM, g_param_spec_enum( @@ -217,14 +239,14 @@ static void gst_aja_sink_init(GstAjaSink *self) { self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); self->channel = DEFAULT_CHANNEL; self->queue_size = DEFAULT_QUEUE_SIZE; + self->start_frame = DEFAULT_START_FRAME; + self->end_frame = DEFAULT_END_FRAME; self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->output_destination = DEFAULT_OUTPUT_DESTINATION; self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; self->output_cpu_core = DEFAULT_OUTPUT_CPU_CORE; - gst_base_sink_set_render_delay(GST_BASE_SINK(self), - (self->queue_size / 2) * GST_SECOND / 30); self->queue = gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); } @@ -244,6 +266,12 @@ void gst_aja_sink_set_property(GObject *object, guint property_id, case PROP_QUEUE_SIZE: self->queue_size = g_value_get_uint(value); break; + case PROP_START_FRAME: + self->start_frame = g_value_get_uint(value); + break; + case PROP_END_FRAME: + self->end_frame = g_value_get_uint(value); + break; case PROP_AUDIO_SYSTEM: self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value); break; @@ -283,6 +311,12 @@ void gst_aja_sink_get_property(GObject *object, guint property_id, case PROP_QUEUE_SIZE: g_value_set_uint(value, self->queue_size); break; + case PROP_START_FRAME: + g_value_set_uint(value, self->start_frame); + break; + case PROP_END_FRAME: + g_value_set_uint(value, self->end_frame); + break; case PROP_AUDIO_SYSTEM: g_value_set_enum(value, self->audio_system_setting); break; @@ -566,13 +600,6 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { self->quad_mode = quad_mode; self->video_format = video_format; - // Configure render delay based on the framerate and queue size - gst_base_sink_set_render_delay( - GST_BASE_SINK(self), - gst_util_uint64_scale(self->queue_size / 2, - self->configured_info.fps_d * GST_SECOND, - self->configured_info.fps_n)); - g_assert(self->device != NULL); // Make sure to globally lock here as the routing settings and others are @@ -1700,12 +1727,39 @@ restart: self->device->device->EnableOutputInterrupt(self->channel); self->device->device->SubscribeOutputVerticalEvent(self->channel); + + guint16 start_frame = self->start_frame; + guint16 end_frame = self->end_frame; + + if (start_frame == end_frame) { + guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id); + guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id); + + start_frame = self->channel * (num_frames / num_channels); + end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1; + + // Don't configure too many frames here. It needs to be in relation to + // our input queue. + end_frame = MIN(start_frame + self->queue_size / 2, end_frame); + } + + GST_DEBUG_OBJECT( + self, "Configuring channel %u with start frame %u and end frame %u", + self->channel, start_frame, end_frame); + + // Configure render delay based on the framerate and queue size + gst_base_sink_set_render_delay( + GST_BASE_SINK(self), + gst_util_uint64_scale(end_frame - start_frame + 1, + self->configured_info.fps_d * GST_SECOND, + self->configured_info.fps_n)); + if (!self->device->device->AutoCirculateInitForOutput( - self->channel, self->queue_size / 2, self->audio_system, + self->channel, 0, self->audio_system, AUTOCIRCULATE_WITH_RP188 | (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC : 0), - 1)) { + 1, start_frame, end_frame)) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to initialize autocirculate")); goto out; diff --git a/gstajasink.h b/gstajasink.h index 054a67e98c..e724c45b6b 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -69,6 +69,7 @@ struct _GstAjaSink { gchar *device_identifier; NTV2Channel channel; guint queue_size; + guint start_frame, end_frame; guint output_cpu_core; GstAjaAudioSystem audio_system_setting; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 6c97cb5c15..f1b8f5023e 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -42,6 +42,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_START_FRAME (0) +#define DEFAULT_END_FRAME (0) #define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) enum { @@ -55,6 +57,8 @@ enum { PROP_AUDIO_SOURCE, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, + PROP_START_FRAME, + PROP_END_FRAME, PROP_QUEUE_SIZE, PROP_CAPTURE_CPU_CORE, PROP_SIGNAL, @@ -148,6 +152,24 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { 1, G_MAXINT, DEFAULT_QUEUE_SIZE, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property( + gobject_class, PROP_START_FRAME, + g_param_spec_uint( + "start-frame", "Start Frame", + "Start frame buffer to be used for capturing (auto if same number as " + "end-frame).", + 0, G_MAXINT, DEFAULT_START_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_END_FRAME, + g_param_spec_uint( + "end-frame", "End Frame", + "End frame buffer to be used for capturing (auto if same number as " + "start-frame).", + 0, G_MAXINT, DEFAULT_END_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + g_object_class_install_property( gobject_class, PROP_AUDIO_SYSTEM, g_param_spec_enum( @@ -244,6 +266,8 @@ static void gst_aja_src_init(GstAjaSrc *self) { self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); self->channel = DEFAULT_CHANNEL; self->queue_size = DEFAULT_QUEUE_SIZE; + self->start_frame = DEFAULT_START_FRAME; + self->end_frame = DEFAULT_END_FRAME; self->video_format_setting = DEFAULT_VIDEO_FORMAT; self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->input_source = DEFAULT_INPUT_SOURCE; @@ -275,6 +299,12 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_QUEUE_SIZE: self->queue_size = g_value_get_uint(value); break; + case PROP_START_FRAME: + self->start_frame = g_value_get_uint(value); + break; + case PROP_END_FRAME: + self->end_frame = g_value_get_uint(value); + break; case PROP_VIDEO_FORMAT: self->video_format_setting = (GstAjaVideoFormat)g_value_get_enum(value); break; @@ -319,6 +349,12 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_QUEUE_SIZE: g_value_set_uint(value, self->queue_size); break; + case PROP_START_FRAME: + g_value_set_uint(value, self->start_frame); + break; + case PROP_END_FRAME: + g_value_set_uint(value, self->end_frame); + break; case PROP_VIDEO_FORMAT: g_value_set_enum(value, self->video_format_setting); break; @@ -1742,12 +1778,27 @@ restart: continue; } + guint16 start_frame = self->start_frame; + guint16 end_frame = self->end_frame; + + if (start_frame == end_frame) { + guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id); + guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id); + + start_frame = self->channel * (num_frames / num_channels); + end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1; + } + + GST_DEBUG_OBJECT( + self, "Configuring channel %u with start frame %u and end frame %u", + self->channel, start_frame, end_frame); + if (!self->device->device->AutoCirculateInitForInput( - self->channel, self->queue_size / 2, self->audio_system, + self->channel, 0, self->audio_system, AUTOCIRCULATE_WITH_RP188 | (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC : 0), - 1)) { + 1, start_frame, end_frame)) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to initialize autocirculate")); goto out; diff --git a/gstajasrc.h b/gstajasrc.h index f73b4c646d..4d0841a2fb 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -70,6 +70,7 @@ struct _GstAjaSrc { GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; guint queue_size; + guint start_frame, end_frame; guint capture_cpu_core; gboolean signal; From 22c0b9ed418b59de6842149f87351ba628f2fc28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 27 Aug 2021 15:12:07 +0300 Subject: [PATCH 42/73] Disable channel when exiting from the capture/output thread --- gstajasink.cpp | 41 +++++++++++++++++++++++++++-------------- gstajasrc.cpp | 7 +++++++ 2 files changed, 34 insertions(+), 14 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index d48c7497b9..7a81f2425a 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -684,20 +684,6 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { (int)reference_source); self->device->device->SetFramePulseReference(reference_source); - if (!self->device->device->EnableChannel(self->channel)) { - GST_ERROR_OBJECT(self, "Failed to enable channel"); - return FALSE; - } - if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - if (!self->device->device->EnableChannel( - (NTV2Channel)(self->channel + i))) { - GST_ERROR_OBJECT(self, "Failed to enable channel"); - return FALSE; - } - } - } - self->device->device->DMABufferAutoLock(false, true, 0); if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) @@ -1725,6 +1711,26 @@ restart: self->device->device->AutoCirculateStop(self->channel); + if (!self->device->device->EnableChannel(self->channel)) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + g_mutex_lock(&self->queue_lock); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + g_mutex_lock(&self->queue_lock); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + } + } + self->device->device->EnableOutputInterrupt(self->channel); self->device->device->SubscribeOutputVerticalEvent(self->channel); @@ -2007,6 +2013,13 @@ out : { self->device->device->AutoCirculateStop(self->channel); self->device->device->UnsubscribeOutputVerticalEvent(self->channel); self->device->device->DisableOutputInterrupt(self->channel); + + self->device->device->DisableChannel(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->DisableChannel((NTV2Channel)(self->channel + i)); + } + } } if ((!self->playing || self->draining) && !self->shutdown) goto restart; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index f1b8f5023e..9839227537 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -2139,6 +2139,13 @@ out : { self->device->device->AutoCirculateStop(self->channel); self->device->device->UnsubscribeInputVerticalEvent(self->channel); self->device->device->DisableInputInterrupt(self->channel); + + self->device->device->DisableChannel(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->DisableChannel((NTV2Channel)(self->channel + i)); + } + } } if (!self->playing && !self->shutdown) goto restart; From 569aad78b75a5126d744cc831fdd8c97df2a5e16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 27 Sep 2021 10:10:39 +0300 Subject: [PATCH 43/73] Improve debug output for the currently detected video format --- gstajasrc.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 9839227537..a2f236c90a 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1816,9 +1816,13 @@ restart: ULWord vpid_b = 0; self->device->device->ReadSDIInVPID(self->channel, vpid_a, vpid_b); - GST_TRACE_OBJECT(self, - "Detected input video format %u with VPID %08x / %08x", - current_video_format, vpid_a, vpid_b); + { + std::string current_string = + NTV2VideoFormatToString(current_video_format); + GST_TRACE_OBJECT( + self, "Detected input video format %s (%d) with VPID %08x / %08x", + current_string.c_str(), (int)current_video_format, vpid_a, vpid_b); + } NTV2VideoFormat effective_video_format = self->video_format; // Can't call this unconditionally as it also maps e.g. 3840x2160p to 1080p From a2d4058c4bc8edad9ff597e31d0220632da9d9c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 27 Sep 2021 10:12:19 +0300 Subject: [PATCH 44/73] Report no signal if not all quadrants have the same format in quad-link mode --- gstajasrc.cpp | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index a2f236c90a..c7839c5c6d 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1812,6 +1812,29 @@ restart: self->device->device->GetInputVideoFormat( self->configured_input_source); + bool all_quads_equal = true; + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + NTV2VideoFormat other_video_format = + self->device->device->GetInputVideoFormat( + (NTV2InputSource)(self->configured_input_source + i)); + if (other_video_format != current_video_format) { + std::string current_string = + NTV2VideoFormatToString(current_video_format); + std::string other_string = + NTV2VideoFormatToString(other_video_format); + GST_DEBUG_OBJECT( + self, + "Not all quadrants had the same format in " + "quad-link-mode: %s (%d) on input 1 vs. %s (%d) on input %d", + current_string.c_str(), current_video_format, + other_string.c_str(), other_video_format, i + 1); + all_quads_equal = false; + break; + } + } + } + ULWord vpid_a = 0; ULWord vpid_b = 0; self->device->device->ReadSDIInVPID(self->channel, vpid_a, vpid_b); @@ -1831,7 +1854,7 @@ restart: ::GetQuarterSizedVideoFormat(effective_video_format); } - if (current_video_format == ::NTV2_FORMAT_UNKNOWN) { + if (current_video_format == ::NTV2_FORMAT_UNKNOWN || !all_quads_equal) { if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) self->video_format = NTV2_FORMAT_UNKNOWN; From 50d90f6dca82822bb8e19f8157cedc93699559d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 12 Oct 2021 19:52:06 +0300 Subject: [PATCH 45/73] Allocate more frame buffers for quad modes --- gstajasrc.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index c7839c5c6d..8666b1ffe1 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1784,9 +1784,13 @@ restart: if (start_frame == end_frame) { guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id); guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id); + guint start_channel = self->channel; + guint end_channel = self->channel + 1; - start_frame = self->channel * (num_frames / num_channels); - end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1; + if (self->quad_mode) end_channel = self->channel + 4; + + start_frame = start_channel * (num_frames / num_channels); + end_frame = (end_channel * (num_frames / num_channels)) - 1; } GST_DEBUG_OBJECT( From e2f1953fa72d86a80e4233aa1e0f3641f930d2e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 12 Oct 2021 19:56:31 +0300 Subject: [PATCH 46/73] Fix compilation with gcc 11 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ../gstajacommon.cpp: In function ‘GType gst_aja_audio_meta_api_get_type()’: /usr/include/glib-2.0/glib/gatomic.h:113:19: error: argument 2 of ‘__atomic_load’ must not be a pointer to a ‘volatile’ type 113 | __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \ | ~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /usr/include/glib-2.0/glib/gthread.h:260:7: note: in expansion of macro ‘g_atomic_pointer_get’ 260 | (!g_atomic_pointer_get (location) && \ | ^~~~~~~~~~~~~~~~~~~~ ../gstajacommon.cpp:398:7: note: in expansion of macro ‘g_once_init_enter’ 398 | if (g_once_init_enter(&type)) { | ^~~~~~~~~~~~~~~~~ --- gstajacommon.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 004981a5e9..dbb7da8c8d 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -393,7 +393,7 @@ bool gst_ntv2_video_format_is_quad(NTV2VideoFormat format) { } GType gst_aja_audio_meta_api_get_type(void) { - static volatile GType type; + static GType type; if (g_once_init_enter(&type)) { static const gchar *tags[] = {NULL}; From bfdf93ecd1a21a6b2d63f522357418381140ccb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 1 Nov 2021 16:12:05 +0200 Subject: [PATCH 47/73] Configure fewer frames per channel by default The number of frame stores / frame buffers can't be used like this as the actual number of frames that can be used also depends on the resolution. If too many frames are used and the card's memory runs full, strange problems (green frames, channels leaking into each other, etc.) will appear. As such, configure 8 frames for the source by default and half the queue length for the sink. If anything else is needed, manual configuration via the properties has to happen based on the expected workload on all channels and the characteristics of the card in use. --- gstajasink.cpp | 13 +++++-------- gstajasrc.cpp | 13 +++++-------- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 7a81f2425a..500b1b97dc 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1737,16 +1737,13 @@ restart: guint16 start_frame = self->start_frame; guint16 end_frame = self->end_frame; + // If nothing was configured, work with a number of frames that is half + // the queue size and assume that all other channels work the same. if (start_frame == end_frame) { - guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id); - guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id); + guint16 num_frames = self->queue_size / 2; - start_frame = self->channel * (num_frames / num_channels); - end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1; - - // Don't configure too many frames here. It needs to be in relation to - // our input queue. - end_frame = MIN(start_frame + self->queue_size / 2, end_frame); + start_frame = self->channel * num_frames; + end_frame = (self->channel + 1) * num_frames - 1; } GST_DEBUG_OBJECT( diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8666b1ffe1..8b41853bd7 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1781,16 +1781,13 @@ restart: guint16 start_frame = self->start_frame; guint16 end_frame = self->end_frame; + // If nothing was configured, work with 8 frames and assume that all + // other channels work the same. if (start_frame == end_frame) { - guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id); - guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id); - guint start_channel = self->channel; - guint end_channel = self->channel + 1; + const guint16 num_frames = 8; - if (self->quad_mode) end_channel = self->channel + 4; - - start_frame = start_channel * (num_frames / num_channels); - end_frame = (end_channel * (num_frames / num_channels)) - 1; + start_frame = self->channel * num_frames; + end_frame = (self->channel + 1) * num_frames - 1; } GST_DEBUG_OBJECT( From 484cb51445125d165c8e72686edbb5b488e4782b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 10 Nov 2021 17:22:30 +0200 Subject: [PATCH 48/73] Implement hopefully working dynamic framebuffer assignments We try to allocate as many frames as were configured by looking for a big enough range of unused contiguous frames while taking the configured modes into account. --- gstajacommon.cpp | 128 +++++++++++++++++++++++++++++++++++++++++++++++ gstajacommon.h | 5 ++ gstajasink.cpp | 17 +++++-- gstajasrc.cpp | 29 +++++++---- 4 files changed, 164 insertions(+), 15 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index dbb7da8c8d..ce343cb63c 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -684,6 +684,134 @@ ShmMutexLocker::~ShmMutexLocker() { if (s != SEM_FAILED) sem_post(s); } +static guint gst_aja_device_get_frame_multiplier(GstAjaNtv2Device *device, + NTV2Channel channel) { + // quad formats use 4x as many frames, quad-quad formats 8x + bool quad_enabled = false; + device->device->GetQuadFrameEnable(quad_enabled, channel); + bool quad_quad_enabled = false; + device->device->GetQuadQuadFrameEnable(quad_quad_enabled, channel); + + NTV2VideoFormat format = NTV2_FORMAT_UNKNOWN; + device->device->GetVideoFormat(format, channel); + + GST_TRACE("Channel %d uses mode %d (quad: %d, quad quad: %d)", (gint)channel, + (gint)format, quad_enabled, quad_quad_enabled); + + // Similarly, 2k/UHD use 4x as many frames and 4k/UHD2 use 8x as many + // frames + if (format != NTV2_FORMAT_UNKNOWN) { + guint width = ::GetDisplayWidth(format); + guint height = ::GetDisplayHeight(format); + + if (height <= 1080 && width <= 1920) { + // SD and HD but not 2k! + } else if (height <= 2160 && width <= 3840) { + // 2k and UHD but not 4k + quad_enabled = true; + } else if (height <= 4320 && width <= 7680) { + // 4k and UHD2 but not 8k + quad_quad_enabled = true; + } else { + // 8k FIXME + quad_quad_enabled = true; + } + } + + if (quad_enabled) { + g_assert(!quad_quad_enabled); + + return 4; + } else if (quad_quad_enabled) { + g_assert(!quad_enabled); + + return 8; + } + + return 1; +} + +// Returns -1 on failure or otherwise the start_frame. +// end_frame would be start_frame + frame_count - 1 +gint gst_aja_ntv2_device_find_unallocated_frames(GstAjaNtv2Device *device, + NTV2Channel channel, + guint frame_count) { + g_assert(frame_count != 0); + g_assert(device != NULL); + g_assert(device->device->IsOpen()); + + // Adapted from CNTV2Card::FindUnallocatedFrames() with + // quad/quad-quad/UHD/UHD2 support + std::set used_frames; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + guint16 start_frame = ac_status.GetStartFrame(); + guint16 end_frame = ac_status.GetEndFrame(); + + guint multiplier = gst_aja_device_get_frame_multiplier(device, c); + + GST_TRACE("Channel %d uses frames %u-%u (multiplier: %u)", c, start_frame, + end_frame, multiplier); + + start_frame *= multiplier; + end_frame *= multiplier; + end_frame += (multiplier - 1); + + GST_TRACE("Channel %d uses HD frames %u-%u", c, start_frame, end_frame); + for (guint16 i = start_frame; i <= end_frame; i++) { + used_frames.insert(i); + } + } + } + + guint multiplier = gst_aja_device_get_frame_multiplier(device, channel); + frame_count *= multiplier; + + const guint16 last_frame = + ::NTV2DeviceGetNumberFrameBuffers(device->device->GetDeviceID()) - 1; + guint16 start_frame = 0; + guint16 end_frame = start_frame + frame_count - 1; + + auto iter = used_frames.cbegin(); + while (iter != used_frames.cend()) { + guint16 allocated_start_frame = *iter; + guint16 allocated_end_frame = allocated_start_frame; + + // Find end of the allocation + while (++iter != used_frames.cend() && *iter == (allocated_end_frame + 1)) + allocated_end_frame++; + + // Free block before this allocation + if (start_frame < allocated_start_frame && + end_frame < allocated_start_frame) + break; + + // Move after this allocation and check if there is enough space before + // the next allocation + start_frame = GST_ROUND_UP_N(allocated_end_frame + 1, multiplier); + end_frame = start_frame + frame_count - 1; + } + + // If above we moved after the end of the available frames error out + if (start_frame > last_frame || end_frame > last_frame) { + GST_WARNING("Did not find a contiguous unused range of %u frames", + frame_count); + return -1; + } + + // Otherwise we have enough space after the last allocation + GST_INFO("Using HD frames %u-%u", start_frame, end_frame); + GST_INFO("Using frames %u-%u", start_frame / multiplier, + start_frame / multiplier + frame_count / multiplier - 1); + + return start_frame / multiplier; +} + GType gst_aja_audio_system_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { diff --git a/gstajacommon.h b/gstajacommon.h index a976ded5a5..619035b8e0 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -65,6 +65,11 @@ GstAjaNtv2Device *gst_aja_ntv2_device_ref(GstAjaNtv2Device *device); G_GNUC_INTERNAL void gst_aja_ntv2_device_unref(GstAjaNtv2Device *device); +G_GNUC_INTERNAL +gint gst_aja_ntv2_device_find_unallocated_frames(GstAjaNtv2Device *device, + NTV2Channel channel, + guint frame_count); + #define GST_AJA_ALLOCATOR_MEMTYPE "aja" #define GST_TYPE_AJA_ALLOCATOR (gst_aja_allocator_get_type()) diff --git a/gstajasink.cpp b/gstajasink.cpp index 500b1b97dc..b08624ec38 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -1737,13 +1737,22 @@ restart: guint16 start_frame = self->start_frame; guint16 end_frame = self->end_frame; - // If nothing was configured, work with a number of frames that is half - // the queue size and assume that all other channels work the same. + // If both are the same, try to find queue_size/2 unallocated frames and + // use those. if (start_frame == end_frame) { guint16 num_frames = self->queue_size / 2; - start_frame = self->channel * num_frames; - end_frame = (self->channel + 1) * num_frames - 1; + gint assigned_start_frame = gst_aja_ntv2_device_find_unallocated_frames( + self->device, self->channel, num_frames); + + if (assigned_start_frame == -1) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to allocate %u frames", num_frames)); + goto out; + } + + start_frame = assigned_start_frame; + end_frame = start_frame + num_frames - 1; } GST_DEBUG_OBJECT( diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8b41853bd7..9568ff192b 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -42,8 +42,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_QUEUE_SIZE (16) -#define DEFAULT_START_FRAME (0) -#define DEFAULT_END_FRAME (0) +#define DEFAULT_START_FRAME (8) +#define DEFAULT_END_FRAME (8) #define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) enum { @@ -156,8 +156,8 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { gobject_class, PROP_START_FRAME, g_param_spec_uint( "start-frame", "Start Frame", - "Start frame buffer to be used for capturing (auto if same number as " - "end-frame).", + "Start frame buffer to be used for capturing (automatically assign " + "that many frames if same number as end-frame).", 0, G_MAXINT, DEFAULT_START_FRAME, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); @@ -165,8 +165,8 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { gobject_class, PROP_END_FRAME, g_param_spec_uint( "end-frame", "End Frame", - "End frame buffer to be used for capturing (auto if same number as " - "start-frame).", + "End frame buffer to be used for capturing (automatically assign " + "that many frames if same number as start-frame).", 0, G_MAXINT, DEFAULT_END_FRAME, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); @@ -1781,13 +1781,20 @@ restart: guint16 start_frame = self->start_frame; guint16 end_frame = self->end_frame; - // If nothing was configured, work with 8 frames and assume that all - // other channels work the same. + // If both are set to the same value, try to find that many unallocated + // frames and use those. if (start_frame == end_frame) { - const guint16 num_frames = 8; + gint assigned_start_frame = gst_aja_ntv2_device_find_unallocated_frames( + self->device, self->channel, self->start_frame); - start_frame = self->channel * num_frames; - end_frame = (self->channel + 1) * num_frames - 1; + if (assigned_start_frame == -1) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to allocate %u frames", start_frame)); + goto out; + } + + start_frame = assigned_start_frame; + end_frame = start_frame + self->start_frame - 1; } GST_DEBUG_OBJECT( From a5c9cffea3cdec308694f28c3a7d73f6aec20aea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 12 Nov 2021 10:37:18 +0200 Subject: [PATCH 49/73] Print VPID information into the logs in the source element --- gstajasrc.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 9568ff192b..8581954d0b 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1593,6 +1593,12 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { if (vpid.IsValid()) { GstVideoInfo info; + { + std::stringstream os; + vpid.Print(os); + GST_TRACE_OBJECT(self, "Got valid VPID %s", os.str().c_str()); + } + if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { switch (vpid.GetTransferCharacteristics()) { default: From a23b634323f5b59e7df28b8fb8df2d13f661e779 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 15 Nov 2021 14:45:31 +0200 Subject: [PATCH 50/73] Fix mixup between VITC and ATC/LTC timecode indices in the source --- gstajasrc.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8581954d0b..08d12eae27 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1125,10 +1125,10 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { switch (self->timecode_index) { case GST_AJA_TIMECODE_INDEX_VITC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); break; case GST_AJA_TIMECODE_INDEX_ATC_LTC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); + self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); break; case GST_AJA_TIMECODE_INDEX_LTC1: self->tc_index = ::NTV2_TCINDEX_LTC1; From 87303fa05fca0643b72a8513c643f63e036d208c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 22 Nov 2021 10:21:35 +0200 Subject: [PATCH 51/73] Don't keep queue lock in the source locked while ShmMutexLocker is locked for reconfiguring This can lead to a deadlock. --- gstajasrc.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 08d12eae27..9f43164409 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1752,13 +1752,13 @@ restart: // If we don't have a video format configured, configure the device now // and potentially auto-detect the video format if (self->video_format == NTV2_FORMAT_UNKNOWN) { + // Don't keep queue locked while configuring as this might take a while + g_mutex_unlock(&self->queue_lock); + // Make sure to globally lock here as the routing settings and others are // global shared state ShmMutexLocker locker; - // Don't keep queue locked while configuring as this might take a while - g_mutex_unlock(&self->queue_lock); - if (!gst_aja_src_configure(self)) { g_mutex_lock(&self->queue_lock); GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), From d4d8ca79cce4d42e57d0b1fe00a64481a8e0da76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 22 Nov 2021 10:22:12 +0200 Subject: [PATCH 52/73] Directly wait again if the source is not playing anymore after reconfiguring --- gstajasrc.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 9f43164409..a351db9d8f 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1767,6 +1767,8 @@ restart: } g_mutex_lock(&self->queue_lock); + if (!self->playing || self->shutdown) goto restart; + if (self->video_format == ::NTV2_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT(self, "No signal, waiting"); frames_dropped_last = G_MAXUINT64; From 21b6a77140aabe07f81420ed022eb343d722d946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 23 Nov 2021 17:46:46 +0200 Subject: [PATCH 53/73] Remove all routes from a previously configured quad layout when reconfiguring a channel --- gstajasink.cpp | 89 +++++++++++++++++++++++++++++++++++------------ gstajasrc.cpp | 93 +++++++++++++++++++++++++++++++++++++++----------- 2 files changed, 141 insertions(+), 41 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index b08624ec38..656af0262d 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -910,6 +910,18 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { } } + bool had_quad_enabled = false, had_quad_quad_enabled = false; + + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } + if (self->quad_mode) { switch (self->sdi_mode) { case GST_AJA_SDI_MODE_SINGLE_LINK: @@ -943,10 +955,17 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { break; } } else { - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - self->device->device->SetQuadQuadFrameEnable(false, self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + self->device->device->Set4kSquaresEnable(false, quad_channel); + self->device->device->SetTsiFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadSquaresEnable(false, quad_channel); } NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); @@ -1025,24 +1044,52 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { g_assert_not_reached(); } } else { - for (auto iter = connections.begin(); iter != connections.end(); iter++) { - if (iter->first == output_destination_id || - iter->second == framebuffer_id) - router.RemoveConnection(iter->first, iter->second); + // This also removes all connections for any previous quad mode on the + // corresponding channels. - if (((output_destination_id == NTV2_XptSDIOut6Input || - output_destination_id == NTV2_XptSDIOut8Input) && - iter->second == NTV2_XptFrameBuffer6_DS2YUV) || - ((output_destination_id == NTV2_XptSDIOut5Input || - output_destination_id == NTV2_XptSDIOut6Input) && - iter->second == NTV2_XptFrameBuffer5_DS2YUV) || - ((output_destination_id == NTV2_XptSDIOut2Input || - output_destination_id == NTV2_XptSDIOut4Input) && - iter->second == NTV2_XptFrameBuffer2_DS2YUV) || - ((output_destination_id == NTV2_XptSDIOut1Input || - output_destination_id == NTV2_XptSDIOut2Input) && - iter->second == NTV2_XptFrameBuffer1_DS2YUV)) - router.RemoveConnection(iter->first, iter->second); + NTV2InputCrosspointID quad_output_source_ids[10]; + + if (output_destination_id == NTV2_XptSDIOut1Input || + output_destination_id == NTV2_XptSDIOut2Input || + output_destination_id == NTV2_XptSDIOut3Input || + output_destination_id == NTV2_XptSDIOut4Input) { + quad_output_source_ids[0] = NTV2_XptSDIOut1Input; + quad_output_source_ids[1] = NTV2_XptSDIOut2Input; + quad_output_source_ids[2] = NTV2_XptSDIOut3Input; + quad_output_source_ids[3] = NTV2_XptSDIOut4Input; + quad_output_source_ids[4] = NTV2_XptSDIOut1InputDS2; + quad_output_source_ids[5] = NTV2_XptSDIOut2InputDS2; + quad_output_source_ids[6] = NTV2_Xpt425Mux1AInput; + quad_output_source_ids[7] = NTV2_Xpt425Mux1BInput; + quad_output_source_ids[8] = NTV2_Xpt425Mux2AInput; + quad_output_source_ids[9] = NTV2_Xpt425Mux2BInput; + } else if (output_destination_id == NTV2_XptSDIOut5Input || + output_destination_id == NTV2_XptSDIOut6Input || + output_destination_id == NTV2_XptSDIOut7Input || + output_destination_id == NTV2_XptSDIOut8Input) { + quad_output_source_ids[0] = NTV2_XptSDIOut5Input; + quad_output_source_ids[1] = NTV2_XptSDIOut6Input; + quad_output_source_ids[2] = NTV2_XptSDIOut7Input; + quad_output_source_ids[3] = NTV2_XptSDIOut8Input; + quad_output_source_ids[4] = NTV2_XptSDIOut5InputDS2; + quad_output_source_ids[5] = NTV2_XptSDIOut6InputDS2; + quad_output_source_ids[6] = NTV2_Xpt425Mux3AInput; + quad_output_source_ids[7] = NTV2_Xpt425Mux3BInput; + quad_output_source_ids[8] = NTV2_Xpt425Mux4AInput; + quad_output_source_ids[9] = NTV2_Xpt425Mux4BInput; + } + + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (had_quad_enabled || had_quad_quad_enabled) { + for (auto quad_output_source_id : quad_output_source_ids) { + if (iter->first == quad_output_source_id) + router.RemoveConnection(iter->first, iter->second); + } + } else { + if (iter->first == output_destination_id || + iter->second == framebuffer_id) + router.RemoveConnection(iter->first, iter->second); + } } } diff --git a/gstajasrc.cpp b/gstajasrc.cpp index a351db9d8f..58b3174107 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -635,6 +635,18 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { gst_video_info_from_ntv2_video_format(&self->configured_info, self->video_format); + bool had_quad_enabled = false, had_quad_quad_enabled = false; + + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } + if (self->quad_mode) { if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { @@ -679,10 +691,17 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { } } } else { - self->device->device->Set4kSquaresEnable(false, self->channel); - self->device->device->SetTsiFrameEnable(false, self->channel); - self->device->device->SetQuadQuadFrameEnable(false, self->channel); - self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + self->device->device->Set4kSquaresEnable(false, quad_channel); + self->device->device->SetTsiFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadSquaresEnable(false, quad_channel); } self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); @@ -859,23 +878,57 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { g_assert_not_reached(); } } else { - for (auto iter = connections.begin(); iter != connections.end(); iter++) { - if (iter->first == framebuffer_id || iter->second == input_source_id) - router.RemoveConnection(iter->first, iter->second); + // This also removes all connections for any previous quad mode on the + // corresponding channels. - if (((input_source_id == NTV2_XptSDIIn6 || - input_source_id == NTV2_XptSDIIn8) && - iter->first == NTV2_XptFrameBuffer6BInput) || - ((input_source_id == NTV2_XptSDIIn5 || - input_source_id == NTV2_XptSDIIn6) && - iter->first == NTV2_XptFrameBuffer5BInput) || - ((input_source_id == NTV2_XptSDIIn4 || - input_source_id == NTV2_XptSDIIn2) && - iter->first == NTV2_XptFrameBuffer2BInput) || - ((input_source_id == NTV2_XptSDIIn1 || - input_source_id == NTV2_XptSDIIn2) && - iter->first == NTV2_XptFrameBuffer1BInput)) - router.RemoveConnection(iter->first, iter->second); + NTV2OutputCrosspointID quad_input_source_ids[10]; + + if (input_source_id == NTV2_XptSDIIn1 || + input_source_id == NTV2_XptSDIIn2 || + input_source_id == NTV2_XptSDIIn3 || + input_source_id == NTV2_XptSDIIn4) { + if (had_quad_enabled || had_quad_quad_enabled) { + quad_input_source_ids[0] = NTV2_XptSDIIn1; + quad_input_source_ids[1] = NTV2_XptSDIIn2; + quad_input_source_ids[2] = NTV2_XptSDIIn3; + quad_input_source_ids[3] = NTV2_XptSDIIn4; + quad_input_source_ids[4] = NTV2_XptSDIIn1DS2; + quad_input_source_ids[5] = NTV2_XptSDIIn2DS2; + quad_input_source_ids[6] = NTV2_Xpt425Mux1AYUV; + quad_input_source_ids[7] = NTV2_Xpt425Mux1BYUV; + quad_input_source_ids[8] = NTV2_Xpt425Mux2AYUV; + quad_input_source_ids[9] = NTV2_Xpt425Mux2BYUV; + } + } else if (input_source_id == NTV2_XptSDIIn5 || + input_source_id == NTV2_XptSDIIn6 || + input_source_id == NTV2_XptSDIIn7 || + input_source_id == NTV2_XptSDIIn8) { + if (had_quad_enabled || had_quad_quad_enabled) { + quad_input_source_ids[0] = NTV2_XptSDIIn5; + quad_input_source_ids[1] = NTV2_XptSDIIn6; + quad_input_source_ids[2] = NTV2_XptSDIIn7; + quad_input_source_ids[3] = NTV2_XptSDIIn8; + quad_input_source_ids[4] = NTV2_XptSDIIn5DS2; + quad_input_source_ids[5] = NTV2_XptSDIIn6DS2; + quad_input_source_ids[6] = NTV2_Xpt425Mux3AYUV; + quad_input_source_ids[7] = NTV2_Xpt425Mux3BYUV; + quad_input_source_ids[8] = NTV2_Xpt425Mux4AYUV; + quad_input_source_ids[9] = NTV2_Xpt425Mux4BYUV; + } + } else { + g_assert_not_reached(); + } + + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (had_quad_enabled || had_quad_quad_enabled) { + for (auto quad_input_source_id : quad_input_source_ids) { + if (iter->second == quad_input_source_id) + router.RemoveConnection(iter->first, iter->second); + } + } else { + if (iter->first == framebuffer_id || iter->second == input_source_id) + router.RemoveConnection(iter->first, iter->second); + } } } From 9b8f452078f9844840d9be88f4651cf08db28845 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 23 Nov 2021 17:47:16 +0200 Subject: [PATCH 54/73] Overwrite the whole routing table if currently no channel is running --- gstajasink.cpp | 21 ++++++++++++++++++++- gstajasrc.cpp | 21 ++++++++++++++++++++- 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 656af0262d..2546521751 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -976,7 +976,26 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { CNTV2SignalRouter router; - self->device->device->GetRouting(router); + // If any channels are currently running, initialize the router with the + // existing routing setup. Otherwise overwrite the whole routing table. + { + bool have_channels_running = false; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (c == self->channel) continue; + + if (self->device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + have_channels_running = true; + break; + } + } + + if (have_channels_running) self->device->device->GetRouting(router); + } // Need to remove old routes for the output and framebuffer we're going to use NTV2ActualConnections connections = router.GetConnections(); diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 58b3174107..16292b5439 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -792,7 +792,26 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { CNTV2SignalRouter router; - self->device->device->GetRouting(router); + // If any channels are currently running, initialize the router with the + // existing routing setup. Otherwise overwrite the whole routing table. + { + bool have_channels_running = false; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (c == self->channel) continue; + + if (self->device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + have_channels_running = true; + break; + } + } + + if (have_channels_running) self->device->device->GetRouting(router); + } // Need to remove old routes for the output and framebuffer we're going to // use From d1c7e0e7bbdc7e57bc17352a3a7f0740ce9dab38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 26 Nov 2021 13:15:27 +0200 Subject: [PATCH 55/73] Stop autocirculate of all quad channels if a quad mode was used before And also move the channel==1 || channel==5 check for enabling quad modes to the very beginning. --- gstajasrc.cpp | 51 ++++++++++++++++++++++++++++++--------------------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 16292b5439..22fc1a5b8d 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -473,10 +473,38 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { #undef NEEDS_QUAD_MODE + bool had_quad_enabled = false, had_quad_quad_enabled = false; + + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } + + // Stop any previously running quad mode, or other configurations on the + // quad channels self->device->device->AutoCirculateStop(self->channel); + if (self->quad_mode || had_quad_enabled || had_quad_enabled) { + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + for (int i = 0; i < 4; i++) { + self->device->device->AutoCirculateStop((NTV2Channel)(quad_channel + i)); + } + } + if (self->quad_mode) { - for (int i = 1; i < 4; i++) { - self->device->device->AutoCirculateStop((NTV2Channel)(self->channel + i)); + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; } } @@ -625,28 +653,9 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { return FALSE; } - if (self->quad_mode) { - if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { - GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); - return FALSE; - } - } - gst_video_info_from_ntv2_video_format(&self->configured_info, self->video_format); - bool had_quad_enabled = false, had_quad_quad_enabled = false; - - if (self->channel < ::NTV2_CHANNEL5) { - self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); - self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, - ::NTV2_CHANNEL1); - } else { - self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); - self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, - ::NTV2_CHANNEL5); - } - if (self->quad_mode) { if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { From de8e5ed9f4327c1bea4ee9674405906b24242e20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 3 Dec 2021 11:40:16 +0200 Subject: [PATCH 56/73] Require the default input source/output destination and channel 1 or 5 for quad modes --- gstajasink.cpp | 14 ++++++++++++++ gstajasrc.cpp | 23 ++++++++++++++++------- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 2546521751..7b73df0ce5 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -597,6 +597,20 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { return FALSE; } + if (quad_mode) { + if (self->output_destination != GST_AJA_OUTPUT_DESTINATION_AUTO) { + GST_ERROR_OBJECT(self, + "Quad modes require usage of the channel's default " + "output destination"); + return FALSE; + } + + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; + } + } + self->quad_mode = quad_mode; self->video_format = video_format; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 22fc1a5b8d..70037bd3d1 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -473,6 +473,22 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { #undef NEEDS_QUAD_MODE + if (self->quad_mode) { + if (self->input_source != GST_AJA_INPUT_SOURCE_AUTO && + !(self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) { + GST_ERROR_OBJECT( + self, + "Quad modes require usage of the channel's default input source"); + return FALSE; + } + + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; + } + } + bool had_quad_enabled = false, had_quad_quad_enabled = false; if (self->channel < ::NTV2_CHANNEL5) { @@ -501,13 +517,6 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { } } - if (self->quad_mode) { - if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { - GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); - return FALSE; - } - } - if (self->buffer_pool) { gst_buffer_pool_set_active(self->buffer_pool, FALSE); gst_clear_object(&self->buffer_pool); From 083aa0bc862f2ebe5960bd817ed8d2fb7e85b532 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 3 Dec 2021 12:14:16 +0200 Subject: [PATCH 57/73] Don't disable quad modes on ch1 if a 12G UHD mode runs there --- gstajasrc.cpp | 46 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 70037bd3d1..5ffe3eef74 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -491,14 +491,42 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { bool had_quad_enabled = false, had_quad_quad_enabled = false; - if (self->channel < ::NTV2_CHANNEL5) { - self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); - self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, - ::NTV2_CHANNEL1); - } else { - self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); - self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, - ::NTV2_CHANNEL5); + // HDMI can also be internally quad mode but it runs on a single channel. + if (!(self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) { + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, + ::NTV2_CHANNEL1); + + // 12G UHD is also internally considered quad modes but they run on a + // single channel. + if (had_quad_enabled && ::NTV2DeviceCanDo12gRouting(self->device_id)) { + NTV2VideoFormat fmt = + self->device->device->GetInputVideoFormat(::NTV2_INPUTSOURCE_SDI1); + if (fmt >= NTV2_FORMAT_FIRST_UHD_TSI_DEF_FORMAT && + fmt < NTV2_FORMAT_END_4K_TSI_DEF_FORMATS) + had_quad_enabled = false; + } + + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, + ::NTV2_CHANNEL5); + + // 12G UHD is also internally considered quad modes but they run on a + // single channel. + if (had_quad_enabled && ::NTV2DeviceCanDo12gRouting(self->device_id)) { + NTV2VideoFormat fmt = + self->device->device->GetInputVideoFormat(::NTV2_INPUTSOURCE_SDI5); + if (fmt >= NTV2_FORMAT_FIRST_UHD_TSI_DEF_FORMAT && + fmt < NTV2_FORMAT_END_4K_TSI_DEF_FORMATS) + had_quad_enabled = false; + } + + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } } // Stop any previously running quad mode, or other configurations on the @@ -708,7 +736,7 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { break; } } - } else { + } else if (had_quad_enabled || had_quad_quad_enabled) { NTV2Channel quad_channel; if (self->channel < ::NTV2_CHANNEL5) From a122044e2c80240a7c20dc4ca93cb24c12920147 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 10 Dec 2021 11:05:55 +0200 Subject: [PATCH 58/73] Include field-order in the caps for interlaced formats --- gstajacommon.cpp | 12 ++++++++++++ gstajasrc.cpp | 11 +++++++++++ 2 files changed, 23 insertions(+) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index ce343cb63c..7576091dec 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -266,6 +266,7 @@ bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, NTV2VideoFormat format) { if (format == NTV2_FORMAT_UNKNOWN) return false; + NTV2Standard standard = ::GetNTV2StandardFromVideoFormat(format); guint width = ::GetDisplayWidth(format); guint height = ::GetDisplayHeight(format); NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); @@ -286,6 +287,17 @@ bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + if (!::IsProgressiveTransport(format)) { + NTV2SmpteLineNumber line_number = ::GetSmpteLineNumber(standard); + + if (line_number.firstFieldTop) { + GST_VIDEO_INFO_FIELD_ORDER(info) = GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST; + } else { + GST_VIDEO_INFO_FIELD_ORDER(info) = + GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST; + } + } + return true; } diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 5ffe3eef74..8494952ada 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1818,6 +1818,17 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { gst_caps_unref(caps); } + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + GST_BUFFER_FLAG_SET(*buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); + switch (GST_VIDEO_INFO_FIELD_ORDER(&self->configured_info)) { + case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: + GST_BUFFER_FLAG_SET(*buffer, GST_VIDEO_BUFFER_FLAG_TFF); + default: + break; + } + } + GST_TRACE_OBJECT(self, "Outputting buffer %" GST_PTR_FORMAT, *buffer); return flow_ret; From 0487634aa64467c0ea747a44e4169a5b396084a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 10 Dec 2021 11:10:30 +0200 Subject: [PATCH 59/73] Don't set frame geometry and VANC mode The former already does the latter. --- gstajasrc.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 8494952ada..e74019bb0d 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -809,14 +809,11 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { self->device->device->SetFrameGeometry(geometry, false, self->channel); - self->device->device->SetVANCMode(self->vanc_mode, self->channel); if (self->quad_mode) { for (int i = 1; i < 4; i++) { self->device->device->SetFrameGeometry( geometry, false, (NTV2Channel)(self->channel + i)); - self->device->device->SetVANCMode(self->vanc_mode, - (NTV2Channel)(self->channel + i)); } } } else { @@ -824,14 +821,11 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { ::GetVANCFrameGeometry(geometry, self->vanc_mode); self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); - self->device->device->SetVANCMode(self->vanc_mode, self->channel); if (self->quad_mode) { for (int i = 1; i < 4; i++) { self->device->device->SetFrameGeometry( vanc_geometry, false, (NTV2Channel)(self->channel + i)); - self->device->device->SetVANCMode(self->vanc_mode, - (NTV2Channel)(self->channel + i)); } } } From eadd35363ae089272e01e885f17e52f803f1abe6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 10 Dec 2021 16:00:10 +0200 Subject: [PATCH 60/73] Workaround for sometimes setting the video format not actually changing the register values Let's just try again. --- gstajasrc.cpp | 38 ++++++++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index e74019bb0d..5e77590c69 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -761,8 +761,13 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { GST_DEBUG_OBJECT(self, "Configuring video format %s (%d) on channel %d", configured_string.c_str(), (int)self->video_format, (int)self->channel); - self->device->device->SetVideoFormat(self->video_format, false, false, - self->channel); + if (!self->device->device->SetVideoFormat(self->video_format, false, false, + self->channel)) { + GST_DEBUG_OBJECT( + self, "Failed configuring video format %s (%d) on channel %d", + configured_string.c_str(), (int)self->video_format, (int)self->channel); + return FALSE; + } if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, ::NTV2_FBF_10BIT_YCBCR)) { @@ -770,8 +775,33 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { (int)::NTV2_FBF_10BIT_YCBCR); return FALSE; } - self->device->device->SetFrameBufferFormat(self->channel, - ::NTV2_FBF_10BIT_YCBCR); + + if (!self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Failed configuring frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + + // FIXME: Workaround for sometimes setting the video format not actually + // changing the register values. Let's just try again. + { + NTV2VideoFormat fmt; + self->device->device->GetVideoFormat(fmt, self->channel); + + if (fmt != self->video_format) { + std::string actual_string = NTV2VideoFormatToString(fmt); + + GST_ERROR_OBJECT(self, + "Configured video format %s (%d) on channel %d but %s " + "(%d) is configured instead, trying again", + configured_string.c_str(), (int)self->video_format, + (int)self->channel, actual_string.c_str(), (int)fmt); + self->video_format = ::NTV2_FORMAT_UNKNOWN; + return TRUE; + } + } + if (self->quad_mode) { for (int i = 1; i < 4; i++) self->device->device->SetFrameBufferFormat( From e53a017ee7aa18fbeda2b1e4631f9fe3b362f229 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 29 Mar 2022 10:05:36 +0300 Subject: [PATCH 61/73] Add support for extracting CEA608 S334-1 Closed Captions and add a property to select which ones to extract --- gstajacommon.cpp | 28 +++++++++++++++ gstajacommon.h | 14 ++++++++ gstajasrc.cpp | 91 ++++++++++++++++++++++++++++++++++++++++++++++-- gstajasrc.h | 1 + 4 files changed, 131 insertions(+), 3 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 7576091dec..3f990724aa 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -1057,6 +1057,34 @@ GType gst_aja_timecode_index_get_type(void) { return (GType)id; } +GType gst_aja_closed_caption_capture_mode_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608, + "cea708-and-cea608", + "CEA708 S334-2 and CEA608 S334-1 Annex A Closed Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608, "cea708-or-cea608", + "CEA708 S334-2 or if not existing CEA608 S334-1 Annex A Closed " + "Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708, "cea608-or-cea708", + "CEA608 S334-1 Annex A or if not existing CEA708 S334-2 Closed " + "Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY, "cea708-only", + "CEA708 S334-2 Closed Captions only"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY, "cea608-only", + "CEA608 S334-1 Annex A Closed Captions only"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_NONE, "none", + "Don't capture Closed Captions"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaClosedCaptionCaptureMode", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + void gst_aja_common_init(void) { GST_DEBUG_CATEGORY_INIT(gst_aja_debug, "aja", 0, "Debug category for AJA plugin"); diff --git a/gstajacommon.h b/gstajacommon.h index 619035b8e0..0203c68650 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -277,6 +277,20 @@ typedef enum { G_GNUC_INTERNAL GType gst_aja_timecode_index_get_type(void); +typedef enum { + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_NONE, +} GstAjaClosedCaptionCaptureMode; + +#define GST_TYPE_AJA_CLOSED_CAPTION_CAPTURE_MODE \ + (gst_aja_closed_caption_capture_mode_get_type()) +G_GNUC_INTERNAL +GType gst_aja_closed_caption_capture_mode_get_type(void); + G_GNUC_INTERNAL void gst_aja_common_init(void); diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 5e77590c69..58ef91917f 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -21,6 +21,7 @@ #include "config.h" #endif +#include #include #include #include @@ -41,6 +42,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) +#define DEFAULT_CLOSED_CAPTION_CAPTURE_MODE \ + (GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_START_FRAME (8) #define DEFAULT_END_FRAME (8) @@ -57,6 +60,7 @@ enum { PROP_AUDIO_SOURCE, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, + PROP_CLOSED_CAPTION_CAPTURE_MODE, PROP_START_FRAME, PROP_END_FRAME, PROP_QUEUE_SIZE, @@ -218,6 +222,16 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_CLOSED_CAPTION_CAPTURE_MODE, + g_param_spec_enum( + "closed-caption-capture-mode", "Closed Caption Capture Mode", + "Closed Caption Capture Mode", + GST_TYPE_AJA_CLOSED_CAPTION_CAPTURE_MODE, + DEFAULT_CLOSED_CAPTION_CAPTURE_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_CAPTURE_CPU_CORE, g_param_spec_uint( @@ -274,6 +288,7 @@ static void gst_aja_src_init(GstAjaSrc *self) { self->audio_source = DEFAULT_AUDIO_SOURCE; self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; + self->closed_caption_capture_mode = DEFAULT_CLOSED_CAPTION_CAPTURE_MODE; self->capture_cpu_core = DEFAULT_CAPTURE_CPU_CORE; self->queue = @@ -326,6 +341,10 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; + case PROP_CLOSED_CAPTION_CAPTURE_MODE: + self->closed_caption_capture_mode = + (GstAjaClosedCaptionCaptureMode)g_value_get_enum(value); + break; case PROP_CAPTURE_CPU_CORE: self->capture_cpu_core = g_value_get_uint(value); break; @@ -376,6 +395,9 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; + case PROP_CLOSED_CAPTION_CAPTURE_MODE: + g_value_set_enum(value, self->closed_caption_capture_mode); + break; case PROP_CAPTURE_CPU_CORE: g_value_set_uint(value, self->capture_cpu_core); break; @@ -1680,17 +1702,80 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { // // See AJA SDK support ticket #4844. guint32 n_vanc_packets = anc_packets.CountAncillaryData(); + + // Check if we have either CEA608 or CEA708 packets, or both. + bool have_cea608 = false; + bool have_cea708 = false; + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); + + if (packet->GetDID() == AJAAncillaryData_Cea608_Vanc_DID && + packet->GetSID() == AJAAncillaryData_Cea608_Vanc_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Found CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + have_cea608 = true; + } else if (packet->GetDID() == AJAAncillaryData_CEA708_DID && + packet->GetSID() == AJAAncillaryData_CEA708_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Found CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + have_cea708 = true; + } + } + + // Decide based on the closed-caption-capture-mode property and closed + // caption availability which ones to add as metadata to the output buffer. + bool want_cea608 = + have_cea608 && + (self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY || + (!have_cea708 && + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608)); + + bool want_cea708 = + have_cea708 && + (self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY || + (!have_cea608 && + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708)); + bool aspect_ratio_flag = false; bool have_afd_bar = false; for (guint32 i = 0; i < n_vanc_packets; i++) { AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); - if (packet->GetDID() == AJAAncillaryData_CEA708_DID && - packet->GetSID() == AJAAncillaryData_CEA708_SID && + if (want_cea608 && packet->GetDID() == AJAAncillaryData_Cea608_Vanc_DID && + packet->GetSID() == AJAAncillaryData_Cea608_Vanc_SID && packet->GetPayloadData() && packet->GetPayloadByteCount() && AJA_SUCCESS(packet->ParsePayloadData())) { GST_TRACE_OBJECT( - self, "Found CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + self, "Adding CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + gst_buffer_add_video_caption_meta( + *buffer, GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, + packet->GetPayloadData(), packet->GetPayloadByteCount()); + } else if (want_cea708 && packet->GetDID() == AJAAncillaryData_CEA708_DID && + packet->GetSID() == AJAAncillaryData_CEA708_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, + "Adding CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); gst_buffer_add_video_caption_meta( *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet->GetPayloadData(), diff --git a/gstajasrc.h b/gstajasrc.h index 4d0841a2fb..bd93033961 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -69,6 +69,7 @@ struct _GstAjaSrc { GstAjaAudioSource audio_source; GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; + GstAjaClosedCaptionCaptureMode closed_caption_capture_mode; guint queue_size; guint start_frame, end_frame; guint capture_cpu_core; From 2bddfeac8acf8adab654e2c9982c263ac5a98f36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 29 Mar 2022 10:24:45 +0300 Subject: [PATCH 62/73] Add support for writing CEA608 S334-1 Annex A Closed Captions And allow configuring the line number to use for CEA708/608 captions. --- gstajasink.cpp | 89 ++++++++++++++++++++++++++++++++++++++++++++------ gstajasink.h | 3 ++ 2 files changed, 82 insertions(+), 10 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 7b73df0ce5..57e7506886 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -21,6 +21,7 @@ #include "config.h" #endif +#include #include #include #include @@ -38,6 +39,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) +#define DEFAULT_CEA608_LINE_NUMBER (12) +#define DEFAULT_CEA708_LINE_NUMBER (12) #define DEFAULT_QUEUE_SIZE (16) #define DEFAULT_START_FRAME (0) #define DEFAULT_END_FRAME (0) @@ -52,6 +55,8 @@ enum { PROP_SDI_MODE, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, + PROP_CEA608_LINE_NUMBER, + PROP_CEA708_LINE_NUMBER, PROP_QUEUE_SIZE, PROP_START_FRAME, PROP_END_FRAME, @@ -198,6 +203,27 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_CEA608_LINE_NUMBER, + g_param_spec_uint( + "cea608-line-number", "CEA608 Line Number", + "Sets the line number to use for CEA608 S334-1 Annex A Closed " + "Captions " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CEA608_LINE_NUMBER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CEA708_LINE_NUMBER, + g_param_spec_uint( + "cea708-line-number", "CEA708 Line Number", + "Sets the line number to use for CEA708 S334-2 Closed Captions " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CEA608_LINE_NUMBER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_OUTPUT_CPU_CORE, g_param_spec_uint( @@ -288,6 +314,12 @@ void gst_aja_sink_set_property(GObject *object, guint property_id, case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; + case PROP_CEA608_LINE_NUMBER: + self->cea608_line_number = g_value_get_uint(value); + break; + case PROP_CEA708_LINE_NUMBER: + self->cea708_line_number = g_value_get_uint(value); + break; case PROP_OUTPUT_CPU_CORE: self->output_cpu_core = g_value_get_uint(value); break; @@ -332,6 +364,12 @@ void gst_aja_sink_get_property(GObject *object, guint property_id, case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; + case PROP_CEA608_LINE_NUMBER: + g_value_set_uint(value, self->cea608_line_number); + break; + case PROP_CEA708_LINE_NUMBER: + g_value_set_uint(value, self->cea708_line_number); + break; case PROP_OUTPUT_CPU_CORE: g_value_set_uint(value, self->output_cpu_core); break; @@ -1578,19 +1616,50 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, (caption_meta = (GstVideoCaptionMeta *)gst_buffer_iterate_meta_filtered( buffer, &iter, GST_VIDEO_CAPTION_META_API_TYPE))) { if (caption_meta->caption_type == GST_VIDEO_CAPTION_TYPE_CEA708_CDP) { - const AJAAncillaryDataLocation kCEA708LocF1( - AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, - AJAAncillaryDataSpace_VANC, 12, AJAAncDataHorizOffset_AnyVanc); + if (self->cea708_line_number != -1) { + const AJAAncillaryDataLocation kCEA708LocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, self->cea708_line_number, + AJAAncDataHorizOffset_AnyVanc); - AJAAncillaryData_Cea708 pkt; + AJAAncillaryData_Cea708 pkt; - pkt.SetDID(GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 >> 8); - pkt.SetSID(GST_VIDEO_ANCILLARY_DID16_S334_EIA_708 & 0xff); - pkt.SetDataLocation(kCEA708LocF1); - pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); - pkt.SetPayloadData(caption_meta->data, caption_meta->size); + pkt.SetDID(AJAAncillaryData_CEA708_DID); + pkt.SetSID(AJAAncillaryData_CEA708_SID); + pkt.SetDataLocation(kCEA708LocF1); + pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); + pkt.SetPayloadData(caption_meta->data, caption_meta->size); - anc_packet_list.AddAncillaryData(pkt); + GST_TRACE_OBJECT( + self, + "Adding CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + pkt.GetPayloadByteCount(), pkt.GetLocationLineNumber()); + + anc_packet_list.AddAncillaryData(pkt); + } + } else if (caption_meta->caption_type == + GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A) { + if (self->cea608_line_number != -1) { + const AJAAncillaryDataLocation kCEA608LocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, self->cea608_line_number, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData_Cea608_Vanc pkt; + + pkt.SetDID(AJAAncillaryData_Cea608_Vanc_DID); + pkt.SetSID(AJAAncillaryData_Cea608_Vanc_SID); + pkt.SetDataLocation(kCEA608LocF1); + pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); + pkt.SetPayloadData(caption_meta->data, caption_meta->size); + pkt.ParsePayloadData(); + + GST_TRACE_OBJECT( + self, "Adding CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + pkt.GetPayloadByteCount(), pkt.GetLocationLineNumber()); + + anc_packet_list.AddAncillaryData(pkt); + } } else { GST_WARNING_OBJECT(self, "Unhandled caption type %d", caption_meta->caption_type); diff --git a/gstajasink.h b/gstajasink.h index e724c45b6b..ac7864d142 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -78,6 +78,9 @@ struct _GstAjaSink { GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; + gint cea608_line_number; + gint cea708_line_number; + NTV2AudioSystem audio_system; NTV2VideoFormat video_format; bool quad_mode; From 5fa487a4494f58941f3a16648463d41ca3e1ba21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Fri, 8 Apr 2022 12:07:31 +0300 Subject: [PATCH 63/73] Add support for setting the embedded audio input to use in ajasrc --- gstajacommon.cpp | 22 +++++++++++++++++++ gstajacommon.h | 17 +++++++++++++++ gstajasrc.cpp | 57 ++++++++++++++++++++++++++++++++++++++++++++++-- gstajasrc.h | 1 + 4 files changed, 95 insertions(+), 2 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 3f990724aa..1f3dc511b7 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -1040,6 +1040,28 @@ GType gst_aja_audio_source_get_type(void) { return (GType)id; } +GType gst_aja_embedded_audio_input_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, "auto", "auto"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1, "video-1", "Video 1"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2, "video-2", "Video 2"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3, "video-3", "Video 3"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4, "video-4", "Video 4"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5, "video-5", "Video 5"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6, "video-6", "Video 6"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7, "video-7", "Video 7"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8, "video-8", "Video 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaEmbeddedAudioInput", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + GType gst_aja_timecode_index_get_type(void) { static gsize id = 0; static const GEnumValue modes[] = { diff --git a/gstajacommon.h b/gstajacommon.h index 0203c68650..48eed7c08e 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -266,6 +266,23 @@ typedef enum { G_GNUC_INTERNAL GType gst_aja_audio_source_get_type(void); +typedef enum { + GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8, +} GstAjaEmbeddedAudioInput; + +#define GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT \ + (gst_aja_embedded_audio_input_get_type()) +G_GNUC_INTERNAL +GType gst_aja_embedded_audio_input_get_type(void); + typedef enum { GST_AJA_TIMECODE_INDEX_VITC, GST_AJA_TIMECODE_INDEX_ATC_LTC, diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 58ef91917f..d77a0afcd0 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -40,6 +40,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) #define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) #define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) +#define DEFAULT_EMBEDDED_AUDIO_INPUT (GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_CLOSED_CAPTION_CAPTURE_MODE \ @@ -58,6 +59,7 @@ enum { PROP_INPUT_SOURCE, PROP_SDI_MODE, PROP_AUDIO_SOURCE, + PROP_EMBEDDED_AUDIO_INPUT, PROP_TIMECODE_INDEX, PROP_REFERENCE_SOURCE, PROP_CLOSED_CAPTION_CAPTURE_MODE, @@ -206,6 +208,15 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_EMBEDDED_AUDIO_INPUT, + g_param_spec_enum( + "embedded-audio-input", "Embedded Audio Input", + "Embedded Audio Input to use", GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT, + DEFAULT_EMBEDDED_AUDIO_INPUT, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_TIMECODE_INDEX, g_param_spec_enum( @@ -286,6 +297,7 @@ static void gst_aja_src_init(GstAjaSrc *self) { self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; self->input_source = DEFAULT_INPUT_SOURCE; self->audio_source = DEFAULT_AUDIO_SOURCE; + self->embedded_audio_input = DEFAULT_EMBEDDED_AUDIO_INPUT; self->timecode_index = DEFAULT_TIMECODE_INDEX; self->reference_source = DEFAULT_REFERENCE_SOURCE; self->closed_caption_capture_mode = DEFAULT_CLOSED_CAPTION_CAPTURE_MODE; @@ -335,6 +347,10 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_AUDIO_SOURCE: self->audio_source = (GstAjaAudioSource)g_value_get_enum(value); break; + case PROP_EMBEDDED_AUDIO_INPUT: + self->embedded_audio_input = + (GstAjaEmbeddedAudioInput)g_value_get_enum(value); + break; case PROP_TIMECODE_INDEX: self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); break; @@ -389,6 +405,9 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_AUDIO_SOURCE: g_value_set_enum(value, self->audio_source); break; + case PROP_EMBEDDED_AUDIO_INPUT: + g_value_set_enum(value, self->embedded_audio_input); + break; case PROP_TIMECODE_INDEX: g_value_set_enum(value, self->timecode_index); break; @@ -1228,9 +1247,43 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { break; } + NTV2EmbeddedAudioInput embedded_audio_input; + switch (self->embedded_audio_input) { + case GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO: + embedded_audio_input = + ::NTV2InputSourceToEmbeddedAudioInput(input_source); + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_1; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_2; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_3; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_4; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_5; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_6; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_7; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_8; + break; + default: + g_assert_not_reached(); + break; + } + self->device->device->SetAudioSystemInputSource( - self->audio_system, audio_source, - ::NTV2InputSourceToEmbeddedAudioInput(input_source)); + self->audio_system, audio_source, embedded_audio_input); self->configured_audio_channels = ::NTV2DeviceGetMaxAudioChannels(self->device_id); self->device->device->SetNumberAudioChannels(self->configured_audio_channels, diff --git a/gstajasrc.h b/gstajasrc.h index bd93033961..091a5810d4 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -67,6 +67,7 @@ struct _GstAjaSrc { GstAjaSdiMode sdi_mode; GstAjaInputSource input_source; GstAjaAudioSource audio_source; + GstAjaEmbeddedAudioInput embedded_audio_input; GstAjaTimecodeIndex timecode_index; GstAjaReferenceSource reference_source; GstAjaClosedCaptionCaptureMode closed_caption_capture_mode; From 0df1d6dad9f25ed257fa16312e4d7b64eaec83b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Tue, 7 Jun 2022 14:16:09 +0300 Subject: [PATCH 64/73] Allow switching the timecode index at runtime in the source element --- gstajasrc.cpp | 45 ++++++++++++++++++++++++--------------------- gstajasrc.h | 1 - 2 files changed, 24 insertions(+), 22 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index d77a0afcd0..3789fd08c9 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -1339,26 +1339,8 @@ static gboolean gst_aja_src_configure(GstAjaSrc *self) { (int)reference_source); self->device->device->SetReference(reference_source); - - switch (self->timecode_index) { - case GST_AJA_TIMECODE_INDEX_VITC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, true); - break; - case GST_AJA_TIMECODE_INDEX_ATC_LTC: - self->tc_index = ::NTV2InputSourceToTimecodeIndex(input_source, false); - break; - case GST_AJA_TIMECODE_INDEX_LTC1: - self->tc_index = ::NTV2_TCINDEX_LTC1; - self->device->device->SetLTCInputEnable(true); - break; - case GST_AJA_TIMECODE_INDEX_LTC2: - self->tc_index = ::NTV2_TCINDEX_LTC2; - self->device->device->SetLTCInputEnable(true); - break; - default: - g_assert_not_reached(); - break; - } + self->device->device->SetLTCInputEnable(true); + self->device->device->SetRP188SourceFilter(self->channel, 0xff); guint video_buffer_size = ::GetVideoActiveSize( self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); @@ -2366,9 +2348,30 @@ restart: gst_buffer_set_size(anc_buffer2, transfer.GetCapturedAncByteCount(true)); + NTV2TCIndex tc_index; + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + tc_index = ::NTV2InputSourceToTimecodeIndex( + self->configured_input_source, true); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + tc_index = ::NTV2InputSourceToTimecodeIndex( + self->configured_input_source, false); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + tc_index = ::NTV2_TCINDEX_LTC1; + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + tc_index = ::NTV2_TCINDEX_LTC2; + break; + default: + g_assert_not_reached(); + break; + } + NTV2_RP188 time_code; transfer.acTransferStatus.acFrameStamp.GetInputTimeCode(time_code, - self->tc_index); + tc_index); gint64 frame_time = transfer.acTransferStatus.acFrameStamp.acFrameTime; gint64 now_sys = g_get_real_time(); diff --git a/gstajasrc.h b/gstajasrc.h index 091a5810d4..8bfcd3fd1d 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -81,7 +81,6 @@ struct _GstAjaSrc { bool quad_mode; NTV2VANCMode vanc_mode; NTV2InputSource configured_input_source; - NTV2TCIndex tc_index; GstVideoInfo configured_info; // Based on properties GstVideoInfo current_info; // Based on properties + stream metadata From 3dbbf2045159172671e57188c4efd1c804e694b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Thu, 1 Sep 2022 11:39:09 +0300 Subject: [PATCH 65/73] Proxy CAPS query from ajasrcdemux source pads to ajasrc --- gstajasrcdemux.cpp | 134 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 132 insertions(+), 2 deletions(-) diff --git a/gstajasrcdemux.cpp b/gstajasrcdemux.cpp index 67edc6dd52..5ae841ed90 100644 --- a/gstajasrcdemux.cpp +++ b/gstajasrcdemux.cpp @@ -43,6 +43,12 @@ static GstFlowReturn gst_aja_src_demux_sink_chain(GstPad *pad, GstBuffer *buffer); static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, GstEvent *event); +static gboolean gst_aja_src_demux_audio_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query); +static gboolean gst_aja_src_demux_video_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query); #define parent_class gst_aja_src_demux_parent_class G_DEFINE_TYPE(GstAjaSrcDemux, gst_aja_src_demux, GST_TYPE_ELEMENT); @@ -73,12 +79,14 @@ static void gst_aja_src_demux_init(GstAjaSrcDemux *self) { self->audio_src = gst_pad_new_from_static_template(&audio_src_template, "audio"); - gst_pad_use_fixed_caps(self->audio_src); + gst_pad_set_query_function( + self->audio_src, GST_DEBUG_FUNCPTR(gst_aja_src_demux_audio_src_query)); gst_element_add_pad(GST_ELEMENT(self), self->audio_src); self->video_src = gst_pad_new_from_static_template(&video_src_template, "video"); - gst_pad_use_fixed_caps(self->video_src); + gst_pad_set_query_function( + self->video_src, GST_DEBUG_FUNCPTR(gst_aja_src_demux_video_src_query)); gst_element_add_pad(GST_ELEMENT(self), self->video_src); } @@ -160,3 +168,125 @@ static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, return gst_pad_event_default(pad, parent, event); } } + +static gboolean gst_aja_src_demux_audio_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + if ((caps = gst_pad_get_current_caps(pad))) { + GST_DEBUG_OBJECT( + pad, "Returning currently negotiated caps %" GST_PTR_FORMAT, caps); + } else if ((caps = gst_pad_peer_query_caps(self->sink, NULL))) { + guint n; + GstAudioInfo audio_info; + gint audio_channels = 0; + GstCaps *tmp; + + GST_DEBUG_OBJECT(pad, "Got upstream caps %" GST_PTR_FORMAT, caps); + + n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gint tmp; + + if (!gst_structure_get_int(s, "audio-channels", &tmp)) { + tmp = 0; + } + + // No audio channels in all caps + if (tmp == 0 || (audio_channels != 0 && audio_channels != tmp)) { + audio_channels = 0; + break; + } + + audio_channels = tmp; + } + + gst_audio_info_init(&audio_info); + gst_audio_info_set_format(&audio_info, GST_AUDIO_FORMAT_S32LE, 48000, + audio_channels ? audio_channels : 1, NULL); + tmp = gst_audio_info_to_caps(&audio_info); + gst_caps_unref(caps); + caps = tmp; + + if (!audio_channels) { + gst_caps_set_simple(caps, "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, + NULL); + } + + GST_DEBUG_OBJECT(pad, "Returning caps %" GST_PTR_FORMAT, caps); + } else { + caps = gst_pad_get_pad_template_caps(pad); + + GST_DEBUG_OBJECT(pad, "Returning template caps %" GST_PTR_FORMAT, caps); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + gst_caps_unref(caps); + + return TRUE; + } + default: + return gst_pad_query_default(pad, parent, query); + } +} + +static gboolean gst_aja_src_demux_video_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + if ((caps = gst_pad_get_current_caps(pad))) { + GST_DEBUG_OBJECT( + pad, "Returning currently negotiated caps %" GST_PTR_FORMAT, caps); + } else if ((caps = gst_pad_peer_query_caps(self->sink, NULL))) { + guint n; + + GST_DEBUG_OBJECT(pad, "Returning upstream caps %" GST_PTR_FORMAT, caps); + + caps = gst_caps_make_writable(caps); + n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gst_structure_remove_field(s, "audio-channels"); + } + } else { + caps = gst_pad_get_pad_template_caps(pad); + + GST_DEBUG_OBJECT(pad, "Returning template caps %" GST_PTR_FORMAT, caps); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + gst_caps_unref(caps); + + return TRUE; + } + default: + return gst_pad_query_default(pad, parent, query); + } +} From 1f6ea294db5949a4468510b5cc375fc599a9b6c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Thu, 15 Sep 2022 11:00:37 +0300 Subject: [PATCH 66/73] Add support for HD psf modes in ajasrc --- gstajacommon.cpp | 36 ++++++++++++++++++++++++++---------- gstajacommon.h | 9 +++++++-- gstajasrc.cpp | 16 ++++++++++++++++ 3 files changed, 49 insertions(+), 12 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 1f3dc511b7..9ba7e06664 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -47,6 +47,10 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_720p_6000, NTV2_FORMAT_720p_6000, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2398, NTV2_FORMAT_1080psf_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2400, NTV2_FORMAT_1080psf_2400, + NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_2997, NTV2_FORMAT_1080p_2997, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_3000, NTV2_FORMAT_1080p_3000, @@ -59,16 +63,22 @@ static const FormatMapEntry format_map[] = { NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_5000, NTV2_FORMAT_UNKNOWN}, - {GST_AJA_VIDEO_FORMAT_720p_2398, NTV2_FORMAT_720p_2398, - NTV2_FORMAT_UNKNOWN}, - {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_2500, - NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_5000_A, NTV2_FORMAT_1080p_5000_A, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_5994_A, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_1080p_6000_A, NTV2_FORMAT_1080p_6000_A, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_2398, NTV2_FORMAT_720p_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2500_2, NTV2_FORMAT_1080psf_2500_2, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2997_2, NTV2_FORMAT_1080psf_2997_2, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_3000_2, NTV2_FORMAT_1080psf_3000_2, + NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_625_5000, NTV2_FORMAT_625_5000, NTV2_FORMAT_UNKNOWN}, {GST_AJA_VIDEO_FORMAT_525_5994, NTV2_FORMAT_525_5994, NTV2_FORMAT_UNKNOWN}, @@ -283,11 +293,12 @@ bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, info->par_n = 12; info->par_d = 11; } - info->interlace_mode = !::IsProgressiveTransport(format) - ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED - : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + info->interlace_mode = + (!::IsProgressivePicture(format) && !NTV2_IS_PSF_VIDEO_FORMAT(format)) + ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED + : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; - if (!::IsProgressiveTransport(format)) { + if (!::IsProgressiveTransport(format) && !NTV2_IS_PSF_VIDEO_FORMAT(format)) { NTV2SmpteLineNumber line_number = ::GetSmpteLineNumber(standard); if (line_number.firstFieldTop) { @@ -947,17 +958,22 @@ GType gst_aja_video_format_get_type(void) { {GST_AJA_VIDEO_FORMAT_1080i_6000, "1080i-6000", "1080i 6000"}, {GST_AJA_VIDEO_FORMAT_720p_5994, "720p-5994", "720p 5994"}, {GST_AJA_VIDEO_FORMAT_720p_6000, "720p-6000", "720p 6000"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2398, "1080psf-2398", "1080psf 2398"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2400, "1080psf-2400", "1080psf 2400"}, {GST_AJA_VIDEO_FORMAT_1080p_2997, "1080p-2997", "1080p 2997"}, {GST_AJA_VIDEO_FORMAT_1080p_3000, "1080p-3000", "1080p 3000"}, {GST_AJA_VIDEO_FORMAT_1080p_2500, "1080p-2500", "1080p 2500"}, {GST_AJA_VIDEO_FORMAT_1080p_2398, "1080p-2398", "1080p 2398"}, {GST_AJA_VIDEO_FORMAT_1080p_2400, "1080p-2400", "1080p 2400"}, {GST_AJA_VIDEO_FORMAT_720p_5000, "720p-5000", "720p 5000"}, - {GST_AJA_VIDEO_FORMAT_720p_2398, "720p-2398", "720p 2398"}, - {GST_AJA_VIDEO_FORMAT_720p_2500, "720p-2500", "720p 2500"}, {GST_AJA_VIDEO_FORMAT_1080p_5000_A, "1080p-5000-a", "1080p 5000 A"}, {GST_AJA_VIDEO_FORMAT_1080p_5994_A, "1080p-5994-a", "1080p 5994 A"}, {GST_AJA_VIDEO_FORMAT_1080p_6000_A, "1080p-6000-a", "1080p 6000 A"}, + {GST_AJA_VIDEO_FORMAT_720p_2398, "720p-2398", "720p 2398"}, + {GST_AJA_VIDEO_FORMAT_720p_2500, "720p-2500", "720p 2500"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2500_2, "1080psf-2500-2", "1080psf 2500 2"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2997_2, "1080psf-2997-2", "1080psf 2997 2"}, + {GST_AJA_VIDEO_FORMAT_1080psf_3000_2, "1080psf-3000-2", "1080psf 3000 2"}, {GST_AJA_VIDEO_FORMAT_625_5000, "625-5000", "625 5000"}, {GST_AJA_VIDEO_FORMAT_525_5994, "525-5994", "525 5994"}, diff --git a/gstajacommon.h b/gstajacommon.h index 48eed7c08e..1293cff280 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -193,17 +193,22 @@ typedef enum { GST_AJA_VIDEO_FORMAT_1080i_6000, GST_AJA_VIDEO_FORMAT_720p_5994, GST_AJA_VIDEO_FORMAT_720p_6000, + GST_AJA_VIDEO_FORMAT_1080psf_2398, + GST_AJA_VIDEO_FORMAT_1080psf_2400, GST_AJA_VIDEO_FORMAT_1080p_2997, GST_AJA_VIDEO_FORMAT_1080p_3000, GST_AJA_VIDEO_FORMAT_1080p_2500, GST_AJA_VIDEO_FORMAT_1080p_2398, GST_AJA_VIDEO_FORMAT_1080p_2400, GST_AJA_VIDEO_FORMAT_720p_5000, - GST_AJA_VIDEO_FORMAT_720p_2398, - GST_AJA_VIDEO_FORMAT_720p_2500, GST_AJA_VIDEO_FORMAT_1080p_5000_A, GST_AJA_VIDEO_FORMAT_1080p_5994_A, GST_AJA_VIDEO_FORMAT_1080p_6000_A, + GST_AJA_VIDEO_FORMAT_720p_2398, + GST_AJA_VIDEO_FORMAT_720p_2500, + GST_AJA_VIDEO_FORMAT_1080psf_2500_2, + GST_AJA_VIDEO_FORMAT_1080psf_2997_2, + GST_AJA_VIDEO_FORMAT_1080psf_3000_2, GST_AJA_VIDEO_FORMAT_625_5000, GST_AJA_VIDEO_FORMAT_525_5994, GST_AJA_VIDEO_FORMAT_525_2398, diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 3789fd08c9..5b11982439 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -2142,6 +2142,22 @@ restart: effective_video_format = ::GetQuarterSizedVideoFormat(effective_video_format); } + switch (self->video_format) { + case NTV2_FORMAT_1080psf_2500_2: + if (current_video_format == NTV2_FORMAT_1080i_5000) + current_video_format = NTV2_FORMAT_1080psf_2500_2; + break; + case NTV2_FORMAT_1080psf_2997_2: + if (current_video_format == NTV2_FORMAT_1080i_5994) + current_video_format = NTV2_FORMAT_1080psf_2997_2; + break; + case NTV2_FORMAT_1080psf_3000_2: + if (current_video_format == NTV2_FORMAT_1080i_6000) + current_video_format = NTV2_FORMAT_1080psf_3000_2; + break; + default: + break; + } if (current_video_format == ::NTV2_FORMAT_UNKNOWN || !all_quads_equal) { if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) From 27fb7d380a8ba57e8ec6fb9bcc66d4e5c0d55ef8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Mon, 5 Dec 2022 12:37:01 +0200 Subject: [PATCH 67/73] Add property for enabling/disabling of RP188 receival/transmission Enabling this can lead to frame drops in certain setups so enabling it always is not ideal. --- gstajasink.cpp | 17 ++++++++++++++++- gstajasink.h | 1 + gstajasrc.cpp | 22 +++++++++++++++++++--- gstajasrc.h | 1 + 4 files changed, 37 insertions(+), 4 deletions(-) diff --git a/gstajasink.cpp b/gstajasink.cpp index 57e7506886..c729938123 100644 --- a/gstajasink.cpp +++ b/gstajasink.cpp @@ -38,6 +38,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); #define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) #define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) +#define DEFAULT_RP188 (TRUE) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) #define DEFAULT_CEA608_LINE_NUMBER (12) #define DEFAULT_CEA708_LINE_NUMBER (12) @@ -54,6 +55,7 @@ enum { PROP_OUTPUT_DESTINATION, PROP_SDI_MODE, PROP_TIMECODE_INDEX, + PROP_RP188, PROP_REFERENCE_SOURCE, PROP_CEA608_LINE_NUMBER, PROP_CEA708_LINE_NUMBER, @@ -195,6 +197,13 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_RP188, + g_param_spec_boolean( + "rp188", "RP188", "Enable RP188 timecode transmission", DEFAULT_RP188, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_REFERENCE_SOURCE, g_param_spec_enum( @@ -311,6 +320,9 @@ void gst_aja_sink_set_property(GObject *object, guint property_id, case PROP_TIMECODE_INDEX: self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); break; + case PROP_RP188: + self->rp188 = g_value_get_boolean(value); + break; case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; @@ -361,6 +373,9 @@ void gst_aja_sink_get_property(GObject *object, guint property_id, case PROP_TIMECODE_INDEX: g_value_set_enum(value, self->timecode_index); break; + case PROP_RP188: + g_value_set_boolean(value, self->rp188); + break; case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; @@ -1917,7 +1932,7 @@ restart: if (!self->device->device->AutoCirculateInitForOutput( self->channel, 0, self->audio_system, - AUTOCIRCULATE_WITH_RP188 | + (self->rp188 ? AUTOCIRCULATE_WITH_RP188 : 0) | (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC : 0), 1, start_frame, end_frame)) { diff --git a/gstajasink.h b/gstajasink.h index ac7864d142..0e2aafc37a 100644 --- a/gstajasink.h +++ b/gstajasink.h @@ -76,6 +76,7 @@ struct _GstAjaSink { GstAjaOutputDestination output_destination; GstAjaSdiMode sdi_mode; GstAjaTimecodeIndex timecode_index; + gboolean rp188; GstAjaReferenceSource reference_source; gint cea608_line_number; diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 5b11982439..0141d6893b 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -42,6 +42,7 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); #define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) #define DEFAULT_EMBEDDED_AUDIO_INPUT (GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO) #define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) +#define DEFAULT_RP188 (TRUE) #define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) #define DEFAULT_CLOSED_CAPTION_CAPTURE_MODE \ (GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608) @@ -61,6 +62,7 @@ enum { PROP_AUDIO_SOURCE, PROP_EMBEDDED_AUDIO_INPUT, PROP_TIMECODE_INDEX, + PROP_RP188, PROP_REFERENCE_SOURCE, PROP_CLOSED_CAPTION_CAPTURE_MODE, PROP_START_FRAME, @@ -225,6 +227,13 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) { (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT))); + g_object_class_install_property( + gobject_class, PROP_RP188, + g_param_spec_boolean( + "rp188", "RP188", "Enable RP188 timecode retrieval", DEFAULT_RP188, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + g_object_class_install_property( gobject_class, PROP_REFERENCE_SOURCE, g_param_spec_enum( @@ -354,6 +363,9 @@ void gst_aja_src_set_property(GObject *object, guint property_id, case PROP_TIMECODE_INDEX: self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); break; + case PROP_RP188: + self->rp188 = g_value_get_boolean(value); + break; case PROP_REFERENCE_SOURCE: self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); break; @@ -411,6 +423,9 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, case PROP_TIMECODE_INDEX: g_value_set_enum(value, self->timecode_index); break; + case PROP_RP188: + g_value_set_boolean(value, self->rp188); + break; case PROP_REFERENCE_SOURCE: g_value_set_enum(value, self->reference_source); break; @@ -2084,9 +2099,10 @@ restart: if (!self->device->device->AutoCirculateInitForInput( self->channel, 0, self->audio_system, - AUTOCIRCULATE_WITH_RP188 | (self->vanc_mode == ::NTV2_VANCMODE_OFF - ? AUTOCIRCULATE_WITH_ANC - : 0), + (self->rp188 ? AUTOCIRCULATE_WITH_RP188 : 0) | + (self->vanc_mode == ::NTV2_VANCMODE_OFF + ? AUTOCIRCULATE_WITH_ANC + : 0), 1, start_frame, end_frame)) { GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), ("Failed to initialize autocirculate")); diff --git a/gstajasrc.h b/gstajasrc.h index 8bfcd3fd1d..0ed297165e 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -69,6 +69,7 @@ struct _GstAjaSrc { GstAjaAudioSource audio_source; GstAjaEmbeddedAudioInput embedded_audio_input; GstAjaTimecodeIndex timecode_index; + gboolean rp188; GstAjaReferenceSource reference_source; GstAjaClosedCaptionCaptureMode closed_caption_capture_mode; guint queue_size; From 6490e853fd848ae62d4c84eae173f16233cbd03f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Thu, 9 Feb 2023 10:27:03 +0200 Subject: [PATCH 68/73] Use malloc()/free() instead of GSlice allocator for AJA memory --- gstajacommon.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 9ba7e06664..5fd324f310 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -513,7 +513,7 @@ static inline GstAjaMemory *_aja_memory_new(GstAjaAllocator *alloc, gsize size) { GstAjaMemory *mem; - mem = (GstAjaMemory *)g_slice_alloc(sizeof(GstAjaMemory)); + mem = (GstAjaMemory *)g_new0(GstAjaMemory, 1); _aja_memory_init(alloc, mem, flags, (GstMemory *)parent, data, maxsize, offset, size); @@ -526,7 +526,7 @@ static GstAjaMemory *_aja_memory_new_block(GstAjaAllocator *alloc, GstAjaMemory *mem; guint8 *data; - mem = (GstAjaMemory *)g_slice_alloc(sizeof(GstAjaMemory)); + mem = (GstAjaMemory *)g_new0(GstAjaMemory, 1); data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); GST_TRACE_OBJECT(alloc, "Allocated %" G_GSIZE_FORMAT " at %p", maxsize, data); @@ -604,7 +604,7 @@ static void gst_aja_allocator_free(GstAllocator *alloc, GstMemory *mem) { AJAMemory::FreeAligned(dmem->data); } - g_slice_free1(sizeof(GstAjaMemory), dmem); + g_free(dmem); } static void gst_aja_allocator_finalize(GObject *alloc) { From 2d13fb16e80d39e1dfa4b6eec510d375c3dca0a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Thu, 9 Feb 2023 10:47:49 +0200 Subject: [PATCH 69/73] Add caching of freed memories to the allocator Memories might be freed outside the pool if the buffer was copied and the copy is freed after the original buffer. Caching up to 8 freed memories makes it possible to recycle these memories in most cases. --- gstajacommon.cpp | 70 +++++++++++++++++++++++++++++++++++++++++------- gstajacommon.h | 1 + 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/gstajacommon.cpp b/gstajacommon.cpp index 5fd324f310..1fef9c8f13 100644 --- a/gstajacommon.cpp +++ b/gstajacommon.cpp @@ -494,6 +494,11 @@ typedef struct { guint8 *data; } GstAjaMemory; +typedef struct { + guint8 *data; + gsize size; +} FreedMemory; + G_DEFINE_TYPE(GstAjaAllocator, gst_aja_allocator, GST_TYPE_ALLOCATOR); static inline void _aja_memory_init(GstAjaAllocator *alloc, GstAjaMemory *mem, @@ -524,14 +529,35 @@ static GstAjaMemory *_aja_memory_new_block(GstAjaAllocator *alloc, GstMemoryFlags flags, gsize maxsize, gsize offset, gsize size) { GstAjaMemory *mem; - guint8 *data; + guint8 *data = NULL; mem = (GstAjaMemory *)g_new0(GstAjaMemory, 1); - data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); - GST_TRACE_OBJECT(alloc, "Allocated %" G_GSIZE_FORMAT " at %p", maxsize, data); - if (!alloc->device->device->DMABufferLock((ULWord *)data, maxsize, true)) { - GST_WARNING_OBJECT(alloc, "Failed to pre-lock memory"); + GST_OBJECT_LOCK(alloc); + guint n = gst_queue_array_get_length(alloc->freed_mems); + for (guint i = 0; i < n; i++) { + FreedMemory *fmem = + (FreedMemory *)gst_queue_array_peek_nth_struct(alloc->freed_mems, i); + + if (fmem->size == size) { + data = fmem->data; + GST_TRACE_OBJECT( + alloc, "Using cached freed memory of size %" G_GSIZE_FORMAT " at %p", + fmem->size, fmem->data); + gst_queue_array_drop_struct(alloc->freed_mems, i, NULL); + break; + } + } + GST_OBJECT_UNLOCK(alloc); + + if (!data) { + data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); + GST_TRACE_OBJECT(alloc, + "Allocated memory of size %" G_GSIZE_FORMAT " at %p", + maxsize, data); + if (!alloc->device->device->DMABufferLock((ULWord *)data, maxsize, true)) { + GST_WARNING_OBJECT(alloc, "Failed to pre-lock memory"); + } } _aja_memory_init(alloc, mem, flags, NULL, data, maxsize, offset, size); @@ -598,10 +624,27 @@ static void gst_aja_allocator_free(GstAllocator *alloc, GstMemory *mem) { if (!mem->parent) { GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); - GST_TRACE_OBJECT(alloc, "Freeing memory at %p", dmem->data); - aja_alloc->device->device->DMABufferUnlock((ULWord *)dmem->data, - mem->maxsize); - AJAMemory::FreeAligned(dmem->data); + GST_OBJECT_LOCK(aja_alloc); + while (gst_queue_array_get_length(aja_alloc->freed_mems) > 8) { + FreedMemory *fmem = + (FreedMemory *)gst_queue_array_pop_head_struct(aja_alloc->freed_mems); + + GST_TRACE_OBJECT( + alloc, "Freeing cached memory of size %" G_GSIZE_FORMAT " at %p", + fmem->size, fmem->data); + aja_alloc->device->device->DMABufferUnlock((ULWord *)fmem->data, + fmem->size); + AJAMemory::FreeAligned(fmem->data); + } + + FreedMemory fmem; + GST_TRACE_OBJECT(alloc, + "Caching freed memory of size %" G_GSIZE_FORMAT " at %p", + mem->maxsize, dmem->data); + fmem.data = dmem->data; + fmem.size = mem->size; + gst_queue_array_push_tail_struct(aja_alloc->freed_mems, &fmem); + GST_OBJECT_UNLOCK(aja_alloc); } g_free(dmem); @@ -612,6 +655,14 @@ static void gst_aja_allocator_finalize(GObject *alloc) { GST_DEBUG_OBJECT(alloc, "Freeing allocator"); + FreedMemory *mem; + while ((mem = (FreedMemory *)gst_queue_array_pop_head_struct( + aja_alloc->freed_mems))) { + GST_TRACE_OBJECT(alloc, "Freeing cached memory at %p", mem->data); + aja_alloc->device->device->DMABufferUnlock((ULWord *)mem->data, mem->size); + AJAMemory::FreeAligned(mem->data); + } + gst_aja_ntv2_device_unref(aja_alloc->device); G_OBJECT_CLASS(gst_aja_allocator_parent_class)->finalize(alloc); @@ -645,6 +696,7 @@ GstAllocator *gst_aja_allocator_new(GstAjaNtv2Device *device) { (GstAjaAllocator *)g_object_new(GST_TYPE_AJA_ALLOCATOR, NULL); alloc->device = gst_aja_ntv2_device_ref(device); + alloc->freed_mems = gst_queue_array_new_for_struct(sizeof(FreedMemory), 16); GST_DEBUG_OBJECT(alloc, "Creating allocator for device %d", device->device->GetIndexNumber()); diff --git a/gstajacommon.h b/gstajacommon.h index 1293cff280..1629cf08a2 100644 --- a/gstajacommon.h +++ b/gstajacommon.h @@ -91,6 +91,7 @@ struct _GstAjaAllocator { GstAllocator allocator; GstAjaNtv2Device *device; + GstQueueArray *freed_mems; }; struct _GstAjaAllocatorClass { From 002437f37d3eafdf9a3e7f53d88e4fa0f3d062b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 14 Jun 2023 10:54:35 +0300 Subject: [PATCH 70/73] Post messages never from the capture thread but instead handle everything from the streaming thread --- gstajasrc.cpp | 407 ++++++++++++++++++++++++++++++++++++-------------- gstajasrc.h | 1 + 2 files changed, 293 insertions(+), 115 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 0141d6893b..56e5c505ba 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -72,24 +72,79 @@ enum { PROP_SIGNAL, }; +// Make these plain C structs for usage in GstQueueArray +G_BEGIN_DECLS + typedef enum { + QUEUE_ITEM_TYPE_DUMMY, QUEUE_ITEM_TYPE_FRAME, + QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + QUEUE_ITEM_TYPE_ERROR, + QUEUE_ITEM_TYPE_FRAMES_DROPPED, } QueueItemType; typedef struct { QueueItemType type; - // For FRAME - GstClockTime capture_time; - GstBuffer *video_buffer; - GstBuffer *audio_buffer; - GstBuffer *anc_buffer, *anc_buffer2; - NTV2_RP188 tc; + union { + // For DUMMY + struct { + gchar dummy; + } dummy; + // For FRAME + struct { + GstClockTime capture_time; + GstBuffer *video_buffer; + GstBuffer *audio_buffer; + GstBuffer *anc_buffer, *anc_buffer2; + NTV2_RP188 tc; - NTV2VideoFormat detected_format; - guint32 vpid; + NTV2VideoFormat detected_format; + guint32 vpid; + } frame; + // For SIGNAL_CHANGE + struct { + gboolean have_signal; + NTV2VideoFormat detected_format; + guint32 vpid; + } signal_change; + // For ERROR + struct { + GstMessage *msg; + } error; + // For FRAMES_DROPPED + struct { + gboolean driver_side; + GstClockTime timestamp_start, timestamp_end; + } frames_dropped; + }; } QueueItem; +G_END_DECLS + +static void queue_item_clear(QueueItem *item) { + switch (item->type) { + case QUEUE_ITEM_TYPE_DUMMY: + break; + case QUEUE_ITEM_TYPE_FRAME: + gst_clear_buffer(&item->frame.video_buffer); + gst_clear_buffer(&item->frame.audio_buffer); + gst_clear_buffer(&item->frame.anc_buffer); + gst_clear_buffer(&item->frame.anc_buffer2); + item->frame.tc.~NTV2_RP188(); + break; + case QUEUE_ITEM_TYPE_SIGNAL_CHANGE: + break; + case QUEUE_ITEM_TYPE_ERROR: + gst_clear_message(&item->error.msg); + break; + case QUEUE_ITEM_TYPE_FRAMES_DROPPED: + break; + } + + item->type = QUEUE_ITEM_TYPE_DUMMY; +} + static void gst_aja_src_set_property(GObject *object, guint property_id, const GValue *value, GParamSpec *pspec); static void gst_aja_src_get_property(GObject *object, guint property_id, @@ -1448,13 +1503,9 @@ static gboolean gst_aja_src_stop(GstAjaSrc *self) { GST_OBJECT_UNLOCK(self); while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { - if (item->type == QUEUE_ITEM_TYPE_FRAME) { - gst_clear_buffer(&item->video_buffer); - gst_clear_buffer(&item->audio_buffer); - gst_clear_buffer(&item->anc_buffer); - gst_clear_buffer(&item->anc_buffer2); - } + queue_item_clear(item); } + self->queue_num_frames = 0; if (self->buffer_pool) { gst_buffer_pool_set_active(self->buffer_pool, FALSE); @@ -1629,7 +1680,12 @@ static gboolean gst_aja_src_unlock_stop(GstBaseSrc *bsrc) { static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { GstAjaSrc *self = GST_AJA_SRC(psrc); GstFlowReturn flow_ret = GST_FLOW_OK; - QueueItem item; + QueueItem item = { + .type = QUEUE_ITEM_TYPE_DUMMY, + }; + +next_item: + item.type = QUEUE_ITEM_TYPE_DUMMY; g_mutex_lock(&self->queue_lock); while (gst_queue_array_is_empty(self->queue) && !self->flushing) { @@ -1643,13 +1699,80 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { } item = *(QueueItem *)gst_queue_array_pop_head_struct(self->queue); + if (item.type == QUEUE_ITEM_TYPE_FRAME) { + self->queue_num_frames -= 1; + } g_mutex_unlock(&self->queue_lock); - *buffer = item.video_buffer; - gst_buffer_add_aja_audio_meta(*buffer, item.audio_buffer); - gst_buffer_unref(item.audio_buffer); + switch (item.type) { + case QUEUE_ITEM_TYPE_DUMMY: + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_SIGNAL_CHANGE: + // These are already only produced when signal status is changing + if (item.signal_change.have_signal) { + GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, + ("Signal recovered"), ("Input source detected")); + self->signal = TRUE; + g_object_notify(G_OBJECT(self), "signal"); + } else if (!item.signal_change.have_signal) { + if (item.signal_change.detected_format != ::NTV2_FORMAT_UNKNOWN) { + std::string format_string = + NTV2VideoFormatToString(item.signal_change.detected_format); - if (item.tc.IsValid()) { + GST_ELEMENT_WARNING_WITH_DETAILS( + GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), + ("Input source with different mode %s was detected", + format_string.c_str()), + ("detected-format", G_TYPE_STRING, format_string.c_str(), "vpid", + G_TYPE_UINT, item.signal_change.vpid, NULL)); + } else { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, + ("Signal lost"), + ("No input source was detected")); + } + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_ERROR: + GST_ERROR_OBJECT(self, "Stopping because of error on capture thread"); + gst_element_post_message(GST_ELEMENT(self), + (GstMessage *)g_steal_pointer(&item.error.msg)); + queue_item_clear(&item); + return GST_FLOW_ERROR; + case QUEUE_ITEM_TYPE_FRAMES_DROPPED: + GST_WARNING_OBJECT( + self, "Dropped frames from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, + GST_TIME_ARGS(item.frames_dropped.timestamp_start), + GST_TIME_ARGS(item.frames_dropped.timestamp_end)); + gst_element_post_message( + GST_ELEMENT(self), + gst_message_new_qos(GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, + GST_CLOCK_TIME_NONE, + item.frames_dropped.timestamp_start, + item.frames_dropped.timestamp_end - + item.frames_dropped.timestamp_start)); + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_FRAME: + // fall through below + break; + } + + g_assert(item.type == QUEUE_ITEM_TYPE_FRAME); + + if (!self->signal) { + self->signal = TRUE; + g_object_notify(G_OBJECT(self), "signal"); + } + + *buffer = (GstBuffer *)g_steal_pointer(&item.frame.video_buffer); + gst_buffer_add_aja_audio_meta(*buffer, item.frame.audio_buffer); + gst_clear_buffer(&item.frame.audio_buffer); + + if (item.frame.tc.IsValid()) { TimecodeFormat tc_format = ::kTCFormatUnknown; GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE; @@ -1687,7 +1810,7 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { flags = (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_INTERLACED); - CRP188 rp188(item.tc, tc_format); + CRP188 rp188(item.frame.tc, tc_format); { std::stringstream os; @@ -1710,40 +1833,41 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { AJAAncillaryList anc_packets; - if (item.anc_buffer) { + if (item.frame.anc_buffer) { GstMapInfo map = GST_MAP_INFO_INIT; GstMapInfo map2 = GST_MAP_INFO_INIT; - gst_buffer_map(item.anc_buffer, &map, GST_MAP_READ); - if (item.anc_buffer2) gst_buffer_map(item.anc_buffer2, &map2, GST_MAP_READ); + gst_buffer_map(item.frame.anc_buffer, &map, GST_MAP_READ); + if (item.frame.anc_buffer2) + gst_buffer_map(item.frame.anc_buffer2, &map2, GST_MAP_READ); NTV2_POINTER ptr1(map.data, map.size); NTV2_POINTER ptr2(map2.data, map2.size); AJAAncillaryList::SetFromDeviceAncBuffers(ptr1, ptr2, anc_packets); - if (item.anc_buffer2) gst_buffer_unmap(item.anc_buffer2, &map2); - gst_buffer_unmap(item.anc_buffer, &map); + if (item.frame.anc_buffer2) gst_buffer_unmap(item.frame.anc_buffer2, &map2); + gst_buffer_unmap(item.frame.anc_buffer, &map); } else if (self->vanc_mode != ::NTV2_VANCMODE_OFF) { GstMapInfo map; NTV2FormatDescriptor format_desc(self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); - gst_buffer_map(item.video_buffer, &map, GST_MAP_READ); + gst_buffer_map(*buffer, &map, GST_MAP_READ); NTV2_POINTER ptr(map.data, map.size); AJAAncillaryList::SetFromVANCData(ptr, format_desc, anc_packets); - gst_buffer_unmap(item.video_buffer, &map); + gst_buffer_unmap(*buffer, &map); guint offset = format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); guint size = format_desc.GetVisibleRasterBytes(); - gst_buffer_resize(item.video_buffer, offset, size); + gst_buffer_resize(*buffer, offset, size); } - gst_clear_buffer(&item.anc_buffer); - gst_clear_buffer(&item.anc_buffer2); + gst_clear_buffer(&item.frame.anc_buffer); + gst_clear_buffer(&item.frame.anc_buffer2); // Not using CountAncillaryDataWithType(AJAAncillaryDataType_Cea708) etc // here because for SD it doesn't recognize the packets. It assumes they @@ -1849,7 +1973,7 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { is_letterbox, bar1, bar2); const NTV2Standard standard( - ::GetNTV2StandardFromVideoFormat(item.detected_format)); + ::GetNTV2StandardFromVideoFormat(item.frame.detected_format)); const NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); bool field2 = @@ -1866,7 +1990,7 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { bool caps_changed = false; - CNTV2VPID vpid(item.vpid); + CNTV2VPID vpid(item.frame.vpid); if (vpid.IsValid()) { GstVideoInfo info; @@ -1876,7 +2000,8 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { GST_TRACE_OBJECT(self, "Got valid VPID %s", os.str().c_str()); } - if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { + if (gst_video_info_from_ntv2_video_format(&info, + item.frame.detected_format)) { switch (vpid.GetTransferCharacteristics()) { default: case NTV2_VPID_TC_SDR_TV: @@ -1937,7 +2062,8 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { } else { GstVideoInfo info; - if (gst_video_info_from_ntv2_video_format(&info, item.detected_format)) { + if (gst_video_info_from_ntv2_video_format(&info, + item.frame.detected_format)) { // Widescreen PAL/NTSC if (aspect_ratio_flag && info.height == 486) { info.par_n = 40; @@ -1988,11 +2114,43 @@ static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { } } + queue_item_clear(&item); + GST_TRACE_OBJECT(self, "Outputting buffer %" GST_PTR_FORMAT, *buffer); return flow_ret; } +#define AJA_SRC_ERROR(el, domain, code, text, debug) \ + G_STMT_START { \ + gchar *__txt = _gst_element_error_printf text; \ + gchar *__dbg = _gst_element_error_printf debug; \ + GstMessage *__msg; \ + GError *__err; \ + gchar *__name, *__fmt_dbg; \ + if (__txt) GST_WARNING_OBJECT(el, "error: %s", __txt); \ + if (__dbg) GST_WARNING_OBJECT(el, "error: %s", __dbg); \ + if (!__txt) \ + __txt = gst_error_get_message(GST_##domain##_ERROR, \ + GST_##domain##_ERROR_##code); \ + __err = g_error_new_literal(GST_##domain##_ERROR, \ + GST_##domain##_ERROR_##code, __txt); \ + __name = gst_object_get_path_string(GST_OBJECT_CAST(el)); \ + if (__dbg) \ + __fmt_dbg = g_strdup_printf("%s(%d): %s (): %s:\n%s", __FILE__, \ + __LINE__, GST_FUNCTION, __name, __dbg); \ + else \ + __fmt_dbg = g_strdup_printf("%s(%d): %s (): %s", __FILE__, __LINE__, \ + GST_FUNCTION, __name); \ + g_free(__name); \ + g_free(__dbg); \ + __msg = gst_message_new_error(GST_OBJECT(el), __err, __fmt_dbg); \ + QueueItem item = {.type = QUEUE_ITEM_TYPE_ERROR, .error{.msg = __msg}}; \ + gst_queue_array_push_tail_struct(el->queue, &item); \ + g_cond_signal(&el->queue_cond); \ + } \ + G_STMT_END; + static void capture_thread_func(AJAThread *thread, void *data) { GstAjaSrc *self = GST_AJA_SRC(data); GstClock *clock = NULL; @@ -2000,6 +2158,7 @@ static void capture_thread_func(AJAThread *thread, void *data) { guint64 frames_dropped_last = G_MAXUINT64; gboolean have_signal = TRUE; guint iterations_without_frame = 0; + NTV2VideoFormat last_detected_video_format = ::NTV2_FORMAT_UNKNOWN; if (self->capture_cpu_core != G_MAXUINT) { cpu_set_t mask; @@ -2049,8 +2208,8 @@ restart: if (!gst_aja_src_configure(self)) { g_mutex_lock(&self->queue_lock); - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to configure device")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); goto out; } g_mutex_lock(&self->queue_lock); @@ -2061,15 +2220,15 @@ restart: GST_DEBUG_OBJECT(self, "No signal, waiting"); frames_dropped_last = G_MAXUINT64; if (have_signal) { - GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, - ("Signal lost"), - ("No input source was detected")); + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = ::NTV2_FORMAT_UNKNOWN, + .vpid = 0}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); have_signal = FALSE; } - if (self->signal) { - self->signal = FALSE; - g_object_notify(G_OBJECT(self), "signal"); - } self->device->device->WaitForInputVerticalInterrupt(self->channel); continue; } @@ -2084,8 +2243,8 @@ restart: self->device, self->channel, self->start_frame); if (assigned_start_frame == -1) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to allocate %u frames", start_frame)); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to allocate %u frames", start_frame)); goto out; } @@ -2104,8 +2263,8 @@ restart: ? AUTOCIRCULATE_WITH_ANC : 0), 1, start_frame, end_frame)) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to initialize autocirculate")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); goto out; } @@ -2183,14 +2342,16 @@ restart: g_mutex_unlock(&self->queue_lock); frames_dropped_last = G_MAXUINT64; if (have_signal) { - GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), - ("No input source was detected")); + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = ::NTV2_FORMAT_UNKNOWN, + .vpid = 0}}; + last_detected_video_format = ::NTV2_FORMAT_UNKNOWN; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); have_signal = FALSE; } - if (self->signal) { - self->signal = FALSE; - g_object_notify(G_OBJECT(self), "signal"); - } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; @@ -2216,16 +2377,17 @@ restart: effective_string.c_str()); g_mutex_unlock(&self->queue_lock); frames_dropped_last = G_MAXUINT64; - if (have_signal) { - GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), - ("Different input source (%s) was detected", - current_string.c_str())); + if (have_signal || current_video_format != last_detected_video_format) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + last_detected_video_format = current_video_format; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); have_signal = FALSE; } - if (self->signal) { - self->signal = FALSE; - g_object_notify(G_OBJECT(self), "signal"); - } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); continue; @@ -2265,10 +2427,13 @@ restart: status.acFramesProcessed + status.acFramesDropped, self->configured_info.fps_n, self->configured_info.fps_d * GST_SECOND); - GstMessage *msg = gst_message_new_qos( - GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, - timestamp, timestamp_end - timestamp); - gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + QueueItem item = {.type = QUEUE_ITEM_TYPE_FRAMES_DROPPED, + .frames_dropped = {.driver_side = TRUE, + .timestamp_start = timestamp, + .timestamp_end = timestamp_end}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); frames_dropped_last = status.acFramesDropped; } @@ -2284,29 +2449,30 @@ restart: AUTOCIRCULATE_TRANSFER transfer; if (!have_signal) { - GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, - ("Signal recovered"), ("Input source detected")); + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = TRUE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); have_signal = TRUE; } - if (!self->signal) { - self->signal = TRUE; - g_object_notify(G_OBJECT(self), "signal"); - } iterations_without_frame = 0; if (gst_buffer_pool_acquire_buffer(self->buffer_pool, &video_buffer, NULL) != GST_FLOW_OK) { - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to acquire video buffer")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire video buffer")); break; } if (gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, &audio_buffer, NULL) != GST_FLOW_OK) { gst_buffer_unref(video_buffer); - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to acquire audio buffer")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire audio buffer")); break; } @@ -2316,8 +2482,8 @@ restart: NULL) != GST_FLOW_OK) { gst_buffer_unref(audio_buffer); gst_buffer_unref(video_buffer); - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to acquire anc buffer")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); break; } @@ -2328,8 +2494,8 @@ restart: gst_buffer_unref(anc_buffer); gst_buffer_unref(audio_buffer); gst_buffer_unref(video_buffer); - GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), - ("Failed to acquire anc buffer")); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); break; } } @@ -2434,45 +2600,54 @@ restart: QueueItem item = { .type = QUEUE_ITEM_TYPE_FRAME, - .capture_time = now_gst, - .video_buffer = video_buffer, - .audio_buffer = audio_buffer, - .anc_buffer = anc_buffer, - .anc_buffer2 = anc_buffer2, - .tc = time_code, - .detected_format = - (self->quad_mode ? ::GetQuadSizedVideoFormat(current_video_format) - : current_video_format), - .vpid = vpid_a}; + .frame = {.capture_time = now_gst, + .video_buffer = video_buffer, + .audio_buffer = audio_buffer, + .anc_buffer = anc_buffer, + .anc_buffer2 = anc_buffer2, + .tc = time_code, + .detected_format = + (self->quad_mode + ? ::GetQuadSizedVideoFormat(current_video_format) + : current_video_format), + .vpid = vpid_a}}; - while (gst_queue_array_get_length(self->queue) >= self->queue_size) { - QueueItem *tmp = - (QueueItem *)gst_queue_array_pop_head_struct(self->queue); + while (self->queue_num_frames >= self->queue_size) { + guint n = gst_queue_array_get_length(self->queue); - if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { - GST_WARNING_OBJECT(self, "Element queue overrun, dropping old frame"); + for (guint i = 0; i < n; i++) { + QueueItem *tmp = + (QueueItem *)gst_queue_array_peek_nth_struct(self->queue, i); + if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { + GST_WARNING_OBJECT(self, + "Element queue overrun, dropping old frame"); - GstMessage *msg = gst_message_new_qos( - GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, - GST_CLOCK_TIME_NONE, tmp->capture_time, - gst_util_uint64_scale(GST_SECOND, self->configured_info.fps_d, - self->configured_info.fps_n)); - gst_element_post_message(GST_ELEMENT_CAST(self), msg); - - gst_clear_buffer(&tmp->video_buffer); - gst_clear_buffer(&tmp->audio_buffer); - gst_clear_buffer(&tmp->anc_buffer); - gst_clear_buffer(&tmp->anc_buffer2); + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAMES_DROPPED, + .frames_dropped = { + .driver_side = FALSE, + .timestamp_start = tmp->frame.capture_time, + .timestamp_end = + tmp->frame.capture_time + + gst_util_uint64_scale(GST_SECOND, + self->configured_info.fps_d, + self->configured_info.fps_n)}}; + queue_item_clear(tmp); + gst_queue_array_drop_struct(self->queue, i, NULL); + gst_queue_array_push_tail_struct(self->queue, &item); + self->queue_num_frames -= 1; + g_cond_signal(&self->queue_cond); + break; + } } } GST_TRACE_OBJECT(self, "Queuing frame %" GST_TIME_FORMAT, GST_TIME_ARGS(now_gst)); gst_queue_array_push_tail_struct(self->queue, &item); - GST_TRACE_OBJECT(self, "%u frames queued", - gst_queue_array_get_length(self->queue)); + self->queue_num_frames += 1; + GST_TRACE_OBJECT(self, "%u frames queued", self->queue_num_frames); g_cond_signal(&self->queue_cond); - } else { g_mutex_unlock(&self->queue_lock); @@ -2483,15 +2658,17 @@ restart: iterations_without_frame++; } else { frames_dropped_last = G_MAXUINT64; - if (have_signal) { - GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, - ("Signal lost"), ("No frames captured")); + if (have_signal || last_detected_video_format != current_video_format) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = TRUE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + last_detected_video_format = current_video_format; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); have_signal = FALSE; } - if (self->signal) { - self->signal = FALSE; - g_object_notify(G_OBJECT(self), "signal"); - } } self->device->device->WaitForInputVerticalInterrupt(self->channel); diff --git a/gstajasrc.h b/gstajasrc.h index 0ed297165e..119fcc807e 100644 --- a/gstajasrc.h +++ b/gstajasrc.h @@ -48,6 +48,7 @@ struct _GstAjaSrc { GMutex queue_lock; GCond queue_cond; GstQueueArray *queue; + guint queue_num_frames; gboolean playing; gboolean shutdown; gboolean flushing; From 63b1e261b2a8f135df26794dcd687034855060db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 14 Jun 2023 12:02:51 +0300 Subject: [PATCH 71/73] Set DISCONT flag after signal loss or frame drop --- gstajasrc.cpp | 45 ++++++++++++++++++++++++++++----------------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/gstajasrc.cpp b/gstajasrc.cpp index 56e5c505ba..2b3434560c 100644 --- a/gstajasrc.cpp +++ b/gstajasrc.cpp @@ -2156,7 +2156,7 @@ static void capture_thread_func(AJAThread *thread, void *data) { GstClock *clock = NULL; AUTOCIRCULATE_TRANSFER transfer; guint64 frames_dropped_last = G_MAXUINT64; - gboolean have_signal = TRUE; + gboolean have_signal = TRUE, discont = TRUE; guint iterations_without_frame = 0; NTV2VideoFormat last_detected_video_format = ::NTV2_FORMAT_UNKNOWN; @@ -2228,6 +2228,7 @@ restart: gst_queue_array_push_tail_struct(self->queue, &item); g_cond_signal(&self->queue_cond); have_signal = FALSE; + discont = TRUE; } self->device->device->WaitForInputVerticalInterrupt(self->channel); continue; @@ -2351,6 +2352,7 @@ restart: gst_queue_array_push_tail_struct(self->queue, &item); g_cond_signal(&self->queue_cond); have_signal = FALSE; + discont = TRUE; } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); @@ -2387,6 +2389,7 @@ restart: gst_queue_array_push_tail_struct(self->queue, &item); g_cond_signal(&self->queue_cond); have_signal = FALSE; + discont = TRUE; } self->device->device->WaitForInputVerticalInterrupt(self->channel); g_mutex_lock(&self->queue_lock); @@ -2436,6 +2439,7 @@ restart: g_cond_signal(&self->queue_cond); frames_dropped_last = status.acFramesDropped; + discont = TRUE; } if (status.IsRunning() && status.acBufferLevel > 1) { @@ -2589,6 +2593,7 @@ restart: else now_gst = 0; + // TODO: Drift detection and compensation GST_BUFFER_PTS(video_buffer) = now_gst; GST_BUFFER_DURATION(video_buffer) = gst_util_uint64_scale( GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); @@ -2596,22 +2601,6 @@ restart: GST_BUFFER_DURATION(audio_buffer) = gst_util_uint64_scale( GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); - // TODO: Drift detection and compensation - - QueueItem item = { - .type = QUEUE_ITEM_TYPE_FRAME, - .frame = {.capture_time = now_gst, - .video_buffer = video_buffer, - .audio_buffer = audio_buffer, - .anc_buffer = anc_buffer, - .anc_buffer2 = anc_buffer2, - .tc = time_code, - .detected_format = - (self->quad_mode - ? ::GetQuadSizedVideoFormat(current_video_format) - : current_video_format), - .vpid = vpid_a}}; - while (self->queue_num_frames >= self->queue_size) { guint n = gst_queue_array_get_length(self->queue); @@ -2636,12 +2625,33 @@ restart: gst_queue_array_drop_struct(self->queue, i, NULL); gst_queue_array_push_tail_struct(self->queue, &item); self->queue_num_frames -= 1; + discont = TRUE; g_cond_signal(&self->queue_cond); break; } } } + if (discont) { + GST_BUFFER_FLAG_SET(video_buffer, GST_BUFFER_FLAG_DISCONT); + GST_BUFFER_FLAG_SET(audio_buffer, GST_BUFFER_FLAG_DISCONT); + discont = FALSE; + } + + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAME, + .frame = {.capture_time = now_gst, + .video_buffer = video_buffer, + .audio_buffer = audio_buffer, + .anc_buffer = anc_buffer, + .anc_buffer2 = anc_buffer2, + .tc = time_code, + .detected_format = + (self->quad_mode + ? ::GetQuadSizedVideoFormat(current_video_format) + : current_video_format), + .vpid = vpid_a}}; + GST_TRACE_OBJECT(self, "Queuing frame %" GST_TIME_FORMAT, GST_TIME_ARGS(now_gst)); gst_queue_array_push_tail_struct(self->queue, &item); @@ -2668,6 +2678,7 @@ restart: gst_queue_array_push_tail_struct(self->queue, &item); g_cond_signal(&self->queue_cond); have_signal = FALSE; + discont = TRUE; } } From 71be705fe6bd3817b2106a8fea22424be54ebf09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Sat, 21 Oct 2023 10:02:43 +0300 Subject: [PATCH 72/73] Add subproject for building the AJA NTV2 SDK from github By default that subproject will be built now instead of requiring the user to provide a location for the SDK. --- README.md | 7 +- meson.build | 50 ++++-- meson_options.txt | 2 +- subprojects/ntv2.wrap | 9 + .../ntv2-16.2-bugfix5.meson.patch | 167 ++++++++++++++++++ 5 files changed, 214 insertions(+), 21 deletions(-) create mode 100644 subprojects/ntv2.wrap create mode 100644 subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch diff --git a/README.md b/README.md index 41d737d93e..0934a55d06 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,12 @@ [GStreamer](https://gstreamer.freedesktop.org/) plugin for [AJA](https://www.aja.com) capture and output cards. -This plugin requires the NTV2 SDK version 16 or newer. +This plugin requires the AJA NTV2 SDK version 16 or newer. + +The location of the SDK can be configured via the `aja-sdk-dir` meson option. +If no location is given then the NTV2 SDK from +[GitHub](https://github.com/aja-video/ntv2.git) is compiled as a meson +subproject as part of the plugin. ## Example usage diff --git a/meson.build b/meson.build index b46abe89ac..7ae140dbd9 100644 --- a/meson.build +++ b/meson.build @@ -1,6 +1,6 @@ project('gst-aja', 'cpp', version : '0.1.0', - meson_version : '>= 0.54.0', + meson_version : '>= 0.63.0', default_options : [ 'warning_level=1', 'buildtype=debugoptimized', 'cpp_std=c++11', @@ -57,26 +57,39 @@ thread_dep = dependency('threads') rt_dep = cxx.find_library('rt', required : false) aja_sdk_dir = get_option('aja-sdk-dir') -aja_includedirs = [ - '-I@0@/ajalibraries'.format(aja_sdk_dir), - '-I@0@/ajalibraries/ajantv2/includes'.format(aja_sdk_dir), - '-I@0@/ajalibraries/ajantv2/src/lin'.format(aja_sdk_dir), -] +if aja_sdk_dir == '' + ajantv2_dep = dependency('libajantv2') + aja_includedirs = [] -message('Looking for AJA SDK in directory ' + aja_sdk_dir) -if not cxx.has_header('ajabase/common/videotypes.h', - args : aja_includedirs, - ) - error('Cannot find AJA SDK') + if not ajantv2_dep.found() + subdir_done() + endif +else + aja_includedirs = include_directories( + f'@aja_sdk_dir@/ajalibraries', + f'@aja_sdk_dir@/ajalibraries/ajantv2/includes', + f'@aja_sdk_dir@/ajalibraries/ajantv2/src/lin', + ) + + message('Looking for AJA SDK in directory ' + aja_sdk_dir) + if not cxx.has_header('ajabase/common/videotypes.h', + include_directories : aja_includedirs, + ) + error('Cannot find AJA SDK') + endif + + + ajantv2_lib = cxx.find_library('ajantv2', + # If the header is found, this should also be + required : true, + dirs : [f'@aja_sdk_dir@/lib'], + ) + ajantv2_dep = declare_dependency( + dependencies: ajantv2_lib, + include_directories: aja_includedirs, + ) endif -aja_libdir = '@0@/lib'.format(aja_sdk_dir) - -ajantv2_dep = cxx.find_library('ajantv2', - required : true, - dirs : [aja_libdir], -) - gstaja = library('gstaja', ['plugin.cpp', 'gstajacommon.cpp', @@ -87,7 +100,6 @@ gstaja = library('gstaja', 'gstajadeviceprovider.cpp', ], cpp_args : [ - aja_includedirs, '-DPACKAGE="gst-aja"', '-DGST_PACKAGE_NAME="gstreamer-aja"', '-DGST_PACKAGE_ORIGIN="https://github.com/centricular/gstreamer-aja"', diff --git a/meson_options.txt b/meson_options.txt index db37bb9ef4..d03c4d5426 100644 --- a/meson_options.txt +++ b/meson_options.txt @@ -1,2 +1,2 @@ -option('aja-sdk-dir', type : 'string', value : 'ntv2sdklinux_16.0.0.4', +option('aja-sdk-dir', type : 'string', value : '', description : 'Directory with AJA SDK, e.g. ntv2sdklinux_16.0.0.4') diff --git a/subprojects/ntv2.wrap b/subprojects/ntv2.wrap new file mode 100644 index 0000000000..a4dc981fc0 --- /dev/null +++ b/subprojects/ntv2.wrap @@ -0,0 +1,9 @@ +[wrap-file] +directory = ntv2-16.2-bugfix5 +source_url = https://github.com/aja-video/ntv2/archive/refs/tags/v16.2-bugfix5.tar.gz +source_filename = ntv2-16.2-bugfix5.tar.gz +source_hash = 560c798c3a43aa0cef1cba6be5adb669ec72e648c28814158eb649275efc9f88 +diff_files = ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch + +[provide] +libajantv2 = libajantv2_dep diff --git a/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch b/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch new file mode 100644 index 0000000000..99963f371b --- /dev/null +++ b/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch @@ -0,0 +1,167 @@ +--- /dev/null 2023-10-13 08:29:31.027000134 +0300 ++++ ntv2-16.2-bugfix5/meson.build 2023-10-21 09:58:37.680821179 +0300 +@@ -0,0 +1,164 @@ ++project('ntv2', 'cpp', ++ version : '16.2-bugfix5', ++ meson_version : '>= 0.54.0', ++ default_options : [ 'warning_level=1', ++ 'buildtype=debugoptimized', ++ 'cpp_std=c++11', ++ 'cpp_eh=none', ++ 'cpp_rtti=false', ++ ] ++) ++ ++cxx = meson.get_compiler('cpp') ++test_cppflags = ['-Wno-non-virtual-dtor'] ++ ++common_flags = [ ++ '-DAJALinux=1', ++ '-DAJA_LINUX=1', ++] ++foreach cxxflag: test_cppflags ++ if cxx.has_argument(cxxflag) ++ common_flags += [ cxxflag ] ++ endif ++endforeach ++ ++thread_dep = dependency('threads') ++rt_dep = cxx.find_library('rt', required : false) ++ ++ajantv2_sources = [ ++ 'ajalibraries/ajaanc/src/ancillarydata.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydatafactory.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608_line21.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608_vanc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea708.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_framestatusinfo524D.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_framestatusinfo5251.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_hdr10.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_hlg.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_sdr.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode_atc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode_vitc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarylist.cpp', ++ 'ajalibraries/ajabase/system/atomic.cpp', ++ 'ajalibraries/ajabase/common/audioutilities.cpp', ++ 'ajalibraries/ajabase/common/buffer.cpp', ++ 'ajalibraries/ajabase/common/common.cpp', ++ 'ajalibraries/ajabase/system/debug.cpp', ++ 'ajalibraries/ajabase/common/dpx_hdr.cpp', ++ 'ajalibraries/ajabase/common/dpxfileio.cpp', ++ 'ajalibraries/ajabase/system/event.cpp', ++ 'ajalibraries/ajabase/system/linux/eventimpl.cpp', ++ 'ajalibraries/ajabase/system/file_io.cpp', ++ 'ajalibraries/ajabase/common/guid.cpp', ++ 'ajalibraries/ajabase/system/info.cpp', ++ 'ajalibraries/ajabase/system/linux/infoimpl.cpp', ++ 'ajalibraries/ajabase/network/ip_socket.cpp', ++ 'ajalibraries/ajabase/system/lock.cpp', ++ 'ajalibraries/ajabase/system/linux/lockimpl.cpp', ++ 'ajalibraries/ajabase/system/memory.cpp', ++ 'ajalibraries/ajabase/common/options_popt.cpp', ++ 'ajalibraries/ajabase/common/performance.cpp', ++ 'ajalibraries/ajabase/common/pixelformat.cpp', ++ 'ajalibraries/ajabase/pnp/pnp.cpp', ++ 'ajalibraries/ajabase/pnp/linux/pnpimpl.cpp', ++ 'ajalibraries/ajabase/system/process.cpp', ++ 'ajalibraries/ajabase/system/linux/processimpl.cpp', ++ 'ajalibraries/ajabase/system/system.cpp', ++ 'ajalibraries/ajabase/system/systemtime.cpp', ++ 'ajalibraries/ajabase/common/testpatterngen.cpp', ++ 'ajalibraries/ajabase/system/thread.cpp', ++ 'ajalibraries/ajabase/system/linux/threadimpl.cpp', ++ 'ajalibraries/ajabase/common/timebase.cpp', ++ 'ajalibraries/ajabase/common/timecode.cpp', ++ 'ajalibraries/ajabase/common/timecodeburn.cpp', ++ 'ajalibraries/ajabase/common/timer.cpp', ++ 'ajalibraries/ajabase/network/udp_socket.cpp', ++ 'ajalibraries/ajabase/common/videoutilities.cpp', ++ 'ajalibraries/ajabase/common/wavewriter.cpp', ++ 'ajalibraries/ajabase/persistence/persistence.cpp', ++ 'ajalibraries/ajantv2/src/ntv2audio.cpp', ++ 'ajalibraries/ajantv2/src/ntv2anc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2autocirculate.cpp', ++ 'ajalibraries/ajantv2/src/ntv2bitfile.cpp', ++ 'ajalibraries/ajantv2/src/ntv2bitfilemanager.cpp', ++ 'ajalibraries/ajantv2/src/ntv2card.cpp', ++ 'ajalibraries/ajantv2/src/ntv2config2022.cpp', ++ 'ajalibraries/ajantv2/src/ntv2config2110.cpp', ++ 'ajalibraries/ajantv2/src/ntv2configts2022.cpp', ++ 'ajalibraries/ajantv2/src/ntv2csclut.cpp', ++ 'ajalibraries/ajantv2/src/ntv2cscmatrix.cpp', ++ 'ajalibraries/ajantv2/src/ntv2debug.cpp', ++ 'ajalibraries/ajantv2/src/ntv2devicefeatures.cpp', ++ 'ajalibraries/ajantv2/src/ntv2devicescanner.cpp', ++ 'ajalibraries/ajantv2/src/ntv2discover.cpp', ++ 'ajalibraries/ajantv2/src/ntv2dma.cpp', ++ 'ajalibraries/ajantv2/src/ntv2dynamicdevice.cpp', ++ 'ajalibraries/ajantv2/src/ntv2hdmi.cpp', ++ 'ajalibraries/ajantv2/src/ntv2hevc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2driverinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2enhancedcsc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2formatdescriptor.cpp', ++ 'ajalibraries/ajantv2/src/ntv2interrupts.cpp', ++ 'ajalibraries/ajantv2/src/ntv2konaflashprogram.cpp', ++ 'ajalibraries/ajantv2/src/lin/ntv2linuxdriverinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mailbox.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mbcontroller.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mcsfile.cpp', ++ 'ajalibraries/ajantv2/src/ntv2nubaccess.cpp', ++ 'ajalibraries/ajantv2/src/ntv2nubpktcom.cpp', ++ 'ajalibraries/ajantv2/src/ntv2publicinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2register.cpp', ++ 'ajalibraries/ajantv2/src/ntv2registerexpert.cpp', ++ 'ajalibraries/ajantv2/src/ntv2resample.cpp', ++ 'ajalibraries/ajantv2/src/ntv2routingexpert.cpp', ++ 'ajalibraries/ajantv2/src/ntv2rp188.cpp', ++ 'ajalibraries/ajantv2/src/ntv2serialcontrol.cpp', ++ 'ajalibraries/ajantv2/src/ntv2signalrouter.cpp', ++ 'ajalibraries/ajantv2/src/ntv2spiinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2subscriptions.cpp', ++ 'ajalibraries/ajantv2/src/ntv2supportlogger.cpp', ++ 'ajalibraries/ajantv2/src/ntv2transcode.cpp', ++ 'ajalibraries/ajantv2/src/ntv2utf8.cpp', ++ 'ajalibraries/ajantv2/src/ntv2utils.cpp', ++ 'ajalibraries/ajantv2/src/ntv2verticalfilter.cpp', ++ 'ajalibraries/ajantv2/src/ntv2vpid.cpp', ++ 'ajalibraries/ajantv2/src/ntv2vpidfromspec.cpp', ++ 'ajalibraries/ajantv2/src/ntv2task.cpp', ++ 'ajalibraries/ajantv2/src/ntv2testpatterngen.cpp', ++] ++ ++ajantv2_args = [ ++ '-D_REENTRANT', ++ '-DAJASTATIC', ++ '-DAJALinux', ++ '-DAJA_LINUX', ++ '-D_LARGEFILE_SOURCE', ++ '-D_LARGEFILE64_SOURCE', ++ '-D_FILE_OFFSET_BITS=64', ++] ++ ++ajantv2_inc = include_directories( ++ 'ajalibraries/ajaanc/includes', ++ 'ajalibraries/ajantv2/includes', ++ 'ajalibraries/ajantv2/src', ++ 'ajalibraries/ajantv2/src/lin', ++ 'ajalibraries', ++ 'ajalibraries/ajabase', ++) ++ ++libajantv2 = static_library( ++ 'libajantv2', ++ sources: ajantv2_sources, ++ cpp_args: ajantv2_args, ++ include_directories: ajantv2_inc, ++ pic: true, ++ override_options: ['cpp_eh=default', 'werror=false'], ++ install: false ++) ++ ++libajantv2_dep = declare_dependency( ++ link_with: libajantv2, ++ include_directories: ajantv2_inc, ++) From f1a1dadbf34aac2b929b2895d39f3d57e342e0f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Sat, 21 Oct 2023 10:22:39 +0300 Subject: [PATCH 73/73] Prepare for merging into GStreamer --- .gitignore | 3 - COPYING | 502 ------------------ .../gst-plugins-bad/sys/aja/README.md | 0 .../gst-plugins-bad/sys/aja/gstajacommon.cpp | 0 .../gst-plugins-bad/sys/aja/gstajacommon.h | 0 .../sys/aja/gstajadeviceprovider.cpp | 0 .../sys/aja/gstajadeviceprovider.h | 0 .../gst-plugins-bad/sys/aja/gstajasink.cpp | 0 .../gst-plugins-bad/sys/aja/gstajasink.h | 0 .../sys/aja/gstajasinkcombiner.cpp | 0 .../sys/aja/gstajasinkcombiner.h | 0 .../gst-plugins-bad/sys/aja/gstajasrc.cpp | 0 .../gst-plugins-bad/sys/aja/gstajasrc.h | 0 .../sys/aja/gstajasrcdemux.cpp | 0 .../gst-plugins-bad/sys/aja/gstajasrcdemux.h | 0 .../gst-plugins-bad/sys/aja/meson.build | 0 .../gst-plugins-bad/sys/aja/meson_options.txt | 0 .../gst-plugins-bad/sys/aja/plugin.cpp | 5 +- 18 files changed, 4 insertions(+), 506 deletions(-) delete mode 100644 .gitignore delete mode 100644 COPYING rename README.md => subprojects/gst-plugins-bad/sys/aja/README.md (100%) rename gstajacommon.cpp => subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp (100%) rename gstajacommon.h => subprojects/gst-plugins-bad/sys/aja/gstajacommon.h (100%) rename gstajadeviceprovider.cpp => subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp (100%) rename gstajadeviceprovider.h => subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h (100%) rename gstajasink.cpp => subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp (100%) rename gstajasink.h => subprojects/gst-plugins-bad/sys/aja/gstajasink.h (100%) rename gstajasinkcombiner.cpp => subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp (100%) rename gstajasinkcombiner.h => subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h (100%) rename gstajasrc.cpp => subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp (100%) rename gstajasrc.h => subprojects/gst-plugins-bad/sys/aja/gstajasrc.h (100%) rename gstajasrcdemux.cpp => subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp (100%) rename gstajasrcdemux.h => subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h (100%) rename meson.build => subprojects/gst-plugins-bad/sys/aja/meson.build (100%) rename meson_options.txt => subprojects/gst-plugins-bad/sys/aja/meson_options.txt (100%) rename plugin.cpp => subprojects/gst-plugins-bad/sys/aja/plugin.cpp (94%) diff --git a/.gitignore b/.gitignore deleted file mode 100644 index c82e9a4fdc..0000000000 --- a/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*build*/ -.cache/ -compile_commands.json diff --git a/COPYING b/COPYING deleted file mode 100644 index 4362b49151..0000000000 --- a/COPYING +++ /dev/null @@ -1,502 +0,0 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 - - Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - -[This is the first released version of the Lesser GPL. It also counts - as the successor of the GNU Library Public License, version 2, hence - the version number 2.1.] - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -Licenses are intended to guarantee your freedom to share and change -free software--to make sure the software is free for all its users. - - This license, the Lesser General Public License, applies to some -specially designated software packages--typically libraries--of the -Free Software Foundation and other authors who decide to use it. You -can use it too, but we suggest you first think carefully about whether -this license or the ordinary General Public License is the better -strategy to use in any particular case, based on the explanations below. - - When we speak of free software, we are referring to freedom of use, -not price. Our General Public Licenses are designed to make sure that -you have the freedom to distribute copies of free software (and charge -for this service if you wish); that you receive source code or can get -it if you want it; that you can change the software and use pieces of -it in new free programs; and that you are informed that you can do -these things. - - To protect your rights, we need to make restrictions that forbid -distributors to deny you these rights or to ask you to surrender these -rights. These restrictions translate to certain responsibilities for -you if you distribute copies of the library or if you modify it. - - For example, if you distribute copies of the library, whether gratis -or for a fee, you must give the recipients all the rights that we gave -you. You must make sure that they, too, receive or can get the source -code. If you link other code with the library, you must provide -complete object files to the recipients, so that they can relink them -with the library after making changes to the library and recompiling -it. And you must show them these terms so they know their rights. - - We protect your rights with a two-step method: (1) we copyright the -library, and (2) we offer you this license, which gives you legal -permission to copy, distribute and/or modify the library. - - To protect each distributor, we want to make it very clear that -there is no warranty for the free library. Also, if the library is -modified by someone else and passed on, the recipients should know -that what they have is not the original version, so that the original -author's reputation will not be affected by problems that might be -introduced by others. - - Finally, software patents pose a constant threat to the existence of -any free program. We wish to make sure that a company cannot -effectively restrict the users of a free program by obtaining a -restrictive license from a patent holder. Therefore, we insist that -any patent license obtained for a version of the library must be -consistent with the full freedom of use specified in this license. - - Most GNU software, including some libraries, is covered by the -ordinary GNU General Public License. This license, the GNU Lesser -General Public License, applies to certain designated libraries, and -is quite different from the ordinary General Public License. We use -this license for certain libraries in order to permit linking those -libraries into non-free programs. - - When a program is linked with a library, whether statically or using -a shared library, the combination of the two is legally speaking a -combined work, a derivative of the original library. The ordinary -General Public License therefore permits such linking only if the -entire combination fits its criteria of freedom. The Lesser General -Public License permits more lax criteria for linking other code with -the library. - - We call this license the "Lesser" General Public License because it -does Less to protect the user's freedom than the ordinary General -Public License. It also provides other free software developers Less -of an advantage over competing non-free programs. These disadvantages -are the reason we use the ordinary General Public License for many -libraries. However, the Lesser license provides advantages in certain -special circumstances. - - For example, on rare occasions, there may be a special need to -encourage the widest possible use of a certain library, so that it becomes -a de-facto standard. To achieve this, non-free programs must be -allowed to use the library. A more frequent case is that a free -library does the same job as widely used non-free libraries. In this -case, there is little to gain by limiting the free library to free -software only, so we use the Lesser General Public License. - - In other cases, permission to use a particular library in non-free -programs enables a greater number of people to use a large body of -free software. For example, permission to use the GNU C Library in -non-free programs enables many more people to use the whole GNU -operating system, as well as its variant, the GNU/Linux operating -system. - - Although the Lesser General Public License is Less protective of the -users' freedom, it does ensure that the user of a program that is -linked with the Library has the freedom and the wherewithal to run -that program using a modified version of the Library. - - The precise terms and conditions for copying, distribution and -modification follow. Pay close attention to the difference between a -"work based on the library" and a "work that uses the library". The -former contains code derived from the library, whereas the latter must -be combined with the library in order to run. - - GNU LESSER GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License Agreement applies to any software library or other -program which contains a notice placed by the copyright holder or -other authorized party saying it may be distributed under the terms of -this Lesser General Public License (also called "this License"). -Each licensee is addressed as "you". - - A "library" means a collection of software functions and/or data -prepared so as to be conveniently linked with application programs -(which use some of those functions and data) to form executables. - - The "Library", below, refers to any such software library or work -which has been distributed under these terms. A "work based on the -Library" means either the Library or any derivative work under -copyright law: that is to say, a work containing the Library or a -portion of it, either verbatim or with modifications and/or translated -straightforwardly into another language. (Hereinafter, translation is -included without limitation in the term "modification".) - - "Source code" for a work means the preferred form of the work for -making modifications to it. For a library, complete source code means -all the source code for all modules it contains, plus any associated -interface definition files, plus the scripts used to control compilation -and installation of the library. - - Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running a program using the Library is not restricted, and output from -such a program is covered only if its contents constitute a work based -on the Library (independent of the use of the Library in a tool for -writing it). Whether that is true depends on what the Library does -and what the program that uses the Library does. - - 1. You may copy and distribute verbatim copies of the Library's -complete source code as you receive it, in any medium, provided that -you conspicuously and appropriately publish on each copy an -appropriate copyright notice and disclaimer of warranty; keep intact -all the notices that refer to this License and to the absence of any -warranty; and distribute a copy of this License along with the -Library. - - You may charge a fee for the physical act of transferring a copy, -and you may at your option offer warranty protection in exchange for a -fee. - - 2. You may modify your copy or copies of the Library or any portion -of it, thus forming a work based on the Library, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) The modified work must itself be a software library. - - b) You must cause the files modified to carry prominent notices - stating that you changed the files and the date of any change. - - c) You must cause the whole of the work to be licensed at no - charge to all third parties under the terms of this License. - - d) If a facility in the modified Library refers to a function or a - table of data to be supplied by an application program that uses - the facility, other than as an argument passed when the facility - is invoked, then you must make a good faith effort to ensure that, - in the event an application does not supply such function or - table, the facility still operates, and performs whatever part of - its purpose remains meaningful. - - (For example, a function in a library to compute square roots has - a purpose that is entirely well-defined independent of the - application. Therefore, Subsection 2d requires that any - application-supplied function or table used by this function must - be optional: if the application does not supply it, the square - root function must still compute square roots.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Library, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Library, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote -it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Library. - -In addition, mere aggregation of another work not based on the Library -with the Library (or with a work based on the Library) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may opt to apply the terms of the ordinary GNU General Public -License instead of this License to a given copy of the Library. To do -this, you must alter all the notices that refer to this License, so -that they refer to the ordinary GNU General Public License, version 2, -instead of to this License. (If a newer version than version 2 of the -ordinary GNU General Public License has appeared, then you can specify -that version instead if you wish.) Do not make any other change in -these notices. - - Once this change is made in a given copy, it is irreversible for -that copy, so the ordinary GNU General Public License applies to all -subsequent copies and derivative works made from that copy. - - This option is useful when you wish to copy part of the code of -the Library into a program that is not a library. - - 4. You may copy and distribute the Library (or a portion or -derivative of it, under Section 2) in object code or executable form -under the terms of Sections 1 and 2 above provided that you accompany -it with the complete corresponding machine-readable source code, which -must be distributed under the terms of Sections 1 and 2 above on a -medium customarily used for software interchange. - - If distribution of object code is made by offering access to copy -from a designated place, then offering equivalent access to copy the -source code from the same place satisfies the requirement to -distribute the source code, even though third parties are not -compelled to copy the source along with the object code. - - 5. A program that contains no derivative of any portion of the -Library, but is designed to work with the Library by being compiled or -linked with it, is called a "work that uses the Library". Such a -work, in isolation, is not a derivative work of the Library, and -therefore falls outside the scope of this License. - - However, linking a "work that uses the Library" with the Library -creates an executable that is a derivative of the Library (because it -contains portions of the Library), rather than a "work that uses the -library". The executable is therefore covered by this License. -Section 6 states terms for distribution of such executables. - - When a "work that uses the Library" uses material from a header file -that is part of the Library, the object code for the work may be a -derivative work of the Library even though the source code is not. -Whether this is true is especially significant if the work can be -linked without the Library, or if the work is itself a library. The -threshold for this to be true is not precisely defined by law. - - If such an object file uses only numerical parameters, data -structure layouts and accessors, and small macros and small inline -functions (ten lines or less in length), then the use of the object -file is unrestricted, regardless of whether it is legally a derivative -work. (Executables containing this object code plus portions of the -Library will still fall under Section 6.) - - Otherwise, if the work is a derivative of the Library, you may -distribute the object code for the work under the terms of Section 6. -Any executables containing that work also fall under Section 6, -whether or not they are linked directly with the Library itself. - - 6. As an exception to the Sections above, you may also combine or -link a "work that uses the Library" with the Library to produce a -work containing portions of the Library, and distribute that work -under terms of your choice, provided that the terms permit -modification of the work for the customer's own use and reverse -engineering for debugging such modifications. - - You must give prominent notice with each copy of the work that the -Library is used in it and that the Library and its use are covered by -this License. You must supply a copy of this License. If the work -during execution displays copyright notices, you must include the -copyright notice for the Library among them, as well as a reference -directing the user to the copy of this License. Also, you must do one -of these things: - - a) Accompany the work with the complete corresponding - machine-readable source code for the Library including whatever - changes were used in the work (which must be distributed under - Sections 1 and 2 above); and, if the work is an executable linked - with the Library, with the complete machine-readable "work that - uses the Library", as object code and/or source code, so that the - user can modify the Library and then relink to produce a modified - executable containing the modified Library. (It is understood - that the user who changes the contents of definitions files in the - Library will not necessarily be able to recompile the application - to use the modified definitions.) - - b) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (1) uses at run time a - copy of the library already present on the user's computer system, - rather than copying library functions into the executable, and (2) - will operate properly with a modified version of the library, if - the user installs one, as long as the modified version is - interface-compatible with the version that the work was made with. - - c) Accompany the work with a written offer, valid for at - least three years, to give the same user the materials - specified in Subsection 6a, above, for a charge no more - than the cost of performing this distribution. - - d) If distribution of the work is made by offering access to copy - from a designated place, offer equivalent access to copy the above - specified materials from the same place. - - e) Verify that the user has already received a copy of these - materials or that you have already sent this user a copy. - - For an executable, the required form of the "work that uses the -Library" must include any data and utility programs needed for -reproducing the executable from it. However, as a special exception, -the materials to be distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies -the executable. - - It may happen that this requirement contradicts the license -restrictions of other proprietary libraries that do not normally -accompany the operating system. Such a contradiction means you cannot -use both them and the Library together in an executable that you -distribute. - - 7. You may place library facilities that are a work based on the -Library side-by-side in a single library together with other library -facilities not covered by this License, and distribute such a combined -library, provided that the separate distribution of the work based on -the Library and of the other library facilities is otherwise -permitted, and provided that you do these two things: - - a) Accompany the combined library with a copy of the same work - based on the Library, uncombined with any other library - facilities. This must be distributed under the terms of the - Sections above. - - b) Give prominent notice with the combined library of the fact - that part of it is a work based on the Library, and explaining - where to find the accompanying uncombined form of the same work. - - 8. You may not copy, modify, sublicense, link with, or distribute -the Library except as expressly provided under this License. Any -attempt otherwise to copy, modify, sublicense, link with, or -distribute the Library is void, and will automatically terminate your -rights under this License. However, parties who have received copies, -or rights, from you under this License will not have their licenses -terminated so long as such parties remain in full compliance. - - 9. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Library or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Library (or any work based on the -Library), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Library or works based on it. - - 10. Each time you redistribute the Library (or any work based on the -Library), the recipient automatically receives a license from the -original licensor to copy, distribute, link with or modify the Library -subject to these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties with -this License. - - 11. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Library at all. For example, if a patent -license would not permit royalty-free redistribution of the Library by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Library. - -If any portion of this section is held invalid or unenforceable under any -particular circumstance, the balance of the section is intended to apply, -and the section as a whole is intended to apply in other circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 12. If the distribution and/or use of the Library is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Library under this License may add -an explicit geographical distribution limitation excluding those countries, -so that distribution is permitted only in or among countries not thus -excluded. In such case, this License incorporates the limitation as if -written in the body of this License. - - 13. The Free Software Foundation may publish revised and/or new -versions of the Lesser General Public License from time to time. -Such new versions will be similar in spirit to the present version, -but may differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Library -specifies a version number of this License which applies to it and -"any later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Library does not specify a -license version number, you may choose any version ever published by -the Free Software Foundation. - - 14. If you wish to incorporate parts of the Library into other free -programs whose distribution conditions are incompatible with these, -write to the author to ask for permission. For software which is -copyrighted by the Free Software Foundation, write to the Free -Software Foundation; we sometimes make exceptions for this. Our -decision will be guided by the two goals of preserving the free status -of all derivatives of our free software and of promoting the sharing -and reuse of software generally. - - NO WARRANTY - - 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY -KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE -LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME -THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU -FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR -CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE -LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING -RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A -FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF -SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Libraries - - If you develop a new library, and you want it to be of the greatest -possible use to the public, we recommend making it free software that -everyone can redistribute and change. You can do so by permitting -redistribution under these terms (or, alternatively, under the terms of the -ordinary General Public License). - - To apply these terms, attach the following notices to the library. It is -safest to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - -Also add information on how to contact you by electronic and paper mail. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the library, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - library `Frob' (a library for tweaking knobs) written by James Random Hacker. - - , 1 April 1990 - Ty Coon, President of Vice - -That's all there is to it! diff --git a/README.md b/subprojects/gst-plugins-bad/sys/aja/README.md similarity index 100% rename from README.md rename to subprojects/gst-plugins-bad/sys/aja/README.md diff --git a/gstajacommon.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp similarity index 100% rename from gstajacommon.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp diff --git a/gstajacommon.h b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.h similarity index 100% rename from gstajacommon.h rename to subprojects/gst-plugins-bad/sys/aja/gstajacommon.h diff --git a/gstajadeviceprovider.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp similarity index 100% rename from gstajadeviceprovider.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp diff --git a/gstajadeviceprovider.h b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h similarity index 100% rename from gstajadeviceprovider.h rename to subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h diff --git a/gstajasink.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp similarity index 100% rename from gstajasink.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp diff --git a/gstajasink.h b/subprojects/gst-plugins-bad/sys/aja/gstajasink.h similarity index 100% rename from gstajasink.h rename to subprojects/gst-plugins-bad/sys/aja/gstajasink.h diff --git a/gstajasinkcombiner.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp similarity index 100% rename from gstajasinkcombiner.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp diff --git a/gstajasinkcombiner.h b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h similarity index 100% rename from gstajasinkcombiner.h rename to subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h diff --git a/gstajasrc.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp similarity index 100% rename from gstajasrc.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp diff --git a/gstajasrc.h b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.h similarity index 100% rename from gstajasrc.h rename to subprojects/gst-plugins-bad/sys/aja/gstajasrc.h diff --git a/gstajasrcdemux.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp similarity index 100% rename from gstajasrcdemux.cpp rename to subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp diff --git a/gstajasrcdemux.h b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h similarity index 100% rename from gstajasrcdemux.h rename to subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h diff --git a/meson.build b/subprojects/gst-plugins-bad/sys/aja/meson.build similarity index 100% rename from meson.build rename to subprojects/gst-plugins-bad/sys/aja/meson.build diff --git a/meson_options.txt b/subprojects/gst-plugins-bad/sys/aja/meson_options.txt similarity index 100% rename from meson_options.txt rename to subprojects/gst-plugins-bad/sys/aja/meson_options.txt diff --git a/plugin.cpp b/subprojects/gst-plugins-bad/sys/aja/plugin.cpp similarity index 94% rename from plugin.cpp rename to subprojects/gst-plugins-bad/sys/aja/plugin.cpp index 128de52e48..bfd6f7f96e 100644 --- a/plugin.cpp +++ b/subprojects/gst-plugins-bad/sys/aja/plugin.cpp @@ -16,6 +16,9 @@ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, * Boston, MA 02110-1335, USA. */ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif #include #include @@ -47,4 +50,4 @@ static gboolean plugin_init(GstPlugin* plugin) { GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, aja, "GStreamer AJA plugin", plugin_init, VERSION, "LGPL", - GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) + PACKAGE_NAME, GST_PACKAGE_ORIGIN)