diff --git a/catalog_validation/ix-source/.github/workflows/ci.yml b/catalog_validation/ix-source/.github/workflows/ci.yml new file mode 100644 index 00000000..a964f544 --- /dev/null +++ b/catalog_validation/ix-source/.github/workflows/ci.yml @@ -0,0 +1,34 @@ +name: CI + +on: [push] + +jobs: + build-deb: + runs-on: ubuntu-latest + container: + image: ixsystems/catalog_validation:latest + + steps: + - name: Checkout + uses: actions/checkout@v1 + + - name: Build deb package + run: > + dpkg-buildpackage + -B + --no-sign + -jauto + + - name: Create artifacts dir + run: mkdir artifacts + if: success() + + - name: Move artifacts + run: mv ../*.deb artifacts + if: success() + + - uses: actions/upload-artifact@v1 + with: + name: py-catalog-validation + path: artifacts + if: success() diff --git a/catalog_validation/ix-source/.github/workflows/docker_image.yml b/catalog_validation/ix-source/.github/workflows/docker_image.yml new file mode 100644 index 00000000..91f66097 --- /dev/null +++ b/catalog_validation/ix-source/.github/workflows/docker_image.yml @@ -0,0 +1,28 @@ +name: build_image + +on: + push: + branches: + - 'master' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + push: true + tags: ixsystems/catalog_validation:latest + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/catalog_validation/ix-source/.github/workflows/lint.yml b/catalog_validation/ix-source/.github/workflows/lint.yml new file mode 100644 index 00000000..8cd12cb7 --- /dev/null +++ b/catalog_validation/ix-source/.github/workflows/lint.yml @@ -0,0 +1,21 @@ +name: flake8 + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 + - name: Analysing the code with flake8 + run: flake8 . diff --git a/catalog_validation/ix-source/.github/workflows/test.yaml b/catalog_validation/ix-source/.github/workflows/test.yaml new file mode 100644 index 00000000..81880fe3 --- /dev/null +++ b/catalog_validation/ix-source/.github/workflows/test.yaml @@ -0,0 +1,27 @@ +name: catalog_validation_test + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + container: + image: ghcr.io/truenas/middleware:master + + steps: + - uses: actions/checkout@v2 + - name: Setup dependencies + run: | + /usr/bin/install-dev-tools + + - name: Install dependencies + run: | + pip install --break-system-packages -r catalog_validation/pytest/requirements.txt + pip install --break-system-packages -r requirements.txt + + - name: Installing catalog validation + run: python setup.py install + + - name: Running test + run: pytest catalog_validation/pytest/ diff --git a/catalog_validation/ix-source/.gitignore b/catalog_validation/ix-source/.gitignore new file mode 100644 index 00000000..99ac9116 --- /dev/null +++ b/catalog_validation/ix-source/.gitignore @@ -0,0 +1,2 @@ +catalog_validation/__pycache__ +catalog_validation/schema/__pycache__ diff --git a/catalog_validation/ix-source/Dockerfile b/catalog_validation/ix-source/Dockerfile new file mode 100644 index 00000000..1d29a605 --- /dev/null +++ b/catalog_validation/ix-source/Dockerfile @@ -0,0 +1,23 @@ +FROM ghcr.io/truenas/middleware:master + +RUN /usr/bin/install-dev-tools + +RUN apt-get install -y \ + debhelper-compat \ + dh-python \ + python3-dev \ + python3-setuptools \ + devscripts \ + python3-jsonschema \ + python3-semantic-version \ + python3-kubernetes \ + python3-yaml + +ENV PYTHONUNBUFFERED 1 +ENV WORK_DIR /app +RUN mkdir -p ${WORK_DIR} +WORKDIR ${WORK_DIR} + +ADD . ${WORK_DIR}/ +RUN pip install --break-system-packages -r requirements.txt +RUN pip install --break-system-packages -U . diff --git a/catalog_validation/ix-source/LICENSE b/catalog_validation/ix-source/LICENSE new file mode 100644 index 00000000..f288702d --- /dev/null +++ b/catalog_validation/ix-source/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/catalog_validation/ix-source/README.md b/catalog_validation/ix-source/README.md new file mode 100644 index 00000000..e142d111 --- /dev/null +++ b/catalog_validation/ix-source/README.md @@ -0,0 +1,2 @@ +# catalogs_validation +Validate truenas compliant catalog structure / format. diff --git a/catalog_validation/ix-source/catalog_validation/__init__.py b/catalog_validation/ix-source/catalog_validation/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/ci/__init__.py b/catalog_validation/ix-source/catalog_validation/ci/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/ci/utils.py b/catalog_validation/ix-source/catalog_validation/ci/utils.py new file mode 100644 index 00000000..2b8f0914 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/ci/utils.py @@ -0,0 +1,54 @@ +import os +import yaml + +from catalog_validation.items.utils import DEVELOPMENT_DIR +from jsonschema import validate as json_schema_validate +from semantic_version import Version + + +DEV_DIRECTORY_RELATIVE_PATH: str = os.path.join('library', DEVELOPMENT_DIR) +TO_KEEP_VERSIONS = 'to_keep_versions.yaml' +OPTIONAL_METADATA_FILES = ['upgrade_info.json', 'upgrade_strategy', TO_KEEP_VERSIONS] +REQUIRED_METADATA_FILES = ['item.yaml'] +UPDATE_STRATEGY_FILE = 'upgrade_strategy' + + +REQUIRED_VERSIONS_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + } +} + + +def get_app_version(app_path: str) -> str: + # This assumes that file exists and version is specified and is good + with open(os.path.join(app_path, 'Chart.yaml'), 'r') as f: + return yaml.safe_load(f.read())['version'] + + +def get_ci_development_directory(catalog_path: str) -> str: + return os.path.join(catalog_path, DEV_DIRECTORY_RELATIVE_PATH) + + +def get_to_keep_versions(app_dir_path: str) -> list: + required_version_path = os.path.join(app_dir_path, TO_KEEP_VERSIONS) + if not os.path.exists(required_version_path): + return [] + + with open(required_version_path, 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, REQUIRED_VERSIONS_SCHEMA) + return data + + +def version_has_been_bumped(app_path: str, new_version: str) -> bool: + if not os.path.isdir(app_path): + return True + + versions = [ + Version(version) for version in filter(lambda v: os.path.isdir(os.path.join(app_path, v)), os.listdir(app_path)) + ] + versions.sort() + return not versions or Version(new_version) > versions[-1] diff --git a/catalog_validation/ix-source/catalog_validation/ci/validate.py b/catalog_validation/ix-source/catalog_validation/ci/validate.py new file mode 100644 index 00000000..6de3a360 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/ci/validate.py @@ -0,0 +1,96 @@ +import os +import yaml + +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.validation import validate_catalog_item_version, validate_chart_version +from jsonschema import ValidationError as JsonValidationError + +from .utils import ( + get_app_version, get_ci_development_directory, REQUIRED_METADATA_FILES, version_has_been_bumped, + TO_KEEP_VERSIONS, REQUIRED_VERSIONS_SCHEMA, get_to_keep_versions, UPDATE_STRATEGY_FILE +) + + +def validate_dev_directory_structure(catalog_path: str, to_check_apps: dict) -> None: + verrors = ValidationErrors() + dev_directory = get_ci_development_directory(catalog_path) + if not os.path.exists(dev_directory): + return + + for train_name in filter( + lambda name: name in to_check_apps and os.path.isdir(os.path.join(dev_directory, name)), + os.listdir(dev_directory) + ): + validate_train( + catalog_path, os.path.join(dev_directory, train_name), f'dev.{train_name}', to_check_apps[train_name] + ) + verrors.check() + + +def validate_train(catalog_path: str, train_path: str, schema: str, to_check_apps: list) -> None: + verrors = ValidationErrors() + train_name = os.path.basename(train_path) + for app_name in filter( + lambda name: os.path.isdir(os.path.join(train_path, name)), os.listdir(train_path) + ): + if app_name not in to_check_apps: + continue + + app_path = os.path.join(train_path, app_name) + try: + validate_app(app_path, f'{schema}.{app_name}') + except ValidationErrors as ve: + verrors.extend(ve) + else: + published_train_app_path = os.path.join(catalog_path, train_name, app_name) + if not os.path.exists(published_train_app_path): + # The application is new and we are good + continue + + if not version_has_been_bumped(published_train_app_path, get_app_version(app_path)): + verrors.add( + f'{schema}.{app_name}.version', + 'Version must be bumped as app has been changed but version has not been updated' + ) + + verrors.check() + + +def validate_keep_versions(app_dir_path: str, schema: str, verrors: ValidationErrors) -> ValidationErrors: + try: + get_to_keep_versions(app_dir_path) + except yaml.YAMLError: + verrors.add(f'{schema}.{REQUIRED_VERSIONS_SCHEMA}', 'Invalid yaml format') + except JsonValidationError: + verrors.add( + f'{schema}.{REQUIRED_VERSIONS_SCHEMA}', + f'Invalid json schema {TO_KEEP_VERSIONS} must contain list of required versions' + ) + + +def validate_upgrate_strategy(app_path, schema, verrors): + upgrade_strategy_path = os.path.join(app_path, UPDATE_STRATEGY_FILE) + if os.path.exists(upgrade_strategy_path) and not os.access(upgrade_strategy_path, os.X_OK): + verrors.add(schema, f'{upgrade_strategy_path!r} is not executable') + + +def validate_app(app_dir_path: str, schema: str) -> None: + app_name = os.path.basename(app_dir_path) + chart_version_path = os.path.join(app_dir_path, 'Chart.yaml') + verrors = validate_chart_version(ValidationErrors(), chart_version_path, schema, app_name) + validate_keep_versions(app_dir_path, app_name, verrors) + verrors.check() + + validate_catalog_item_version(app_dir_path, schema, get_app_version(app_dir_path), app_name, True) + + required_files = set(REQUIRED_METADATA_FILES) + available_files = set( + f for f in filter(lambda f: os.path.isfile(os.path.join(app_dir_path, f)), os.listdir(app_dir_path)) + ) + if missing_files := required_files - available_files: + verrors.add( + f'{schema}.required_files', + f'{", ".join(missing_files)!r} file(s) must be specified' + ) + validate_upgrate_strategy(app_dir_path, f'{schema}.{UPDATE_STRATEGY_FILE}', verrors) + verrors.check() diff --git a/catalog_validation/ix-source/catalog_validation/exceptions.py b/catalog_validation/ix-source/catalog_validation/exceptions.py new file mode 100644 index 00000000..5fba74a2 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/exceptions.py @@ -0,0 +1,67 @@ +import errno + + +class ValidationException(Exception): + def __init__(self, error_msg, error_no=errno.EFAULT): + self.errmsg = error_msg + self.errno = error_no + + def get_error_name(self): + return errno.errorcode.get(self.errno) or 'EUNKNOWN' + + def __str__(self): + return f'[{self.get_error_name()}] {self.errmsg}' + + +class ValidationError(ValidationException): + def __init__(self, attribute, errmsg, errno=errno.EFAULT): + self.attribute = attribute + self.errmsg = errmsg + self.errno = errno + + def __str__(self): + return f'[{self.get_error_name()}] {self.attribute}: {self.errmsg}' + + +class ValidationErrors(ValidationException): + def __init__(self, errors=None): + self.errors = errors or [] + + def add(self, attribute, errmsg, errno=errno.EINVAL): + self.errors.append(ValidationError(attribute, errmsg, errno)) + + def add_validation_error(self, validation_error): + self.errors.append(validation_error) + + def add_child(self, attribute, child): + for e in child.errors: + self.add(f"{attribute}.{e.attribute}", e.errmsg, e.errno) + + def check(self): + if self: + raise self + + def extend(self, errors): + for e in errors.errors: + self.add(e.attribute, e.errmsg, e.errno) + + def __iter__(self): + for e in self.errors: + yield e.attribute, e.errmsg, e.errno + + def __bool__(self): + return bool(self.errors) + + def __str__(self): + output = '' + for e in self.errors: + output += str(e) + '\n' + return output + + def __contains__(self, item): + return item in [e.attribute for e in self.errors] + + +class CatalogDoesNotExist(ValidationException): + def __init__(self, path): + super().__init__(f'Failed to find a catalog at {path}', errno.ENOENT) diff --git a/catalog_validation/ix-source/catalog_validation/git_utils.py b/catalog_validation/ix-source/catalog_validation/git_utils.py new file mode 100644 index 00000000..adbcf5fb --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/git_utils.py @@ -0,0 +1,42 @@ +import os +import subprocess + +from catalog_validation.ci.utils import DEV_DIRECTORY_RELATIVE_PATH, get_ci_development_directory +from catalog_validation.items.utils import valid_train +from collections import defaultdict + +from .ci.utils import OPTIONAL_METADATA_FILES +from .exceptions import CatalogDoesNotExist + + +def get_changed_apps(catalog_path: str, base_branch: str = 'master') -> dict: + if not os.path.exists(catalog_path): + raise CatalogDoesNotExist(catalog_path) + + cp = subprocess.run( + ['git', '-C', catalog_path, '--no-pager', 'diff', '--name-only', base_branch], + capture_output=True, check=True, + ) + dev_directory_path = get_ci_development_directory(catalog_path) + to_check_apps = defaultdict(list) + for file_path in filter( + lambda path: path and path.startswith(f'{DEV_DIRECTORY_RELATIVE_PATH}/'), + map(str.strip, cp.stdout.decode().split('\n')) + ): + dev_dir_relative_path = file_path.strip(f'{DEV_DIRECTORY_RELATIVE_PATH}/') + train_name = dev_dir_relative_path.split('/', 1)[0] + if not valid_train(train_name, os.path.join(dev_directory_path, train_name)): + continue + + app_name = dev_dir_relative_path.split('/')[1] + base_name = os.path.basename(file_path) + + if base_name in OPTIONAL_METADATA_FILES: + continue + if not os.path.isdir(os.path.join(dev_directory_path, train_name, app_name)): + continue + + if app_name not in to_check_apps[train_name]: + to_check_apps[train_name].append(app_name) + + return to_check_apps diff --git a/catalog_validation/ix-source/catalog_validation/items/__init__.py b/catalog_validation/ix-source/catalog_validation/items/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/items/catalog.py b/catalog_validation/ix-source/catalog_validation/items/catalog.py new file mode 100644 index 00000000..b45dff64 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/catalog.py @@ -0,0 +1,83 @@ +import concurrent.futures +import functools +import os +import typing +import yaml + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from .items_util import get_item_details, get_default_questions_context +from .utils import RECOMMENDED_APPS_FILENAME, RECOMMENDED_APPS_SCHEMA, valid_train + + +def item_details(items: dict, location: str, questions_context: typing.Optional[dict], item_key: str) -> dict: + train = items[item_key] + item = item_key.removesuffix(f'_{train}') + item_location = os.path.join(location, train, item) + return get_item_details(item_location, questions_context, {'retrieve_versions': True}) + + +def retrieve_train_names(location: str, all_trains=True, trains_filter=None) -> list: + train_names = [] + trains_filter = trains_filter or [] + for train in os.listdir(location): + if not (all_trains or train in trains_filter) or not valid_train(train, os.path.join(location, train)): + continue + train_names.append(train) + return train_names + + +def get_items_in_trains(trains_to_traverse: list, catalog_location: str) -> dict: + items = {} + for train in trains_to_traverse: + items.update({ + f'{i}_{train}': train for i in os.listdir(os.path.join(catalog_location, train)) + if os.path.isdir(os.path.join(catalog_location, train, i)) + }) + + return items + + +def retrieve_trains_data( + items: dict, catalog_location: str, preferred_trains: list, + trains_to_traverse: list, job: typing.Any = None, questions_context: typing.Optional[dict] = None +) -> typing.Tuple[dict, set]: + questions_context = questions_context or get_default_questions_context() + trains = { + 'charts': {}, + 'test': {}, + **{k: {} for k in trains_to_traverse}, + } + unhealthy_apps = set() + + total_items = len(items) + with concurrent.futures.ProcessPoolExecutor(max_workers=(5 if total_items > 10 else 2)) as exc: + for index, result in enumerate(zip(items, exc.map( + functools.partial(item_details, items, catalog_location, questions_context), + items, chunksize=(10 if total_items > 10 else 5) + ))): + item_key = result[0] + item_info = result[1] + train = items[item_key] + item = item_key.removesuffix(f'_{train}') + if job: + job.set_progress( + int((index / total_items) * 80) + 10, + f'Retrieved information of {item!r} item from {train!r} train' + ) + trains[train][item] = item_info + if train in preferred_trains and not trains[train][item]['healthy']: + unhealthy_apps.add(f'{item} ({train} train)') + + return trains, unhealthy_apps + + +def retrieve_recommended_apps(catalog_location: str) -> typing.Dict[str, list]: + try: + with open(os.path.join(catalog_location, RECOMMENDED_APPS_FILENAME), 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, RECOMMENDED_APPS_SCHEMA) + except (FileNotFoundError, JsonValidationError, yaml.YAMLError): + return {} + else: + return data diff --git a/catalog_validation/ix-source/catalog_validation/items/features.py b/catalog_validation/ix-source/catalog_validation/items/features.py new file mode 100644 index 00000000..3ec64c8f --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/features.py @@ -0,0 +1,19 @@ +SUPPORTED_FEATURES = { + 'normalize/interfaceConfiguration', + 'normalize/ixVolume', + 'definitions/certificate', + 'definitions/certificateAuthority', + 'definitions/interface', + 'definitions/gpuConfiguration', + 'definitions/timezone', + 'definitions/nodeIP', + 'validations/containerImage', + 'validations/nodePort', + 'validations/hostPath', + 'validations/lockedHostPath', + 'validations/hostPathAttachments', +} + + +def version_supported(version_details: dict) -> bool: + return not bool(set(version_details['required_features']) - SUPPORTED_FEATURES) diff --git a/catalog_validation/ix-source/catalog_validation/items/items_util.py b/catalog_validation/ix-source/catalog_validation/items/items_util.py new file mode 100644 index 00000000..468b4bbb --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/items_util.py @@ -0,0 +1,216 @@ +import markdown +import os +import typing +import yaml + +from pkg_resources import parse_version + +from catalog_validation.exceptions import ValidationErrors + +from .features import version_supported +from .questions_utils import normalise_questions +from .utils import get_last_updated_date +from .validate_utils import validate_item, validate_item_version + + +ITEM_KEYS = ['icon_url'] + + +def get_item_details_base(retrieve_complete_item_keys: bool = True) -> dict: + return { + 'app_readme': None, + 'categories': [], + 'description': None, + 'healthy': False, # healthy means that each version the item hosts is valid and healthy + 'healthy_error': None, # An error string explaining why the item is not healthy + 'home': None, + 'location': None, + 'latest_version': None, + 'latest_app_version': None, + 'latest_human_version': None, + 'last_update': None, + 'name': None, + 'recommended': False, + 'title': None, + 'maintainers': [], + 'tags': [], + 'screenshots': [], + 'sources': [], + **({ + 'versions': {}, + } if retrieve_complete_item_keys else {}), + } + + +def get_item_details( + item_location: str, questions_context: typing.Optional[dict] = None, options: typing.Optional[dict] = None +) -> dict: + catalog_path = item_location.rstrip('/').rsplit('/', 2)[0] + item = item_location.rsplit('/', 1)[-1] + train = item_location.rsplit('/', 2)[-2] + + options = options or {} + retrieve_versions = options.get('retrieve_versions', True) + item_data = get_item_details_base() + item_data.update({ + 'location': item_location, + 'last_update': get_last_updated_date(catalog_path, item_location), + 'name': item, + 'title': item.capitalize(), + }) + + schema = f'{train}.{item}' + try: + validate_item(item_location, schema, False) + except ValidationErrors as verrors: + item_data['healthy_error'] = f'Following error(s) were found with {item!r}:\n' + for verror in verrors: + item_data['healthy_error'] += f'{verror[0]}: {verror[1]}' + + # If the item format is not valid - there is no point descending any further into versions + if not retrieve_versions: + item_data.pop('versions') + return item_data + + item_data.update(get_item_details_impl(item_location, schema, questions_context, { + 'retrieve_latest_version': not retrieve_versions, + 'default_values_callable': options.get('default_values_callable'), + })) + unhealthy_versions = [] + for k, v in sorted(item_data['versions'].items(), key=lambda v: parse_version(v[0]), reverse=True): + if not v['healthy']: + unhealthy_versions.append(k) + else: + chart_metadata = v['chart_metadata'] + if not item_data['app_readme']: + item_data['app_readme'] = v['app_readme'] + if not item_data['maintainers'] and chart_metadata.get('maintainers'): + item_data['maintainers'] = chart_metadata['maintainers'] + if not item_data['latest_version']: + item_data['latest_version'] = k + item_data['latest_app_version'] = chart_metadata.get('appVersion') + item_data['latest_human_version'] = '' + if item_data['latest_app_version']: + item_data['latest_human_version'] = f'{item_data["latest_app_version"]}_' + item_data['latest_human_version'] += k + if not item_data['description'] and chart_metadata.get('description'): + item_data['description'] = v['chart_metadata']['description'] + if item_data['title'] == item_data['name'].capitalize() and chart_metadata.get( + 'annotations', {} + ).get('title'): + item_data['title'] = chart_metadata['annotations']['title'] + if item_data['home'] is None and chart_metadata.get('home'): + item_data['home'] = chart_metadata['home'] + if not item_data['sources'] and chart_metadata.get('sources'): + item_data['sources'] = chart_metadata['sources'] + + if unhealthy_versions: + item_data['healthy_error'] = f'Errors were found with {", ".join(unhealthy_versions)} version(s)' + else: + item_data['healthy'] = True + if not retrieve_versions: + item_data.pop('versions') + + return item_data + + +def get_item_details_impl( + item_path: str, schema: str, questions_context: typing.Optional[dict], options: typing.Optional[dict] +) -> dict: + # Each directory under item path represents a version of the item and we need to retrieve details + # for each version available under the item + retrieve_latest_version = options.get('retrieve_latest_version') + item_data = { + 'categories': [], + 'icon_url': None, + 'screenshots': [], + 'tags': [], + 'versions': {}, + } + with open(os.path.join(item_path, 'item.yaml'), 'r') as f: + item_data.update(yaml.safe_load(f.read())) + + item_data.update({k: item_data.get(k) for k in ITEM_KEYS}) + + for version in sorted( + filter(lambda p: os.path.isdir(os.path.join(item_path, p)), os.listdir(item_path)), + reverse=True, key=parse_version, + ): + catalog_path = item_path.rstrip('/').rsplit('/', 2)[0] + version_path = os.path.join(item_path, version) + item_data['versions'][version] = version_details = { + 'healthy': False, + 'supported': False, + 'healthy_error': None, + 'location': version_path, + 'last_update': get_last_updated_date(catalog_path, version_path), + 'required_features': [], + 'human_version': version, + 'version': version, + } + try: + validate_item_version(version_details['location'], f'{schema}.{version}') + except ValidationErrors as verrors: + version_details['healthy_error'] = f'Following error(s) were found with {schema}.{version!r}:\n' + for verror in verrors: + version_details['healthy_error'] += f'{verror[0]}: {verror[1]}' + + # There is no point in trying to see what questions etc the version has as it's invalid + continue + + version_details.update({ + 'healthy': True, + **get_item_version_details(version_details['location'], questions_context) + }) + if retrieve_latest_version: + break + + return item_data + + +def get_item_version_details( + version_path: str, questions_context: typing.Optional[dict], options: typing.Optional[dict] = None +) -> dict: + version_data = {'location': version_path, 'required_features': set()} + for key, filename, parser in ( + ('chart_metadata', 'Chart.yaml', yaml.safe_load), + ('app_metadata', 'metadata.yaml', yaml.safe_load), + ('schema', 'questions.yaml', yaml.safe_load), + ('app_readme', 'app-readme.md', markdown.markdown), + ('detailed_readme', 'README.md', markdown.markdown), + ('changelog', 'CHANGELOG.md', markdown.markdown), + ): + if os.path.exists(os.path.join(version_path, filename)): + with open(os.path.join(version_path, filename), 'r') as f: + version_data[key] = parser(f.read()) + else: + version_data[key] = None + + # We will normalise questions now so that if they have any references, we render them accordingly + # like a field referring to available interfaces on the system + normalise_questions(version_data, questions_context or get_default_questions_context()) + + version_data.update({ + 'supported': version_supported(version_data), + 'required_features': list(version_data['required_features']), + }) + if options and options.get('default_values_callable'): + version_data['values'] = options['default_values_callable'](version_data) + chart_metadata = version_data['chart_metadata'] + if chart_metadata['name'] != 'ix-chart' and chart_metadata.get('appVersion'): + version_data['human_version'] = f'{chart_metadata["appVersion"]}_{chart_metadata["version"]}' + + return version_data + + +def get_default_questions_context() -> dict: + return { + 'nic_choices': [], + 'gpus': {}, + 'timezones': {'Asia/Saigon': 'Asia/Saigon', 'Asia/Damascus': 'Asia/Damascus'}, + 'node_ip': '192.168.0.10', + 'certificates': [], + 'certificate_authorities': [], + 'system.general.config': {'timezone': 'America/Los_Angeles'}, + 'unused_ports': [i for i in range(1025, 65535)], + } diff --git a/catalog_validation/ix-source/catalog_validation/items/ix_values_utils.py b/catalog_validation/ix-source/catalog_validation/items/ix_values_utils.py new file mode 100644 index 00000000..0fcc046b --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/ix_values_utils.py @@ -0,0 +1,58 @@ +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.exceptions import ValidationErrors + + +CUSTOM_PORTALS_JSON_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'portalName': { + 'type': 'string', + }, + 'protocol': { + 'type': 'string', 'enum': ['http', 'https'], + }, + 'useNodeIP': { + 'type': 'boolean', + }, + 'port': { + 'type': 'integer', + }, + 'path': { + 'type': 'string', + }, + }, + 'allOf': [ + { + 'if': { + 'properties': { + 'useNodeIP': { + 'const': False, + }, + }, + }, + 'then': { + 'required': ['host'], + 'properties': { + 'host': { + 'type': 'string', + }, + }, + }, + }], + 'required': ['portalName', 'protocol', 'useNodeIP', 'port'], + }, +} + + +def validate_ix_values_schema(schema, data): + verrors = ValidationErrors() + + try: + json_schema_validate(data, CUSTOM_PORTALS_JSON_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Failed to validate schema: {e}') + + verrors.check() diff --git a/catalog_validation/ix-source/catalog_validation/items/questions_utils.py b/catalog_validation/ix-source/catalog_validation/items/questions_utils.py new file mode 100644 index 00000000..d0255d2a --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/questions_utils.py @@ -0,0 +1,186 @@ +import itertools + +from .utils import ACL_QUESTION, IX_VOLUMES_ACL_QUESTION + + +CUSTOM_PORTALS_KEY = 'iXPortals' +CUSTOM_PORTALS_ENABLE_KEY = 'enableIXPortals' +CUSTOM_PORTAL_GROUP_KEY = 'iXPortalsGroupName' + + +def get_custom_portal_question(group_name: str) -> dict: + return { + 'variable': CUSTOM_PORTALS_KEY, + 'label': 'User Specified Web Portals', + 'description': 'User(s) can specify custom webUI portals', + 'group': group_name, + 'schema': { + 'type': 'list', + 'items': [{ + 'variable': 'portalConfiguration', + 'label': 'Portal Configuration', + 'description': 'Configure WebUI Portal', + 'schema': { + 'type': 'dict', + 'attrs': [ + { + 'variable': 'portalName', + 'label': 'Portal Name', + 'description': 'Specify a UI Portal name to use which would be displayed in the UI', + 'schema': { + 'type': 'string', + 'default': 'Web Portal', + 'empty': False, + }, + }, + { + 'variable': 'protocol', + 'label': 'Protocol for Portal', + 'description': 'Specify protocol for Portal', + 'schema': { + 'type': 'string', + 'default': 'http', + 'enum': [ + {'value': 'http', 'description': 'HTTP Protocol'}, + {'value': 'https', 'description': 'HTTPS Protocol'}, + ], + }, + }, + { + 'variable': 'useNodeIP', + 'label': 'Use Node IP for Portal IP/Domain', + 'schema': { + 'type': 'boolean', + 'default': True, + }, + }, + { + 'variable': 'host', + 'label': 'Portal IP/Domain', + 'schema': { + 'type': 'string', + 'show_if': [['useNodeIP', '=', False]], + '$ref': ['definitions/nodeIP'], + }, + }, + { + 'variable': 'port', + 'label': 'Port', + 'description': 'Specify port to be used for Portal access', + 'schema': { + 'type': 'int', + 'max': 65535, + 'default': 15000, + }, + }, + { + 'variable': 'path', + 'label': 'Path (optional - leave empty if not required)', + 'description': 'Some app(s) might have a sub path i.e http://192.168.0.10:9000/api/', + 'schema': { + 'type': 'string', + }, + }, + ], + }, + }], + }, + } + + +def normalise_questions(version_data: dict, context: dict) -> None: + version_data['required_features'] = set() + version_data['schema']['questions'].extend( + [ + get_custom_portal_question(version_data['schema'][CUSTOM_PORTAL_GROUP_KEY]) + ] if version_data['schema'].get(CUSTOM_PORTALS_ENABLE_KEY) else [] + ) + for question in version_data['schema']['questions']: + normalise_question(question, version_data, context) + version_data['required_features'] = list(version_data['required_features']) + + +def normalise_question(question: dict, version_data: dict, context: dict) -> None: + schema = question['schema'] + for attr in itertools.chain(*[schema.get(k, []) for k in ('attrs', 'items', 'subquestions')]): + normalise_question(attr, version_data, context) + + if '$ref' not in schema: + return + + data = {} + for ref in schema['$ref']: + version_data['required_features'].add(ref) + if ref == 'definitions/interface': + data['enum'] = [ + {'value': i, 'description': f'{i!r} Interface'} for i in context['nic_choices'] + ] + elif ref == 'definitions/gpuConfiguration': + data['attrs'] = [ + { + 'variable': gpu, + 'label': f'GPU Resource ({gpu})', + 'description': 'Please enter the number of GPUs to allocate', + 'schema': { + 'type': 'int', + 'max': int(quantity), + 'enum': [ + {'value': i, 'description': f'Allocate {i!r} {gpu} GPU'} + for i in range(int(quantity) + 1) + ], + 'default': 0, + } + } for gpu, quantity in context['gpus'].items() + ] + elif ref == 'definitions/timezone': + data.update({ + 'enum': [{'value': t, 'description': f'{t!r} timezone'} for t in sorted(context['timezones'])], + 'default': context['system.general.config']['timezone'] + }) + elif ref == 'definitions/nodeIP': + data['default'] = context['node_ip'] + elif ref == 'definitions/certificate': + get_cert_ca_options(schema, data, {'value': None, 'description': 'No Certificate'}) + data['enum'] += [ + {'value': i['id'], 'description': f'{i["name"]!r} Certificate'} + for i in context['certificates'] + ] + elif ref == 'definitions/certificateAuthority': + get_cert_ca_options(schema, data, {'value': None, 'description': 'No Certificate Authority'}) + data['enum'] += [{'value': None, 'description': 'No Certificate Authority'}] + [ + {'value': i['id'], 'description': f'{i["name"]!r} Certificate Authority'} + for i in context['certificate_authorities'] + ] + elif ref == 'definitions/port': + data['enum'] = [{'value': None, 'description': 'No Port Selected'}] if schema.get('null') else [] + data['enum'] += [ + {'value': i, 'description': f'{i!r} Port'} + for i in filter( + lambda p: schema.get('min', 9000) <= p <= schema.get('max', 65534), + context['unused_ports'] + ) + ] + elif ref == 'normalize/acl': + data['attrs'] = ACL_QUESTION + elif ref == 'normalize/ixVolume': + if schema['type'] == 'dict' and any(i['variable'] == 'aclEntries' for i in schema['attrs']): + # get index of aclEntries from attrs + acl_index = next(i for i, v in enumerate(schema['attrs']) if v['variable'] == 'aclEntries') + # insert acl question before aclEntries + schema['attrs'][acl_index]['schema']['attrs'] = IX_VOLUMES_ACL_QUESTION + + schema.update(data) + + +def get_cert_ca_options(schema: dict, data: dict, default_entry: dict): + if schema.get('null', True): + data.update({ + 'enum': [default_entry], + 'default': None, + 'null': True, + }) + else: + data.update({ + 'enum': [], + 'required': True, + }) diff --git a/catalog_validation/ix-source/catalog_validation/items/utils.py b/catalog_validation/ix-source/catalog_validation/items/utils.py new file mode 100644 index 00000000..64abe39f --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/utils.py @@ -0,0 +1,243 @@ +import contextlib +import os +import subprocess + +from datetime import datetime +from typing import Optional + +from catalog_validation.schema.migration_schema import MIGRATION_DIRS +from catalog_validation.utils import VALID_TRAIN_REGEX + + +DEVELOPMENT_DIR = 'ix-dev' +RECOMMENDED_APPS_FILENAME = 'recommended_apps.yaml' +RECOMMENDED_APPS_SCHEMA = { + 'type': 'object', + 'patternProperties': { + '.*': { + 'type': 'array', + 'items': {'type': 'string'}, + } + }, +} +TRAIN_IGNORE_DIRS = ['library', 'docs', DEVELOPMENT_DIR] + MIGRATION_DIRS + + +ACL_QUESTION = [ + { + 'variable': 'path', + 'label': 'Host Path', + 'description': 'Host Path to perform ACL', + 'schema': { + 'type': 'hostpath', + 'required': True, + 'empty': False, + } + }, + { + 'variable': 'entries', + 'label': 'ACL Entries', + 'description': 'ACL Entries', + 'schema': { + 'type': 'list', + 'items': [{ + 'variable': 'aclEntry', + 'label': 'ACL Entry', + 'schema': { + 'type': 'dict', + 'attrs': [ + { + 'variable': 'id_type', + 'label': 'ID Type', + 'schema': { + 'type': 'string', + 'enum': [ + {'value': 'USER', 'description': 'Entry is for a USER'}, + {'value': 'GROUP', 'description': 'Entry is for a GROUP'}, + ], + 'default': 'USER', + } + }, + { + 'variable': 'id', + 'label': 'ID', + 'description': 'Make sure to check the ID value is correct and aligns with ' + 'RunAs user context of the application', + 'schema': { + 'type': 'int', + 'required': True, + 'min': 0, + } + }, + { + 'variable': 'access', + 'label': 'Access', + 'schema': { + 'type': 'string', + 'enum': [ + {'value': 'READ', 'description': 'Read Access'}, + {'value': 'MODIFY', 'description': 'Modify Access'}, + {'value': 'FULL_CONTROL', 'description': 'FULL_CONTROL Access'}, + ], + } + } + ], + } + }] + } + }, + { + 'variable': 'options', + 'label': 'ACL Options', + 'schema': { + 'type': 'dict', + 'attrs': [ + { + 'variable': 'force', + 'label': 'Force Flag', + 'description': 'Enabling `Force` applies ACL even if the path has existing data', + 'schema': { + 'type': 'boolean', + 'default': False, + } + }, + ], + }, + }, +] + +IX_VOLUMES_ACL_QUESTION = [ + { + 'variable': 'path', + 'label': 'Path', + 'description': 'Path to perform ACL', + 'schema': { + 'type': 'string', + 'hidden': True + } + }, + ACL_QUESTION[1] +] + + +def get_catalog_json_schema() -> dict: + return { + 'type': 'object', + 'patternProperties': { + '.*': { + 'type': 'object', + 'title': 'Train', + 'patternProperties': { + '.*': { + 'type': 'object', + 'title': 'Item', + 'properties': { + 'name': { + 'type': 'string', + 'title': 'Name', + }, + 'categories': { + 'type': 'array', + 'items': { + 'type': 'string' + }, + }, + 'app_readme': { + 'type': 'string', + }, + 'location': { + 'type': 'string', + }, + 'healthy': { + 'type': 'boolean', + }, + 'healthy_error': { + 'type': ['string', 'null'], + }, + 'last_update': { + 'type': 'string', + 'pattern': r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$', + }, + 'latest_version': { + 'type': 'string', + }, + 'latest_app_version': { + 'type': 'string', + }, + 'latest_human_version': { + 'type': 'string', + }, + 'description': { + 'type': ['string', 'null'], + }, + 'title': { + 'type': 'string', + }, + 'icon_url': { + 'type': ['string', 'null'], + }, + 'maintainers': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'url': {'type': ['string', 'null']}, + 'email': {'type': 'string'} + }, + 'required': ['name', 'email'], + } + }, + 'home': { + 'type': 'string', + }, + 'tags': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + 'screenshots': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + 'sources': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + }, + 'required': [ + 'name', 'categories', 'location', 'healthy', 'icon_url', + 'latest_version', 'latest_app_version', 'latest_human_version', + 'last_update', 'recommended', 'healthy_error', 'maintainers', + 'home', 'tags', 'sources', 'screenshots', + ], + } + } + + } + } + } + + +def get_last_updated_date(repo_path: str, folder_path: str) -> Optional[str]: + with contextlib.suppress(Exception): + # We don't want to fail querying items if for whatever reason this fails + output = subprocess.check_output( + ['git', 'log', '-n', '1', '--pretty=format:%ct', f'{folder_path}'], + cwd=repo_path, + stderr=subprocess.DEVNULL + ) + if output: + timestamp = datetime.fromtimestamp(int(output)) + return timestamp.strftime('%Y-%m-%d %H:%M:%S') + + +def valid_train(train_name: str, train_location: str) -> bool: + return VALID_TRAIN_REGEX.match( + train_name + ) and not train_name.startswith('.') and train_name not in TRAIN_IGNORE_DIRS and os.path.isdir(train_location) diff --git a/catalog_validation/ix-source/catalog_validation/items/validate_utils.py b/catalog_validation/ix-source/catalog_validation/items/validate_utils.py new file mode 100644 index 00000000..3cecf3ba --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/items/validate_utils.py @@ -0,0 +1,9 @@ +from catalog_validation.validation import validate_catalog_item, validate_catalog_item_version + + +def validate_item(path: str, schema: str, validate_versions: bool = True): + validate_catalog_item(path, schema, validate_versions) + + +def validate_item_version(path: str, schema: str): + validate_catalog_item_version(path, schema) diff --git a/catalog_validation/ix-source/catalog_validation/k8s/__init__.py b/catalog_validation/ix-source/catalog_validation/k8s/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/k8s/api_client.py b/catalog_validation/ix-source/catalog_validation/k8s/api_client.py new file mode 100644 index 00000000..cec1da78 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/k8s/api_client.py @@ -0,0 +1,15 @@ +from contextlib import contextmanager + +from kubernetes import client, config + +from .utils import KUBECONFIG_FILE + + +@contextmanager +def api_client(): + config.load_kube_config(config_file=KUBECONFIG_FILE) + api_cl = client.api_client.ApiClient() + try: + yield client.CoreV1Api(api_cl) + finally: + api_cl.close() diff --git a/catalog_validation/ix-source/catalog_validation/k8s/utils.py b/catalog_validation/ix-source/catalog_validation/k8s/utils.py new file mode 100644 index 00000000..c973e77e --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/k8s/utils.py @@ -0,0 +1 @@ +KUBECONFIG_FILE = '/etc/rancher/k3s/k3s.yaml' diff --git a/catalog_validation/ix-source/catalog_validation/pytest/requirements.txt b/catalog_validation/ix-source/catalog_validation/pytest/requirements.txt new file mode 100644 index 00000000..1d6ed5ca --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/requirements.txt @@ -0,0 +1,2 @@ +pytest +pytest-mock \ No newline at end of file diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_catalog_validate.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_catalog_validate.py new file mode 100644 index 00000000..d22b2ac5 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_catalog_validate.py @@ -0,0 +1,368 @@ +import pytest + +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.utils import WANTED_FILES_IN_ITEM_VERSION +from catalog_validation.validation import ( + validate_train_structure, validate_questions_yaml, validate_catalog_item, + validate_catalog_item_version, validate_variable_uniqueness, +) + + +@pytest.mark.parametrize('train_path,should_work', [ + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts', True), + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/', False), + +]) +def test_validate_train_structure(train_path, should_work): + if should_work: + assert validate_train_structure(train_path) is None + else: + with pytest.raises(ValidationErrors): + validate_train_structure(train_path) + + +@pytest.mark.parametrize('test_yaml,should_work', [ + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + portals: + web_portal: + protocols: + - "http" + host: + - "$node_ip" + ports: + - "$variable-machinaris_ui_port" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + portals: + web_portal: + protocols: {} + host: {} + ports: {} + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Network" + group: "Machinaris Network Configuration" + description: "Configure timezone for machianaris" + + ''', + False + ), + ( + ''' + enableIXPortals: true + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + enableIXPortals: true + iXPortalsGroupName: "Machinaris Configuration" + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + enableIXPortals: true + iXPortalsGroupName: "Invalid Group name" + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + +]) +def test_validate_questions_yaml(mocker, test_yaml, should_work): + open_file_data = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('catalog_validation.validation.validate_question', return_value=None) + if should_work: + assert validate_questions_yaml(None, 'charts.machinaris.versions.1.1.13.questions_configuration') is None + else: + with pytest.raises(ValidationErrors): + validate_questions_yaml(None, 'charts.machinaris.versions.1.1.13.questions_configuration') + + +@pytest.mark.parametrize('catalog_item_path,test_yaml,should_work', [ + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/machinaris', + ''' + categories: + - storage + - crypto + icon_url: https://raw.githubusercontent.com/guydavis/machinaris/main/web/static/machinaris.png + ''', + True + ), + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/machinaris', + ''' + icon_url: https://raw.githubusercontent.com/guydavis/machinaris/main/web/static/machinaris.png + ''', + False + ), +]) +def test_validate_catalog_item(mocker, catalog_item_path, test_yaml, should_work): + mocker.patch('os.path.isdir', side_effect=[True, True, False]) + mocker.patch('os.listdir', return_value=['1.1.13', 'item.yaml']) + open_file_data = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('catalog_validation.validation.validate_catalog_item_version', return_value=None) + if not should_work: + with pytest.raises(ValidationErrors): + validate_catalog_item(catalog_item_path, 'charts.machinaris') + else: + assert validate_catalog_item(catalog_item_path, 'charts.machinaris') is None + + +@pytest.mark.parametrize('chart_yaml,should_work', [ + ( + ''' + name: storj + version: 1.0.4 + ''', + True + ), + ( + ''' + name: storj + version: 1.0.0 + ''', + False + ), + ( + ''' + name: storj_s + version: 1.0.0 + ''', + False + ) +]) +def test_validate_catalog_item_version(mocker, chart_yaml, should_work): + mocker.patch('os.listdir', return_value=WANTED_FILES_IN_ITEM_VERSION) + mocker.patch('os.path.exists', return_value=True) + open_file = mocker.mock_open(read_data=chart_yaml) + mocker.patch('builtins.open', open_file) + mocker.patch('catalog_validation.validation.validate_questions_yaml', return_value=None) + mocker.patch('catalog_validation.validation.validate_ix_values_yaml', return_value=None) + mocker.patch('catalog_validation.validation.validate_app_migrations', return_value=None) + if should_work: + assert validate_catalog_item_version( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/storj/1.0.4', + 'charts.storj.versions.1.0.4') is None + else: + with pytest.raises(ValidationErrors): + validate_catalog_item_version( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/storj/1.0.4', + 'charts.storj.versions.1.0.4' + ) + + +@pytest.mark.parametrize('data,schema,should_work', [ + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'dnsConfig', + 'label': 'DNS Configuration', + 'group': 'Advanced DNS Settings', + 'schema': { + 'type': 'dict', + 'attrs': [] + } + }, + ], 'plex.questions', True), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + ], 'plex.questions', False), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'hostPathEnabled', + 'label': 'Enable Host Path for Plex Transcode Volume', + 'type': 'boolean', + 'default': False, + 'show_subquestions_if': False, + 'subquestions': [ + { + 'variable': 'hostPath', + 'label': 'Host Path for Plex Transcode Volume', + 'schema': { + 'type': 'hostpath', + 'required': True, + '$ref': [ + 'validations/lockedHostPath' + ] + } + }, + ] + } + ], 'plex.questions', True), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'mountPath', + 'label': 'Plex Transcode Mount Path', + 'description': 'Path where the volume will be mounted inside the pod', + 'schema': { + 'type': 'path', + } + }, + { + 'variable': 'hostPathEnabled', + 'label': 'Enable Host Path for Plex Transcode Volume', + 'type': 'boolean', + 'default': False, + 'show_subquestions_if': False, + 'subquestions': [ + { + 'variable': 'hostPath', + 'label': 'Host Path for Plex Transcode Volume', + 'schema': { + 'type': 'hostpath', + 'required': True, + '$ref': [ + 'validations/lockedHostPath' + ] + } + }, + { + 'variable': 'mountPath', + 'label': 'Plex Transcode Mount Path', + 'description': 'Path where the volume will be mounted inside the pod', + 'schema': { + 'type': 'path', + } + }, + ] + } + ], 'plex.questions', False), +]) +def test_validate_variable_uniqueness(data, schema, should_work): + verrors = ValidationErrors() + if should_work: + assert validate_variable_uniqueness(data, schema, verrors) is None + else: + with pytest.raises(ValidationErrors): + validate_variable_uniqueness(data, schema, verrors) diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_items_util.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_items_util.py new file mode 100644 index 00000000..aab41eb5 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_items_util.py @@ -0,0 +1,107 @@ +import pytest + +from catalog_validation.items.items_util import get_item_details, get_item_details_impl + + +QUESTION_CONTEXT = { + 'nic_choices': [], + 'gpus': {}, + 'timezones': {'Asia/Saigon': 'Asia/Saigon', 'Asia/Damascus': 'Asia/Damascus'}, + 'node_ip': '192.168.0.10', + 'certificates': [], + 'certificate_authorities': [], + 'system.general.config': {'timezone': 'America/Los_Angeles'}, +} + + +@pytest.mark.parametrize('item_location,options,items_data', [ + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + {'retrieve_versions': True}, + { + 'name': 'chia', + 'categories': [], + 'app_readme': None, + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + 'healthy': True, + 'healthy_error': None, + 'home': None, + 'last_update': None, + 'versions': {}, + 'maintainers': [], + 'latest_version': None, + 'latest_app_version': None, + 'latest_human_version': None, + 'recommended': False, + 'title': 'Chia', + 'description': None, + 'tags': [], + 'screenshots': [], + 'sources': [], + } + ), +]) +def test_get_item_details(mocker, item_location, options, items_data): + mocker.patch('catalog_validation.items.items_util.validate_item', return_value=None) + mocker.patch('catalog_validation.items.items_util.get_item_details_impl', return_value={}) + assert get_item_details(item_location, QUESTION_CONTEXT, options) == items_data + + +@pytest.mark.parametrize('item_path,schema,options,yaml_data,item_data_impl,open_yaml', [ + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + 'charts.chia', + {'retrieve_latest_version': True}, { + 'variable': 'web_port', + 'label': 'Web Port for Diskover', + 'group': 'Networking', + 'schema': { + 'type': 'int', + 'min': 8000, + 'max': 65535, + 'default': 22510, + 'required': True + } + }, { + 'versions': { + '1.3.37': { + 'healthy': True, + 'supported': False, + 'healthy_error': None, + 'last_update': None, + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_' + 'charts_git_master/charts/chia/1.3.37', + 'required_features': [], + 'human_version': '1.3.37', + 'version': '1.3.37' + } + }, + 'categories': ['storage', 'crypto'], + 'icon_url': 'https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg', + 'tags': ['finance'], + 'screenshots': ['https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg'], + 'sources': ['https://hub.docker.com/r/emby/embyserver'], + }, + ''' + screenshots: + - 'https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg' + tags: + - finance + categories: + - storage + - crypto + icon_url: https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg + sources: + - https://hub.docker.com/r/emby/embyserver + ''' + ), +]) +def test_get_item_details_impl( + mocker, item_path, schema, options, yaml_data, item_data_impl, open_yaml, +): + open_file_data = mocker.mock_open(read_data=open_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('os.path.isdir', return_value=True) + mocker.patch('os.listdir', return_value=['1.3.37']) + mocker.patch('catalog_validation.items.items_util.validate_item_version', return_value=None) + mocker.patch('catalog_validation.items.items_util.get_item_version_details', return_value={}) + assert get_item_details_impl(item_path, schema, QUESTION_CONTEXT, options) == item_data_impl diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_normalise_questions.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_normalise_questions.py new file mode 100644 index 00000000..5372d6f6 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_normalise_questions.py @@ -0,0 +1,321 @@ +from catalog_validation.items.questions_utils import normalise_question +import pytest + + +VERSION_DATA = { + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/syncthing/1.0.14', + 'required_features': { + 'normalize/ixVolume', + 'validations/lockedHostPath', + }, + 'chart_metadata': {}, + 'schema': { + 'variable': 'hostNetwork', + 'label': 'Host Network', + 'group': 'Networking', + }, + 'app_readme': 'there is not any', + 'detailed_readme': 'there is not any', + 'changelog': None, +} + + +@pytest.mark.parametrize('question,normalise_data,context', [ + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + 'enum': [], + } + }, { + 'nic_choices': [], + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + 'enum': [{ + 'value': 'ens0', + 'description': "'ens0' Interface" + }], + } + }, { + 'nic_choices': ['ens0'] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + 'attrs': [{ + 'variable': 'test@gpu', + 'label': 'GPU Resource (test@gpu)', + 'description': 'Please enter the number of GPUs to allocate', + 'schema': { + 'type': 'int', + 'max': 3, + 'enum': [ + {'value': i, 'description': f'Allocate {i!r} test@gpu GPU'} + for i in range(4) + ], + 'default': 0, + } + }], + } + }, { + 'gpus': { + 'test@gpu': 3 + } + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + 'attrs': [], + } + }, { + 'gpus': {} + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/timezone'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/timezone'], + 'enum': [{ + 'value': 'Asia/Damascus', + 'description': "'Asia/Damascus' timezone", + }, { + 'value': 'Asia/Saigon', + 'description': "'Asia/Saigon' timezone", + }], + 'default': 'America/Los_Angeles', + } + }, + { + 'timezones': { + 'Asia/Saigon': 'Asia/Saigon', + 'Asia/Damascus': 'Asia/Damascus', + }, + 'system.general.config': { + 'timezone': 'America/Los_Angeles', + } + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/nodeIP'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/nodeIP'], + 'default': '192.168.0.10', + } + }, + { + 'node_ip': '192.168.0.10' + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate' + }, + { + 'value': '1', + 'description': "'testcert' Certificate" + } + ], + 'default': None, + 'null': True + } + }, {'certificates': [{ + 'id': '1', + 'name': 'testcert' + }], + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate' + }], + 'default': None, + 'null': True + } + }, { + 'certificates': [] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate Authority' + }, { + 'value': None, + 'description': 'No Certificate Authority' + }], + 'default': None, + 'null': True + } + }, { + 'certificate_authorities': [] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate Authority' + }, { + 'value': None, + 'description': 'No Certificate Authority' + }, + { + 'value': '1', + 'description': "'testca' Certificate Authority" + } + ], + 'default': None, + 'null': True + } + }, { + 'certificate_authorities': [{ + 'id': '1', + 'name': 'testca' + }], + } + ) +]) +def test_normalise_question(question, normalise_data, context): + normalise_question(question, VERSION_DATA, context) + assert question == normalise_data diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_schema.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_schema.py new file mode 100644 index 00000000..a741a0fa --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_schema.py @@ -0,0 +1,577 @@ +import pytest + +from catalog_validation.schema.schema_gen import get_schema +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.validation import validate_question + + +@pytest.mark.parametrize('schema,should_work', [ + ( + { + 'type': 'dict', + 'attrs': [] + }, + True + ), + ( + { + 'type': 'dict', + 'attrs': {} + }, + False + ), + ( + { + 'type': 'list' + }, + False + ), + ( + { + 'type': 'list', + 'items': [] + }, + True + ), + ( + { + 'type': 'list', + 'items': {} + }, + False + ), + ( + { + 'type': 'string', + 'editable': True + }, + True + ), + ( + { + 'type': 'string', + 'default': 'hello' + }, + True + ), + ( + { + 'type': 'string', + 'default': 1 + }, + False + ), + ( + { + 'type': 'string', + 'editable': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'private': True + }, + True + ), + ( + { + 'type': 'string', + 'private': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'max_length': 233 + }, + True + ), + ( + { + 'type': 'string', + 'max_length': '233' + }, + False + ), + ( + { + 'type': 'string', + 'min_length': 233 + }, + True + ), + ( + { + 'type': 'string', + 'min_length': '233' + }, + False + ), + ( + { + 'type': 'string', + 'valid_chars': '[a-z]*' + }, + True + ), + ( + { + 'type': 'string', + 'valid_chars': ['a-z'] + }, + False + ), + ( + { + 'type': 'string', 'null': True + }, + True + ), + ( + { + 'type': 'string', + 'null': 'true' + }, False + ), + ( + { + 'type': 'string', + 'immutable': True + }, + True + ), + ( + { + 'type': 'string', + 'immutable': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'required': True + }, + True + ), + ( + { + 'type': 'string', + 'required': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'hidden': True + }, + True + ), + ( + { + 'type': 'string', + 'hidden': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'show_if': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'show_if': [['hello', '=', 'world']] + }, + True + ), + ( + { + 'type': 'string', + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'subquestions': {} + }, + False + ), + ( + { + 'type': 'string', + 'show_subquestions_if': None + }, False + ), + ( + { + 'type': 'string', + 'show_subquestions_if': None, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': 1, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': 'test', + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': {}, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': [], + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + '$ui-ref': [] + }, + True + ), + ( + { + 'type': 'string', + '$ui-ref': {} + }, + False + ), + ( + { + 'type': 'string', + '$ref': [] + }, + True + ), + ( + { + 'type': 'string', + '$ref': {} + }, + False + ), + ( + { + 'type': 'int', + 'min': 233, + 'max': 2311 + }, + True + ), + ( + { + 'type': 'int', + 'min': '233', + 'max': 2311 + }, + False + ), + ( + { + 'type': 'int', + 'min': 233, + 'max': '2311' + }, + False + ), + ( + { + 'type': 'int', + 'default': 23 + }, + True + ), + ( + { + 'type': 'int', + 'default': '23' + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': False, + 'cidr': True + }, + True + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': False, + 'cidr': 'true' + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': 'False', + 'cidr': True + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': 'True', + 'ipv6': False, + 'cidr': True + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test', + 'description': 'test' + }] + }, + True + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test', + 'description': 'test', + 'obj': {} + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test' + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'key': 'value' + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{}] + }, + False + ), + ( + { + 'type': 'hostpath' + }, + True + ), + ( + { + 'type': 'hostpath', + 'default': '/root/' + }, + True + ), + ( + { + 'type': 'hostpath', + 'default': 231 + }, + False + ), + ( + { + 'type': 'path' + }, + True + ), + ( + { + 'type': 'path', + 'default': '/root/' + }, + True + ), + ( + { + 'type': 'path', + 'default': 231 + }, + False + ), + ( + { + 'type': 'boolean' + }, + True + ), + ( + { + 'type': 'boolean', + 'default': True + }, + True + ), + ( + { + 'type': 'boolean', + 'default': 'true' + }, + False + ), + ( + { + 'type': 'cron' + }, + True + ), + ( + { + 'type': 'cron', + 'default': {} + }, + True + ), + ( + { + 'type': 'cron', + 'default': [] + }, + False + ), + ( + { + 'type': 'uri' + }, + True + ), + ( + { + 'type': 'uri', + 'default': 'http://www.google.com' + }, + True + ), + ( + { + 'type': 'uri', + 'default': 2133 + }, + False + ), +]) +def test_schema_validation(schema, should_work): + if not should_work: + with pytest.raises(ValidationErrors): + get_schema(schema).validate('') + else: + assert get_schema(schema).validate('') is None + + +@pytest.mark.parametrize('variable,should_work', [ + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'group': 'testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + True + ), + ( + { + 'variable': 'testing', + 'description': 'for testing', + 'group': 'testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + False + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing' + }, + False + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + True + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'schema': { + 'default': True + } + }, + False + ), +]) +def test_question_variable_validation(variable, should_work): + verrors = ValidationErrors() + validate_question(variable, '', verrors) + if not should_work: + with pytest.raises(ValidationErrors): + verrors.check() + else: + verrors.check() diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_util.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_util.py new file mode 100644 index 00000000..c4e8b75c --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_util.py @@ -0,0 +1,17 @@ +import pytest + +from catalog_validation.utils import RE_VERSION_PATTERN + + +@pytest.mark.parametrize('version,result', [ + ('22.04-MASTER-12345678', '22.04'), + ('24.10.1', '24.10.1'), + ('23.12', '23.12'), + ('22.02.0.1', '22.02.0.1'), + ('22.02-ALPHA', '22.02'), + ('24.04-MASTER', '24.04'), + ('20.23-INTERNAL', '20.23'), +]) +def test_version_regex_match(version, result): + match = RE_VERSION_PATTERN.findall(version) + assert len(match) == 1 and match[0] == result diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validate_ix_values.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validate_ix_values.py new file mode 100644 index 00000000..f634a927 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validate_ix_values.py @@ -0,0 +1,94 @@ +import pytest +from catalog_validation.validation import validate_ix_values_yaml +from catalog_validation.exceptions import ValidationErrors + + +@pytest.mark.parametrize('schema, ix_values_yaml_path, test_yaml, should_work', [ + + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'http', useNodeIP: false, host: '192.168.0.18', port: 9898}] + ''', + True + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'http', useNodeIP: true, port: 9898}] + ''', + True + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'htts', useNodeIP: true, port: 9898}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 09088, useNodeIP: true, port: '9898'}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', useNodeIP: true, port: '9898'}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + '', + False, + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + 'image pullPolicy ifNotPresent', + False, + ) +]) +def test_validate_ix_values(mocker, schema, ix_values_yaml_path, test_yaml, should_work): + open_file = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file) + + if should_work: + assert validate_ix_values_yaml(ix_values_yaml_path, schema) is None + else: + with pytest.raises(ValidationErrors): + validate_ix_values_yaml(ix_values_yaml_path, schema) diff --git a/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validation_utils.py b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validation_utils.py new file mode 100644 index 00000000..3f7eb815 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/pytest/unit/test_validation_utils.py @@ -0,0 +1,104 @@ +import pytest + +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.validation_utils import validate_min_max_version_values + + +@pytest.mark.parametrize( + 'annotations_dict,schema,expected_error', + [ + ( + { + 'min_scale_version': '23.04', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + None + ), + ( + { + 'min_scale_version': '22.02-RC.2', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + None + ), + ( + { + 'min_scale_version': '24.04', + 'max_scale_version': '23.04' + }, + 'charts.plex.versions.1.7.56', + 'Provided min_scale_version is greater than provided max_scale_version' + ), + ( + { + 'min_scale_version': '12', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + 'Format of provided min_scale_version value is not correct' + ), + ( + { + 'min_scale_version': '24.04-MASTER-20230928-144829', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + 'Format of provided min_scale_version value is not correct' + ), + ( + { + 'min_scale_version': '22.12.2-INTERNAL.9', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + 'Format of provided min_scale_version value is not correct' + ), + ( + { + 'min_scale_version': '23.04', + }, + 'charts.plex.versions.1.7.56', + None + ), + ( + { + 'min_scale_version': 24.04, + }, + 'charts.plex.versions.1.7.56', + '\'min_scale_version\' value should be a \'str\'' + ), + ( + { + 'min_scale_version': None + }, + 'charts.plex.versions.1.7.56', + '\'min_scale_version\' value should be a \'str\'' + ), + ( + { + 'min_scale_version': '22.02.CUSTOM', + }, + 'charts.plex.versions.1.7.56', + 'Format of provided min_scale_version value is not correct' + ), + ( + { + 'min_scale_version': 'TrueNAS-SCALE-22.02-RC.1', + 'max_scale_version': '24.04' + }, + 'charts.plex.versions.1.7.56', + 'Format of provided min_scale_version value is not correct' + ), + ] +) +def test_validate_min_max_version_values(annotations_dict, schema, expected_error): + verrors = ValidationErrors() + if expected_error: + with pytest.raises(ValidationErrors) as ve: + validate_min_max_version_values(annotations_dict, verrors, schema) + verrors.check() + assert ve.value.errors[0].errmsg == expected_error + else: + assert validate_min_max_version_values(annotations_dict, verrors, schema) is None diff --git a/catalog_validation/ix-source/catalog_validation/schema/__init__.py b/catalog_validation/ix-source/catalog_validation/schema/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/schema/attrs.py b/catalog_validation/ix-source/catalog_validation/schema/attrs.py new file mode 100644 index 00000000..dbf525f1 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/attrs.py @@ -0,0 +1,282 @@ +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.exceptions import ValidationErrors + +from .feature_gen import get_feature +from .variable_gen import generate_variable + + +class Schema: + + DEFAULT_TYPE = NotImplementedError + + def __init__(self, include_subquestions_attrs=True, data=None): + self.required = self.null = self.show_if = self.ref = self.ui_ref = self.type =\ + self.editable = self.hidden = self.default = self._schema_data = None + self._skip_data_values = [] + if include_subquestions_attrs: + self.subquestions = self.show_subquestions_if = None + if data: + self.initialize_values(data) + + def initialize_values(self, data): + self._schema_data = data + for key, value in filter( + lambda k: hasattr(self, k[0]) and k[0] not in self._skip_data_values, data.items() + ): + setattr(self, key, value) + + def get_schema_str(self, schema): + if schema: + return f'{schema}.' + return '' + + def validate(self, schema, data=None): + if data: + self.initialize_values(data) + + if not self._schema_data: + raise Exception('Schema data must be initialized before validating schema') + + verrors = ValidationErrors() + try: + json_schema_validate(self._schema_data, self.json_schema()) + except JsonValidationError as e: + verrors.add(schema, f'Failed to validate schema: {e}') + + verrors.check() + + if '$ref' in self._schema_data: + for index, ref in enumerate(self._schema_data['$ref']): + if not isinstance(ref, str): + verrors.add(f'{schema}.$ref.{index}', 'Must be a string') + continue + + feature_obj = get_feature(ref) + if not feature_obj: + continue + try: + feature_obj.validate(self, f'{schema}.$ref.{index}') + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + def json_schema(self): + schema = { + 'type': 'object', + 'properties': { + 'required': { + 'type': 'boolean', + }, + 'null': { + 'type': 'boolean', + }, + 'show_if': { + 'type': 'array', + }, + '$ref': { + 'type': 'array', + }, + '$ui-ref': { + 'type': 'array', + }, + 'subquestions': { + 'type': 'array', + }, + 'show_subquestions_if': { + 'type': ['string', 'integer', 'boolean', 'object', 'array', 'null'], + }, + 'type': { + 'type': 'string', + }, + 'editable': { + 'type': 'boolean', + }, + 'immutable': { + 'type': 'boolean', + }, + 'hidden': { + 'type': 'boolean', + }, + }, + 'required': ['type'], + 'dependentRequired': { + 'show_subquestions_if': ['subquestions'] + } + } + if self.DEFAULT_TYPE: + schema['properties']['default'] = { + 'type': [self.DEFAULT_TYPE] + (['null'] if self.null else []) + } + if hasattr(self, 'enum'): + schema['properties']['enum'] = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'value': {'type': [self.DEFAULT_TYPE] + (['null'] if self.null else [])}, + 'description': {'type': ['string', 'null']}, + }, + 'additionalProperties': False, + 'required': ['value', 'description'] + }, + } + return schema + + +class BooleanSchema(Schema): + DEFAULT_TYPE = 'boolean' + + +class StringSchema(Schema): + DEFAULT_TYPE = 'string' + + def __init__(self, data): + self.min_length = self.max_length = self.enum = self.private = self.valid_chars = self.valid_chars_error = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'min_length': { + 'type': 'integer', + }, + 'max_length': { + 'type': 'integer', + }, + 'private': { + 'type': 'boolean', + }, + 'valid_chars': { + 'type': 'string', + }, + 'valid_chars_error': { + 'type': 'string' + }, + }) + return schema + + +class TextFieldSchema(StringSchema): + def __init__(self, data): + super().__init__(data) + self.max_length = 1024 * 1024 + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'max_length': { + 'type': 'integer', + 'const': 1024 * 1024 + }, + 'language': { + 'type': 'string', + 'enum': ['yaml', 'json', 'toml', 'text'], + } + }) + return schema + + +class IntegerSchema(Schema): + DEFAULT_TYPE = 'integer' + + def __init__(self, data): + self.min = self.max = self.enum = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'min': { + 'type': 'integer', + }, + 'max': { + 'type': 'integer', + }, + }) + return schema + + +class PathSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathDirSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathFileSchema(Schema): + DEFAULT_TYPE = 'string' + + +class URISchema(Schema): + DEFAULT_TYPE = 'string' + + +class IPAddrSchema(Schema): + DEFAULT_TYPE = 'string' + + def __init__(self, data): + self.ipv4 = self.ipv6 = self.cidr = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'ipv4': {'type': 'boolean'}, + 'ipv6': {'type': 'boolean'}, + 'cidr': {'type': 'boolean'}, + }) + return schema + + +class CronSchema(Schema): + DEFAULT_TYPE = 'object' + + +class DictSchema(Schema): + DEFAULT_TYPE = 'object' + + def __init__(self, data): + self.attrs = [] + self.additional_attrs = None + super().__init__(data=data) + self._skip_data_values = ['attrs'] + + def initialize_values(self, data): + super().initialize_values(data) + self.attrs = [generate_variable(d) for d in (data.get('attrs') or [])] + + def json_schema(self): + schema = super().json_schema() + schema['additionalProperties'] = bool(self.additional_attrs) + schema['properties']['attrs'] = {'type': 'array'} + schema['required'].append('attrs') + # We do not validate nested children and hence do not add it in the + # json schema as it makes it very complex to handle all the possibilities + return schema + + +class ListSchema(Schema): + + DEFAULT_TYPE = 'array' + + def __init__(self, data): + self.items = [] + super().__init__(False, data=data) + self._skip_data_values = ['items'] + + def initialize_values(self, data): + super().initialize_values(data) + self.items = [generate_variable(d) for d in (data.get('items') or [])] + + def json_schema(self): + schema = super().json_schema() + schema['properties']['items'] = {'type': 'array'} + schema['required'].append('items') + return schema diff --git a/catalog_validation/ix-source/catalog_validation/schema/feature_gen.py b/catalog_validation/ix-source/catalog_validation/schema/feature_gen.py new file mode 100644 index 00000000..62dd657f --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/feature_gen.py @@ -0,0 +1,4 @@ +def get_feature(feature): + from .features import FEATURES + if feature in FEATURES: + return FEATURES[FEATURES.index(feature)] diff --git a/catalog_validation/ix-source/catalog_validation/schema/features.py b/catalog_validation/ix-source/catalog_validation/schema/features.py new file mode 100644 index 00000000..c69c7b44 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/features.py @@ -0,0 +1,158 @@ +from catalog_validation.exceptions import ValidationErrors + +from .schema_gen import DictSchema, IntegerSchema, StringSchema + + +class Feature: + + NAME = NotImplementedError + VALID_SCHEMAS = [] + + def __str__(self): + return self.NAME + + def validate(self, schema_obj, schema_str): + verrors = ValidationErrors() + if not isinstance(schema_obj, tuple(self.VALID_SCHEMAS)): + verrors.add( + f'{schema_str}.type', + f'Schema must be one of {", ".join(str(v) for v in self.VALID_SCHEMAS)} schema types' + ) + + if not verrors: + self._validate(verrors, schema_obj, schema_str) + verrors.check() + + def _validate(self, verrors, schema_obj, schema_str): + pass + + def __eq__(self, other): + return self.NAME == (other if isinstance(other, str) else other.NAME) + + +class IXVolumeFeature(Feature): + + NAME = 'normalize/ixVolume' + VALID_SCHEMAS = [DictSchema, StringSchema] + + def _validate(self, verrors, schema_obj, schema_str): + if isinstance(schema_obj, StringSchema): + return + + attrs = schema_obj.attrs + if 'datasetName' not in attrs: + verrors.add(f'{schema_str}.attrs', 'Variable "datasetName" must be specified.') + elif not isinstance(attrs[attrs.index('datasetName')].schema, StringSchema): + verrors.add(f'{schema_str}.attrs', 'Variable "datasetName" must be of string type.') + + if 'aclEntries' in attrs and not isinstance(attrs[attrs.index('aclEntries')].schema, DictSchema): + verrors.add(f'{schema_str}.attrs', 'Variable "aclEntries" must be of dict type.') + + if 'properties' in attrs: + index = attrs.index('properties') + properties = attrs[index] + properties_schema = properties.schema + supported_props = { + 'recordsize': { + 'valid_schema_type': [StringSchema], + }, + } + not_supported = set([str(v) for v in properties_schema.attrs]) - set(supported_props) + if not_supported: + verrors.add( + f'{schema_str}.attrs.{index}.attrs', f'{", ".join(not_supported)} properties are not supported' + ) + + for prop_index, prop in enumerate(properties_schema.attrs): + if prop.name not in supported_props: + continue + + prop_schema = prop.schema + check_prop = supported_props[prop.name] + if not isinstance(prop_schema, tuple(check_prop['valid_schema_type'])): + verrors.add( + f'{schema_str}.attrs.{index}.attrs.{prop_index}', + f'{prop.name!r} must be of ' + f'{", ".join([str(s) for s in check_prop["valid_schema_type"]])} type(s)' + ) + + +class NormalizeInterfaceConfiguration(Feature): + NAME = 'normalize/interfaceConfiguration' + VALID_SCHEMAS = [DictSchema] + + +class DefinitionInterfaceFeature(Feature): + + NAME = 'definitions/interface' + VALID_SCHEMAS = [StringSchema] + + +class DefinitionGPUConfigurationFeature(Feature): + + NAME = 'definitions/gpuConfiguration' + VALID_SCHEMAS = [DictSchema] + + +class DefinitionTimezoneFeature(Feature): + + NAME = 'definitions/timezone' + VALID_SCHEMAS = [StringSchema] + + +class DefinitionNodeIPFeature(Feature): + + NAME = 'definitions/nodeIP' + VALID_SCHEMAS = [StringSchema] + + +class ValidationNodePortFeature(Feature): + + NAME = 'validations/nodePort' + VALID_SCHEMAS = [IntegerSchema] + + +class CertificateFeature(Feature): + + NAME = 'definitions/certificate' + VALID_SCHEMAS = [IntegerSchema] + + +class CertificateAuthorityFeature(Feature): + + NAME = 'definitions/certificateAuthority' + VALID_SCHEMAS = [IntegerSchema] + + +class ContainerImageFeature(Feature): + + NAME = 'validations/containerImage' + VALID_SCHEMAS = [DictSchema] + + def _validate(self, verrors, schema_obj, schema_str): + attrs = schema_obj.attrs + for check_attr in ('repository', 'tag'): + if check_attr not in attrs: + verrors.add(f'{schema_str}.attrs', f'Variable {check_attr!r} must be specified.') + elif not isinstance(attrs[attrs.index(check_attr)].schema, StringSchema): + verrors.add(f'{schema_str}.attrs', f'Variable {check_attr!r} must be of string type.') + + +class ACLFeature(Feature): + + NAME = 'normalize/acl' + VALID_SCHEMAS = [DictSchema] + + +FEATURES = [ + ACLFeature(), + IXVolumeFeature(), + DefinitionInterfaceFeature(), + DefinitionGPUConfigurationFeature(), + DefinitionTimezoneFeature(), + DefinitionNodeIPFeature(), + ValidationNodePortFeature(), + CertificateFeature(), + CertificateAuthorityFeature(), + ContainerImageFeature(), +] diff --git a/catalog_validation/ix-source/catalog_validation/schema/migration_schema.py b/catalog_validation/ix-source/catalog_validation/schema/migration_schema.py new file mode 100644 index 00000000..9b535d9a --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/migration_schema.py @@ -0,0 +1,42 @@ +import re + + +APP_MIGRATION_DIR = 'migrations' +APP_MIGRATION_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'app_name': {'type': 'string'}, + 'action': {'type': 'string', 'enum': ['move']}, + }, + 'required': [ + 'app_name', + 'action' + ], + 'allOf': [ + { + 'if': { + 'properties': { + 'action': { + 'const': 'move', + }, + }, + }, + 'then': { + 'properties': { + 'old_train': {'type': 'string'}, + 'new_train': {'type': 'string'}, + }, + 'required': [ + 'new_train', + 'old_train', + ], + }, + }, + ], + }, +} +MIGRATION_DIRS = ['.migrations', 'ix-migrations'] +RE_MIGRATION_NAME_STR = r'^\d+\w+.json' +RE_MIGRATION_NAME = re.compile(RE_MIGRATION_NAME_STR) diff --git a/catalog_validation/ix-source/catalog_validation/schema/schema_gen.py b/catalog_validation/ix-source/catalog_validation/schema/schema_gen.py new file mode 100644 index 00000000..34d7ba29 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/schema_gen.py @@ -0,0 +1,43 @@ +from .attrs import ( + BooleanSchema, StringSchema, TextFieldSchema, IntegerSchema, PathSchema, HostPathSchema, HostPathDirSchema, + HostPathFileSchema, ListSchema, DictSchema, IPAddrSchema, CronSchema, URISchema +) + + +def get_schema(schema_data): + schema = None + if not isinstance(schema_data, dict): + return schema + + s_type = schema_data.get('type') + if s_type == 'boolean': + schema = BooleanSchema + elif s_type == 'string': + schema = StringSchema + elif s_type == 'text': + schema = TextFieldSchema + elif s_type == 'int': + schema = IntegerSchema + elif s_type == 'path': + schema = PathSchema + elif s_type == 'hostpath': + schema = HostPathSchema + elif s_type == 'hostpathdirectory': + schema = HostPathDirSchema + elif s_type == 'hostpathfile': + schema = HostPathFileSchema + elif s_type == 'list': + schema = ListSchema + elif s_type == 'dict': + schema = DictSchema + elif s_type == 'ipaddr': + schema = IPAddrSchema + elif s_type == 'cron': + schema = CronSchema + elif s_type == 'uri': + schema = URISchema + + if schema: + schema = schema(data=schema_data) + + return schema diff --git a/catalog_validation/ix-source/catalog_validation/schema/variable.py b/catalog_validation/ix-source/catalog_validation/schema/variable.py new file mode 100644 index 00000000..e11d522a --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/variable.py @@ -0,0 +1,37 @@ +from catalog_validation.exceptions import ValidationErrors + +from .schema_gen import get_schema + + +class Variable: + def __init__(self, data): + self.name = self.label = self.description = self.group = None + self.schema = None + self.update_from_data(data) + + def update_from_data(self, data): + self.name = data.get('variable') + self.label = data.get('label') + self.description = data.get('description') + self.schema = get_schema(data.get('schema')) + + def validate(self, schema): + verrors = ValidationErrors() + if not self.name: + verrors.add(f'{schema}.variable', 'Variable value must be specified') + + if not self.schema: + verrors.add(f'{schema}.schema', 'Schema must be specified for variable') + else: + try: + self.schema.validate(f'{schema}.schema') + except ValidationErrors as ve: + verrors.extend(ve) + + verrors.check() + + def __str__(self): + return self.name + + def __eq__(self, other): + return (other if isinstance(other, str) else other.name) == self.name diff --git a/catalog_validation/ix-source/catalog_validation/schema/variable_gen.py b/catalog_validation/ix-source/catalog_validation/schema/variable_gen.py new file mode 100644 index 00000000..8784e4f9 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/schema/variable_gen.py @@ -0,0 +1,3 @@ +def generate_variable(variable_data): + from .variable import Variable + return Variable(variable_data) diff --git a/catalog_validation/ix-source/catalog_validation/scripts/__init__.py b/catalog_validation/ix-source/catalog_validation/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/ix-source/catalog_validation/scripts/catalog_update.py b/catalog_validation/ix-source/catalog_validation/scripts/catalog_update.py new file mode 100644 index 00000000..0d52a8b6 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/scripts/catalog_update.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +import argparse +import contextlib +import json +import os +import shutil +import typing + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.ci.utils import ( + get_app_version, get_ci_development_directory, OPTIONAL_METADATA_FILES, + REQUIRED_METADATA_FILES, version_has_been_bumped, get_to_keep_versions +) +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.items.catalog import get_items_in_trains, retrieve_train_names, retrieve_trains_data +from catalog_validation.items.utils import get_catalog_json_schema +from catalog_validation.utils import CACHED_CATALOG_FILE_NAME, CACHED_VERSION_FILE_NAME +from catalog_validation.validation import validate_catalog_item_version_data +from collections import defaultdict + + +def get_trains(location: str) -> typing.Tuple[dict, dict]: + preferred_trains: list = [] + trains_to_traverse = retrieve_train_names(location) + catalog_data = {} + versions_data = {} + for train_name, train_data in retrieve_trains_data( + get_items_in_trains(trains_to_traverse, location), location, preferred_trains, trains_to_traverse + )[0].items(): + catalog_data[train_name] = {} + versions_data[train_name] = {} + for app_name, app_data in train_data.items(): + catalog_data[train_name][app_name] = {} + versions_data[train_name][app_name] = {} + for k, v in app_data.items(): + if k == 'versions': + versions_data[train_name][app_name][k] = v + else: + catalog_data[train_name][app_name][k] = v + + return catalog_data, versions_data + + +def validate_train_data(train_data): + verrors = ValidationErrors() + try: + json_schema_validate(train_data, get_catalog_json_schema()) + except (json.JSONDecodeError, JsonValidationError) as e: + verrors.add( + 'catalog_json', + f'Failed to validate contents of train data: {e!r}' + ) + verrors.check() + + +def validate_versions_data(versions_data): + verrors = ValidationErrors() + for train_name, train_data in versions_data.items(): + for app_name, app_version_data in train_data.items(): + validate_catalog_item_version_data(app_version_data['versions'], f'{train_name}.{app_name}', verrors) + verrors.check() + + +def get_apps_to_publish(catalog_path: str) -> dict: + ci_dev_dir = get_ci_development_directory(catalog_path) + to_publish_apps = defaultdict(list) + for train_name in os.listdir(ci_dev_dir): + train_path = os.path.join(ci_dev_dir, train_name) + if not os.path.isdir(train_path): + continue + + for app_name in os.listdir(train_path): + app_path = os.path.join(train_path, app_name) + if not os.path.isdir(app_path): + continue + + app_current_version = get_app_version(app_path) + if version_has_been_bumped(os.path.join(catalog_path, train_name, app_name), app_current_version): + to_publish_apps[train_name].append({'name': app_name, 'version': app_current_version}) + + return to_publish_apps + + +def publish_updated_apps(catalog_path: str) -> None: + ci_dev_directory = get_ci_development_directory(catalog_path) + if not os.path.isdir(ci_dev_directory): + return + + for train_name, apps in get_apps_to_publish(catalog_path).items(): + dev_train_path = os.path.join(ci_dev_directory, train_name) + publish_train_path = os.path.join(catalog_path, train_name) + os.makedirs(publish_train_path, exist_ok=True) + + for app in apps: + app_name, app_version = app['name'], app['version'] + dev_app_path = os.path.join(dev_train_path, app_name) + publish_app_path = os.path.join(publish_train_path, app_name) + publish_app_version_path = os.path.join(publish_app_path, app_version) + required_versions = get_to_keep_versions(dev_app_path) + os.makedirs(publish_app_path, exist_ok=True) + + dev_item_yaml_path = os.path.join(dev_app_path, 'item.yaml') + publish_item_yaml_path = os.path.join(publish_app_path, 'item.yaml') + shutil.copy(dev_item_yaml_path, publish_item_yaml_path) + shutil.copytree(dev_app_path, publish_app_version_path) + + for file_name in OPTIONAL_METADATA_FILES + REQUIRED_METADATA_FILES: + with contextlib.suppress(OSError): + os.unlink(os.path.join(publish_app_version_path, file_name)) + + ix_values_path = os.path.join(publish_app_version_path, 'ix_values.yaml') + values_path = os.path.join(publish_app_version_path, 'values.yaml') + if not os.path.exists(ix_values_path) and os.path.exists(values_path): + shutil.move(values_path, ix_values_path) + + for version in os.listdir(publish_app_path): + version_path = os.path.join(publish_app_path, version) + if not os.path.isdir(version_path) or version in required_versions: + continue + + if version != app_version: + shutil.rmtree(version_path) + + print( + f'[\033[92mOK\x1B[0m]\tPublished {app_name!r} having {app_version!r} version ' + f'to {train_name!r} train successfully!' + ) + + +def update_catalog_file(location: str) -> None: + catalog_file_path = os.path.join(location, CACHED_CATALOG_FILE_NAME) + catalog_data, versions_data = get_trains(location) + validate_train_data(catalog_data) + validate_versions_data(versions_data) + + with open(catalog_file_path, 'w') as f: + f.write(json.dumps(catalog_data, indent=4)) + + print(f'[\033[92mOK\x1B[0m]\tUpdated {catalog_file_path!r} successfully!') + + for train_name, train_data in versions_data.items(): + for app_name, app_data in train_data.items(): + version_path = os.path.join(location, train_name, app_name, CACHED_VERSION_FILE_NAME) + with open(version_path, 'w') as f: + f.write(json.dumps(app_data['versions'], indent=4)) + + print(f'[\033[92mOK\x1B[0m]\tUpdated {version_path!r} successfully!') + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + publish_setup = subparsers.add_parser('publish', help='Publish apps of TrueNAS catalog') + publish_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + + parser_setup = subparsers.add_parser('update', help='Update TrueNAS catalog') + parser_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + + args = parser.parse_args() + if args.action == 'publish': + publish_updated_apps(args.path) + elif args.action == 'update': + update_catalog_file(args.path) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/ix-source/catalog_validation/scripts/catalog_validate.py b/catalog_validation/ix-source/catalog_validation/scripts/catalog_validate.py new file mode 100644 index 00000000..6d4b8d3f --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/scripts/catalog_validate.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +import argparse + +from catalog_validation.exceptions import CatalogDoesNotExist, ValidationErrors +from catalog_validation.validation import validate_catalog + + +def validate(catalog_path): + + try: + validate_catalog(catalog_path) + except CatalogDoesNotExist: + print(f'[\033[91mFAILED\x1B[0m]\tSpecified {catalog_path!r} path does not exist') + exit(1) + except ValidationErrors as verrors: + print('[\033[91mFAILED\x1B[0m]\tFollowing validation failures were found:') + for index, verror in enumerate(verrors.errors): + print(f'[\033[91m{index}\x1B[0m]\t{verror}') + exit(1) + else: + print('[\033[92mOK\x1B[0m]\tPASSED VALIDATION CHECKS') + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + parser_setup = subparsers.add_parser('validate', help='Validate TrueNAS catalog') + parser_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + + args = parser.parse_args() + if args.action == 'validate': + validate(args.path) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/ix-source/catalog_validation/scripts/dev_apps_validate.py b/catalog_validation/ix-source/catalog_validation/scripts/dev_apps_validate.py new file mode 100644 index 00000000..1ce514be --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/scripts/dev_apps_validate.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +import argparse + +from catalog_validation.ci.validate import validate_dev_directory_structure +from catalog_validation.git_utils import get_changed_apps + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + parser_setup = subparsers.add_parser( + 'validate', help='Validate TrueNAS dev catalog items' + ) + parser_setup.add_argument('--path', help='Specify path of TrueNAS dev catalog', required=True) + parser_setup.add_argument( + '--base_branch', help='Specify base branch to find changed catalog items', default='master' + ) + + args = parser.parse_args() + if args.action == 'validate': + validate_dev_directory_structure(args.path, get_changed_apps(args.path, args.base_branch)) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/ix-source/catalog_validation/utils.py b/catalog_validation/ix-source/catalog_validation/utils.py new file mode 100644 index 00000000..42874798 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/utils.py @@ -0,0 +1,225 @@ +import re + + +CACHED_CATALOG_FILE_NAME = 'catalog.json' +CACHED_VERSION_FILE_NAME = 'app_versions.json' +METADATA_JSON_SCHEMA = { + 'type': 'object', + 'properties': { + 'runAsContext': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'gid': {'type': 'integer'}, + 'groupName': {'type': 'string'}, + 'userName': {'type': 'string'}, + 'uid': {'type': 'integer'}, + }, + 'required': ['description'], + }, + }, + 'capabilities': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'name': {'type': 'string'}, + }, + 'required': ['description', 'name'], + }, + }, + 'hostMounts': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'hostPath': {'type': 'string'}, + }, + 'required': ['description', 'hostPath'], + }, + }, + }, +} +RE_SCALE_VERSION = re.compile(r'^(\d{2}\.\d{2}(?:\.\d)*(?:-?(?:RC|BETA)\.?\d?)?)$') # 24.04 / 24.04.1 / 24.04-RC.1 +RE_VERSION_PATTERN = re.compile(r'(\d{2}\.\d{2}(?:\.\d)*)') # We are only interested in XX.XX here +VALID_TRAIN_REGEX = re.compile(r'^\w+[\w.-]*$') +VERSION_VALIDATION_SCHEMA = { + 'type': 'object', + 'title': 'Versions', + 'patternProperties': { + '[0-9]+.[0-9]+.[0-9]+': { + 'type': 'object', + 'properties': { + 'healthy': { + 'type': 'boolean', + }, + 'supported': { + 'type': 'boolean', + }, + 'healthy_error': { + 'type': ['string', 'null'] + }, + 'location': { + 'type': 'string', + 'pattern': r'^(\/[a-zA-Z0-9_.-]+)+$' + }, + 'last_update': { + 'type': 'string', + 'pattern': '^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$' + }, + 'required_features': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'human_version': { + 'type': 'string' + }, + 'version': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + }, + 'chart_metadata': { + 'type': 'object', + 'properties': { + 'name': { + 'type': 'string' + }, + 'description': { + 'type': 'string' + }, + 'annotations': { + 'type': 'object' + }, + 'type': { + 'type': 'string' + }, + 'version': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + }, + 'apiVersion': { + 'type': 'string', + }, + 'appVersion': { + 'type': 'string' + }, + 'kubeVersion': { + 'type': 'string' + }, + 'app_readme': {'type': 'string'}, + 'detailed_readme': {'type': 'string'}, + 'changelog': {'type': ['string', 'null']}, + 'maintainers': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'url': {'type': ['string', 'null']}, + 'email': {'type': 'string'}, + }, + 'required': ['name', 'email'], + } + }, + 'dependencies': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'repository': {'type': 'string'}, + 'version': {'type': 'string'} + } + } + }, + 'home': {'type': 'string'}, + 'icon': {'type': 'string'}, + 'sources': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'keywords': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + } + }, + 'app_metadata': { + **METADATA_JSON_SCHEMA, + 'type': ['object', 'null'], + }, + 'schema': { + 'type': 'object', + 'properties': { + 'groups': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': { + 'type': 'string' + }, + 'description': { + 'type': 'string' + }, + }, + 'required': ['description', 'name'], + } + }, + 'portals': { + 'type': 'object' + }, + 'questions': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'variable': {'type': 'string'}, + 'label': {'type': 'string'}, + 'group': {'type': 'string'}, + 'schema': { + 'type': 'object', + 'properties': { + 'type': {'type': 'string'} + }, + 'required': ['type'] + } + } + } + } + }, + 'required': ['groups', 'questions'] + }, + }, + 'required': [ + 'healthy', 'supported', 'healthy_error', 'location', 'last_update', 'required_features', + 'human_version', 'version', 'chart_metadata', 'app_metadata', 'schema', + ], + }, + }, + 'additionalProperties': False +} +WANTED_FILES_IN_ITEM_VERSION = {'questions.yaml', 'app-readme.md', 'Chart.yaml', 'README.md'} + + +def validate_key_value_types(data_to_check, mapping, verrors, schema): + for key_mapping in mapping: + if len(key_mapping) == 2: + key, value_type, required = *key_mapping, True + else: + key, value_type, required = key_mapping + + if required and key not in data_to_check: + verrors.add(f'{schema}.{key}', f'Missing required {key!r} key.') + elif key in data_to_check and not isinstance(data_to_check[key], value_type): + verrors.add(f'{schema}.{key}', f'{key!r} value should be a {value_type.__name__!r}') diff --git a/catalog_validation/ix-source/catalog_validation/validation.py b/catalog_validation/ix-source/catalog_validation/validation.py new file mode 100644 index 00000000..5d348f39 --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/validation.py @@ -0,0 +1,459 @@ +import concurrent.futures +import json +import jsonschema +import os +import yaml + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError +from middlewared.validators import validate_filters +from semantic_version import Version +from typing import Optional + +from .exceptions import CatalogDoesNotExist, ValidationErrors +from .items.ix_values_utils import validate_ix_values_schema +from .items.questions_utils import ( + CUSTOM_PORTALS_KEY, CUSTOM_PORTALS_ENABLE_KEY, CUSTOM_PORTAL_GROUP_KEY, +) +from .items.utils import get_catalog_json_schema, RECOMMENDED_APPS_FILENAME, RECOMMENDED_APPS_SCHEMA, TRAIN_IGNORE_DIRS +from .schema.migration_schema import ( + APP_MIGRATION_SCHEMA, MIGRATION_DIRS, RE_MIGRATION_NAME, RE_MIGRATION_NAME_STR, APP_MIGRATION_DIR, +) +from .schema.variable import Variable +from .validation_utils import validate_chart_version +from .utils import ( + CACHED_CATALOG_FILE_NAME, CACHED_VERSION_FILE_NAME, METADATA_JSON_SCHEMA, validate_key_value_types, + VALID_TRAIN_REGEX, VERSION_VALIDATION_SCHEMA, WANTED_FILES_IN_ITEM_VERSION +) + + +def validate_catalog(catalog_path): + if not os.path.exists(catalog_path): + raise CatalogDoesNotExist(catalog_path) + + verrors = ValidationErrors() + items = [] + item_futures = [] + cached_catalog_file_path = os.path.join(catalog_path, CACHED_CATALOG_FILE_NAME) + if not os.path.exists(cached_catalog_file_path): + verrors.add( + 'cached_catalog_file', + f'{CACHED_CATALOG_FILE_NAME!r} metadata file must be specified for a valid catalog' + ) + else: + try: + with open(cached_catalog_file_path, 'r') as f: + json_schema_validate(json.loads(f.read()), get_catalog_json_schema()) + + except (json.JSONDecodeError, JsonValidationError) as e: + verrors.add( + 'cached_catalog_file', + f'Failed to validate contents of {cached_catalog_file_path!r}: {e!r}' + ) + + verrors.check() + + validate_recommended_apps_file(catalog_path) + + for file_dir in os.listdir(catalog_path): + complete_path = os.path.join(catalog_path, file_dir) + if file_dir not in MIGRATION_DIRS and ( + file_dir.startswith('.') or not os.path.isdir(complete_path) or file_dir in TRAIN_IGNORE_DIRS + ): + continue + if file_dir in MIGRATION_DIRS: + if all(os.path.exists(migration_dir) for migration_dir in map( + lambda d: os.path.join(catalog_path, d), MIGRATION_DIRS + )): + verrors.add( + 'app_migrations', f'Both {", ".join(MIGRATION_DIRS)!r} cannot be used to specify app migrations' + ) + else: + for directory in MIGRATION_DIRS: + migration_dir = os.path.join(catalog_path, directory) + if not os.path.exists(migration_dir): + continue + if os.path.isdir(migration_dir): + try: + validate_migrations(migration_dir) + except ValidationErrors as e: + verrors.extend(e) + else: + verrors.add('app_migrations', f'{directory!r} is not a directory') + else: + try: + validate_train_structure(complete_path) + except ValidationErrors as e: + verrors.extend(e) + else: + items.extend(get_train_items(complete_path)) + + with concurrent.futures.ProcessPoolExecutor(max_workers=5 if len(items) > 10 else 2) as exc: + for item in items: + item_futures.append(exc.submit(validate_catalog_item, item[0], item[1])) + + for future in item_futures: + try: + future.result() + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + +def validate_recommended_apps_file(catalog_location: str) -> None: + verrors = ValidationErrors() + try: + with open(os.path.join(catalog_location, RECOMMENDED_APPS_FILENAME), 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, RECOMMENDED_APPS_SCHEMA) + except FileNotFoundError: + return + except yaml.YAMLError: + verrors.add(RECOMMENDED_APPS_FILENAME, 'Must be a valid yaml file') + except JsonValidationError as e: + verrors.add(RECOMMENDED_APPS_FILENAME, f'Invalid format specified: {e}') + + verrors.check() + + +def validate_migrations(migration_dir): + verrors = ValidationErrors() + for migration_file in os.listdir(migration_dir): + if not RE_MIGRATION_NAME.findall(migration_file): + verrors.add( + f'app_migrations.{migration_file}', + 'Invalid naming scheme used for migration file name. ' + f'It should be conforming to {RE_MIGRATION_NAME_STR!r} pattern.' + ) + else: + try: + with open(os.path.join(migration_dir, migration_file), 'r') as f: + data = json.loads(f.read()) + jsonschema.validate(data, APP_MIGRATION_SCHEMA) + except (json.JSONDecodeError, jsonschema.ValidationError) as e: + verrors.add( + f'app_migrations.{migration_file}', + f'Failed to validate migration file structure: {e}' + ) + verrors.check() + + +def validate_train_structure(train_path): + train = os.path.basename(train_path) + verrors = ValidationErrors() + if not VALID_TRAIN_REGEX.match(train): + verrors.add(train, 'Train name is invalid.') + + verrors.check() + + +def get_train_items(train_path): + train = os.path.basename(train_path) + items = [] + for catalog_item in os.listdir(train_path): + item_path = os.path.join(train_path, catalog_item) + if not os.path.isdir(item_path): + continue + items.append((item_path, f'{train}.{catalog_item}')) + return items + + +def validate_catalog_item(catalog_item_path, schema, validate_versions=True): + # We should ensure that each catalog item has at least 1 version available + # Also that we have item.yaml present + verrors = ValidationErrors() + item_name = os.path.join(catalog_item_path) + files = [] + versions = [] + + if not os.path.isdir(catalog_item_path): + verrors.add(schema, 'Catalog item must be a directory') + verrors.check() + + for file_dir in os.listdir(catalog_item_path): + complete_path = os.path.join(catalog_item_path, file_dir) + if os.path.isdir(complete_path): + versions.append(complete_path) + else: + files.append(file_dir) + + if not versions: + verrors.add(f'{schema}.versions', f'No versions found for {item_name} item.') + + if 'item.yaml' not in files: + verrors.add(f'{schema}.item', 'Item configuration (item.yaml) not found') + else: + with open(os.path.join(catalog_item_path, 'item.yaml'), 'r') as f: + item_config = yaml.safe_load(f.read()) + + validate_key_value_types( + item_config, ( + ('categories', list), ('tags', list, False), ('screenshots', list, False), + ), verrors, f'{schema}.item_config' + ) + + cached_version_file_path = os.path.join(catalog_item_path, CACHED_VERSION_FILE_NAME) + if os.path.exists(cached_version_file_path): + try: + with open(cached_version_file_path, 'r') as f: + validate_catalog_item_version_data( + json.loads(f.read()), f'{schema}.{CACHED_VERSION_FILE_NAME}', verrors + ) + except json.JSONDecodeError: + verrors.add( + f'{schema}.{CACHED_VERSION_FILE_NAME}', f'{CACHED_VERSION_FILE_NAME!r} is not a valid json file' + ) + + for version_path in (versions if validate_versions else []): + try: + validate_catalog_item_version(version_path, f'{schema}.versions.{os.path.basename(version_path)}') + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + +def validate_app_migrations(verrors, version_path, schema): + app_migration_path = os.path.join(version_path, APP_MIGRATION_DIR) + + if not os.path.exists(app_migration_path): + return verrors + + for migration_file in os.listdir(app_migration_path): + migration_file_path = os.path.join(app_migration_path, migration_file) + if not os.access(migration_file_path, os.X_OK): + verrors.add(schema, f'{migration_file!r} is not executable') + return verrors + + +def validate_catalog_item_version_data(version_data: dict, schema: str, verrors: ValidationErrors) -> ValidationErrors: + try: + json_schema_validate(version_data, VERSION_VALIDATION_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Invalid format specified for application versions: {e}') + return verrors + + +def validate_catalog_item_version( + version_path: str, schema: str, version_name: Optional[str] = None, item_name: Optional[str] = None, + validate_values: bool = False, +): + verrors = ValidationErrors() + version_name = version_name or os.path.basename(version_path) + item_name = item_name or version_path.split('/')[-2] + try: + Version(version_name) + except ValueError: + verrors.add(f'{schema}.name', f'{version_name!r} is not a valid version name.') + + files_diff = WANTED_FILES_IN_ITEM_VERSION ^ set( + f for f in os.listdir(version_path) if f in WANTED_FILES_IN_ITEM_VERSION + ) + if files_diff: + verrors.add(f'{schema}.required_files', f'Missing {", ".join(files_diff)} required configuration files.') + + chart_version_path = os.path.join(version_path, 'Chart.yaml') + validate_chart_version(verrors, chart_version_path, schema, item_name, version_name) + + questions_path = os.path.join(version_path, 'questions.yaml') + if os.path.exists(questions_path): + try: + validate_questions_yaml(questions_path, f'{schema}.questions_configuration') + except ValidationErrors as v: + verrors.extend(v) + + for values_file in ['ix_values.yaml'] + (['values.yaml'] if validate_values else []): + values_path = os.path.join(version_path, values_file) + if os.path.exists(values_path): + try: + validate_ix_values_yaml(values_path, f'{schema}.values_configuration') + except ValidationErrors as v: + verrors.extend(v) + + metadata_path = os.path.join(version_path, 'metadata.yaml') + if os.path.exists(metadata_path): + try: + validate_metadata_yaml(metadata_path, f'{schema}.metadata_configuration') + except ValidationErrors as v: + verrors.extend(v) + + validate_app_migrations(verrors, version_path, f'{schema}.app_migrations') + + verrors.check() + + +def validate_ix_values_yaml(ix_values_yaml_path, schema): + verrors = ValidationErrors() + + with open(ix_values_yaml_path, 'r') as f: + try: + ix_values = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + + verrors.check() + + if isinstance(ix_values, dict): + portals = ix_values.get(CUSTOM_PORTALS_KEY) + if portals: + try: + validate_ix_values_schema(schema, portals) + except ValidationErrors as ve: + verrors.extend(ve) + else: + verrors.add(schema, 'Must be a dictionary') + + verrors.check() + + +def validate_metadata_yaml(metadata_yaml_path, schema): + verrors = ValidationErrors() + with open(metadata_yaml_path, 'r') as f: + try: + metadata = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + try: + json_schema_validate(metadata, METADATA_JSON_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Invalid format specified for application metadata: {e}') + + verrors.check() + + +def validate_questions_yaml(questions_yaml_path, schema): + verrors = ValidationErrors() + + with open(questions_yaml_path, 'r') as f: + try: + questions_config = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + if not isinstance(questions_config, dict): + verrors.add(schema, 'Must be a dictionary') + + verrors.check() + + validate_key_value_types( + questions_config, ( + ('groups', list), ('questions', list), ('portals', dict, False), (CUSTOM_PORTALS_ENABLE_KEY, bool, False), + (CUSTOM_PORTAL_GROUP_KEY, str, False), + ), verrors, schema + ) + + verrors.check() + + groups = [] + for index, group in enumerate(questions_config['groups']): + if not isinstance(group, dict): + verrors.add(f'{schema}.groups.{index}', 'Type of group should be a dictionary.') + continue + + if group.get('name'): + groups.append(group['name']) + + validate_key_value_types(group, (('name', str), ('description', str)), verrors, f'{schema}.group.{index}') + + for index, portal_details in enumerate((questions_config.get('portals') or {}).items()): + portal_type, portal_schema = portal_details + error_schema = f'{schema}.portals.{index}' + if not isinstance(portal_type, str): + verrors.add(error_schema, 'Portal type must be a string') + if not isinstance(portal_schema, dict): + verrors.add(error_schema, 'Portal schema must be a dictionary') + else: + validate_key_value_types( + portal_schema, (('protocols', list), ('host', list), ('ports', list), ('path', str, False)), + verrors, error_schema + ) + + validate_variable_uniqueness(questions_config['questions'], f'{schema}.questions', verrors) + for index, question in enumerate(questions_config['questions']): + validate_question(question, f'{schema}.questions.{index}', verrors, (('group', str),)) + if question.get('group') and question['group'] not in groups: + verrors.add(f'{schema}.questions.{index}.group', f'Please specify a group declared in "{schema}.groups"') + + if questions_config.get(CUSTOM_PORTALS_ENABLE_KEY): + if not questions_config.get(CUSTOM_PORTAL_GROUP_KEY): + verrors.add( + f'{schema}.{CUSTOM_PORTALS_ENABLE_KEY}', + f'{CUSTOM_PORTAL_GROUP_KEY!r} must be specified when user specified portals are desired' + ) + elif questions_config[CUSTOM_PORTAL_GROUP_KEY] not in groups: + verrors.add( + f'{schema}.{CUSTOM_PORTAL_GROUP_KEY}', + 'Specified group not declared under "groups"' + ) + + verrors.check() + + +def validate_variable_uniqueness(data, schema, verrors): + variables = [] + for index, question in enumerate(data): + if question['variable'] in variables: + verrors.add( + f'{schema}.{index}', f'Variable name {question["variable"]!r} has been used again which is not allowed' + ) + else: + variables.append(question['variable']) + sub_questions = question.get('subquestions') or [] + for sub_index, sub_question in enumerate(sub_questions): + if sub_question['variable'] in variables: + verrors.add( + f'{schema}.{index}.subquestions.{sub_index}', + f'Variable name {sub_question["variable"]!r} has been used again which is not allowed' + ) + else: + variables.append(sub_question['variable']) + + verrors.check() + + +def validate_question(question_data, schema, verrors, validate_top_level_attrs=None): + if not isinstance(question_data, dict): + verrors.add(schema, 'Question must be a valid dictionary.') + return + + validate_top_level_attrs = validate_top_level_attrs or tuple() + validate_key_value_types( + question_data, (('variable', str), ('label', str), ('schema', dict)) + validate_top_level_attrs, verrors, schema + ) + if type(question_data.get('schema')) != dict: + return + + if question_data['variable'] == CUSTOM_PORTALS_KEY: + verrors.add( + f'{schema}.variable', + f'{CUSTOM_PORTALS_KEY!r} is a reserved variable name and cannot be specified by app developer' + ) + # No need to validate the question data etc here + return + + try: + Variable(question_data).validate(schema) + except ValidationErrors as ve: + verrors.extend(ve) + return + + schema_data = question_data['schema'] + variable_type = schema_data['type'] + + if filters := schema_data.get('show_if'): + validate_filters(filters) + + for condition, key, schema_str in ( + (variable_type != 'list', 'subquestions', f'{schema}.schema.subquestions'), + (variable_type == 'list', 'items', f'{schema}.schema.items'), + (variable_type == 'dict', 'attrs', f'{schema}.schema.attrs'), + ): + if not (condition and type(schema_data.get(key)) == list): + continue + + if variable_type == 'dict': + validate_variable_uniqueness(schema_data[key], f'{schema}.{schema_str}', verrors) + + for index, item in enumerate(schema_data[key]): + validate_question(item, f'{schema_str}.{index}', verrors) diff --git a/catalog_validation/ix-source/catalog_validation/validation_utils.py b/catalog_validation/ix-source/catalog_validation/validation_utils.py new file mode 100644 index 00000000..c6ad270f --- /dev/null +++ b/catalog_validation/ix-source/catalog_validation/validation_utils.py @@ -0,0 +1,94 @@ +import os +import yaml + +from middlewared.plugins.update_.utils import can_update +from semantic_version import Version +from typing import Optional + +from .exceptions import ValidationErrors +from .utils import validate_key_value_types, RE_SCALE_VERSION + + +def validate_min_max_version_values(annotations_dict, verrors, schema): + validate_key_value_types( + annotations_dict, (('min_scale_version', str, False), ('max_scale_version', str, False)), verrors, schema + ) + + if verrors: + # No point in proceeding further + return + + for version in filter(lambda v: v in annotations_dict, ['min_scale_version', 'max_scale_version']): + if not RE_SCALE_VERSION.match(annotations_dict[version]): + verrors.add( + f'{schema}.{version}', + f'Format of provided {version} value is not correct' + ) + + if ( + not verrors and all(version in annotations_dict for version in ['min_scale_version', 'max_scale_version']) and + annotations_dict['min_scale_version'] != annotations_dict['max_scale_version'] and + not can_update(annotations_dict['min_scale_version'], annotations_dict['max_scale_version']) + ): + verrors.add(schema, 'Provided min_scale_version is greater than provided max_scale_version') + + +def validate_chart_version( + verrors: ValidationErrors, chart_version_path: str, schema: str, item_name: str, version_name: Optional[str] = None, +) -> ValidationErrors: + if os.path.exists(chart_version_path): + with open(chart_version_path, 'r') as f: + try: + chart_config = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + if not isinstance(chart_config, dict): + verrors.add(schema, 'Must be a dictionary') + else: + if chart_config.get('name') != item_name: + verrors.add(f'{schema}.item_name', 'Item name not correctly set in "Chart.yaml".') + + if not isinstance(chart_config.get('annotations', {}), dict): + verrors.add(f'{schema}.annotations', 'Annotations must be a dictionary') + elif chart_config.get('annotations'): + validate_min_max_version_values(chart_config['annotations'], verrors, schema) + + if not isinstance(chart_config.get('sources', []), list): + verrors.add(f'{schema}.sources', 'Sources must be a list') + else: + for index, source in enumerate(chart_config.get('sources', [])): + if not isinstance(source, str): + verrors.add(f'{schema}.sources.{index}', 'Source must be a string') + + if not isinstance(chart_config.get('maintainers', []), list): + verrors.add(f'{schema}.maintainers', 'Maintainers must be a list') + else: + for index, maintainer in enumerate(chart_config.get('maintainers', [])): + if not isinstance(maintainer, dict): + verrors.add(f'{schema}.maintainers.{index}', 'Maintainer must be a dictionary') + elif not all(k in maintainer and isinstance(maintainer[k], str) for k in ('name', 'email')): + verrors.add( + f'{schema}.maintainers.{index}', + 'Maintainer must have name and email attributes defined and be strings.' + ) + + chart_version = chart_config.get('version') + if chart_version is None: + verrors.add(f'{schema}.version', 'Version must be configured in "Chart.yaml"') + else: + try: + Version(chart_version) + except ValueError: + verrors.add(f'{schema}.version', f'{chart_version!r} is not a valid version name') + + if version_name is not None and chart_version != version_name: + verrors.add( + f'{schema}.version', + 'Configured version in "Chart.yaml" does not match version directory name.' + ) + + else: + verrors.add(schema, 'Missing chart version file') + + return verrors diff --git a/catalog_validation/ix-source/debian/changelog b/catalog_validation/ix-source/debian/changelog new file mode 100644 index 00000000..0019cb79 --- /dev/null +++ b/catalog_validation/ix-source/debian/changelog @@ -0,0 +1,5 @@ +catalog-validation (0.1-0~truenas+1) bullseye-truenas-unstable; urgency=medium + + * Initial release + + -- Waqar Ahmed Fri, 27 Nov 2020 00:26:21 +0500 diff --git a/catalog_validation/ix-source/debian/control b/catalog_validation/ix-source/debian/control new file mode 100644 index 00000000..3034a70d --- /dev/null +++ b/catalog_validation/ix-source/debian/control @@ -0,0 +1,27 @@ +Source: catalog-validation +Section: contrib/python +Priority: optional +Maintainer: Waqar Ahmed +Build-Depends: debhelper-compat (= 12), + dh-python, + python3-dev, + python3-jsonschema, + python3-semantic-version, + python3-kubernetes, + python3-yaml, + python3-setuptools +Standards-Version: 4.4.1 +Homepage: https://github.com/truenas/catalog_validation +Testsuite: autopkgtest-pkg-python + +Package: python3-catalog-validation +Architecture: any +Depends: python3-semantic-version, + python3-jsonschema, + python3-kubernetes, + python3-yaml, + ${shlibs:Depends}, + ${misc:Depends}, + ${python3:Depends} +Description: Validate TrueNAS Catalogs + This package helps validate truenas catalogs. diff --git a/catalog_validation/ix-source/debian/rules b/catalog_validation/ix-source/debian/rules new file mode 100644 index 00000000..3bb0ffc0 --- /dev/null +++ b/catalog_validation/ix-source/debian/rules @@ -0,0 +1,7 @@ +#!/usr/bin/make -f +export DH_VERBOSE = 1 + +export PYBUILD_NAME=catalog_validation + +%: + dh $@ --with python3 --buildsystem=pybuild diff --git a/catalog_validation/ix-source/debian/source/format b/catalog_validation/ix-source/debian/source/format new file mode 100644 index 00000000..163aaf8d --- /dev/null +++ b/catalog_validation/ix-source/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/catalog_validation/ix-source/debian/source/options b/catalog_validation/ix-source/debian/source/options new file mode 100644 index 00000000..cb61fa52 --- /dev/null +++ b/catalog_validation/ix-source/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/catalog_validation/ix-source/requirements.txt b/catalog_validation/ix-source/requirements.txt new file mode 100644 index 00000000..ac13017d --- /dev/null +++ b/catalog_validation/ix-source/requirements.txt @@ -0,0 +1,6 @@ +gitpython +jsonschema==4.10.3 +kubernetes +markdown +pyyaml +semantic_version diff --git a/catalog_validation/ix-source/setup.cfg b/catalog_validation/ix-source/setup.cfg new file mode 100644 index 00000000..aa079ec5 --- /dev/null +++ b/catalog_validation/ix-source/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=120 diff --git a/catalog_validation/ix-source/setup.py b/catalog_validation/ix-source/setup.py new file mode 100644 index 00000000..a64ad680 --- /dev/null +++ b/catalog_validation/ix-source/setup.py @@ -0,0 +1,21 @@ +from distutils.core import setup +from setuptools import find_packages + +VERSION = '0.1' + +setup( + name='catalog_validation', + description='Validate TrueNAS Catalog(s)', + version=VERSION, + include_package_data=True, + packages=find_packages(), + license='GNU3', + platforms='any', + entry_points={ + 'console_scripts': [ + 'catalog_validate = catalog_validation.scripts.catalog_validate:main', + 'catalog_update = catalog_validation.scripts.catalog_update:main', + 'dev_charts_validate = catalog_validation.scripts.dev_apps_validate:main', + ], + }, +) diff --git a/catalog_validation/truecharts-fork/.github/workflows/ci.yml b/catalog_validation/truecharts-fork/.github/workflows/ci.yml new file mode 100644 index 00000000..2d48c77a --- /dev/null +++ b/catalog_validation/truecharts-fork/.github/workflows/ci.yml @@ -0,0 +1,34 @@ +name: CI + +on: [push] + +jobs: + build-deb: + runs-on: ubuntu-latest + container: + image: tccr.io/truecharts/catalog_validation:latest + + steps: + - name: Checkout + uses: actions/checkout@v1 + + - name: Build deb package + run: > + dpkg-buildpackage + -B + --no-sign + -jauto + + - name: Create artifacts dir + run: mkdir artifacts + if: success() + + - name: Move artifacts + run: mv ../*.deb artifacts + if: success() + + - uses: actions/upload-artifact@v1 + with: + name: py-catalog-validation + path: artifacts + if: success() diff --git a/catalog_validation/truecharts-fork/.github/workflows/containers.build.yaml b/catalog_validation/truecharts-fork/.github/workflows/containers.build.yaml new file mode 100644 index 00000000..ee67f87f --- /dev/null +++ b/catalog_validation/truecharts-fork/.github/workflows/containers.build.yaml @@ -0,0 +1,180 @@ +name: build_image + +on: + push: + branches: + - 'master' + +jobs: + build: + permissions: + actions: read # for detecting the Github Actions environment. + id-token: write # for creating OIDC tokens for signing. + packages: write # for uploading attestations. + name: Build + runs-on: ubuntu-22.04 + if: "!contains(github.event.head_commit.message, '[ci-skip]')" + steps: + - name: Checkout + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + + # Define if tests and push should be run against which versions/platforms + - name: Prepare + id: prep + run: | + PLATFORM="linux/amd64" + echo ::set-output name=platform::${PLATFORM} + + if [ "${{github.event_name}}" == "pull_request" ]; then + echo ::set-output name=push::false + echo ::set-output name=cache_from::"type=registry,ref=ghcr.io/${{ github.repository_owner }}/${{ matrix.container }}:buildcache" || echo ::set-output name=cache_from::"" + echo ::set-output name=cache_to::"" + else + echo ::set-output name=push::true + echo ::set-output name=cache_from::"type=registry,ref=ghcr.io/${{ github.repository_owner }}/${{ matrix.container }}:buildcache" + echo ::set-output name=cache_to::"type=registry,ref=ghcr.io/${{ github.repository_owner }}/${{ matrix.container }}:buildcache,mode=max" + fi + - name: Get Time + id: time + uses: nanzm/get-time-action@887e4db9af58ebae64998b7105921b816af77977 # v2.0 + with: + timeZone: 1 + format: 'YYYYMMDDHHmmss' + + - name: Set up QEMU + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3 + with: + platforms: amd64 + + - uses: sigstore/cosign-installer@main + + - name: Install Syft + uses: anchore/sbom-action/download-syft@78fc58e266e87a38d4194b2137a3d4e9bcaf7ca1 # v0.14.3 + + - name: Login to Quay + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3 + if: github.event_name != 'pull_request' + with: + registry: quay.io + username: ${{ secrets.QUAY_USER }} + password: ${{ secrets.QUAY_SECRET }} + + - name: Login to GHCR + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3 + if: github.event_name != 'pull_request' + with: + registry: ghcr.io + username: ${{ secrets.GHCR_USERNAME }} + password: ${{ secrets.GHCR_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5 + with: + # list of Docker images to use as base name for tags + images: | + quay.io/truecharts/catalog_validation + ghcr.io/truecharts/catalog_validation + tccr.io/truecharts/catalog_validation + + # Install and configure Buildx + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3 + with: + install: true + version: latest + driver-opts: image=moby/buildkit:latest + + # Creates a local build to run tests on + - name: Build and Load local test-container + if: ${{ steps.prep.outputs.goss == 'true' }} + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5 + with: + build-args: | + CONTAINER_NAME=catalog_validation + context: . + file: ./Dockerfile + load: true + tags: | + ghcr.io/${{ github.repository_owner }}/catalog_validation:test + + # Push if not a PR, otherwise just test the build process for all requested platforms + - name: Build and Push + id: push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5 + with: + build-args: | + CONTAINER_NAME=catalog_validation + context: . + platforms: ${{ steps.prep.outputs.platform }} + file: ./Dockerfile + push: ${{ steps.prep.outputs.push }} + labels: ${{ steps.meta.outputs.labels }} + tags: | + quay.io/truecharts/catalog_validation:latest + quay.io/truecharts/catalog_validation:build${{ steps.time.outputs.time }} + + - name: Set quay to Public + if: github.event_name != 'pull_request' + run: | + curl -X POST -H "Content-Type: application/json" -d '{"visibility": "public"}' -H "Authorization: Bearer ${{ secrets.QUAY_TOKEN }}" "https://quay.io/api/v1/repository/truecharts/catalog_validation/changevisibility" + + - name: Sign the images + if: github.event_name != 'pull_request' + run: | + cosign sign quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }} -y -a "repo=${{ github.repository }}" -a "workflow=${{ github.workflow }}" -a "ref=${{ github.sha }}" + + - name: Verify the pushed tags + if: github.event_name != 'pull_request' + run: | + cosign verify quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }} --certificate-oidc-issuer=https://token.actions.githubusercontent.com --certificate-identity=https://github.com/truecharts/catalog_validation/.github/workflows/containers.build.yaml@refs/heads/master + + - name: Generate SBOM + if: github.event_name != 'pull_request' + run: | + syft "quay.io/truecharts/catalog_validation:build${{ steps.time.outputs.time }}@${{ steps.push.outputs.digest }}" -o spdx-json=catalog_validation-sbom-spdx.json + + - name: Attach SBOM to image + if: github.event_name != 'pull_request' + run: | + cosign attest --predicate catalog_validation-sbom-spdx.json --type spdx "quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }}" -y + + + - name: Verify SBOM attestation + if: github.event_name != 'pull_request' + run: | + cosign verify-attestation quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }} --type https://spdx.dev/Document --certificate-oidc-issuer=https://token.actions.githubusercontent.com --certificate-identity=https://github.com/truecharts/catalog_validation/.github/workflows/containers.build.yaml@refs/heads/master | jq '.payload |= @base64d | .payload | fromjson' + + - name: Generate provenance + id: gen-prov + if: github.event_name != 'pull_request' + run: | + wget https://github.com/slsa-framework/slsa-github-generator/releases/download/v1.5.0/slsa-generator-container-linux-amd64 + chmod +x slsa-generator-container-linux-amd64 + # Generate a predicate only. + ./slsa-generator-container-linux-amd64 generate --predicate="catalog_validation-predicate.json" + env: + UNTRUSTED_IMAGE: "quay.io/truecharts/catalog_validation" + UNTRUSTED_DIGEST: "${{ steps.push.outputs.digest }}" + GITHUB_CONTEXT: "${{ toJSON(github) }}" + + - name: Sign provenance + id: sign-prov + if: github.event_name != 'pull_request' + run: | + cosign attest --predicate="catalog_validation-predicate.json" \ + --type slsaprovenance \ + --yes \ + "quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }}" + env: + COSIGN_EXPERIMENTAL: 1 + + - name: Verify provenance attestation + if: github.event_name != 'pull_request' + id: verf-prov + run: | + cosign verify-attestation quay.io/truecharts/catalog_validation@${{ steps.push.outputs.digest }} --type slsaprovenance --certificate-oidc-issuer=https://token.actions.githubusercontent.com --certificate-identity=https://github.com/truecharts/catalog_validation/.github/workflows/containers.build.yaml@refs/heads/master | jq '.payload |= @base64d | .payload | fromjson' + env: + COSIGN_EXPERIMENTAL: 1 + diff --git a/catalog_validation/truecharts-fork/.github/workflows/lint.yml b/catalog_validation/truecharts-fork/.github/workflows/lint.yml new file mode 100644 index 00000000..8cd12cb7 --- /dev/null +++ b/catalog_validation/truecharts-fork/.github/workflows/lint.yml @@ -0,0 +1,21 @@ +name: flake8 + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 + - name: Analysing the code with flake8 + run: flake8 . diff --git a/catalog_validation/truecharts-fork/.github/workflows/test.yaml b/catalog_validation/truecharts-fork/.github/workflows/test.yaml new file mode 100644 index 00000000..74fc0f44 --- /dev/null +++ b/catalog_validation/truecharts-fork/.github/workflows/test.yaml @@ -0,0 +1,24 @@ +name: catalog_validation_test + +on: [push] + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r catalog_validation/pytest/requirements.txt + pip install -r requirements.txt + - name: Installing catalog validation + run: python setup.py install + - name: Running test + run: pytest catalog_validation/pytest/ diff --git a/catalog_validation/truecharts-fork/.gitignore b/catalog_validation/truecharts-fork/.gitignore new file mode 100644 index 00000000..99ac9116 --- /dev/null +++ b/catalog_validation/truecharts-fork/.gitignore @@ -0,0 +1,2 @@ +catalog_validation/__pycache__ +catalog_validation/schema/__pycache__ diff --git a/catalog_validation/truecharts-fork/Dockerfile b/catalog_validation/truecharts-fork/Dockerfile new file mode 100644 index 00000000..d5e9bc46 --- /dev/null +++ b/catalog_validation/truecharts-fork/Dockerfile @@ -0,0 +1,24 @@ +FROM debian:bookworm + +RUN apt-get update + +RUN apt-get install -y \ + debhelper-compat \ + dh-python \ + python3-dev \ + python3-setuptools \ + devscripts \ + python3-jsonschema \ + python3-semantic-version \ + python3-kubernetes \ + python3-yaml \ + python3-pip + +ENV PYTHONUNBUFFERED 1 +ENV WORK_DIR /app +RUN mkdir -p ${WORK_DIR} +WORKDIR ${WORK_DIR} + +ADD . ${WORK_DIR}/ +RUN pip install --break-system-packages -r requirements.txt +RUN pip install --break-system-packages -U . diff --git a/catalog_validation/truecharts-fork/LICENSE b/catalog_validation/truecharts-fork/LICENSE new file mode 100644 index 00000000..f288702d --- /dev/null +++ b/catalog_validation/truecharts-fork/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/catalog_validation/truecharts-fork/README.md b/catalog_validation/truecharts-fork/README.md new file mode 100644 index 00000000..e142d111 --- /dev/null +++ b/catalog_validation/truecharts-fork/README.md @@ -0,0 +1,2 @@ +# catalogs_validation +Validate truenas compliant catalog structure / format. diff --git a/catalog_validation/truecharts-fork/catalog_validation/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/ci/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/ci/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/ci/utils.py b/catalog_validation/truecharts-fork/catalog_validation/ci/utils.py new file mode 100644 index 00000000..2b8f0914 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/ci/utils.py @@ -0,0 +1,54 @@ +import os +import yaml + +from catalog_validation.items.utils import DEVELOPMENT_DIR +from jsonschema import validate as json_schema_validate +from semantic_version import Version + + +DEV_DIRECTORY_RELATIVE_PATH: str = os.path.join('library', DEVELOPMENT_DIR) +TO_KEEP_VERSIONS = 'to_keep_versions.yaml' +OPTIONAL_METADATA_FILES = ['upgrade_info.json', 'upgrade_strategy', TO_KEEP_VERSIONS] +REQUIRED_METADATA_FILES = ['item.yaml'] +UPDATE_STRATEGY_FILE = 'upgrade_strategy' + + +REQUIRED_VERSIONS_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + } +} + + +def get_app_version(app_path: str) -> str: + # This assumes that file exists and version is specified and is good + with open(os.path.join(app_path, 'Chart.yaml'), 'r') as f: + return yaml.safe_load(f.read())['version'] + + +def get_ci_development_directory(catalog_path: str) -> str: + return os.path.join(catalog_path, DEV_DIRECTORY_RELATIVE_PATH) + + +def get_to_keep_versions(app_dir_path: str) -> list: + required_version_path = os.path.join(app_dir_path, TO_KEEP_VERSIONS) + if not os.path.exists(required_version_path): + return [] + + with open(required_version_path, 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, REQUIRED_VERSIONS_SCHEMA) + return data + + +def version_has_been_bumped(app_path: str, new_version: str) -> bool: + if not os.path.isdir(app_path): + return True + + versions = [ + Version(version) for version in filter(lambda v: os.path.isdir(os.path.join(app_path, v)), os.listdir(app_path)) + ] + versions.sort() + return not versions or Version(new_version) > versions[-1] diff --git a/catalog_validation/truecharts-fork/catalog_validation/ci/validate.py b/catalog_validation/truecharts-fork/catalog_validation/ci/validate.py new file mode 100644 index 00000000..6de3a360 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/ci/validate.py @@ -0,0 +1,96 @@ +import os +import yaml + +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.validation import validate_catalog_item_version, validate_chart_version +from jsonschema import ValidationError as JsonValidationError + +from .utils import ( + get_app_version, get_ci_development_directory, REQUIRED_METADATA_FILES, version_has_been_bumped, + TO_KEEP_VERSIONS, REQUIRED_VERSIONS_SCHEMA, get_to_keep_versions, UPDATE_STRATEGY_FILE +) + + +def validate_dev_directory_structure(catalog_path: str, to_check_apps: dict) -> None: + verrors = ValidationErrors() + dev_directory = get_ci_development_directory(catalog_path) + if not os.path.exists(dev_directory): + return + + for train_name in filter( + lambda name: name in to_check_apps and os.path.isdir(os.path.join(dev_directory, name)), + os.listdir(dev_directory) + ): + validate_train( + catalog_path, os.path.join(dev_directory, train_name), f'dev.{train_name}', to_check_apps[train_name] + ) + verrors.check() + + +def validate_train(catalog_path: str, train_path: str, schema: str, to_check_apps: list) -> None: + verrors = ValidationErrors() + train_name = os.path.basename(train_path) + for app_name in filter( + lambda name: os.path.isdir(os.path.join(train_path, name)), os.listdir(train_path) + ): + if app_name not in to_check_apps: + continue + + app_path = os.path.join(train_path, app_name) + try: + validate_app(app_path, f'{schema}.{app_name}') + except ValidationErrors as ve: + verrors.extend(ve) + else: + published_train_app_path = os.path.join(catalog_path, train_name, app_name) + if not os.path.exists(published_train_app_path): + # The application is new and we are good + continue + + if not version_has_been_bumped(published_train_app_path, get_app_version(app_path)): + verrors.add( + f'{schema}.{app_name}.version', + 'Version must be bumped as app has been changed but version has not been updated' + ) + + verrors.check() + + +def validate_keep_versions(app_dir_path: str, schema: str, verrors: ValidationErrors) -> ValidationErrors: + try: + get_to_keep_versions(app_dir_path) + except yaml.YAMLError: + verrors.add(f'{schema}.{REQUIRED_VERSIONS_SCHEMA}', 'Invalid yaml format') + except JsonValidationError: + verrors.add( + f'{schema}.{REQUIRED_VERSIONS_SCHEMA}', + f'Invalid json schema {TO_KEEP_VERSIONS} must contain list of required versions' + ) + + +def validate_upgrate_strategy(app_path, schema, verrors): + upgrade_strategy_path = os.path.join(app_path, UPDATE_STRATEGY_FILE) + if os.path.exists(upgrade_strategy_path) and not os.access(upgrade_strategy_path, os.X_OK): + verrors.add(schema, f'{upgrade_strategy_path!r} is not executable') + + +def validate_app(app_dir_path: str, schema: str) -> None: + app_name = os.path.basename(app_dir_path) + chart_version_path = os.path.join(app_dir_path, 'Chart.yaml') + verrors = validate_chart_version(ValidationErrors(), chart_version_path, schema, app_name) + validate_keep_versions(app_dir_path, app_name, verrors) + verrors.check() + + validate_catalog_item_version(app_dir_path, schema, get_app_version(app_dir_path), app_name, True) + + required_files = set(REQUIRED_METADATA_FILES) + available_files = set( + f for f in filter(lambda f: os.path.isfile(os.path.join(app_dir_path, f)), os.listdir(app_dir_path)) + ) + if missing_files := required_files - available_files: + verrors.add( + f'{schema}.required_files', + f'{", ".join(missing_files)!r} file(s) must be specified' + ) + validate_upgrate_strategy(app_dir_path, f'{schema}.{UPDATE_STRATEGY_FILE}', verrors) + verrors.check() diff --git a/catalog_validation/truecharts-fork/catalog_validation/exceptions.py b/catalog_validation/truecharts-fork/catalog_validation/exceptions.py new file mode 100644 index 00000000..5fba74a2 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/exceptions.py @@ -0,0 +1,67 @@ +import errno + + +class ValidationException(Exception): + def __init__(self, error_msg, error_no=errno.EFAULT): + self.errmsg = error_msg + self.errno = error_no + + def get_error_name(self): + return errno.errorcode.get(self.errno) or 'EUNKNOWN' + + def __str__(self): + return f'[{self.get_error_name()}] {self.errmsg}' + + +class ValidationError(ValidationException): + def __init__(self, attribute, errmsg, errno=errno.EFAULT): + self.attribute = attribute + self.errmsg = errmsg + self.errno = errno + + def __str__(self): + return f'[{self.get_error_name()}] {self.attribute}: {self.errmsg}' + + +class ValidationErrors(ValidationException): + def __init__(self, errors=None): + self.errors = errors or [] + + def add(self, attribute, errmsg, errno=errno.EINVAL): + self.errors.append(ValidationError(attribute, errmsg, errno)) + + def add_validation_error(self, validation_error): + self.errors.append(validation_error) + + def add_child(self, attribute, child): + for e in child.errors: + self.add(f"{attribute}.{e.attribute}", e.errmsg, e.errno) + + def check(self): + if self: + raise self + + def extend(self, errors): + for e in errors.errors: + self.add(e.attribute, e.errmsg, e.errno) + + def __iter__(self): + for e in self.errors: + yield e.attribute, e.errmsg, e.errno + + def __bool__(self): + return bool(self.errors) + + def __str__(self): + output = '' + for e in self.errors: + output += str(e) + '\n' + return output + + def __contains__(self, item): + return item in [e.attribute for e in self.errors] + + +class CatalogDoesNotExist(ValidationException): + def __init__(self, path): + super().__init__(f'Failed to find a catalog at {path}', errno.ENOENT) diff --git a/catalog_validation/truecharts-fork/catalog_validation/git_utils.py b/catalog_validation/truecharts-fork/catalog_validation/git_utils.py new file mode 100644 index 00000000..adbcf5fb --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/git_utils.py @@ -0,0 +1,42 @@ +import os +import subprocess + +from catalog_validation.ci.utils import DEV_DIRECTORY_RELATIVE_PATH, get_ci_development_directory +from catalog_validation.items.utils import valid_train +from collections import defaultdict + +from .ci.utils import OPTIONAL_METADATA_FILES +from .exceptions import CatalogDoesNotExist + + +def get_changed_apps(catalog_path: str, base_branch: str = 'master') -> dict: + if not os.path.exists(catalog_path): + raise CatalogDoesNotExist(catalog_path) + + cp = subprocess.run( + ['git', '-C', catalog_path, '--no-pager', 'diff', '--name-only', base_branch], + capture_output=True, check=True, + ) + dev_directory_path = get_ci_development_directory(catalog_path) + to_check_apps = defaultdict(list) + for file_path in filter( + lambda path: path and path.startswith(f'{DEV_DIRECTORY_RELATIVE_PATH}/'), + map(str.strip, cp.stdout.decode().split('\n')) + ): + dev_dir_relative_path = file_path.strip(f'{DEV_DIRECTORY_RELATIVE_PATH}/') + train_name = dev_dir_relative_path.split('/', 1)[0] + if not valid_train(train_name, os.path.join(dev_directory_path, train_name)): + continue + + app_name = dev_dir_relative_path.split('/')[1] + base_name = os.path.basename(file_path) + + if base_name in OPTIONAL_METADATA_FILES: + continue + if not os.path.isdir(os.path.join(dev_directory_path, train_name, app_name)): + continue + + if app_name not in to_check_apps[train_name]: + to_check_apps[train_name].append(app_name) + + return to_check_apps diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/items/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/catalog.py b/catalog_validation/truecharts-fork/catalog_validation/items/catalog.py new file mode 100644 index 00000000..2fcdea53 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/catalog.py @@ -0,0 +1,83 @@ +import concurrent.futures +import functools +import os +import typing +import yaml + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from .items_util import get_item_details, get_default_questions_context +from .utils import RECOMMENDED_APPS_FILENAME, RECOMMENDED_APPS_SCHEMA, valid_train + + +def item_details(items: dict, location: str, questions_context: typing.Optional[dict], item_key: str) -> dict: + train = items[item_key] + item = item_key.removesuffix(f'_{train}') + item_location = os.path.join(location, train, item) + return get_item_details(item_location, questions_context, {'retrieve_versions': True}) + + +def retrieve_train_names(location: str, all_trains=True, trains_filter=None) -> list: + train_names = [] + trains_filter = trains_filter or [] + for train in os.listdir(location): + if not (all_trains or train in trains_filter) or not valid_train(train, os.path.join(location, train)): + continue + train_names.append(train) + return train_names + + +def get_items_in_trains(trains_to_traverse: list, catalog_location: str) -> dict: + items = {} + for train in trains_to_traverse: + items.update({ + f'{i}_{train}': train for i in os.listdir(os.path.join(catalog_location, train)) + if os.path.isdir(os.path.join(catalog_location, train, i)) + }) + + return items + + +def retrieve_trains_data( + items: dict, catalog_location: str, preferred_trains: list, + trains_to_traverse: list, job: typing.Any = None, questions_context: typing.Optional[dict] = None +) -> typing.Tuple[dict, set]: + questions_context = questions_context or get_default_questions_context() + trains = { + 'charts': {}, + 'test': {}, + **{k: {} for k in trains_to_traverse}, + } + unhealthy_apps = set() + + total_items = len(items) + with concurrent.futures.ProcessPoolExecutor(max_workers=(20 if total_items > 10 else 5)) as exc: + for index, result in enumerate(zip(items, exc.map( + functools.partial(item_details, items, catalog_location, questions_context), + items, chunksize=(10 if total_items > 10 else 5) + ))): + item_key = result[0] + item_info = result[1] + train = items[item_key] + item = item_key.removesuffix(f'_{train}') + if job: + job.set_progress( + int((index / total_items) * 80) + 10, + f'Retrieved information of {item!r} item from {train!r} train' + ) + trains[train][item] = item_info + if train in preferred_trains and not trains[train][item]['healthy']: + unhealthy_apps.add(f'{item} ({train} train)') + + return trains, unhealthy_apps + + +def retrieve_recommended_apps(catalog_location: str) -> typing.Dict[str, list]: + try: + with open(os.path.join(catalog_location, RECOMMENDED_APPS_FILENAME), 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, RECOMMENDED_APPS_SCHEMA) + except (FileNotFoundError, JsonValidationError, yaml.YAMLError): + return {} + else: + return data diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/features.py b/catalog_validation/truecharts-fork/catalog_validation/items/features.py new file mode 100644 index 00000000..3ec64c8f --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/features.py @@ -0,0 +1,19 @@ +SUPPORTED_FEATURES = { + 'normalize/interfaceConfiguration', + 'normalize/ixVolume', + 'definitions/certificate', + 'definitions/certificateAuthority', + 'definitions/interface', + 'definitions/gpuConfiguration', + 'definitions/timezone', + 'definitions/nodeIP', + 'validations/containerImage', + 'validations/nodePort', + 'validations/hostPath', + 'validations/lockedHostPath', + 'validations/hostPathAttachments', +} + + +def version_supported(version_details: dict) -> bool: + return not bool(set(version_details['required_features']) - SUPPORTED_FEATURES) diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/items_util.py b/catalog_validation/truecharts-fork/catalog_validation/items/items_util.py new file mode 100644 index 00000000..466bf284 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/items_util.py @@ -0,0 +1,214 @@ +import markdown +import os +import typing +import yaml + +from pkg_resources import parse_version + +from catalog_validation.exceptions import ValidationErrors + +from .features import version_supported +from .questions_utils import normalise_questions +from .utils import get_last_updated_date +from .validate_utils import validate_item, validate_item_version + + +ITEM_KEYS = ['icon_url'] + + +def get_item_details_base() -> dict: + return { + 'app_readme': None, + 'categories': [], + 'description': None, + 'healthy': False, # healthy means that each version the item hosts is valid and healthy + 'healthy_error': None, # An error string explaining why the item is not healthy + 'home': None, + 'location': None, + 'latest_version': None, + 'latest_app_version': None, + 'latest_human_version': None, + 'last_update': None, + 'name': None, + 'recommended': False, + 'title': None, + 'versions': {}, + 'maintainers': [], + 'tags': [], + 'screenshots': [], + 'sources': [], + } + + +def get_item_details( + item_location: str, questions_context: typing.Optional[dict] = None, options: typing.Optional[dict] = None +) -> dict: + catalog_path = item_location.rstrip('/').rsplit('/', 2)[0] + item = item_location.rsplit('/', 1)[-1] + train = item_location.rsplit('/', 2)[-2] + + options = options or {} + retrieve_versions = options.get('retrieve_versions', True) + item_data = get_item_details_base() + item_data.update({ + 'location': item_location, + 'last_update': get_last_updated_date(catalog_path, item_location), + 'name': item, + 'title': item.capitalize(), + }) + + schema = f'{train}.{item}' + try: + validate_item(item_location, schema, False) + except ValidationErrors as verrors: + item_data['healthy_error'] = f'Following error(s) were found with {item!r}:\n' + for verror in verrors: + item_data['healthy_error'] += f'{verror[0]}: {verror[1]}' + + # If the item format is not valid - there is no point descending any further into versions + if not retrieve_versions: + item_data.pop('versions') + return item_data + + item_data.update(get_item_details_impl(item_location, schema, questions_context, { + 'retrieve_latest_version': not retrieve_versions, + 'default_values_callable': options.get('default_values_callable'), + })) + unhealthy_versions = [] + for k, v in sorted(item_data['versions'].items(), key=lambda v: parse_version(v[0]), reverse=True): + if not v['healthy']: + unhealthy_versions.append(k) + else: + chart_metadata = v['chart_metadata'] + if not item_data['app_readme']: + item_data['app_readme'] = v['app_readme'] + if not item_data['maintainers'] and chart_metadata.get('maintainers'): + item_data['maintainers'] = chart_metadata['maintainers'] + if not item_data['latest_version']: + item_data['latest_version'] = k + item_data['latest_app_version'] = chart_metadata.get('appVersion') + item_data['latest_human_version'] = '' + if item_data['latest_app_version']: + item_data['latest_human_version'] = f'{item_data["latest_app_version"]}_' + item_data['latest_human_version'] += k + if not item_data['description'] and chart_metadata.get('description'): + item_data['description'] = v['chart_metadata']['description'] + if item_data['title'] == item_data['name'].capitalize() and chart_metadata.get( + 'annotations', {} + ).get('title'): + item_data['title'] = chart_metadata['annotations']['title'] + if item_data['home'] is None and chart_metadata.get('home'): + item_data['home'] = chart_metadata['home'] + if not item_data['sources'] and chart_metadata.get('sources'): + item_data['sources'] = chart_metadata['sources'] + + if unhealthy_versions: + item_data['healthy_error'] = f'Errors were found with {", ".join(unhealthy_versions)} version(s)' + else: + item_data['healthy'] = True + if not retrieve_versions: + item_data.pop('versions') + + return item_data + + +def get_item_details_impl( + item_path: str, schema: str, questions_context: typing.Optional[dict], options: typing.Optional[dict] +) -> dict: + # Each directory under item path represents a version of the item and we need to retrieve details + # for each version available under the item + retrieve_latest_version = options.get('retrieve_latest_version') + item_data = { + 'categories': [], + 'icon_url': None, + 'screenshots': [], + 'tags': [], + 'versions': {}, + } + with open(os.path.join(item_path, 'item.yaml'), 'r') as f: + item_data.update(yaml.safe_load(f.read())) + + item_data.update({k: item_data.get(k) for k in ITEM_KEYS}) + + for version in sorted( + filter(lambda p: os.path.isdir(os.path.join(item_path, p)), os.listdir(item_path)), + reverse=True, key=parse_version, + ): + catalog_path = item_path.rstrip('/').rsplit('/', 2)[0] + version_path = os.path.join(item_path, version) + item_data['versions'][version] = version_details = { + 'healthy': False, + 'supported': False, + 'healthy_error': None, + 'location': version_path, + 'last_update': get_last_updated_date(catalog_path, version_path), + 'required_features': [], + 'human_version': version, + 'version': version, + } + try: + validate_item_version(version_details['location'], f'{schema}.{version}') + except ValidationErrors as verrors: + version_details['healthy_error'] = f'Following error(s) were found with {schema}.{version!r}:\n' + for verror in verrors: + version_details['healthy_error'] += f'{verror[0]}: {verror[1]}' + + # There is no point in trying to see what questions etc the version has as it's invalid + continue + + version_details.update({ + 'healthy': True, + **get_item_version_details(version_details['location'], questions_context) + }) + if retrieve_latest_version: + break + + return item_data + + +def get_item_version_details( + version_path: str, questions_context: typing.Optional[dict], options: typing.Optional[dict] = None +) -> dict: + version_data = {'location': version_path, 'required_features': set()} + for key, filename, parser in ( + ('chart_metadata', 'Chart.yaml', yaml.safe_load), + ('app_metadata', 'metadata.yaml', yaml.safe_load), + ('schema', 'questions.yaml', yaml.safe_load), + ('app_readme', 'app-readme.md', markdown.markdown), + ('detailed_readme', 'README.md', markdown.markdown), + ('changelog', 'CHANGELOG.md', markdown.markdown), + ): + if os.path.exists(os.path.join(version_path, filename)): + with open(os.path.join(version_path, filename), 'r') as f: + version_data[key] = parser(f.read()) + else: + version_data[key] = None + + # We will normalise questions now so that if they have any references, we render them accordingly + # like a field referring to available interfaces on the system + normalise_questions(version_data, questions_context or get_default_questions_context()) + + version_data.update({ + 'supported': version_supported(version_data), + 'required_features': list(version_data['required_features']), + }) + if options and options.get('default_values_callable'): + version_data['values'] = options['default_values_callable'](version_data) + chart_metadata = version_data['chart_metadata'] + if chart_metadata['name'] != 'ix-chart' and chart_metadata.get('appVersion'): + version_data['human_version'] = f'{chart_metadata["appVersion"]}_{chart_metadata["version"]}' + + return version_data + + +def get_default_questions_context() -> dict: + return { + 'nic_choices': [], + 'gpus': {}, + 'timezones': {'Asia/Saigon': 'Asia/Saigon', 'Asia/Damascus': 'Asia/Damascus'}, + 'node_ip': '192.168.0.10', + 'certificates': [], + 'certificate_authorities': [], + 'system.general.config': {'timezone': 'America/Los_Angeles'}, + 'unused_ports': [i for i in range(1025, 65535)], + } diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/ix_values_utils.py b/catalog_validation/truecharts-fork/catalog_validation/items/ix_values_utils.py new file mode 100644 index 00000000..0fcc046b --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/ix_values_utils.py @@ -0,0 +1,58 @@ +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.exceptions import ValidationErrors + + +CUSTOM_PORTALS_JSON_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'portalName': { + 'type': 'string', + }, + 'protocol': { + 'type': 'string', 'enum': ['http', 'https'], + }, + 'useNodeIP': { + 'type': 'boolean', + }, + 'port': { + 'type': 'integer', + }, + 'path': { + 'type': 'string', + }, + }, + 'allOf': [ + { + 'if': { + 'properties': { + 'useNodeIP': { + 'const': False, + }, + }, + }, + 'then': { + 'required': ['host'], + 'properties': { + 'host': { + 'type': 'string', + }, + }, + }, + }], + 'required': ['portalName', 'protocol', 'useNodeIP', 'port'], + }, +} + + +def validate_ix_values_schema(schema, data): + verrors = ValidationErrors() + + try: + json_schema_validate(data, CUSTOM_PORTALS_JSON_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Failed to validate schema: {e}') + + verrors.check() diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/questions_utils.py b/catalog_validation/truecharts-fork/catalog_validation/items/questions_utils.py new file mode 100644 index 00000000..d0255d2a --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/questions_utils.py @@ -0,0 +1,186 @@ +import itertools + +from .utils import ACL_QUESTION, IX_VOLUMES_ACL_QUESTION + + +CUSTOM_PORTALS_KEY = 'iXPortals' +CUSTOM_PORTALS_ENABLE_KEY = 'enableIXPortals' +CUSTOM_PORTAL_GROUP_KEY = 'iXPortalsGroupName' + + +def get_custom_portal_question(group_name: str) -> dict: + return { + 'variable': CUSTOM_PORTALS_KEY, + 'label': 'User Specified Web Portals', + 'description': 'User(s) can specify custom webUI portals', + 'group': group_name, + 'schema': { + 'type': 'list', + 'items': [{ + 'variable': 'portalConfiguration', + 'label': 'Portal Configuration', + 'description': 'Configure WebUI Portal', + 'schema': { + 'type': 'dict', + 'attrs': [ + { + 'variable': 'portalName', + 'label': 'Portal Name', + 'description': 'Specify a UI Portal name to use which would be displayed in the UI', + 'schema': { + 'type': 'string', + 'default': 'Web Portal', + 'empty': False, + }, + }, + { + 'variable': 'protocol', + 'label': 'Protocol for Portal', + 'description': 'Specify protocol for Portal', + 'schema': { + 'type': 'string', + 'default': 'http', + 'enum': [ + {'value': 'http', 'description': 'HTTP Protocol'}, + {'value': 'https', 'description': 'HTTPS Protocol'}, + ], + }, + }, + { + 'variable': 'useNodeIP', + 'label': 'Use Node IP for Portal IP/Domain', + 'schema': { + 'type': 'boolean', + 'default': True, + }, + }, + { + 'variable': 'host', + 'label': 'Portal IP/Domain', + 'schema': { + 'type': 'string', + 'show_if': [['useNodeIP', '=', False]], + '$ref': ['definitions/nodeIP'], + }, + }, + { + 'variable': 'port', + 'label': 'Port', + 'description': 'Specify port to be used for Portal access', + 'schema': { + 'type': 'int', + 'max': 65535, + 'default': 15000, + }, + }, + { + 'variable': 'path', + 'label': 'Path (optional - leave empty if not required)', + 'description': 'Some app(s) might have a sub path i.e http://192.168.0.10:9000/api/', + 'schema': { + 'type': 'string', + }, + }, + ], + }, + }], + }, + } + + +def normalise_questions(version_data: dict, context: dict) -> None: + version_data['required_features'] = set() + version_data['schema']['questions'].extend( + [ + get_custom_portal_question(version_data['schema'][CUSTOM_PORTAL_GROUP_KEY]) + ] if version_data['schema'].get(CUSTOM_PORTALS_ENABLE_KEY) else [] + ) + for question in version_data['schema']['questions']: + normalise_question(question, version_data, context) + version_data['required_features'] = list(version_data['required_features']) + + +def normalise_question(question: dict, version_data: dict, context: dict) -> None: + schema = question['schema'] + for attr in itertools.chain(*[schema.get(k, []) for k in ('attrs', 'items', 'subquestions')]): + normalise_question(attr, version_data, context) + + if '$ref' not in schema: + return + + data = {} + for ref in schema['$ref']: + version_data['required_features'].add(ref) + if ref == 'definitions/interface': + data['enum'] = [ + {'value': i, 'description': f'{i!r} Interface'} for i in context['nic_choices'] + ] + elif ref == 'definitions/gpuConfiguration': + data['attrs'] = [ + { + 'variable': gpu, + 'label': f'GPU Resource ({gpu})', + 'description': 'Please enter the number of GPUs to allocate', + 'schema': { + 'type': 'int', + 'max': int(quantity), + 'enum': [ + {'value': i, 'description': f'Allocate {i!r} {gpu} GPU'} + for i in range(int(quantity) + 1) + ], + 'default': 0, + } + } for gpu, quantity in context['gpus'].items() + ] + elif ref == 'definitions/timezone': + data.update({ + 'enum': [{'value': t, 'description': f'{t!r} timezone'} for t in sorted(context['timezones'])], + 'default': context['system.general.config']['timezone'] + }) + elif ref == 'definitions/nodeIP': + data['default'] = context['node_ip'] + elif ref == 'definitions/certificate': + get_cert_ca_options(schema, data, {'value': None, 'description': 'No Certificate'}) + data['enum'] += [ + {'value': i['id'], 'description': f'{i["name"]!r} Certificate'} + for i in context['certificates'] + ] + elif ref == 'definitions/certificateAuthority': + get_cert_ca_options(schema, data, {'value': None, 'description': 'No Certificate Authority'}) + data['enum'] += [{'value': None, 'description': 'No Certificate Authority'}] + [ + {'value': i['id'], 'description': f'{i["name"]!r} Certificate Authority'} + for i in context['certificate_authorities'] + ] + elif ref == 'definitions/port': + data['enum'] = [{'value': None, 'description': 'No Port Selected'}] if schema.get('null') else [] + data['enum'] += [ + {'value': i, 'description': f'{i!r} Port'} + for i in filter( + lambda p: schema.get('min', 9000) <= p <= schema.get('max', 65534), + context['unused_ports'] + ) + ] + elif ref == 'normalize/acl': + data['attrs'] = ACL_QUESTION + elif ref == 'normalize/ixVolume': + if schema['type'] == 'dict' and any(i['variable'] == 'aclEntries' for i in schema['attrs']): + # get index of aclEntries from attrs + acl_index = next(i for i, v in enumerate(schema['attrs']) if v['variable'] == 'aclEntries') + # insert acl question before aclEntries + schema['attrs'][acl_index]['schema']['attrs'] = IX_VOLUMES_ACL_QUESTION + + schema.update(data) + + +def get_cert_ca_options(schema: dict, data: dict, default_entry: dict): + if schema.get('null', True): + data.update({ + 'enum': [default_entry], + 'default': None, + 'null': True, + }) + else: + data.update({ + 'enum': [], + 'required': True, + }) diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/utils.py b/catalog_validation/truecharts-fork/catalog_validation/items/utils.py new file mode 100644 index 00000000..e2a4e16b --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/utils.py @@ -0,0 +1,223 @@ +import contextlib +import os +import subprocess + +from datetime import datetime +from typing import Optional + +from catalog_validation.schema.migration_schema import MIGRATION_DIRS +from catalog_validation.utils import VALID_TRAIN_REGEX + + +DEVELOPMENT_DIR = 'ix-dev' +RECOMMENDED_APPS_FILENAME = 'recommended_apps.yaml' +RECOMMENDED_APPS_SCHEMA = { + 'type': 'object', + 'patternProperties': { + '.*': { + 'type': 'array', + 'items': {'type': 'string'}, + } + }, +} +TRAIN_IGNORE_DIRS = ['library', 'docs', DEVELOPMENT_DIR] + MIGRATION_DIRS + + +ACL_QUESTION = [ + { + 'variable': 'path', + 'label': 'Host Path', + 'description': 'Host Path to perform ACL', + 'schema': { + 'type': 'hostpath', + 'required': True, + 'empty': False, + } + }, + { + 'variable': 'entries', + 'label': 'ACL Entries', + 'description': 'ACL Entries', + 'schema': { + 'type': 'list', + 'items': [{ + 'variable': 'aclEntry', + 'label': 'ACL Entry', + 'schema': { + 'type': 'dict', + 'attrs': [ + { + 'variable': 'id_type', + 'label': 'ID Type', + 'schema': { + 'type': 'string', + 'enum': [ + {'value': 'USER', 'description': 'Entry is for a USER'}, + {'value': 'GROUP', 'description': 'Entry is for a GROUP'}, + ], + 'default': 'USER', + } + }, + { + 'variable': 'id', + 'label': 'ID', + 'schema': { + 'type': 'int', + 'required': True, + 'min': 0, + } + }, + { + 'variable': 'access', + 'label': 'Access', + 'schema': { + 'type': 'string', + 'enum': [ + {'value': 'READ', 'description': 'Read Access'}, + {'value': 'MODIFY', 'description': 'Modify Access'}, + {'value': 'FULL_CONTROL', 'description': 'FULL_CONTROL Access'}, + ], + } + } + ], + } + }] + } + } +] + +IX_VOLUMES_ACL_QUESTION = [ + { + 'variable': 'path', + 'label': 'Path', + 'description': 'Path to perform ACL', + 'schema': { + 'type': 'string', + 'hidden': True + } + }, + ACL_QUESTION[1] +] + + +def get_catalog_json_schema() -> dict: + return { + 'type': 'object', + 'patternProperties': { + '.*': { + 'type': 'object', + 'title': 'Train', + 'patternProperties': { + '.*': { + 'type': 'object', + 'title': 'Item', + 'properties': { + 'name': { + 'type': 'string', + 'title': 'Name', + }, + 'categories': { + 'type': 'array', + 'items': { + 'type': 'string' + }, + }, + 'app_readme': { + 'type': 'string', + }, + 'location': { + 'type': 'string', + }, + 'healthy': { + 'type': 'boolean', + }, + 'healthy_error': { + 'type': ['string', 'null'], + }, + 'last_update': { + 'type': 'string', + 'pattern': r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$', + }, + 'latest_version': { + 'type': 'string', + }, + 'latest_app_version': { + 'type': 'string', + }, + 'latest_human_version': { + 'type': 'string', + }, + 'description': { + 'type': ['string', 'null'], + }, + 'title': { + 'type': 'string', + }, + 'icon_url': { + 'type': ['string', 'null'], + }, + 'maintainers': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'url': {'type': ['string', 'null']}, + 'email': {'type': 'string'} + }, + 'required': ['name', 'email'], + } + }, + 'home': { + 'type': 'string', + }, + 'tags': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + 'screenshots': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + 'sources': { + 'type': 'array', + 'items': { + 'type': 'string', + } + }, + }, + 'required': [ + 'name', 'categories', 'location', 'healthy', 'icon_url', + 'latest_version', 'latest_app_version', 'latest_human_version', + 'last_update', 'recommended', 'healthy_error', 'maintainers', + 'home', 'tags', 'sources', 'screenshots', + ], + } + } + + } + } + } + + +def get_last_updated_date(repo_path: str, folder_path: str) -> Optional[str]: + with contextlib.suppress(Exception): + # We don't want to fail querying items if for whatever reason this fails + output = subprocess.check_output( + ['git', 'log', '-n', '1', '--pretty=format:%ct', f'{folder_path}'], + cwd=repo_path, + stderr=subprocess.DEVNULL + ) + if output: + timestamp = datetime.fromtimestamp(int(output)) + return timestamp.strftime('%Y-%m-%d %H:%M:%S') + + +def valid_train(train_name: str, train_location: str) -> bool: + return VALID_TRAIN_REGEX.match( + train_name + ) and not train_name.startswith('.') and train_name not in TRAIN_IGNORE_DIRS and os.path.isdir(train_location) diff --git a/catalog_validation/truecharts-fork/catalog_validation/items/validate_utils.py b/catalog_validation/truecharts-fork/catalog_validation/items/validate_utils.py new file mode 100644 index 00000000..3cecf3ba --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/items/validate_utils.py @@ -0,0 +1,9 @@ +from catalog_validation.validation import validate_catalog_item, validate_catalog_item_version + + +def validate_item(path: str, schema: str, validate_versions: bool = True): + validate_catalog_item(path, schema, validate_versions) + + +def validate_item_version(path: str, schema: str): + validate_catalog_item_version(path, schema) diff --git a/catalog_validation/truecharts-fork/catalog_validation/k8s/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/k8s/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/k8s/api_client.py b/catalog_validation/truecharts-fork/catalog_validation/k8s/api_client.py new file mode 100644 index 00000000..cec1da78 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/k8s/api_client.py @@ -0,0 +1,15 @@ +from contextlib import contextmanager + +from kubernetes import client, config + +from .utils import KUBECONFIG_FILE + + +@contextmanager +def api_client(): + config.load_kube_config(config_file=KUBECONFIG_FILE) + api_cl = client.api_client.ApiClient() + try: + yield client.CoreV1Api(api_cl) + finally: + api_cl.close() diff --git a/catalog_validation/truecharts-fork/catalog_validation/k8s/utils.py b/catalog_validation/truecharts-fork/catalog_validation/k8s/utils.py new file mode 100644 index 00000000..c973e77e --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/k8s/utils.py @@ -0,0 +1 @@ +KUBECONFIG_FILE = '/etc/rancher/k3s/k3s.yaml' diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/requirements.txt b/catalog_validation/truecharts-fork/catalog_validation/pytest/requirements.txt new file mode 100644 index 00000000..1d6ed5ca --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/requirements.txt @@ -0,0 +1,2 @@ +pytest +pytest-mock \ No newline at end of file diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_catalog_validate.py b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_catalog_validate.py new file mode 100644 index 00000000..85fc5ff7 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_catalog_validate.py @@ -0,0 +1,367 @@ +import pytest + +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.utils import WANTED_FILES_IN_ITEM_VERSION +from catalog_validation.validation import ( + validate_train_structure, validate_questions_yaml, validate_catalog_item, + validate_catalog_item_version, validate_variable_uniqueness, +) + + +@pytest.mark.parametrize('train_path,should_work', [ + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts', True), + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/', False), + +]) +def test_validate_train_structure(train_path, should_work): + if should_work: + assert validate_train_structure(train_path) is None + else: + with pytest.raises(ValidationErrors): + validate_train_structure(train_path) + + +@pytest.mark.parametrize('test_yaml,should_work', [ + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + portals: + web_portal: + protocols: + - "http" + host: + - "$node_ip" + ports: + - "$variable-machinaris_ui_port" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + portals: + web_portal: + protocols: {} + host: {} + ports: {} + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Network" + group: "Machinaris Network Configuration" + description: "Configure timezone for machianaris" + + ''', + False + ), + ( + ''' + enableIXPortals: true + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + ( + ''' + enableIXPortals: true + iXPortalsGroupName: "Machinaris Configuration" + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + True + ), + ( + ''' + enableIXPortals: true + iXPortalsGroupName: "Invalid Group name" + groups: + - name: "Machinaris Configuration" + description: "Configure timezone for machianaris" + + questions: + - variable: timezone + label: "Configure timezone" + group: "Machinaris Configuration" + description: "Configure timezone for machianaris" + ''', + False + ), + +]) +def test_validate_questions_yaml(mocker, test_yaml, should_work): + open_file_data = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('catalog_validation.validation.validate_question', return_value=None) + if should_work: + assert validate_questions_yaml(None, 'charts.machinaris.versions.1.1.13.questions_configuration') is None + else: + with pytest.raises(ValidationErrors): + validate_questions_yaml(None, 'charts.machinaris.versions.1.1.13.questions_configuration') + + +@pytest.mark.parametrize('catalog_item_path,test_yaml,should_work', [ + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/machinaris', + ''' + categories: + - storage + - crypto + icon_url: https://raw.githubusercontent.com/guydavis/machinaris/main/web/static/machinaris.png + ''', + True + ), + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/machinaris', + ''' + icon_url: https://raw.githubusercontent.com/guydavis/machinaris/main/web/static/machinaris.png + ''', + False + ), +]) +def test_validate_catalog_item(mocker, catalog_item_path, test_yaml, should_work): + mocker.patch('os.path.isdir', side_effect=[True, True, False]) + mocker.patch('os.listdir', return_value=['1.1.13', 'item.yaml']) + open_file_data = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('catalog_validation.validation.validate_catalog_item_version', return_value=None) + if not should_work: + with pytest.raises(ValidationErrors): + validate_catalog_item(catalog_item_path, 'charts.machinaris') + else: + assert validate_catalog_item(catalog_item_path, 'charts.machinaris') is None + + +@pytest.mark.parametrize('chart_yaml,should_work', [ + ( + ''' + name: storj + version: 1.0.4 + ''', + True + ), + ( + ''' + name: storj + version: 1.0.0 + ''', + False + ), + ( + ''' + name: storj_s + version: 1.0.0 + ''', + False + ) +]) +def test_validate_catalog_item_version(mocker, chart_yaml, should_work): + mocker.patch('os.listdir', return_value=WANTED_FILES_IN_ITEM_VERSION) + mocker.patch('os.path.exists', return_value=True) + open_file = mocker.mock_open(read_data=chart_yaml) + mocker.patch('builtins.open', open_file) + mocker.patch('catalog_validation.validation.validate_questions_yaml', return_value=None) + mocker.patch('catalog_validation.validation.validate_ix_values_yaml', return_value=None) + if should_work: + assert validate_catalog_item_version( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/storj/1.0.4', + 'charts.storj.versions.1.0.4') is None + else: + with pytest.raises(ValidationErrors): + validate_catalog_item_version( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/storj/1.0.4', + 'charts.storj.versions.1.0.4' + ) + + +@pytest.mark.parametrize('data,schema,should_work', [ + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'dnsConfig', + 'label': 'DNS Configuration', + 'group': 'Advanced DNS Settings', + 'schema': { + 'type': 'dict', + 'attrs': [] + } + }, + ], 'plex.questions', True), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + ], 'plex.questions', False), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'hostPathEnabled', + 'label': 'Enable Host Path for Plex Transcode Volume', + 'type': 'boolean', + 'default': False, + 'show_subquestions_if': False, + 'subquestions': [ + { + 'variable': 'hostPath', + 'label': 'Host Path for Plex Transcode Volume', + 'schema': { + 'type': 'hostpath', + 'required': True, + '$ref': [ + 'validations/lockedHostPath' + ] + } + }, + ] + } + ], 'plex.questions', True), + ([ + { + 'variable': 'enablePlexPass', + 'label': 'Use PlexPass', + 'group': 'Plex Configuration', + 'schema': { + 'type': 'boolean', + 'default': False + } + }, + { + 'variable': 'mountPath', + 'label': 'Plex Transcode Mount Path', + 'description': 'Path where the volume will be mounted inside the pod', + 'schema': { + 'type': 'path', + } + }, + { + 'variable': 'hostPathEnabled', + 'label': 'Enable Host Path for Plex Transcode Volume', + 'type': 'boolean', + 'default': False, + 'show_subquestions_if': False, + 'subquestions': [ + { + 'variable': 'hostPath', + 'label': 'Host Path for Plex Transcode Volume', + 'schema': { + 'type': 'hostpath', + 'required': True, + '$ref': [ + 'validations/lockedHostPath' + ] + } + }, + { + 'variable': 'mountPath', + 'label': 'Plex Transcode Mount Path', + 'description': 'Path where the volume will be mounted inside the pod', + 'schema': { + 'type': 'path', + } + }, + ] + } + ], 'plex.questions', False), +]) +def test_validate_variable_uniqueness(data, schema, should_work): + verrors = ValidationErrors() + if should_work: + assert validate_variable_uniqueness(data, schema, verrors) is None + else: + with pytest.raises(ValidationErrors): + validate_variable_uniqueness(data, schema, verrors) diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_items_util.py b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_items_util.py new file mode 100644 index 00000000..aab41eb5 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_items_util.py @@ -0,0 +1,107 @@ +import pytest + +from catalog_validation.items.items_util import get_item_details, get_item_details_impl + + +QUESTION_CONTEXT = { + 'nic_choices': [], + 'gpus': {}, + 'timezones': {'Asia/Saigon': 'Asia/Saigon', 'Asia/Damascus': 'Asia/Damascus'}, + 'node_ip': '192.168.0.10', + 'certificates': [], + 'certificate_authorities': [], + 'system.general.config': {'timezone': 'America/Los_Angeles'}, +} + + +@pytest.mark.parametrize('item_location,options,items_data', [ + ('/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + {'retrieve_versions': True}, + { + 'name': 'chia', + 'categories': [], + 'app_readme': None, + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + 'healthy': True, + 'healthy_error': None, + 'home': None, + 'last_update': None, + 'versions': {}, + 'maintainers': [], + 'latest_version': None, + 'latest_app_version': None, + 'latest_human_version': None, + 'recommended': False, + 'title': 'Chia', + 'description': None, + 'tags': [], + 'screenshots': [], + 'sources': [], + } + ), +]) +def test_get_item_details(mocker, item_location, options, items_data): + mocker.patch('catalog_validation.items.items_util.validate_item', return_value=None) + mocker.patch('catalog_validation.items.items_util.get_item_details_impl', return_value={}) + assert get_item_details(item_location, QUESTION_CONTEXT, options) == items_data + + +@pytest.mark.parametrize('item_path,schema,options,yaml_data,item_data_impl,open_yaml', [ + ( + '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/chia', + 'charts.chia', + {'retrieve_latest_version': True}, { + 'variable': 'web_port', + 'label': 'Web Port for Diskover', + 'group': 'Networking', + 'schema': { + 'type': 'int', + 'min': 8000, + 'max': 65535, + 'default': 22510, + 'required': True + } + }, { + 'versions': { + '1.3.37': { + 'healthy': True, + 'supported': False, + 'healthy_error': None, + 'last_update': None, + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_' + 'charts_git_master/charts/chia/1.3.37', + 'required_features': [], + 'human_version': '1.3.37', + 'version': '1.3.37' + } + }, + 'categories': ['storage', 'crypto'], + 'icon_url': 'https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg', + 'tags': ['finance'], + 'screenshots': ['https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg'], + 'sources': ['https://hub.docker.com/r/emby/embyserver'], + }, + ''' + screenshots: + - 'https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg' + tags: + - finance + categories: + - storage + - crypto + icon_url: https://www.chia.net/wp-content/uploads/2022/09/chia-logo.svg + sources: + - https://hub.docker.com/r/emby/embyserver + ''' + ), +]) +def test_get_item_details_impl( + mocker, item_path, schema, options, yaml_data, item_data_impl, open_yaml, +): + open_file_data = mocker.mock_open(read_data=open_yaml) + mocker.patch('builtins.open', open_file_data) + mocker.patch('os.path.isdir', return_value=True) + mocker.patch('os.listdir', return_value=['1.3.37']) + mocker.patch('catalog_validation.items.items_util.validate_item_version', return_value=None) + mocker.patch('catalog_validation.items.items_util.get_item_version_details', return_value={}) + assert get_item_details_impl(item_path, schema, QUESTION_CONTEXT, options) == item_data_impl diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_normalise_questions.py b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_normalise_questions.py new file mode 100644 index 00000000..5372d6f6 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_normalise_questions.py @@ -0,0 +1,321 @@ +from catalog_validation.items.questions_utils import normalise_question +import pytest + + +VERSION_DATA = { + 'location': '/mnt/mypool/ix-applications/catalogs/github_com_truenas_charts_git_master/charts/syncthing/1.0.14', + 'required_features': { + 'normalize/ixVolume', + 'validations/lockedHostPath', + }, + 'chart_metadata': {}, + 'schema': { + 'variable': 'hostNetwork', + 'label': 'Host Network', + 'group': 'Networking', + }, + 'app_readme': 'there is not any', + 'detailed_readme': 'there is not any', + 'changelog': None, +} + + +@pytest.mark.parametrize('question,normalise_data,context', [ + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + 'enum': [], + } + }, { + 'nic_choices': [], + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/interface'], + 'enum': [{ + 'value': 'ens0', + 'description': "'ens0' Interface" + }], + } + }, { + 'nic_choices': ['ens0'] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + 'attrs': [{ + 'variable': 'test@gpu', + 'label': 'GPU Resource (test@gpu)', + 'description': 'Please enter the number of GPUs to allocate', + 'schema': { + 'type': 'int', + 'max': 3, + 'enum': [ + {'value': i, 'description': f'Allocate {i!r} test@gpu GPU'} + for i in range(4) + ], + 'default': 0, + } + }], + } + }, { + 'gpus': { + 'test@gpu': 3 + } + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/gpuConfiguration'], + 'attrs': [], + } + }, { + 'gpus': {} + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/timezone'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/timezone'], + 'enum': [{ + 'value': 'Asia/Damascus', + 'description': "'Asia/Damascus' timezone", + }, { + 'value': 'Asia/Saigon', + 'description': "'Asia/Saigon' timezone", + }], + 'default': 'America/Los_Angeles', + } + }, + { + 'timezones': { + 'Asia/Saigon': 'Asia/Saigon', + 'Asia/Damascus': 'Asia/Damascus', + }, + 'system.general.config': { + 'timezone': 'America/Los_Angeles', + } + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/nodeIP'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/nodeIP'], + 'default': '192.168.0.10', + } + }, + { + 'node_ip': '192.168.0.10' + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate' + }, + { + 'value': '1', + 'description': "'testcert' Certificate" + } + ], + 'default': None, + 'null': True + } + }, {'certificates': [{ + 'id': '1', + 'name': 'testcert' + }], + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificate'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate' + }], + 'default': None, + 'null': True + } + }, { + 'certificates': [] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate Authority' + }, { + 'value': None, + 'description': 'No Certificate Authority' + }], + 'default': None, + 'null': True + } + }, { + 'certificate_authorities': [] + } + ), + ( + { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + } + }, { + 'variable': 'datasetName', + 'label': 'Plots Volume Name', + 'schema': { + 'type': 'string', + 'hidden': True, + '$ref': ['definitions/certificateAuthority'], + 'enum': [{ + 'value': None, + 'description': 'No Certificate Authority' + }, { + 'value': None, + 'description': 'No Certificate Authority' + }, + { + 'value': '1', + 'description': "'testca' Certificate Authority" + } + ], + 'default': None, + 'null': True + } + }, { + 'certificate_authorities': [{ + 'id': '1', + 'name': 'testca' + }], + } + ) +]) +def test_normalise_question(question, normalise_data, context): + normalise_question(question, VERSION_DATA, context) + assert question == normalise_data diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_schema.py b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_schema.py new file mode 100644 index 00000000..a741a0fa --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_schema.py @@ -0,0 +1,577 @@ +import pytest + +from catalog_validation.schema.schema_gen import get_schema +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.validation import validate_question + + +@pytest.mark.parametrize('schema,should_work', [ + ( + { + 'type': 'dict', + 'attrs': [] + }, + True + ), + ( + { + 'type': 'dict', + 'attrs': {} + }, + False + ), + ( + { + 'type': 'list' + }, + False + ), + ( + { + 'type': 'list', + 'items': [] + }, + True + ), + ( + { + 'type': 'list', + 'items': {} + }, + False + ), + ( + { + 'type': 'string', + 'editable': True + }, + True + ), + ( + { + 'type': 'string', + 'default': 'hello' + }, + True + ), + ( + { + 'type': 'string', + 'default': 1 + }, + False + ), + ( + { + 'type': 'string', + 'editable': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'private': True + }, + True + ), + ( + { + 'type': 'string', + 'private': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'max_length': 233 + }, + True + ), + ( + { + 'type': 'string', + 'max_length': '233' + }, + False + ), + ( + { + 'type': 'string', + 'min_length': 233 + }, + True + ), + ( + { + 'type': 'string', + 'min_length': '233' + }, + False + ), + ( + { + 'type': 'string', + 'valid_chars': '[a-z]*' + }, + True + ), + ( + { + 'type': 'string', + 'valid_chars': ['a-z'] + }, + False + ), + ( + { + 'type': 'string', 'null': True + }, + True + ), + ( + { + 'type': 'string', + 'null': 'true' + }, False + ), + ( + { + 'type': 'string', + 'immutable': True + }, + True + ), + ( + { + 'type': 'string', + 'immutable': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'required': True + }, + True + ), + ( + { + 'type': 'string', + 'required': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'hidden': True + }, + True + ), + ( + { + 'type': 'string', + 'hidden': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'show_if': 'true' + }, + False + ), + ( + { + 'type': 'string', + 'show_if': [['hello', '=', 'world']] + }, + True + ), + ( + { + 'type': 'string', + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'subquestions': {} + }, + False + ), + ( + { + 'type': 'string', + 'show_subquestions_if': None + }, False + ), + ( + { + 'type': 'string', + 'show_subquestions_if': None, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': 1, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': 'test', + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': {}, + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + 'show_subquestions_if': [], + 'subquestions': [] + }, + True + ), + ( + { + 'type': 'string', + '$ui-ref': [] + }, + True + ), + ( + { + 'type': 'string', + '$ui-ref': {} + }, + False + ), + ( + { + 'type': 'string', + '$ref': [] + }, + True + ), + ( + { + 'type': 'string', + '$ref': {} + }, + False + ), + ( + { + 'type': 'int', + 'min': 233, + 'max': 2311 + }, + True + ), + ( + { + 'type': 'int', + 'min': '233', + 'max': 2311 + }, + False + ), + ( + { + 'type': 'int', + 'min': 233, + 'max': '2311' + }, + False + ), + ( + { + 'type': 'int', + 'default': 23 + }, + True + ), + ( + { + 'type': 'int', + 'default': '23' + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': False, + 'cidr': True + }, + True + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': False, + 'cidr': 'true' + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': True, + 'ipv6': 'False', + 'cidr': True + }, + False + ), + ( + { + 'type': 'ipaddr', + 'ipv4': 'True', + 'ipv6': False, + 'cidr': True + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test', + 'description': 'test' + }] + }, + True + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test', + 'description': 'test', + 'obj': {} + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'value': 'test' + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{ + 'key': 'value' + }] + }, + False + ), + ( + { + 'type': 'string', + 'enum': [{}] + }, + False + ), + ( + { + 'type': 'hostpath' + }, + True + ), + ( + { + 'type': 'hostpath', + 'default': '/root/' + }, + True + ), + ( + { + 'type': 'hostpath', + 'default': 231 + }, + False + ), + ( + { + 'type': 'path' + }, + True + ), + ( + { + 'type': 'path', + 'default': '/root/' + }, + True + ), + ( + { + 'type': 'path', + 'default': 231 + }, + False + ), + ( + { + 'type': 'boolean' + }, + True + ), + ( + { + 'type': 'boolean', + 'default': True + }, + True + ), + ( + { + 'type': 'boolean', + 'default': 'true' + }, + False + ), + ( + { + 'type': 'cron' + }, + True + ), + ( + { + 'type': 'cron', + 'default': {} + }, + True + ), + ( + { + 'type': 'cron', + 'default': [] + }, + False + ), + ( + { + 'type': 'uri' + }, + True + ), + ( + { + 'type': 'uri', + 'default': 'http://www.google.com' + }, + True + ), + ( + { + 'type': 'uri', + 'default': 2133 + }, + False + ), +]) +def test_schema_validation(schema, should_work): + if not should_work: + with pytest.raises(ValidationErrors): + get_schema(schema).validate('') + else: + assert get_schema(schema).validate('') is None + + +@pytest.mark.parametrize('variable,should_work', [ + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'group': 'testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + True + ), + ( + { + 'variable': 'testing', + 'description': 'for testing', + 'group': 'testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + False + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing' + }, + False + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'schema': { + 'type': 'boolean', + 'default': True + } + }, + True + ), + ( + { + 'variable': 'testing', + 'label': 'Testing', + 'description': 'for testing', + 'schema': { + 'default': True + } + }, + False + ), +]) +def test_question_variable_validation(variable, should_work): + verrors = ValidationErrors() + validate_question(variable, '', verrors) + if not should_work: + with pytest.raises(ValidationErrors): + verrors.check() + else: + verrors.check() diff --git a/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_validate_ix_values.py b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_validate_ix_values.py new file mode 100644 index 00000000..f634a927 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/pytest/unit/test_validate_ix_values.py @@ -0,0 +1,94 @@ +import pytest +from catalog_validation.validation import validate_ix_values_yaml +from catalog_validation.exceptions import ValidationErrors + + +@pytest.mark.parametrize('schema, ix_values_yaml_path, test_yaml, should_work', [ + + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'http', useNodeIP: false, host: '192.168.0.18', port: 9898}] + ''', + True + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'http', useNodeIP: true, port: 9898}] + ''', + True + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 'htts', useNodeIP: true, port: 9898}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', protocol: 09088, useNodeIP: true, port: '9898'}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + ''' + image: + pullPolicy: IfNotPresent + repository: ixsystems/chia-docker + tag: v1.6.2 + updateStrategy: Recreate + iXPortals: [{portalName: 'web portal', useNodeIP: true, port: '9898'}] + ''', + False + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + '', + False, + ), + ( + 'charts.chia.versions.1.3.38.ix_values', + '/mnt/crave/ix-applications/catalogs/github_com_truenas_charts_git_master/test/chia/1.3.38/ix_values.yaml', + 'image pullPolicy ifNotPresent', + False, + ) +]) +def test_validate_ix_values(mocker, schema, ix_values_yaml_path, test_yaml, should_work): + open_file = mocker.mock_open(read_data=test_yaml) + mocker.patch('builtins.open', open_file) + + if should_work: + assert validate_ix_values_yaml(ix_values_yaml_path, schema) is None + else: + with pytest.raises(ValidationErrors): + validate_ix_values_yaml(ix_values_yaml_path, schema) diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/schema/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/attrs.py b/catalog_validation/truecharts-fork/catalog_validation/schema/attrs.py new file mode 100644 index 00000000..6e65d676 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/attrs.py @@ -0,0 +1,262 @@ +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.exceptions import ValidationErrors + +from .feature_gen import get_feature +from .variable_gen import generate_variable + + +class Schema: + + DEFAULT_TYPE = NotImplementedError + + def __init__(self, include_subquestions_attrs=True, data=None): + self.required = self.null = self.show_if = self.ref = self.ui_ref = self.type =\ + self.editable = self.hidden = self.default = self._schema_data = None + self._skip_data_values = [] + if include_subquestions_attrs: + self.subquestions = self.show_subquestions_if = None + if data: + self.initialize_values(data) + + def initialize_values(self, data): + self._schema_data = data + for key, value in filter( + lambda k: hasattr(self, k[0]) and k[0] not in self._skip_data_values, data.items() + ): + setattr(self, key, value) + + def get_schema_str(self, schema): + if schema: + return f'{schema}.' + return '' + + def validate(self, schema, data=None): + if data: + self.initialize_values(data) + + if not self._schema_data: + raise Exception('Schema data must be initialized before validating schema') + + verrors = ValidationErrors() + try: + json_schema_validate(self._schema_data, self.json_schema()) + except JsonValidationError as e: + verrors.add(schema, f'Failed to validate schema: {e}') + + verrors.check() + + if '$ref' in self._schema_data: + for index, ref in enumerate(self._schema_data['$ref']): + if not isinstance(ref, str): + verrors.add(f'{schema}.$ref.{index}', 'Must be a string') + continue + + feature_obj = get_feature(ref) + if not feature_obj: + continue + try: + feature_obj.validate(self, f'{schema}.$ref.{index}') + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + def json_schema(self): + schema = { + 'type': 'object', + 'properties': { + 'required': { + 'type': 'boolean', + }, + 'null': { + 'type': 'boolean', + }, + 'show_if': { + 'type': 'array', + }, + '$ref': { + 'type': 'array', + }, + '$ui-ref': { + 'type': 'array', + }, + 'subquestions': { + 'type': 'array', + }, + 'show_subquestions_if': { + 'type': ['string', 'integer', 'boolean', 'object', 'array', 'null'], + }, + 'type': { + 'type': 'string', + }, + 'editable': { + 'type': 'boolean', + }, + 'immutable': { + 'type': 'boolean', + }, + 'hidden': { + 'type': 'boolean', + }, + }, + 'required': ['type'], + 'dependentRequired': { + 'show_subquestions_if': ['subquestions'] + } + } + if self.DEFAULT_TYPE: + schema['properties']['default'] = { + 'type': [self.DEFAULT_TYPE] + (['null'] if self.null else []) + } + if hasattr(self, 'enum'): + schema['properties']['enum'] = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'value': {'type': [self.DEFAULT_TYPE] + (['null'] if self.null else [])}, + 'description': {'type': ['string', 'null']}, + }, + 'additionalProperties': False, + 'required': ['value', 'description'] + }, + } + return schema + + +class BooleanSchema(Schema): + DEFAULT_TYPE = 'boolean' + + +class StringSchema(Schema): + DEFAULT_TYPE = 'string' + + def __init__(self, data): + self.min_length = self.max_length = self.enum = self.private = self.valid_chars = self.valid_chars_error = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'min_length': { + 'type': 'integer', + }, + 'max_length': { + 'type': 'integer', + }, + 'private': { + 'type': 'boolean', + }, + 'valid_chars': { + 'type': 'string', + }, + 'valid_chars_error': { + 'type': 'string' + }, + }) + return schema + + +class IntegerSchema(Schema): + DEFAULT_TYPE = 'integer' + + def __init__(self, data): + self.min = self.max = self.enum = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'min': { + 'type': 'integer', + }, + 'max': { + 'type': 'integer', + }, + }) + return schema + + +class PathSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathDirSchema(Schema): + DEFAULT_TYPE = 'string' + + +class HostPathFileSchema(Schema): + DEFAULT_TYPE = 'string' + + +class URISchema(Schema): + DEFAULT_TYPE = 'string' + + +class IPAddrSchema(Schema): + DEFAULT_TYPE = 'string' + + def __init__(self, data): + self.ipv4 = self.ipv6 = self.cidr = None + super().__init__(data=data) + + def json_schema(self): + schema = super().json_schema() + schema['properties'].update({ + 'ipv4': {'type': 'boolean'}, + 'ipv6': {'type': 'boolean'}, + 'cidr': {'type': 'boolean'}, + }) + return schema + + +class CronSchema(Schema): + DEFAULT_TYPE = 'object' + + +class DictSchema(Schema): + DEFAULT_TYPE = 'object' + + def __init__(self, data): + self.attrs = [] + self.additional_attrs = None + super().__init__(data=data) + self._skip_data_values = ['attrs'] + + def initialize_values(self, data): + super().initialize_values(data) + self.attrs = [generate_variable(d) for d in (data.get('attrs') or [])] + + def json_schema(self): + schema = super().json_schema() + schema['additionalProperties'] = bool(self.additional_attrs) + schema['properties']['attrs'] = {'type': 'array'} + schema['required'].append('attrs') + # We do not validate nested children and hence do not add it in the + # json schema as it makes it very complex to handle all the possibilities + return schema + + +class ListSchema(Schema): + + DEFAULT_TYPE = 'array' + + def __init__(self, data): + self.items = [] + super().__init__(False, data=data) + self._skip_data_values = ['items'] + + def initialize_values(self, data): + super().initialize_values(data) + self.items = [generate_variable(d) for d in (data.get('items') or [])] + + def json_schema(self): + schema = super().json_schema() + schema['properties']['items'] = {'type': 'array'} + schema['required'].append('items') + return schema diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/feature_gen.py b/catalog_validation/truecharts-fork/catalog_validation/schema/feature_gen.py new file mode 100644 index 00000000..62dd657f --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/feature_gen.py @@ -0,0 +1,4 @@ +def get_feature(feature): + from .features import FEATURES + if feature in FEATURES: + return FEATURES[FEATURES.index(feature)] diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/features.py b/catalog_validation/truecharts-fork/catalog_validation/schema/features.py new file mode 100644 index 00000000..c69c7b44 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/features.py @@ -0,0 +1,158 @@ +from catalog_validation.exceptions import ValidationErrors + +from .schema_gen import DictSchema, IntegerSchema, StringSchema + + +class Feature: + + NAME = NotImplementedError + VALID_SCHEMAS = [] + + def __str__(self): + return self.NAME + + def validate(self, schema_obj, schema_str): + verrors = ValidationErrors() + if not isinstance(schema_obj, tuple(self.VALID_SCHEMAS)): + verrors.add( + f'{schema_str}.type', + f'Schema must be one of {", ".join(str(v) for v in self.VALID_SCHEMAS)} schema types' + ) + + if not verrors: + self._validate(verrors, schema_obj, schema_str) + verrors.check() + + def _validate(self, verrors, schema_obj, schema_str): + pass + + def __eq__(self, other): + return self.NAME == (other if isinstance(other, str) else other.NAME) + + +class IXVolumeFeature(Feature): + + NAME = 'normalize/ixVolume' + VALID_SCHEMAS = [DictSchema, StringSchema] + + def _validate(self, verrors, schema_obj, schema_str): + if isinstance(schema_obj, StringSchema): + return + + attrs = schema_obj.attrs + if 'datasetName' not in attrs: + verrors.add(f'{schema_str}.attrs', 'Variable "datasetName" must be specified.') + elif not isinstance(attrs[attrs.index('datasetName')].schema, StringSchema): + verrors.add(f'{schema_str}.attrs', 'Variable "datasetName" must be of string type.') + + if 'aclEntries' in attrs and not isinstance(attrs[attrs.index('aclEntries')].schema, DictSchema): + verrors.add(f'{schema_str}.attrs', 'Variable "aclEntries" must be of dict type.') + + if 'properties' in attrs: + index = attrs.index('properties') + properties = attrs[index] + properties_schema = properties.schema + supported_props = { + 'recordsize': { + 'valid_schema_type': [StringSchema], + }, + } + not_supported = set([str(v) for v in properties_schema.attrs]) - set(supported_props) + if not_supported: + verrors.add( + f'{schema_str}.attrs.{index}.attrs', f'{", ".join(not_supported)} properties are not supported' + ) + + for prop_index, prop in enumerate(properties_schema.attrs): + if prop.name not in supported_props: + continue + + prop_schema = prop.schema + check_prop = supported_props[prop.name] + if not isinstance(prop_schema, tuple(check_prop['valid_schema_type'])): + verrors.add( + f'{schema_str}.attrs.{index}.attrs.{prop_index}', + f'{prop.name!r} must be of ' + f'{", ".join([str(s) for s in check_prop["valid_schema_type"]])} type(s)' + ) + + +class NormalizeInterfaceConfiguration(Feature): + NAME = 'normalize/interfaceConfiguration' + VALID_SCHEMAS = [DictSchema] + + +class DefinitionInterfaceFeature(Feature): + + NAME = 'definitions/interface' + VALID_SCHEMAS = [StringSchema] + + +class DefinitionGPUConfigurationFeature(Feature): + + NAME = 'definitions/gpuConfiguration' + VALID_SCHEMAS = [DictSchema] + + +class DefinitionTimezoneFeature(Feature): + + NAME = 'definitions/timezone' + VALID_SCHEMAS = [StringSchema] + + +class DefinitionNodeIPFeature(Feature): + + NAME = 'definitions/nodeIP' + VALID_SCHEMAS = [StringSchema] + + +class ValidationNodePortFeature(Feature): + + NAME = 'validations/nodePort' + VALID_SCHEMAS = [IntegerSchema] + + +class CertificateFeature(Feature): + + NAME = 'definitions/certificate' + VALID_SCHEMAS = [IntegerSchema] + + +class CertificateAuthorityFeature(Feature): + + NAME = 'definitions/certificateAuthority' + VALID_SCHEMAS = [IntegerSchema] + + +class ContainerImageFeature(Feature): + + NAME = 'validations/containerImage' + VALID_SCHEMAS = [DictSchema] + + def _validate(self, verrors, schema_obj, schema_str): + attrs = schema_obj.attrs + for check_attr in ('repository', 'tag'): + if check_attr not in attrs: + verrors.add(f'{schema_str}.attrs', f'Variable {check_attr!r} must be specified.') + elif not isinstance(attrs[attrs.index(check_attr)].schema, StringSchema): + verrors.add(f'{schema_str}.attrs', f'Variable {check_attr!r} must be of string type.') + + +class ACLFeature(Feature): + + NAME = 'normalize/acl' + VALID_SCHEMAS = [DictSchema] + + +FEATURES = [ + ACLFeature(), + IXVolumeFeature(), + DefinitionInterfaceFeature(), + DefinitionGPUConfigurationFeature(), + DefinitionTimezoneFeature(), + DefinitionNodeIPFeature(), + ValidationNodePortFeature(), + CertificateFeature(), + CertificateAuthorityFeature(), + ContainerImageFeature(), +] diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/migration_schema.py b/catalog_validation/truecharts-fork/catalog_validation/schema/migration_schema.py new file mode 100644 index 00000000..9b535d9a --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/migration_schema.py @@ -0,0 +1,42 @@ +import re + + +APP_MIGRATION_DIR = 'migrations' +APP_MIGRATION_SCHEMA = { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'app_name': {'type': 'string'}, + 'action': {'type': 'string', 'enum': ['move']}, + }, + 'required': [ + 'app_name', + 'action' + ], + 'allOf': [ + { + 'if': { + 'properties': { + 'action': { + 'const': 'move', + }, + }, + }, + 'then': { + 'properties': { + 'old_train': {'type': 'string'}, + 'new_train': {'type': 'string'}, + }, + 'required': [ + 'new_train', + 'old_train', + ], + }, + }, + ], + }, +} +MIGRATION_DIRS = ['.migrations', 'ix-migrations'] +RE_MIGRATION_NAME_STR = r'^\d+\w+.json' +RE_MIGRATION_NAME = re.compile(RE_MIGRATION_NAME_STR) diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/schema_gen.py b/catalog_validation/truecharts-fork/catalog_validation/schema/schema_gen.py new file mode 100644 index 00000000..25042573 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/schema_gen.py @@ -0,0 +1,41 @@ +from .attrs import ( + BooleanSchema, StringSchema, IntegerSchema, PathSchema, HostPathSchema, HostPathDirSchema, + HostPathFileSchema, ListSchema, DictSchema, IPAddrSchema, CronSchema, URISchema +) + + +def get_schema(schema_data): + schema = None + if not isinstance(schema_data, dict): + return schema + + s_type = schema_data.get('type') + if s_type == 'boolean': + schema = BooleanSchema + elif s_type == 'string': + schema = StringSchema + elif s_type == 'int': + schema = IntegerSchema + elif s_type == 'path': + schema = PathSchema + elif s_type == 'hostpath': + schema = HostPathSchema + elif s_type == 'hostpathdirectory': + schema = HostPathDirSchema + elif s_type == 'hostpathfile': + schema = HostPathFileSchema + elif s_type == 'list': + schema = ListSchema + elif s_type == 'dict': + schema = DictSchema + elif s_type == 'ipaddr': + schema = IPAddrSchema + elif s_type == 'cron': + schema = CronSchema + elif s_type == 'uri': + schema = URISchema + + if schema: + schema = schema(data=schema_data) + + return schema diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/variable.py b/catalog_validation/truecharts-fork/catalog_validation/schema/variable.py new file mode 100644 index 00000000..e11d522a --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/variable.py @@ -0,0 +1,37 @@ +from catalog_validation.exceptions import ValidationErrors + +from .schema_gen import get_schema + + +class Variable: + def __init__(self, data): + self.name = self.label = self.description = self.group = None + self.schema = None + self.update_from_data(data) + + def update_from_data(self, data): + self.name = data.get('variable') + self.label = data.get('label') + self.description = data.get('description') + self.schema = get_schema(data.get('schema')) + + def validate(self, schema): + verrors = ValidationErrors() + if not self.name: + verrors.add(f'{schema}.variable', 'Variable value must be specified') + + if not self.schema: + verrors.add(f'{schema}.schema', 'Schema must be specified for variable') + else: + try: + self.schema.validate(f'{schema}.schema') + except ValidationErrors as ve: + verrors.extend(ve) + + verrors.check() + + def __str__(self): + return self.name + + def __eq__(self, other): + return (other if isinstance(other, str) else other.name) == self.name diff --git a/catalog_validation/truecharts-fork/catalog_validation/schema/variable_gen.py b/catalog_validation/truecharts-fork/catalog_validation/schema/variable_gen.py new file mode 100644 index 00000000..8784e4f9 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/schema/variable_gen.py @@ -0,0 +1,3 @@ +def generate_variable(variable_data): + from .variable import Variable + return Variable(variable_data) diff --git a/catalog_validation/truecharts-fork/catalog_validation/scripts/__init__.py b/catalog_validation/truecharts-fork/catalog_validation/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_update.py b/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_update.py new file mode 100644 index 00000000..0d52a8b6 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_update.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +import argparse +import contextlib +import json +import os +import shutil +import typing + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError + +from catalog_validation.ci.utils import ( + get_app_version, get_ci_development_directory, OPTIONAL_METADATA_FILES, + REQUIRED_METADATA_FILES, version_has_been_bumped, get_to_keep_versions +) +from catalog_validation.exceptions import ValidationErrors +from catalog_validation.items.catalog import get_items_in_trains, retrieve_train_names, retrieve_trains_data +from catalog_validation.items.utils import get_catalog_json_schema +from catalog_validation.utils import CACHED_CATALOG_FILE_NAME, CACHED_VERSION_FILE_NAME +from catalog_validation.validation import validate_catalog_item_version_data +from collections import defaultdict + + +def get_trains(location: str) -> typing.Tuple[dict, dict]: + preferred_trains: list = [] + trains_to_traverse = retrieve_train_names(location) + catalog_data = {} + versions_data = {} + for train_name, train_data in retrieve_trains_data( + get_items_in_trains(trains_to_traverse, location), location, preferred_trains, trains_to_traverse + )[0].items(): + catalog_data[train_name] = {} + versions_data[train_name] = {} + for app_name, app_data in train_data.items(): + catalog_data[train_name][app_name] = {} + versions_data[train_name][app_name] = {} + for k, v in app_data.items(): + if k == 'versions': + versions_data[train_name][app_name][k] = v + else: + catalog_data[train_name][app_name][k] = v + + return catalog_data, versions_data + + +def validate_train_data(train_data): + verrors = ValidationErrors() + try: + json_schema_validate(train_data, get_catalog_json_schema()) + except (json.JSONDecodeError, JsonValidationError) as e: + verrors.add( + 'catalog_json', + f'Failed to validate contents of train data: {e!r}' + ) + verrors.check() + + +def validate_versions_data(versions_data): + verrors = ValidationErrors() + for train_name, train_data in versions_data.items(): + for app_name, app_version_data in train_data.items(): + validate_catalog_item_version_data(app_version_data['versions'], f'{train_name}.{app_name}', verrors) + verrors.check() + + +def get_apps_to_publish(catalog_path: str) -> dict: + ci_dev_dir = get_ci_development_directory(catalog_path) + to_publish_apps = defaultdict(list) + for train_name in os.listdir(ci_dev_dir): + train_path = os.path.join(ci_dev_dir, train_name) + if not os.path.isdir(train_path): + continue + + for app_name in os.listdir(train_path): + app_path = os.path.join(train_path, app_name) + if not os.path.isdir(app_path): + continue + + app_current_version = get_app_version(app_path) + if version_has_been_bumped(os.path.join(catalog_path, train_name, app_name), app_current_version): + to_publish_apps[train_name].append({'name': app_name, 'version': app_current_version}) + + return to_publish_apps + + +def publish_updated_apps(catalog_path: str) -> None: + ci_dev_directory = get_ci_development_directory(catalog_path) + if not os.path.isdir(ci_dev_directory): + return + + for train_name, apps in get_apps_to_publish(catalog_path).items(): + dev_train_path = os.path.join(ci_dev_directory, train_name) + publish_train_path = os.path.join(catalog_path, train_name) + os.makedirs(publish_train_path, exist_ok=True) + + for app in apps: + app_name, app_version = app['name'], app['version'] + dev_app_path = os.path.join(dev_train_path, app_name) + publish_app_path = os.path.join(publish_train_path, app_name) + publish_app_version_path = os.path.join(publish_app_path, app_version) + required_versions = get_to_keep_versions(dev_app_path) + os.makedirs(publish_app_path, exist_ok=True) + + dev_item_yaml_path = os.path.join(dev_app_path, 'item.yaml') + publish_item_yaml_path = os.path.join(publish_app_path, 'item.yaml') + shutil.copy(dev_item_yaml_path, publish_item_yaml_path) + shutil.copytree(dev_app_path, publish_app_version_path) + + for file_name in OPTIONAL_METADATA_FILES + REQUIRED_METADATA_FILES: + with contextlib.suppress(OSError): + os.unlink(os.path.join(publish_app_version_path, file_name)) + + ix_values_path = os.path.join(publish_app_version_path, 'ix_values.yaml') + values_path = os.path.join(publish_app_version_path, 'values.yaml') + if not os.path.exists(ix_values_path) and os.path.exists(values_path): + shutil.move(values_path, ix_values_path) + + for version in os.listdir(publish_app_path): + version_path = os.path.join(publish_app_path, version) + if not os.path.isdir(version_path) or version in required_versions: + continue + + if version != app_version: + shutil.rmtree(version_path) + + print( + f'[\033[92mOK\x1B[0m]\tPublished {app_name!r} having {app_version!r} version ' + f'to {train_name!r} train successfully!' + ) + + +def update_catalog_file(location: str) -> None: + catalog_file_path = os.path.join(location, CACHED_CATALOG_FILE_NAME) + catalog_data, versions_data = get_trains(location) + validate_train_data(catalog_data) + validate_versions_data(versions_data) + + with open(catalog_file_path, 'w') as f: + f.write(json.dumps(catalog_data, indent=4)) + + print(f'[\033[92mOK\x1B[0m]\tUpdated {catalog_file_path!r} successfully!') + + for train_name, train_data in versions_data.items(): + for app_name, app_data in train_data.items(): + version_path = os.path.join(location, train_name, app_name, CACHED_VERSION_FILE_NAME) + with open(version_path, 'w') as f: + f.write(json.dumps(app_data['versions'], indent=4)) + + print(f'[\033[92mOK\x1B[0m]\tUpdated {version_path!r} successfully!') + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + publish_setup = subparsers.add_parser('publish', help='Publish apps of TrueNAS catalog') + publish_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + + parser_setup = subparsers.add_parser('update', help='Update TrueNAS catalog') + parser_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + + args = parser.parse_args() + if args.action == 'publish': + publish_updated_apps(args.path) + elif args.action == 'update': + update_catalog_file(args.path) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_validate.py b/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_validate.py new file mode 100644 index 00000000..1094bdf1 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/scripts/catalog_validate.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +import argparse + +from catalog_validation.exceptions import CatalogDoesNotExist, ValidationErrors +from catalog_validation.validation import validate_catalog + + +def validate(catalog_path, ignore_catalog_json=False): + + try: + validate_catalog(catalog_path, ignore_catalog_json) + except CatalogDoesNotExist: + print(f'[\033[91mFAILED\x1B[0m]\tSpecified {catalog_path!r} path does not exist') + exit(1) + except ValidationErrors as verrors: + print('[\033[91mFAILED\x1B[0m]\tFollowing validation failures were found:') + for index, verror in enumerate(verrors.errors): + print(f'[\033[91m{index}\x1B[0m]\t{verror}') + exit(1) + else: + print('[\033[92mOK\x1B[0m]\tPASSED VALIDATION CHECKS') + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + parser_setup = subparsers.add_parser('validate', help='Validate TrueNAS catalog') + parser_setup.add_argument('--path', help='Specify path of TrueNAS catalog') + parser_setup.add_argument('--ignore-catalog-json', help='Will not validate the catalog.json file', action=argparse.BooleanOptionalAction) + + args = parser.parse_args() + if args.action == 'validate': + validate(args.path, args.ignore_catalog_json) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/truecharts-fork/catalog_validation/scripts/dev_apps_validate.py b/catalog_validation/truecharts-fork/catalog_validation/scripts/dev_apps_validate.py new file mode 100644 index 00000000..1ce514be --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/scripts/dev_apps_validate.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +import argparse + +from catalog_validation.ci.validate import validate_dev_directory_structure +from catalog_validation.git_utils import get_changed_apps + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(help='sub-command help', dest='action') + + parser_setup = subparsers.add_parser( + 'validate', help='Validate TrueNAS dev catalog items' + ) + parser_setup.add_argument('--path', help='Specify path of TrueNAS dev catalog', required=True) + parser_setup.add_argument( + '--base_branch', help='Specify base branch to find changed catalog items', default='master' + ) + + args = parser.parse_args() + if args.action == 'validate': + validate_dev_directory_structure(args.path, get_changed_apps(args.path, args.base_branch)) + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/catalog_validation/truecharts-fork/catalog_validation/utils.py b/catalog_validation/truecharts-fork/catalog_validation/utils.py new file mode 100644 index 00000000..3a99c38c --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/utils.py @@ -0,0 +1,223 @@ +import re + + +CACHED_CATALOG_FILE_NAME = 'catalog.json' +CACHED_VERSION_FILE_NAME = 'app_versions.json' +METADATA_JSON_SCHEMA = { + 'type': 'object', + 'properties': { + 'runAsContext': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'gid': {'type': 'integer'}, + 'groupName': {'type': 'string'}, + 'userName': {'type': 'string'}, + 'uid': {'type': 'integer'}, + }, + 'required': ['description'], + }, + }, + 'capabilities': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'name': {'type': 'string'}, + }, + 'required': ['description', 'name'], + }, + }, + 'hostMounts': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'description': {'type': 'string'}, + 'hostPath': {'type': 'string'}, + }, + 'required': ['description', 'hostPath'], + }, + }, + }, +} +VALID_TRAIN_REGEX = re.compile(r'^\w+[\w.-]*$') +VERSION_VALIDATION_SCHEMA = { + 'type': 'object', + 'title': 'Versions', + 'patternProperties': { + '[0-9]+.[0-9]+.[0-9]+': { + 'type': 'object', + 'properties': { + 'healthy': { + 'type': 'boolean', + }, + 'supported': { + 'type': 'boolean', + }, + 'healthy_error': { + 'type': ['string', 'null'] + }, + 'location': { + 'type': 'string', + 'pattern': r'^(\/[a-zA-Z0-9_.-]+)+$' + }, + 'last_update': { + 'type': 'string', + 'pattern': '^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$' + }, + 'required_features': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'human_version': { + 'type': 'string' + }, + 'version': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + }, + 'chart_metadata': { + 'type': 'object', + 'properties': { + 'name': { + 'type': 'string' + }, + 'description': { + 'type': 'string' + }, + 'annotations': { + 'type': 'object' + }, + 'type': { + 'type': 'string' + }, + 'version': { + 'type': 'string', + 'pattern': '[0-9]+.[0-9]+.[0-9]+' + }, + 'apiVersion': { + 'type': 'string', + }, + 'appVersion': { + 'type': 'string' + }, + 'kubeVersion': { + 'type': 'string' + }, + 'app_readme': {'type': 'string'}, + 'detailed_readme': {'type': 'string'}, + 'changelog': {'type': ['string', 'null']}, + 'maintainers': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'url': {'type': ['string', 'null']}, + 'email': {'type': 'string'}, + }, + 'required': ['name', 'email'], + } + }, + 'dependencies': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'repository': {'type': 'string'}, + 'version': {'type': 'string'} + } + } + }, + 'home': {'type': 'string'}, + 'icon': {'type': 'string'}, + 'sources': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'keywords': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + } + }, + 'app_metadata': { + **METADATA_JSON_SCHEMA, + 'type': ['object', 'null'], + }, + 'schema': { + 'type': 'object', + 'properties': { + 'groups': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'name': { + 'type': 'string' + }, + 'description': { + 'type': 'string' + }, + }, + 'required': ['description', 'name'], + } + }, + 'portals': { + 'type': 'object' + }, + 'questions': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'variable': {'type': 'string'}, + 'label': {'type': 'string'}, + 'group': {'type': 'string'}, + 'schema': { + 'type': 'object', + 'properties': { + 'type': {'type': 'string'} + }, + 'required': ['type'] + } + } + } + } + }, + 'required': ['groups', 'questions'] + }, + }, + 'required': [ + 'healthy', 'supported', 'healthy_error', 'location', 'last_update', 'required_features', + 'human_version', 'version', 'chart_metadata', 'app_metadata', 'schema', + ], + }, + }, + 'additionalProperties': False +} +WANTED_FILES_IN_ITEM_VERSION = {'questions.yaml', 'app-readme.md', 'Chart.yaml', 'README.md'} + + +def validate_key_value_types(data_to_check, mapping, verrors, schema): + for key_mapping in mapping: + if len(key_mapping) == 2: + key, value_type, required = *key_mapping, True + else: + key, value_type, required = key_mapping + + if required and key not in data_to_check: + verrors.add(f'{schema}.{key}', f'Missing required {key!r} key.') + elif key in data_to_check and not isinstance(data_to_check[key], value_type): + verrors.add(f'{schema}.{key}', f'{key!r} value should be a {value_type.__name__!r}') diff --git a/catalog_validation/truecharts-fork/catalog_validation/validation.py b/catalog_validation/truecharts-fork/catalog_validation/validation.py new file mode 100644 index 00000000..cd3cfdcf --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/validation.py @@ -0,0 +1,459 @@ +import concurrent.futures +import json +import jsonschema +import os +import yaml + +from jsonschema import validate as json_schema_validate, ValidationError as JsonValidationError +from semantic_version import Version +from typing import Optional + +from .exceptions import CatalogDoesNotExist, ValidationErrors +from .items.ix_values_utils import validate_ix_values_schema +from .items.questions_utils import ( + CUSTOM_PORTALS_KEY, CUSTOM_PORTALS_ENABLE_KEY, CUSTOM_PORTAL_GROUP_KEY, +) +from .items.utils import get_catalog_json_schema, RECOMMENDED_APPS_FILENAME, RECOMMENDED_APPS_SCHEMA, TRAIN_IGNORE_DIRS +from .schema.migration_schema import ( + APP_MIGRATION_SCHEMA, MIGRATION_DIRS, RE_MIGRATION_NAME, RE_MIGRATION_NAME_STR, APP_MIGRATION_DIR, +) +from .schema.variable import Variable +from .validation_utils import validate_chart_version +from .utils import ( + CACHED_CATALOG_FILE_NAME, CACHED_VERSION_FILE_NAME, METADATA_JSON_SCHEMA, validate_key_value_types, + VALID_TRAIN_REGEX, VERSION_VALIDATION_SCHEMA, WANTED_FILES_IN_ITEM_VERSION +) + + +def validate_catalog(catalog_path, ignore_catalog_json=False): + if not os.path.exists(catalog_path): + raise CatalogDoesNotExist(catalog_path) + + verrors = ValidationErrors() + items = [] + item_futures = [] + + if not ignore_catalog_json: + cached_catalog_file_path = os.path.join(catalog_path, CACHED_CATALOG_FILE_NAME) + if not os.path.exists(cached_catalog_file_path): + verrors.add( + 'cached_catalog_file', + f'{CACHED_CATALOG_FILE_NAME!r} metadata file must be specified for a valid catalog' + ) + else: + try: + with open(cached_catalog_file_path, 'r') as f: + json_schema_validate(json.loads(f.read()), get_catalog_json_schema()) + + except (json.JSONDecodeError, JsonValidationError) as e: + verrors.add( + 'cached_catalog_file', + f'Failed to validate contents of {cached_catalog_file_path!r}: {e!r}' + ) + + verrors.check() + + validate_recommended_apps_file(catalog_path) + + for file_dir in os.listdir(catalog_path): + complete_path = os.path.join(catalog_path, file_dir) + if file_dir not in MIGRATION_DIRS and ( + file_dir.startswith('.') or not os.path.isdir(complete_path) or file_dir in TRAIN_IGNORE_DIRS + ): + continue + if file_dir in MIGRATION_DIRS: + if all(os.path.exists(migration_dir) for migration_dir in map( + lambda d: os.path.join(catalog_path, d), MIGRATION_DIRS + )): + verrors.add( + 'app_migrations', f'Both {", ".join(MIGRATION_DIRS)!r} cannot be used to specify app migrations' + ) + else: + for directory in MIGRATION_DIRS: + migration_dir = os.path.join(catalog_path, directory) + if not os.path.exists(migration_dir): + continue + if os.path.isdir(migration_dir): + try: + validate_migrations(migration_dir) + except ValidationErrors as e: + verrors.extend(e) + else: + verrors.add('app_migrations', f'{directory!r} is not a directory') + else: + try: + validate_train_structure(complete_path) + except ValidationErrors as e: + verrors.extend(e) + else: + items.extend(get_train_items(complete_path)) + + with concurrent.futures.ProcessPoolExecutor(max_workers=20 if len(items) > 10 else 5) as exc: + for item in items: + item_futures.append(exc.submit(validate_catalog_item, item[0], item[1])) + + for future in item_futures: + try: + future.result() + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + +def validate_recommended_apps_file(catalog_location: str) -> None: + verrors = ValidationErrors() + try: + with open(os.path.join(catalog_location, RECOMMENDED_APPS_FILENAME), 'r') as f: + data = yaml.safe_load(f.read()) + json_schema_validate(data, RECOMMENDED_APPS_SCHEMA) + except FileNotFoundError: + return + except yaml.YAMLError: + verrors.add(RECOMMENDED_APPS_FILENAME, 'Must be a valid yaml file') + except JsonValidationError as e: + verrors.add(RECOMMENDED_APPS_FILENAME, f'Invalid format specified: {e}') + + verrors.check() + + +def validate_migrations(migration_dir): + verrors = ValidationErrors() + for migration_file in os.listdir(migration_dir): + if not RE_MIGRATION_NAME.findall(migration_file): + verrors.add( + f'app_migrations.{migration_file}', + 'Invalid naming scheme used for migration file name. ' + f'It should be conforming to {RE_MIGRATION_NAME_STR!r} pattern.' + ) + else: + try: + with open(os.path.join(migration_dir, migration_file), 'r') as f: + data = json.loads(f.read()) + jsonschema.validate(data, APP_MIGRATION_SCHEMA) + except (json.JSONDecodeError, jsonschema.ValidationError) as e: + verrors.add( + f'app_migrations.{migration_file}', + f'Failed to validate migration file structure: {e}' + ) + verrors.check() + + +def validate_train_structure(train_path): + train = os.path.basename(train_path) + verrors = ValidationErrors() + if not VALID_TRAIN_REGEX.match(train): + verrors.add(train, 'Train name is invalid.') + print(f' procssing train: {train} ') + verrors.check() + + +def get_train_items(train_path): + train = os.path.basename(train_path) + items = [] + for catalog_item in os.listdir(train_path): + item_path = os.path.join(train_path, catalog_item) + if not os.path.isdir(item_path): + continue + items.append((item_path, f'{train}.{catalog_item}')) + return items + + +def validate_catalog_item(catalog_item_path, schema, validate_versions=True): + # We should ensure that each catalog item has at least 1 version available + # Also that we have item.yaml present + verrors = ValidationErrors() + item_name = os.path.join(catalog_item_path) + files = [] + versions = [] + print(f' procssing catalog item: {item_name} ') + if not os.path.isdir(catalog_item_path): + verrors.add(schema, 'Catalog item must be a directory') + verrors.check() + + for file_dir in os.listdir(catalog_item_path): + complete_path = os.path.join(catalog_item_path, file_dir) + if os.path.isdir(complete_path): + versions.append(complete_path) + else: + files.append(file_dir) + + if not versions: + verrors.add(f'{schema}.versions', f'No versions found for {item_name} item.') + + if 'item.yaml' not in files: + verrors.add(f'{schema}.item', 'Item configuration (item.yaml) not found') + else: + with open(os.path.join(catalog_item_path, 'item.yaml'), 'r') as f: + item_config = yaml.safe_load(f.read()) + + validate_key_value_types( + item_config, ( + ('categories', list), ('tags', list, False), ('screenshots', list, False), + ), verrors, f'{schema}.item_config' + ) + + cached_version_file_path = os.path.join(catalog_item_path, CACHED_VERSION_FILE_NAME) + if os.path.exists(cached_version_file_path): + try: + with open(cached_version_file_path, 'r') as f: + validate_catalog_item_version_data( + json.loads(f.read()), f'{schema}.{CACHED_VERSION_FILE_NAME}', verrors + ) + except json.JSONDecodeError: + verrors.add( + f'{schema}.{CACHED_VERSION_FILE_NAME}', f'{CACHED_VERSION_FILE_NAME!r} is not a valid json file' + ) + + for version_path in (versions if validate_versions else []): + try: + validate_catalog_item_version(version_path, f'{schema}.versions.{os.path.basename(version_path)}') + except ValidationErrors as e: + verrors.extend(e) + + verrors.check() + + +def validate_app_migrations(version_path, schema): + verrors = ValidationErrors() + app_migration_path = os.path.join(version_path, APP_MIGRATION_DIR) + + if not os.path.exists(app_migration_path): + return verrors + + for migration_file in os.listdir(app_migration_path): + migration_file_path = os.path.join(app_migration_path, migration_file) + if not os.access(migration_file_path, os.X_OK): + verrors.add(schema, f'{migration_file!r} is not executable') + return verrors + + +def validate_catalog_item_version_data(version_data: dict, schema: str, verrors: ValidationErrors) -> ValidationErrors: + try: + json_schema_validate(version_data, VERSION_VALIDATION_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Invalid format specified for application versions: {e}') + return verrors + + +def validate_catalog_item_version( + version_path: str, schema: str, version_name: Optional[str] = None, item_name: Optional[str] = None, + validate_values: bool = False, +): + verrors = ValidationErrors() + version_name = version_name or os.path.basename(version_path) + item_name = item_name or version_path.split('/')[-2] + try: + Version(version_name) + except ValueError: + verrors.add(f'{schema}.name', f'{version_name!r} is not a valid version name.') + print(f' procssing catalog item version: {version_name} ') + + files_diff = WANTED_FILES_IN_ITEM_VERSION ^ set( + f for f in os.listdir(version_path) if f in WANTED_FILES_IN_ITEM_VERSION + ) + if files_diff: + verrors.add(f'{schema}.required_files', f'Missing {", ".join(files_diff)} required configuration files.') + + chart_version_path = os.path.join(version_path, 'Chart.yaml') + validate_chart_version(verrors, chart_version_path, schema, item_name, version_name) + + questions_path = os.path.join(version_path, 'questions.yaml') + if os.path.exists(questions_path): + try: + validate_questions_yaml(questions_path, f'{schema}.questions_configuration') + except ValidationErrors as v: + verrors.extend(v) + + for values_file in ['ix_values.yaml'] + (['values.yaml'] if validate_values else []): + values_path = os.path.join(version_path, values_file) + if os.path.exists(values_path): + try: + validate_ix_values_yaml(values_path, f'{schema}.values_configuration') + except ValidationErrors as v: + verrors.extend(v) + + metadata_path = os.path.join(version_path, 'metadata.yaml') + if os.path.exists(metadata_path): + try: + validate_metadata_yaml(metadata_path, f'{schema}.metadata_configuration') + except ValidationErrors as v: + verrors.extend(v) + + validate_app_migrations(version_path, f'{schema}.app_migrations') + + verrors.check() + + +def validate_ix_values_yaml(ix_values_yaml_path, schema): + verrors = ValidationErrors() + + with open(ix_values_yaml_path, 'r') as f: + try: + ix_values = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + + verrors.check() + + if isinstance(ix_values, dict): + portals = ix_values.get(CUSTOM_PORTALS_KEY) + if portals: + try: + validate_ix_values_schema(schema, portals) + except ValidationErrors as ve: + verrors.extend(ve) + else: + verrors.add(schema, 'Must be a dictionary') + + verrors.check() + + +def validate_metadata_yaml(metadata_yaml_path, schema): + verrors = ValidationErrors() + with open(metadata_yaml_path, 'r') as f: + try: + metadata = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + try: + json_schema_validate(metadata, METADATA_JSON_SCHEMA) + except JsonValidationError as e: + verrors.add(schema, f'Invalid format specified for application metadata: {e}') + + verrors.check() + + +def validate_questions_yaml(questions_yaml_path, schema): + verrors = ValidationErrors() + + with open(questions_yaml_path, 'r') as f: + try: + questions_config = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + if not isinstance(questions_config, dict): + verrors.add(schema, 'Must be a dictionary') + + verrors.check() + + validate_key_value_types( + questions_config, ( + ('groups', list), ('questions', list), ('portals', dict, False), (CUSTOM_PORTALS_ENABLE_KEY, bool, False), + (CUSTOM_PORTAL_GROUP_KEY, str, False), + ), verrors, schema + ) + + verrors.check() + + groups = [] + for index, group in enumerate(questions_config['groups']): + if not isinstance(group, dict): + verrors.add(f'{schema}.groups.{index}', 'Type of group should be a dictionary.') + continue + + if group.get('name'): + groups.append(group['name']) + + validate_key_value_types(group, (('name', str), ('description', str)), verrors, f'{schema}.group.{index}') + + for index, portal_details in enumerate((questions_config.get('portals') or {}).items()): + portal_type, portal_schema = portal_details + error_schema = f'{schema}.portals.{index}' + if not isinstance(portal_type, str): + verrors.add(error_schema, 'Portal type must be a string') + if not isinstance(portal_schema, dict): + verrors.add(error_schema, 'Portal schema must be a dictionary') + else: + validate_key_value_types( + portal_schema, (('protocols', list), ('host', list), ('ports', list), ('path', str, False)), + verrors, error_schema + ) + + validate_variable_uniqueness(questions_config['questions'], f'{schema}.questions', verrors) + for index, question in enumerate(questions_config['questions']): + validate_question(question, f'{schema}.questions.{index}', verrors, (('group', str),)) + if question.get('group') and question['group'] not in groups: + verrors.add(f'{schema}.questions.{index}.group', f'Please specify a group declared in "{schema}.groups"') + + if questions_config.get(CUSTOM_PORTALS_ENABLE_KEY): + if not questions_config.get(CUSTOM_PORTAL_GROUP_KEY): + verrors.add( + f'{schema}.{CUSTOM_PORTALS_ENABLE_KEY}', + f'{CUSTOM_PORTAL_GROUP_KEY!r} must be specified when user specified portals are desired' + ) + elif questions_config[CUSTOM_PORTAL_GROUP_KEY] not in groups: + verrors.add( + f'{schema}.{CUSTOM_PORTAL_GROUP_KEY}', + 'Specified group not declared under "groups"' + ) + + verrors.check() + + +def validate_variable_uniqueness(data, schema, verrors): + variables = [] + for index, question in enumerate(data): + if question['variable'] in variables: + verrors.add( + f'{schema}.{index}', f'Variable name {question["variable"]!r} has been used again which is not allowed' + ) + else: + variables.append(question['variable']) + sub_questions = question.get('subquestions') or [] + for sub_index, sub_question in enumerate(sub_questions): + if sub_question['variable'] in variables: + verrors.add( + f'{schema}.{index}.subquestions.{sub_index}', + f'Variable name {sub_question["variable"]!r} has been used again which is not allowed' + ) + else: + variables.append(sub_question['variable']) + + verrors.check() + + +def validate_question(question_data, schema, verrors, validate_top_level_attrs=None): + if not isinstance(question_data, dict): + verrors.add(schema, 'Question must be a valid dictionary.') + return + + validate_top_level_attrs = validate_top_level_attrs or tuple() + validate_key_value_types( + question_data, (('variable', str), ('label', str), ('schema', dict)) + validate_top_level_attrs, verrors, schema + ) + if type(question_data.get('schema')) != dict: + return + + if question_data['variable'] == CUSTOM_PORTALS_KEY: + verrors.add( + f'{schema}.variable', + f'{CUSTOM_PORTALS_KEY!r} is a reserved variable name and cannot be specified by app developer' + ) + # No need to validate the question data etc here + return + + try: + Variable(question_data).validate(schema) + except ValidationErrors as ve: + verrors.extend(ve) + return + + schema_data = question_data['schema'] + variable_type = schema_data['type'] + + for condition, key, schema_str in ( + (variable_type != 'list', 'subquestions', f'{schema}.schema.subquestions'), + (variable_type == 'list', 'items', f'{schema}.schema.items'), + (variable_type == 'dict', 'attrs', f'{schema}.schema.attrs'), + ): + if not (condition and type(schema_data.get(key)) == list): + continue + + if variable_type == 'dict': + validate_variable_uniqueness(schema_data[key], f'{schema}.{schema_str}', verrors) + + for index, item in enumerate(schema_data[key]): + validate_question(item, f'{schema_str}.{index}', verrors) diff --git a/catalog_validation/truecharts-fork/catalog_validation/validation_utils.py b/catalog_validation/truecharts-fork/catalog_validation/validation_utils.py new file mode 100644 index 00000000..e8ba4851 --- /dev/null +++ b/catalog_validation/truecharts-fork/catalog_validation/validation_utils.py @@ -0,0 +1,66 @@ +import os +import yaml + +from semantic_version import Version +from typing import Optional + +from .exceptions import ValidationErrors + + +def validate_chart_version( + verrors: ValidationErrors, chart_version_path: str, schema: str, item_name: str, version_name: Optional[str] = None, +) -> ValidationErrors: + if os.path.exists(chart_version_path): + with open(chart_version_path, 'r') as f: + try: + chart_config = yaml.safe_load(f.read()) + except yaml.YAMLError: + verrors.add(schema, 'Must be a valid yaml file') + else: + if not isinstance(chart_config, dict): + verrors.add(schema, 'Must be a dictionary') + else: + if chart_config.get('name') != item_name: + verrors.add(f'{schema}.item_name', 'Item name not correctly set in "Chart.yaml".') + + if not isinstance(chart_config.get('annotations', {}), dict): + verrors.add(f'{schema}.annotations', 'Annotations must be a dictionary') + + if not isinstance(chart_config.get('sources', []), list): + verrors.add(f'{schema}.sources', 'Sources must be a list') + else: + for index, source in enumerate(chart_config.get('sources', [])): + if not isinstance(source, str): + verrors.add(f'{schema}.sources.{index}', 'Source must be a string') + + if not isinstance(chart_config.get('maintainers', []), list): + verrors.add(f'{schema}.maintainers', 'Maintainers must be a list') + else: + for index, maintainer in enumerate(chart_config.get('maintainers', [])): + if not isinstance(maintainer, dict): + verrors.add(f'{schema}.maintainers.{index}', 'Maintainer must be a dictionary') + elif not all(k in maintainer and isinstance(maintainer[k], str) for k in ('name', 'email')): + verrors.add( + f'{schema}.maintainers.{index}', + 'Maintainer must have name and email attributes defined and be strings.' + ) + + chart_version = chart_config.get('version') + if chart_version is None: + verrors.add(f'{schema}.version', 'Version must be configured in "Chart.yaml"') + else: + try: + Version(chart_version) + except ValueError: + verrors.add(f'{schema}.version', f'{chart_version!r} is not a valid version name') + + if version_name is not None and chart_version != version_name: + verrors.add( + f'{schema}.version', + 'Configured version in "Chart.yaml" does not match version directory name.' + ) + + else: + verrors.add(schema, 'Missing chart version file') + + return verrors diff --git a/catalog_validation/truecharts-fork/debian/changelog b/catalog_validation/truecharts-fork/debian/changelog new file mode 100644 index 00000000..0019cb79 --- /dev/null +++ b/catalog_validation/truecharts-fork/debian/changelog @@ -0,0 +1,5 @@ +catalog-validation (0.1-0~truenas+1) bullseye-truenas-unstable; urgency=medium + + * Initial release + + -- Waqar Ahmed Fri, 27 Nov 2020 00:26:21 +0500 diff --git a/catalog_validation/truecharts-fork/debian/control b/catalog_validation/truecharts-fork/debian/control new file mode 100644 index 00000000..3034a70d --- /dev/null +++ b/catalog_validation/truecharts-fork/debian/control @@ -0,0 +1,27 @@ +Source: catalog-validation +Section: contrib/python +Priority: optional +Maintainer: Waqar Ahmed +Build-Depends: debhelper-compat (= 12), + dh-python, + python3-dev, + python3-jsonschema, + python3-semantic-version, + python3-kubernetes, + python3-yaml, + python3-setuptools +Standards-Version: 4.4.1 +Homepage: https://github.com/truenas/catalog_validation +Testsuite: autopkgtest-pkg-python + +Package: python3-catalog-validation +Architecture: any +Depends: python3-semantic-version, + python3-jsonschema, + python3-kubernetes, + python3-yaml, + ${shlibs:Depends}, + ${misc:Depends}, + ${python3:Depends} +Description: Validate TrueNAS Catalogs + This package helps validate truenas catalogs. diff --git a/catalog_validation/truecharts-fork/debian/rules b/catalog_validation/truecharts-fork/debian/rules new file mode 100644 index 00000000..3bb0ffc0 --- /dev/null +++ b/catalog_validation/truecharts-fork/debian/rules @@ -0,0 +1,7 @@ +#!/usr/bin/make -f +export DH_VERBOSE = 1 + +export PYBUILD_NAME=catalog_validation + +%: + dh $@ --with python3 --buildsystem=pybuild diff --git a/catalog_validation/truecharts-fork/debian/source/format b/catalog_validation/truecharts-fork/debian/source/format new file mode 100644 index 00000000..163aaf8d --- /dev/null +++ b/catalog_validation/truecharts-fork/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/catalog_validation/truecharts-fork/debian/source/options b/catalog_validation/truecharts-fork/debian/source/options new file mode 100644 index 00000000..cb61fa52 --- /dev/null +++ b/catalog_validation/truecharts-fork/debian/source/options @@ -0,0 +1 @@ +extend-diff-ignore = "^[^/]*[.]egg-info/" diff --git a/catalog_validation/truecharts-fork/requirements.txt b/catalog_validation/truecharts-fork/requirements.txt new file mode 100644 index 00000000..ac13017d --- /dev/null +++ b/catalog_validation/truecharts-fork/requirements.txt @@ -0,0 +1,6 @@ +gitpython +jsonschema==4.10.3 +kubernetes +markdown +pyyaml +semantic_version diff --git a/catalog_validation/truecharts-fork/setup.cfg b/catalog_validation/truecharts-fork/setup.cfg new file mode 100644 index 00000000..aa079ec5 --- /dev/null +++ b/catalog_validation/truecharts-fork/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=120 diff --git a/catalog_validation/truecharts-fork/setup.py b/catalog_validation/truecharts-fork/setup.py new file mode 100644 index 00000000..a64ad680 --- /dev/null +++ b/catalog_validation/truecharts-fork/setup.py @@ -0,0 +1,21 @@ +from distutils.core import setup +from setuptools import find_packages + +VERSION = '0.1' + +setup( + name='catalog_validation', + description='Validate TrueNAS Catalog(s)', + version=VERSION, + include_package_data=True, + packages=find_packages(), + license='GNU3', + platforms='any', + entry_points={ + 'console_scripts': [ + 'catalog_validate = catalog_validation.scripts.catalog_validate:main', + 'catalog_update = catalog_validation.scripts.catalog_update:main', + 'dev_charts_validate = catalog_validation.scripts.dev_apps_validate:main', + ], + }, +)