diff --git a/.dirschema.yml b/.dirschema.yml index 8f611342e..e3375015d 100644 --- a/.dirschema.yml +++ b/.dirschema.yml @@ -26,6 +26,7 @@ apps: - /shipitscript - /signingscript - /treescript + - /notarization_poller tools: schema: https://raw.githubusercontent.com/mozilla-releng/releng-dirschemas/master/python-tool.yaml projects: diff --git a/.gitignore b/.gitignore index aac37a7df..01a91bee7 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ var/ # Installer logs pip-log.txt pip-delete-this-directory.txt +pip-wheel-metadata # Unit test / coverage reports htmlcov/ diff --git a/.taskcluster.yml b/.taskcluster.yml index 0c5343d01..d8b88ef61 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -29,6 +29,8 @@ tasks: - ['beetmoverscript', '38', '', 'mozilla/releng-beetmoverscript'] - ['bouncerscript', '37', '', ''] - ['bouncerscript', '38', '', 'mozilla/releng-bouncerscript'] + - ['notarization_poller', '37', '', ''] + - ['notarization_poller', '38', '', ''] - ['pushapkscript', '37', '${setup_pushapkscript}', ''] - ['pushapkscript', '38', '${setup_pushapkscript}', 'mozilla/releng-pushapkscript'] - ['pushsnapscript', '37', '${setup_pushsnapscript}', ''] diff --git a/maintenance/pin.sh b/maintenance/pin.sh index a1503986c..55d45782c 100755 --- a/maintenance/pin.sh +++ b/maintenance/pin.sh @@ -13,6 +13,7 @@ else bouncerscript configloader iscript + notarization_poller pushapkscript pushsnapscript scriptworker_client diff --git a/notarization_poller/.coveragerc b/notarization_poller/.coveragerc new file mode 100644 index 000000000..5c0cb8ec6 --- /dev/null +++ b/notarization_poller/.coveragerc @@ -0,0 +1,5 @@ +[run] +branch = True +omit = + tests/* + .tox/* diff --git a/notarization_poller/.pyup.yml b/notarization_poller/.pyup.yml new file mode 100644 index 000000000..4c5fb88be --- /dev/null +++ b/notarization_poller/.pyup.yml @@ -0,0 +1,3 @@ +update: all +pin: True +schedule: "every week on wednesday" diff --git a/notarization_poller/Dockerfile b/notarization_poller/Dockerfile new file mode 100644 index 000000000..7226dffe9 --- /dev/null +++ b/notarization_poller/Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.7 + +RUN groupadd --gid 10001 app && \ + useradd -g app --uid 10001 --shell /usr/sbin/nologin --create-home --home-dir /app app + +RUN apt-get update \ + && ln -s /app/docker.d/healthcheck /bin/healthcheck + +USER app +WORKDIR /app + +COPY . /app + +RUN python -m venv /app \ + && cd notarization_poller \ + && /app/bin/pip install -r requirements/base.txt \ + && /app/bin/pip install . \ + && python -m venv /app/configloader_venv \ + && cd /app/configloader \ + && /app/configloader_venv/bin/pip install -r requirements/base.txt \ + && /app/configloader_venv/bin/pip install . \ + && cd /app + +CMD ["/app/docker.d/init.sh"] diff --git a/notarization_poller/Dockerfile.test b/notarization_poller/Dockerfile.test new file mode 100644 index 000000000..4bcf4de13 --- /dev/null +++ b/notarization_poller/Dockerfile.test @@ -0,0 +1,14 @@ +ARG PYTHON_VERSION + +FROM python:${PYTHON_VERSION} + +WORKDIR /app + +COPY MANIFEST.in setup.py tox.ini /app/ +COPY requirements/ /app/requirements/ + +RUN pip install -r requirements/local.txt + +COPY src/ /app/src/ + +ENTRYPOINT ["/usr/local/bin/tox", "-e"] diff --git a/notarization_poller/HISTORY.rst b/notarization_poller/HISTORY.rst new file mode 100644 index 000000000..2abc09b1d --- /dev/null +++ b/notarization_poller/HISTORY.rst @@ -0,0 +1,13 @@ +Change Log +========== + +All notable changes to this project will be documented in this file. +This project adheres to `Semantic Versioning `__. + +[Unreleased] - 2019-12-xx +------------------------- + +Added +~~~~~ + +- Initial ``notarization_poller`` deployment diff --git a/notarization_poller/LICENSE b/notarization_poller/LICENSE new file mode 100644 index 000000000..e87a115e4 --- /dev/null +++ b/notarization_poller/LICENSE @@ -0,0 +1,363 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/notarization_poller/MANIFEST.in b/notarization_poller/MANIFEST.in new file mode 100644 index 000000000..2d65c4eb9 --- /dev/null +++ b/notarization_poller/MANIFEST.in @@ -0,0 +1,12 @@ +recursive-include requirements * +recursive-include src * + +include LICENSE +include README.md +include pyproject.toml +include setup.py +include version.txt + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] +recursive-exclude tests * diff --git a/notarization_poller/README.md b/notarization_poller/README.md new file mode 100644 index 000000000..1d9d4c9d9 --- /dev/null +++ b/notarization_poller/README.md @@ -0,0 +1,15 @@ +# `notarization_poller` + +This is a worker in its own right, and does not run under scriptworker. + +Essentially, sometimes Apple's notarization service will take a long time to return with successful notarization. Sometimes it won't even acknowledge that the UUID is in the queue until hours later. Instead of having failing mac signing tasks, we came up with a three-task workflow: + +1. Task 1 signs the app, creates and signs the pkg, and submits the app and pkg for notarization. It uploads the signed app and pkg, as well as a manifest with the notarization UUID(s) to poll. + +2. Task 2 downloads the manifest with the notarization UUID(s) and polls Apple for status. When Apple returns with a successful notarization (which may take hours), it resolves the task. + +3. Task 3 downloads the signed app and pkg, staples the notarization, and uploads the notarized bits. + +Tasks (1) and (3) run in `iscript`, using scriptworker. + +Task (2) runs under the `notarization_poller`. Because the bulk of the time spent is waiting, we can claim many many tasks in a single worker. diff --git a/notarization_poller/pyproject.toml b/notarization_poller/pyproject.toml new file mode 100644 index 000000000..22a6b0390 --- /dev/null +++ b/notarization_poller/pyproject.toml @@ -0,0 +1,25 @@ +[tool.black] +line-length = 160 +target-version = ["py37", "py38"] +include = '\.(wsgi|pyi?)$' +exclude = ''' +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | \.cache + | \.cache_py3 + | _build + | buck-out + | build + | dist + | ui +)/ +''' + +[tool.isort] +line_length = 160 +known_first_party = 'notarization_poller' diff --git a/notarization_poller/requirements/base.in b/notarization_poller/requirements/base.in new file mode 100644 index 000000000..2eacd545e --- /dev/null +++ b/notarization_poller/requirements/base.in @@ -0,0 +1,5 @@ +aiohttp +arrow +async_timeout +immutabledict +taskcluster diff --git a/notarization_poller/requirements/base.py37.txt b/notarization_poller/requirements/base.py37.txt new file mode 100644 index 000000000..9a950157f --- /dev/null +++ b/notarization_poller/requirements/base.py37.txt @@ -0,0 +1,120 @@ +# SHA1:2335448dcaf35adf539527c2b8909f4e36918ad9 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +aiohttp==3.6.2 \ + --hash=sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \ + --hash=sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \ + --hash=sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \ + --hash=sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \ + --hash=sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \ + --hash=sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \ + --hash=sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \ + --hash=sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \ + --hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \ + --hash=sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \ + --hash=sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \ + --hash=sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965 \ + # via -r requirements/base.in (line 1), taskcluster +arrow==0.15.5 \ + --hash=sha256:5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b \ + --hash=sha256:70729bcc831da496ca3cb4b7e89472c8e2d27d398908155e0796179f6d2d41ee \ + # via -r requirements/base.in (line 2) +async-timeout==3.0.1 \ + --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ + --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 \ + # via -r requirements/base.in (line 3), aiohttp, taskcluster +attrs==19.3.0 \ + --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ + --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ + # via aiohttp +certifi==2019.11.28 \ + --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ + --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f \ + # via requests +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ + # via aiohttp, requests +idna==2.9 \ + --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ + --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ + # via requests, yarl +immutabledict==0.2.0 \ + --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \ + --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a \ + # via -r requirements/base.in (line 4) +mohawk==1.1.0 \ + --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \ + --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723 \ + # via taskcluster +multidict==4.7.5 \ + --hash=sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1 \ + --hash=sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35 \ + --hash=sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928 \ + --hash=sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969 \ + --hash=sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e \ + --hash=sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78 \ + --hash=sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1 \ + --hash=sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136 \ + --hash=sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8 \ + --hash=sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2 \ + --hash=sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e \ + --hash=sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4 \ + --hash=sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5 \ + --hash=sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd \ + --hash=sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab \ + --hash=sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20 \ + --hash=sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3 \ + # via aiohttp, yarl +python-dateutil==2.8.1 \ + --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ + --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \ + # via arrow +requests==2.23.0 \ + --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ + --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \ + # via taskcluster +six==1.14.0 \ + --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ + --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \ + # via mohawk, python-dateutil, taskcluster +slugid==2.0.0 \ + --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \ + --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \ + # via taskcluster +taskcluster-urls==12.1.0 \ + --hash=sha256:1dc740c32c7beb31e11ed7ccf9da2d47a504acdb3170c8900649433b0fd16fb2 \ + --hash=sha256:4a62c776aeba6d45044789a8845ec4d8521bc1bb6ebfc86d79ee759bcdd4f2f7 \ + # via taskcluster +taskcluster==25.3.0 \ + --hash=sha256:7ed49431c68a562cd421dfdb2aa3e3a99e759bffbc208fcfe8eee27e219014e0 \ + --hash=sha256:9f7ad8de338c894a5b916523b4e1150fb210b821dcd93c1fadf1016cc737844b \ + --hash=sha256:c1108a9611c540529f08c9c2a15d104fe8853e0fa6b2d97f030e69c0f3460925 \ + # via -r requirements/base.in (line 5) +urllib3==1.25.8 \ + --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ + --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc \ + # via requests +yarl==1.4.2 \ + --hash=sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce \ + --hash=sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6 \ + --hash=sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce \ + --hash=sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae \ + --hash=sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d \ + --hash=sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f \ + --hash=sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b \ + --hash=sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b \ + --hash=sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb \ + --hash=sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462 \ + --hash=sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea \ + --hash=sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70 \ + --hash=sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1 \ + --hash=sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a \ + --hash=sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b \ + --hash=sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080 \ + --hash=sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2 \ + # via aiohttp diff --git a/notarization_poller/requirements/base.txt b/notarization_poller/requirements/base.txt new file mode 100644 index 000000000..9a950157f --- /dev/null +++ b/notarization_poller/requirements/base.txt @@ -0,0 +1,120 @@ +# SHA1:2335448dcaf35adf539527c2b8909f4e36918ad9 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +aiohttp==3.6.2 \ + --hash=sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \ + --hash=sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \ + --hash=sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \ + --hash=sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \ + --hash=sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \ + --hash=sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \ + --hash=sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \ + --hash=sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \ + --hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \ + --hash=sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \ + --hash=sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \ + --hash=sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965 \ + # via -r requirements/base.in (line 1), taskcluster +arrow==0.15.5 \ + --hash=sha256:5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b \ + --hash=sha256:70729bcc831da496ca3cb4b7e89472c8e2d27d398908155e0796179f6d2d41ee \ + # via -r requirements/base.in (line 2) +async-timeout==3.0.1 \ + --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ + --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 \ + # via -r requirements/base.in (line 3), aiohttp, taskcluster +attrs==19.3.0 \ + --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ + --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ + # via aiohttp +certifi==2019.11.28 \ + --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ + --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f \ + # via requests +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ + # via aiohttp, requests +idna==2.9 \ + --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ + --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ + # via requests, yarl +immutabledict==0.2.0 \ + --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \ + --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a \ + # via -r requirements/base.in (line 4) +mohawk==1.1.0 \ + --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \ + --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723 \ + # via taskcluster +multidict==4.7.5 \ + --hash=sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1 \ + --hash=sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35 \ + --hash=sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928 \ + --hash=sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969 \ + --hash=sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e \ + --hash=sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78 \ + --hash=sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1 \ + --hash=sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136 \ + --hash=sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8 \ + --hash=sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2 \ + --hash=sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e \ + --hash=sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4 \ + --hash=sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5 \ + --hash=sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd \ + --hash=sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab \ + --hash=sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20 \ + --hash=sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3 \ + # via aiohttp, yarl +python-dateutil==2.8.1 \ + --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ + --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \ + # via arrow +requests==2.23.0 \ + --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ + --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \ + # via taskcluster +six==1.14.0 \ + --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ + --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \ + # via mohawk, python-dateutil, taskcluster +slugid==2.0.0 \ + --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \ + --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c \ + # via taskcluster +taskcluster-urls==12.1.0 \ + --hash=sha256:1dc740c32c7beb31e11ed7ccf9da2d47a504acdb3170c8900649433b0fd16fb2 \ + --hash=sha256:4a62c776aeba6d45044789a8845ec4d8521bc1bb6ebfc86d79ee759bcdd4f2f7 \ + # via taskcluster +taskcluster==25.3.0 \ + --hash=sha256:7ed49431c68a562cd421dfdb2aa3e3a99e759bffbc208fcfe8eee27e219014e0 \ + --hash=sha256:9f7ad8de338c894a5b916523b4e1150fb210b821dcd93c1fadf1016cc737844b \ + --hash=sha256:c1108a9611c540529f08c9c2a15d104fe8853e0fa6b2d97f030e69c0f3460925 \ + # via -r requirements/base.in (line 5) +urllib3==1.25.8 \ + --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ + --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc \ + # via requests +yarl==1.4.2 \ + --hash=sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce \ + --hash=sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6 \ + --hash=sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce \ + --hash=sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae \ + --hash=sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d \ + --hash=sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f \ + --hash=sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b \ + --hash=sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b \ + --hash=sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb \ + --hash=sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462 \ + --hash=sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea \ + --hash=sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70 \ + --hash=sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1 \ + --hash=sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a \ + --hash=sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b \ + --hash=sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080 \ + --hash=sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2 \ + # via aiohttp diff --git a/notarization_poller/requirements/local.in b/notarization_poller/requirements/local.in new file mode 100644 index 000000000..cc5f959de --- /dev/null +++ b/notarization_poller/requirements/local.in @@ -0,0 +1,2 @@ +-r test.in +tox diff --git a/notarization_poller/requirements/local.py37.txt b/notarization_poller/requirements/local.py37.txt new file mode 100644 index 000000000..68c9faa7d --- /dev/null +++ b/notarization_poller/requirements/local.py37.txt @@ -0,0 +1,23 @@ +# SHA1:d7dccd96a3223da5804b040410df26ab0cd97d32 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r test.py37.txt +distlib==0.3.0 \ + --hash=sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21 \ + # via virtualenv +filelock==3.0.12 \ + --hash=sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59 \ + --hash=sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836 \ + # via tox, virtualenv +tox==3.14.5 \ + --hash=sha256:0cbe98369081fa16bd6f1163d3d0b2a62afa29d402ccfad2bd09fb2668be0956 \ + --hash=sha256:676f1e3e7de245ad870f956436b84ea226210587d1f72c8dfb8cd5ac7b6f0e70 \ + # via -r requirements/local.in (line 2) +virtualenv==20.0.5 \ + --hash=sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae \ + --hash=sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e \ + # via tox diff --git a/notarization_poller/requirements/local.txt b/notarization_poller/requirements/local.txt new file mode 100644 index 000000000..329f6d20b --- /dev/null +++ b/notarization_poller/requirements/local.txt @@ -0,0 +1,23 @@ +# SHA1:d7dccd96a3223da5804b040410df26ab0cd97d32 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r test.txt +distlib==0.3.0 \ + --hash=sha256:2e166e231a26b36d6dfe35a48c4464346620f8645ed0ace01ee31822b288de21 \ + # via virtualenv +filelock==3.0.12 \ + --hash=sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59 \ + --hash=sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836 \ + # via tox, virtualenv +tox==3.14.5 \ + --hash=sha256:0cbe98369081fa16bd6f1163d3d0b2a62afa29d402ccfad2bd09fb2668be0956 \ + --hash=sha256:676f1e3e7de245ad870f956436b84ea226210587d1f72c8dfb8cd5ac7b6f0e70 \ + # via -r requirements/local.in (line 2) +virtualenv==20.0.5 \ + --hash=sha256:531b142e300d405bb9faedad4adbeb82b4098b918e35209af2adef3129274aae \ + --hash=sha256:5dd42a9f56307542bddc446cfd10ef6576f11910366a07609fe8d0d88fa8fb7e \ + # via tox diff --git a/notarization_poller/requirements/test.in b/notarization_poller/requirements/test.in new file mode 100644 index 000000000..a230714fe --- /dev/null +++ b/notarization_poller/requirements/test.in @@ -0,0 +1,14 @@ +-r base.in +check-manifest +coverage +flake8 +flake8_docstrings +isort +mock +pip-compile-multi +pytest +pytest-asyncio +pytest-black +pytest-cov +pytest-mock +python-coveralls diff --git a/notarization_poller/requirements/test.py37.txt b/notarization_poller/requirements/test.py37.txt new file mode 100644 index 000000000..5611d1e5c --- /dev/null +++ b/notarization_poller/requirements/test.py37.txt @@ -0,0 +1,235 @@ +# SHA1:99f75bbc73c108e0ae15956c7f1d6bc297e6b311 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.py37.txt +appdirs==1.4.3 \ + --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \ + --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \ + # via black +black==19.10b0 \ + --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ + --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 \ + # via pytest-black +check-manifest==0.41 \ + --hash=sha256:4046b1260e63c139be6441fe8db8d9221f495ff39b81add2a55e5ca35dde7a6a \ + --hash=sha256:88afe85b751717688f8bc3b63d9543d0d962da98f1f420c554eaeb8d76c571a8 \ + # via -r requirements/test.in (line 2) +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via black, pip-compile-multi, pip-tools +coverage==5.0.3 \ + --hash=sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3 \ + --hash=sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c \ + --hash=sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0 \ + --hash=sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477 \ + --hash=sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a \ + --hash=sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf \ + --hash=sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691 \ + --hash=sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73 \ + --hash=sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987 \ + --hash=sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894 \ + --hash=sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e \ + --hash=sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef \ + --hash=sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf \ + --hash=sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68 \ + --hash=sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8 \ + --hash=sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954 \ + --hash=sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2 \ + --hash=sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40 \ + --hash=sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc \ + --hash=sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc \ + --hash=sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e \ + --hash=sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d \ + --hash=sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f \ + --hash=sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc \ + --hash=sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301 \ + --hash=sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea \ + --hash=sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb \ + --hash=sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af \ + --hash=sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52 \ + --hash=sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37 \ + --hash=sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0 \ + # via -r requirements/test.in (line 3), pytest-cov, python-coveralls +entrypoints==0.3 \ + --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + # via flake8 +flake8-docstrings==1.5.0 \ + --hash=sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717 \ + --hash=sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc \ + # via -r requirements/test.in (line 5) +flake8==3.7.9 \ + --hash=sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb \ + --hash=sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca \ + # via -r requirements/test.in (line 4), flake8-docstrings +importlib-metadata==1.5.0 \ + --hash=sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302 \ + --hash=sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b \ + # via pep517, pluggy, pytest +isort==4.3.21 \ + --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ + --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \ + # via -r requirements/test.in (line 6) +mccabe==0.6.1 \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + # via flake8 +mock==4.0.1 \ + --hash=sha256:2a572b715f09dd2f0a583d8aeb5bb67d7ed7a8fd31d193cf1227a99c16a67bc3 \ + --hash=sha256:5e48d216809f6f393987ed56920305d8f3c647e6ed35407c1ff2ecb88a9e1151 \ + # via -r requirements/test.in (line 7) +more-itertools==8.2.0 \ + --hash=sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c \ + --hash=sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507 \ + # via pytest +packaging==20.1 \ + --hash=sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73 \ + --hash=sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334 \ + # via pytest +pathspec==0.7.0 \ + --hash=sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424 \ + --hash=sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96 \ + # via black +pep517==0.8.1 \ + --hash=sha256:5ce351f3be71d01bb094d63253854b6139931fcaba8e2f380c02102136c51e40 \ + --hash=sha256:882e2eeeffe39ccd6be6122d98300df18d80950cb5f449766d64149c94c5614a \ + # via check-manifest +pip-compile-multi==1.5.8 \ + --hash=sha256:6c77a2cdae62c28d6c151111e6a38ca9935ef37898f9766100ec2064326d74e9 \ + --hash=sha256:fd92e064e8b187ce919a9b1e22bc7ff41e630bbfba8a9ab0501c260a2580feda \ + # via -r requirements/test.in (line 8) +pip-tools==4.5.0 \ + --hash=sha256:144fbd764e88f761246f832370721dccabfefbbc4ce3aa8468f6802ac6519217 \ + --hash=sha256:61455cfdaa183930eefd8259f393812d94005fb9f8249edb640ed1b66f678116 \ + # via pip-compile-multi +pluggy==0.13.1 \ + --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ + --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \ + # via pytest +py==1.8.1 \ + --hash=sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa \ + --hash=sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0 \ + # via pytest +pycodestyle==2.5.0 \ + --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ + --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \ + # via flake8 +pydocstyle==5.0.2 \ + --hash=sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586 \ + --hash=sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5 \ + # via flake8-docstrings +pyflakes==2.1.1 \ + --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ + --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 \ + # via flake8 +pyparsing==2.4.6 \ + --hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \ + --hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec \ + # via packaging +pytest-asyncio==0.10.0 \ + --hash=sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf \ + --hash=sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b \ + # via -r requirements/test.in (line 10) +pytest-black==0.3.8 \ + --hash=sha256:01a9a7acc69e618ebf3f834932a4d7a81909f6911051d0871b0ed4de3cbe9712 \ + # via -r requirements/test.in (line 11) +pytest-cov==2.8.1 \ + --hash=sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b \ + --hash=sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626 \ + # via -r requirements/test.in (line 12) +pytest-mock==2.0.0 \ + --hash=sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f \ + --hash=sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307 \ + # via -r requirements/test.in (line 13) +pytest==5.3.5 \ + --hash=sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d \ + --hash=sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6 \ + # via -r requirements/test.in (line 9), pytest-asyncio, pytest-black, pytest-cov, pytest-mock +python-coveralls==2.9.3 \ + --hash=sha256:bfaf7811e7dc5628e83b6b162962a4e2485dbff184b30e49f380374ed1bcee55 \ + --hash=sha256:fb0ff49bb1551dac10b06bd55e9790287d898a0f1e2c959802235cae08dd0bff \ + # via -r requirements/test.in (line 14) +pyyaml==5.3 \ + --hash=sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6 \ + --hash=sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf \ + --hash=sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5 \ + --hash=sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e \ + --hash=sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811 \ + --hash=sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e \ + --hash=sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d \ + --hash=sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20 \ + --hash=sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689 \ + --hash=sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994 \ + --hash=sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615 \ + # via python-coveralls +regex==2020.2.20 \ + --hash=sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431 \ + --hash=sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242 \ + --hash=sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1 \ + --hash=sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d \ + --hash=sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045 \ + --hash=sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b \ + --hash=sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400 \ + --hash=sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa \ + --hash=sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0 \ + --hash=sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69 \ + --hash=sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74 \ + --hash=sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb \ + --hash=sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26 \ + --hash=sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5 \ + --hash=sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2 \ + --hash=sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce \ + --hash=sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab \ + --hash=sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e \ + --hash=sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70 \ + --hash=sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc \ + --hash=sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0 \ + # via black +snowballstemmer==2.0.0 \ + --hash=sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0 \ + --hash=sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52 \ + # via pydocstyle +toml==0.10.0 \ + --hash=sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c \ + --hash=sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e \ + # via black, check-manifest, pep517, pytest-black +toposort==1.5 \ + --hash=sha256:d80128b83b411d503b0cdb4a8f172998bc1d3b434b6402a349b8ebd734d51a80 \ + --hash=sha256:dba5ae845296e3bf37b042c640870ffebcdeb8cd4df45adaa01d8c5476c557dd \ + # via pip-compile-multi +typed-ast==1.4.1 \ + --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ + --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ + --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ + --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ + --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ + --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ + --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ + --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ + --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ + --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ + --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ + --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ + --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ + --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ + --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ + --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ + --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ + --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ + --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ + --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ + --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ + # via black +wcwidth==0.1.8 \ + --hash=sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603 \ + --hash=sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8 \ + # via pytest +zipp==3.0.0 \ + --hash=sha256:12248a63bbdf7548f89cb4c7cda4681e537031eda29c02ea29674bc6854460c2 \ + --hash=sha256:7c0f8e91abc0dc07a5068f315c52cb30c66bfbc581e5b50704c8a2f6ebae794a \ + # via importlib-metadata, pep517 diff --git a/notarization_poller/requirements/test.txt b/notarization_poller/requirements/test.txt new file mode 100644 index 000000000..028fe66c5 --- /dev/null +++ b/notarization_poller/requirements/test.txt @@ -0,0 +1,227 @@ +# SHA1:99f75bbc73c108e0ae15956c7f1d6bc297e6b311 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +-r base.txt +appdirs==1.4.3 \ + --hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92 \ + --hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \ + # via black +black==19.10b0 \ + --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ + --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 \ + # via pytest-black +check-manifest==0.41 \ + --hash=sha256:4046b1260e63c139be6441fe8db8d9221f495ff39b81add2a55e5ca35dde7a6a \ + --hash=sha256:88afe85b751717688f8bc3b63d9543d0d962da98f1f420c554eaeb8d76c571a8 \ + # via -r requirements/test.in (line 2) +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via black, pip-compile-multi, pip-tools +coverage==5.0.3 \ + --hash=sha256:15cf13a6896048d6d947bf7d222f36e4809ab926894beb748fc9caa14605d9c3 \ + --hash=sha256:1daa3eceed220f9fdb80d5ff950dd95112cd27f70d004c7918ca6dfc6c47054c \ + --hash=sha256:1e44a022500d944d42f94df76727ba3fc0a5c0b672c358b61067abb88caee7a0 \ + --hash=sha256:25dbf1110d70bab68a74b4b9d74f30e99b177cde3388e07cc7272f2168bd1477 \ + --hash=sha256:3230d1003eec018ad4a472d254991e34241e0bbd513e97a29727c7c2f637bd2a \ + --hash=sha256:3dbb72eaeea5763676a1a1efd9b427a048c97c39ed92e13336e726117d0b72bf \ + --hash=sha256:5012d3b8d5a500834783689a5d2292fe06ec75dc86ee1ccdad04b6f5bf231691 \ + --hash=sha256:51bc7710b13a2ae0c726f69756cf7ffd4362f4ac36546e243136187cfcc8aa73 \ + --hash=sha256:527b4f316e6bf7755082a783726da20671a0cc388b786a64417780b90565b987 \ + --hash=sha256:722e4557c8039aad9592c6a4213db75da08c2cd9945320220634f637251c3894 \ + --hash=sha256:76e2057e8ffba5472fd28a3a010431fd9e928885ff480cb278877c6e9943cc2e \ + --hash=sha256:77afca04240c40450c331fa796b3eab6f1e15c5ecf8bf2b8bee9706cd5452fef \ + --hash=sha256:7afad9835e7a651d3551eab18cbc0fdb888f0a6136169fbef0662d9cdc9987cf \ + --hash=sha256:9bea19ac2f08672636350f203db89382121c9c2ade85d945953ef3c8cf9d2a68 \ + --hash=sha256:a8b8ac7876bc3598e43e2603f772d2353d9931709345ad6c1149009fd1bc81b8 \ + --hash=sha256:b0840b45187699affd4c6588286d429cd79a99d509fe3de0f209594669bb0954 \ + --hash=sha256:b26aaf69713e5674efbde4d728fb7124e429c9466aeaf5f4a7e9e699b12c9fe2 \ + --hash=sha256:b63dd43f455ba878e5e9f80ba4f748c0a2156dde6e0e6e690310e24d6e8caf40 \ + --hash=sha256:be18f4ae5a9e46edae3f329de2191747966a34a3d93046dbdf897319923923bc \ + --hash=sha256:c312e57847db2526bc92b9bfa78266bfbaabac3fdcd751df4d062cd4c23e46dc \ + --hash=sha256:c60097190fe9dc2b329a0eb03393e2e0829156a589bd732e70794c0dd804258e \ + --hash=sha256:c62a2143e1313944bf4a5ab34fd3b4be15367a02e9478b0ce800cb510e3bbb9d \ + --hash=sha256:cc1109f54a14d940b8512ee9f1c3975c181bbb200306c6d8b87d93376538782f \ + --hash=sha256:cd60f507c125ac0ad83f05803063bed27e50fa903b9c2cfee3f8a6867ca600fc \ + --hash=sha256:d513cc3db248e566e07a0da99c230aca3556d9b09ed02f420664e2da97eac301 \ + --hash=sha256:d649dc0bcace6fcdb446ae02b98798a856593b19b637c1b9af8edadf2b150bea \ + --hash=sha256:d7008a6796095a79544f4da1ee49418901961c97ca9e9d44904205ff7d6aa8cb \ + --hash=sha256:da93027835164b8223e8e5af2cf902a4c80ed93cb0909417234f4a9df3bcd9af \ + --hash=sha256:e69215621707119c6baf99bda014a45b999d37602cb7043d943c76a59b05bf52 \ + --hash=sha256:ea9525e0fef2de9208250d6c5aeeee0138921057cd67fcef90fbed49c4d62d37 \ + --hash=sha256:fca1669d464f0c9831fd10be2eef6b86f5ebd76c724d1e0706ebdff86bb4adf0 \ + # via -r requirements/test.in (line 3), pytest-cov, python-coveralls +entrypoints==0.3 \ + --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + # via flake8 +flake8-docstrings==1.5.0 \ + --hash=sha256:3d5a31c7ec6b7367ea6506a87ec293b94a0a46c0bce2bb4975b7f1d09b6f3717 \ + --hash=sha256:a256ba91bc52307bef1de59e2a009c3cf61c3d0952dbe035d6ff7208940c2edc \ + # via -r requirements/test.in (line 5) +flake8==3.7.9 \ + --hash=sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb \ + --hash=sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca \ + # via -r requirements/test.in (line 4), flake8-docstrings +isort==4.3.21 \ + --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ + --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd \ + # via -r requirements/test.in (line 6) +mccabe==0.6.1 \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f \ + # via flake8 +mock==4.0.1 \ + --hash=sha256:2a572b715f09dd2f0a583d8aeb5bb67d7ed7a8fd31d193cf1227a99c16a67bc3 \ + --hash=sha256:5e48d216809f6f393987ed56920305d8f3c647e6ed35407c1ff2ecb88a9e1151 \ + # via -r requirements/test.in (line 7) +more-itertools==8.2.0 \ + --hash=sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c \ + --hash=sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507 \ + # via pytest +packaging==20.1 \ + --hash=sha256:170748228214b70b672c581a3dd610ee51f733018650740e98c7df862a583f73 \ + --hash=sha256:e665345f9eef0c621aa0bf2f8d78cf6d21904eef16a93f020240b704a57f1334 \ + # via pytest +pathspec==0.7.0 \ + --hash=sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424 \ + --hash=sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96 \ + # via black +pep517==0.8.1 \ + --hash=sha256:5ce351f3be71d01bb094d63253854b6139931fcaba8e2f380c02102136c51e40 \ + --hash=sha256:882e2eeeffe39ccd6be6122d98300df18d80950cb5f449766d64149c94c5614a \ + # via check-manifest +pip-compile-multi==1.5.8 \ + --hash=sha256:6c77a2cdae62c28d6c151111e6a38ca9935ef37898f9766100ec2064326d74e9 \ + --hash=sha256:fd92e064e8b187ce919a9b1e22bc7ff41e630bbfba8a9ab0501c260a2580feda \ + # via -r requirements/test.in (line 8) +pip-tools==4.5.0 \ + --hash=sha256:144fbd764e88f761246f832370721dccabfefbbc4ce3aa8468f6802ac6519217 \ + --hash=sha256:61455cfdaa183930eefd8259f393812d94005fb9f8249edb640ed1b66f678116 \ + # via pip-compile-multi +pluggy==0.13.1 \ + --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ + --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d \ + # via pytest +py==1.8.1 \ + --hash=sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa \ + --hash=sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0 \ + # via pytest +pycodestyle==2.5.0 \ + --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ + --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c \ + # via flake8 +pydocstyle==5.0.2 \ + --hash=sha256:da7831660b7355307b32778c4a0dbfb137d89254ef31a2b2978f50fc0b4d7586 \ + --hash=sha256:f4f5d210610c2d153fae39093d44224c17429e2ad7da12a8b419aba5c2f614b5 \ + # via flake8-docstrings +pyflakes==2.1.1 \ + --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ + --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 \ + # via flake8 +pyparsing==2.4.6 \ + --hash=sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f \ + --hash=sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec \ + # via packaging +pytest-asyncio==0.10.0 \ + --hash=sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf \ + --hash=sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b \ + # via -r requirements/test.in (line 10) +pytest-black==0.3.8 \ + --hash=sha256:01a9a7acc69e618ebf3f834932a4d7a81909f6911051d0871b0ed4de3cbe9712 \ + # via -r requirements/test.in (line 11) +pytest-cov==2.8.1 \ + --hash=sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b \ + --hash=sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626 \ + # via -r requirements/test.in (line 12) +pytest-mock==2.0.0 \ + --hash=sha256:b35eb281e93aafed138db25c8772b95d3756108b601947f89af503f8c629413f \ + --hash=sha256:cb67402d87d5f53c579263d37971a164743dc33c159dfb4fb4a86f37c5552307 \ + # via -r requirements/test.in (line 13) +pytest==5.3.5 \ + --hash=sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d \ + --hash=sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6 \ + # via -r requirements/test.in (line 9), pytest-asyncio, pytest-black, pytest-cov, pytest-mock +python-coveralls==2.9.3 \ + --hash=sha256:bfaf7811e7dc5628e83b6b162962a4e2485dbff184b30e49f380374ed1bcee55 \ + --hash=sha256:fb0ff49bb1551dac10b06bd55e9790287d898a0f1e2c959802235cae08dd0bff \ + # via -r requirements/test.in (line 14) +pyyaml==5.3 \ + --hash=sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6 \ + --hash=sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf \ + --hash=sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5 \ + --hash=sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e \ + --hash=sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811 \ + --hash=sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e \ + --hash=sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d \ + --hash=sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20 \ + --hash=sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689 \ + --hash=sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994 \ + --hash=sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615 \ + # via python-coveralls +regex==2020.2.20 \ + --hash=sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431 \ + --hash=sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242 \ + --hash=sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1 \ + --hash=sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d \ + --hash=sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045 \ + --hash=sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b \ + --hash=sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400 \ + --hash=sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa \ + --hash=sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0 \ + --hash=sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69 \ + --hash=sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74 \ + --hash=sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb \ + --hash=sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26 \ + --hash=sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5 \ + --hash=sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2 \ + --hash=sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce \ + --hash=sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab \ + --hash=sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e \ + --hash=sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70 \ + --hash=sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc \ + --hash=sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0 \ + # via black +snowballstemmer==2.0.0 \ + --hash=sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0 \ + --hash=sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52 \ + # via pydocstyle +toml==0.10.0 \ + --hash=sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c \ + --hash=sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e \ + # via black, check-manifest, pep517, pytest-black +toposort==1.5 \ + --hash=sha256:d80128b83b411d503b0cdb4a8f172998bc1d3b434b6402a349b8ebd734d51a80 \ + --hash=sha256:dba5ae845296e3bf37b042c640870ffebcdeb8cd4df45adaa01d8c5476c557dd \ + # via pip-compile-multi +typed-ast==1.4.1 \ + --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ + --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ + --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ + --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ + --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ + --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ + --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ + --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ + --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ + --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ + --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ + --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ + --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ + --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ + --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ + --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ + --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ + --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ + --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ + --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ + --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ + # via black +wcwidth==0.1.8 \ + --hash=sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603 \ + --hash=sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8 \ + # via pytest diff --git a/notarization_poller/setup.cfg b/notarization_poller/setup.cfg new file mode 100644 index 000000000..36ab98e7e --- /dev/null +++ b/notarization_poller/setup.cfg @@ -0,0 +1,33 @@ +[bdist_wheel] +universal = 1 + +[flake8] +max-line-length = 140 +exclude = tests/* +max-complexity=10 + +[tool:pytest] +norecursedirs = + .git + .tox + .env + dist + build +python_files = + test_*.py + *_test.py + tests.py +addopts = + -rxEfsw + --strict + --doctest-modules + --doctest-glob=\*.rst + --tb=short + +[options] +package_dir= + =src +packages=find: + +[options.packages.find] +where=src diff --git a/notarization_poller/setup.py b/notarization_poller/setup.py new file mode 100644 index 000000000..ffc8033ab --- /dev/null +++ b/notarization_poller/setup.py @@ -0,0 +1,25 @@ +import os + +from setuptools import find_packages, setup + +with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), "version.txt")) as f: + version = f.read().rstrip() + +with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), "requirements", "base.in")) as f: + install_requires = ["scriptworker_client"] + f.readlines() + +setup( + name="notarization_poller", + version=version, + description="TaskCluster Notarization Poller", + author="Mozilla Release Engineering", + author_email="release+python@mozilla.com", + url="https://github.com/mozilla-releng/scriptworker-scripts/tree/master/notarization_poller/", + packages=find_packages("src"), + package_data={"notarization_poller": ["data/*"]}, + include_package_data=True, + zip_safe=False, + entry_points={"console_scripts": ["notarization_poller = notarization_poller.worker:main"]}, + license="MPL2", + install_requires=install_requires, +) diff --git a/notarization_poller/src/notarization_poller/__init__.py b/notarization_poller/src/notarization_poller/__init__.py new file mode 100644 index 000000000..8994c3025 --- /dev/null +++ b/notarization_poller/src/notarization_poller/__init__.py @@ -0,0 +1 @@ +"""Initialize notarization_poller.""" diff --git a/notarization_poller/src/notarization_poller/config.py b/notarization_poller/src/notarization_poller/config.py new file mode 100644 index 000000000..c907904dc --- /dev/null +++ b/notarization_poller/src/notarization_poller/config.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +"""Config for notarization poller. + +Attributes: + DEFAULT_CONFIG (immutabledict): the default configuration + log (logging.Logger): the log object for the module. + +""" +import argparse +import logging +import logging.handlers +import os + +from notarization_poller.constants import DEFAULT_CONFIG +from notarization_poller.exceptions import ConfigError +from scriptworker_client.client import _init_logging, init_config + +log = logging.getLogger(__name__) + + +def update_logging_config(config, log_name="", file_name="worker.log"): + """Update python logging settings from config. + + * Use formatting from config settings. + * Log to screen if ``verbose`` + * Add a rotating logfile from config settings. + + Args: + config (dict): the running config + log_name (str, optional): the logger name to use. Primarily for testing. + Defaults to ``""`` + file_name (str, optional): the log file path to use. Defaults to ``"worker.log"`` + + """ + _init_logging(config) + top_level_logger = logging.getLogger(log_name) + + datefmt = config["log_datefmt"] + fmt = config["log_fmt"] + formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) + + if config.get("verbose"): + top_level_logger.setLevel(logging.DEBUG) + else: + top_level_logger.setLevel(logging.INFO) + + if len(top_level_logger.handlers) == 0: + handler = logging.StreamHandler() + handler.setFormatter(formatter) + top_level_logger.addHandler(handler) + + # Rotating log file + os.makedirs(config["log_dir"], exist_ok=True) + path = os.path.join(config["log_dir"], file_name) + if config["watch_log_file"]: + # If we rotate the log file via logrotate.d, let's watch the file + # so we can automatically close/reopen on move. + handler = logging.handlers.WatchedFileHandler(path) + else: + # Avoid using WatchedFileHandler during notarization poller unittests + handler = logging.FileHandler(path) + handler.setFormatter(formatter) + top_level_logger.addHandler(handler) + top_level_logger.addHandler(logging.NullHandler()) + + +# get_config_from_cmdln {{{1 +def _validate_config(config): + if "..." in config.values(): + raise ConfigError("Uninitialized value in config!") + + +def get_config_from_cmdln(args, desc="Run notarization poller"): + """Load config from the args. + + Args: + args (list): the commandline args. Generally ``sys.argv[1:]`` + + Returns: + immutabledict: the config + + """ + parser = argparse.ArgumentParser(description=desc) + parser.add_argument("config_path", type=str, nargs="?", default="poller.yaml", help="the path to the config file") + parsed_args = parser.parse_args(args) + config = init_config(config_path=parsed_args.config_path, default_config=DEFAULT_CONFIG, validator_callback=_validate_config) + update_logging_config(config) + return config diff --git a/notarization_poller/src/notarization_poller/constants.py b/notarization_poller/src/notarization_poller/constants.py new file mode 100644 index 000000000..f5aeaaec7 --- /dev/null +++ b/notarization_poller/src/notarization_poller/constants.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +"""Notarization poller constants. + +Attributes: + DEFAULT_CONFIG (immutabledict): the default config for notarization poller. + Running configs are validated against this. + +""" +import os + +from immutabledict import immutabledict + +from scriptworker_client.constants import STATUSES + +MAX_CLAIM_WORK_TASKS = 32 + +DEFAULT_CONFIG = immutabledict( + { + "log_datefmt": "%Y-%m-%dT%H:%M:%S", + "task_log_datefmt": "YYYY-MM-DDTHH:mm:ss", + "log_fmt": "%(asctime)s %(levelname)s - %(message)s", + "log_dir": os.path.join(os.getcwd(), "logs"), + "work_dir": os.path.join(os.getcwd(), "work"), + "taskcluster_root_url": os.environ.get("TASKCLUSTER_ROOT_URL", "https://firefox-ci-tc.services.mozilla.com/"), + "taskcluster_access_token": "...", + "taskcluster_client_id": "...", + "claim_work_interval": 30, + "max_concurrent_tasks": 100, + "reclaim_interval": 300, + "artifact_upload_timeout": 120, + "provisioner_id": "...", + "worker_group": "...", + "worker_type": "...", + "worker_id": "...", + "watch_log_file": False, + "verbose": False, + "xcrun_cmd": ("xcrun",), + "notarization_username": "...", + "notarization_password": "...", + "poll_sleep_time": 30, + } +) + + +# get_reversed_statuses {{{1 +def get_reversed_statuses(): + """Return a mapping of exit codes to status strings. + + Returns: + dict: the mapping of exit codes to status strings. + + """ + _rev = {v: k for k, v in STATUSES.items()} + _rev.update({-11: "intermittent-task", -15: "intermittent-task"}) + return immutabledict(_rev) diff --git a/notarization_poller/src/notarization_poller/exceptions.py b/notarization_poller/src/notarization_poller/exceptions.py new file mode 100644 index 000000000..90ec5d006 --- /dev/null +++ b/notarization_poller/src/notarization_poller/exceptions.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +"""Notarization poller exceptions.""" + +from scriptworker_client.constants import STATUSES + + +class WorkerError(Exception): + """The base exception in notarization poller. + + When raised inside of the run_loop loop, set the taskcluster task + status to at least ``self.exit_code``. + + Attributes: + exit_code (int): this is set to 5 (internal-error). + + """ + + exit_code = STATUSES["internal-error"] + + +class RetryError(WorkerError): + """worker retry error. + + Attributes: + exit_code (int): this is set to 4 (resource-unavailable) + + """ + + exit_code = STATUSES["resource-unavailable"] + + +class ConfigError(WorkerError): + """Invalid configuration provided to the worker. + + Attributes: + exit_code (int): this is set to 5 (internal-error). + + """ diff --git a/notarization_poller/src/notarization_poller/task.py b/notarization_poller/src/notarization_poller/task.py new file mode 100644 index 000000000..c2287eb2e --- /dev/null +++ b/notarization_poller/src/notarization_poller/task.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python +"""Task execution. + +Attributes: + NOTARIZATION_POLL_REGEX: the regex to find the notarization status + Task (object): the task object + log (logging.Logger): the log object for the module + +""" + +import asyncio +import gzip +import logging +import os +import pprint +import re +import traceback + +import aiohttp +import arrow +import async_timeout +import taskcluster +import taskcluster.exceptions +from taskcluster.aio import Queue + +from notarization_poller.constants import get_reversed_statuses +from notarization_poller.exceptions import RetryError +from scriptworker_client.aio import download_file, retry_async +from scriptworker_client.constants import STATUSES +from scriptworker_client.exceptions import Download404, DownloadError, TaskError +from scriptworker_client.utils import load_json_or_yaml, makedirs, rm, run_command + +log = logging.getLogger(__name__) +NOTARIZATION_POLL_REGEX = re.compile(r"Status: (?Psuccess|invalid)") + + +class Task: + """Manages all information related to a single running task.""" + + reclaim_fut = None + task_fut = None + complete = False + uuids = None + + def __init__(self, config, claim_task, event_loop=None): + """Initialize Task.""" + self.config = config + self.task_id = claim_task["status"]["taskId"] + self.run_id = claim_task["runId"] + self.claim_task = claim_task + self.event_loop = event_loop or asyncio.get_event_loop() + self.task_dir = os.path.join(self.config["work_dir"], "{}-{}".format(self.task_id, self.run_id)) + self.log_path = os.path.join(self.task_dir, "live_backing.log") + self.poll_log_path = os.path.join(self.task_dir, "polling.log") + + def start(self): + """Start the task.""" + rm(self.task_dir) + makedirs(self.task_dir) + self._reclaim_task = {} + self.main_fut = self.event_loop.create_task(self.async_start()) + + async def async_start(self): + """Async start the task.""" + self.reclaim_fut = self.event_loop.create_task(self.reclaim_task()) + self.task_fut = self.event_loop.create_task(self.run_task()) + + try: + await self.task_fut + except Download404: + self.status = STATUSES["resource-unavailable"] + self.task_log(traceback.format_exc(), level=logging.CRITICAL) + except (DownloadError, RetryError): + self.status = STATUSES["intermittent-task"] + self.task_log(traceback.format_exc(), level=logging.CRITICAL) + except TaskError: + self.status = STATUSES["malformed-payload"] + self.task_log(traceback.format_exc(), level=logging.CRITICAL) + except asyncio.CancelledError: + # We already dealt with self.status in reclaim_task + self.task_log(traceback.format_exc(), level=logging.CRITICAL) + log.info("Stopping task %s %s with status %s", self.task_id, self.run_id, self.status) + self.reclaim_fut.cancel() + await self.upload_task() + await self.complete_task() + rm(self.task_dir) + self.complete = True + + @property + def task_credentials(self): + """Return the temporary credentials returned from [re]claimWork.""" + return self._reclaim_task.get("credentials", self.claim_task["credentials"]) + + async def reclaim_task(self): + """Try to reclaim a task from the queue. + + This is a keepalive. Without it the task will expire and be re-queued. + + A 409 status means the task has been resolved. This generally means the + task has expired, reached its deadline, or has been cancelled. + + Raises: + TaskclusterRestFailure: on non-409 status_code from + `taskcluster.aio.Queue.reclaimTask` + + """ + while True: + log.debug("waiting %s seconds before reclaiming..." % self.config["reclaim_interval"]) + await asyncio.sleep(self.config["reclaim_interval"]) + log.debug("Reclaiming task %s %s", self.task_id, self.run_id) + try: + async with aiohttp.ClientSession() as session: + temp_queue = Queue(options={"credentials": self.task_credentials, "rootUrl": self.config["taskcluster_root_url"]}, session=session) + self._reclaim_task = await temp_queue.reclaimTask(self.task_id, self.run_id) + except taskcluster.exceptions.TaskclusterRestFailure as exc: + if exc.status_code == 409: + log.warning("Stopping task after receiving 409 response from reclaim_task: %s %s", self.task_id, self.run_id) + self.status = STATUSES["superseded"] + else: + log.exception("reclaim_task unexpected exception: %s %s", self.task_id, self.run_id) + self.status = STATUSES["internal-error"] + self.task_fut and self.task_fut.cancel() + break + + async def upload_task(self): + """Upload artifacts and return status. + + Returns the integer status of the upload. This only overrides + ``status`` if ``status`` is 0 (success) and the upload fails. + + """ + try: + with open(self.log_path, "rb") as f_in: + text_content = f_in.read() + with gzip.open(self.log_path, "wb") as f_out: + f_out.write(text_content) + await retry_async(self._upload_log, retry_exceptions=(KeyError, RetryError, TypeError, aiohttp.ClientError)) + except aiohttp.ClientError as e: + self.status = self.status or STATUSES["intermittent-task"] + log.error("Hit aiohttp error: {}".format(e)) + except Exception as e: + self.status = self.status or STATUSES["intermittent-task"] + log.exception("WORKER_UNEXPECTED_EXCEPTION upload {}".format(e)) + + async def _upload_log(self): + payload = {"storageType": "s3", "expires": arrow.get(self.claim_task["task"]["expires"]).isoformat(), "contentType": "text/plain"} + args = [self.task_id, self.run_id, "public/logs/live_backing.log", payload] + async with aiohttp.ClientSession() as session: + temp_queue = Queue(options={"credentials": self.task_credentials, "rootUrl": self.config["taskcluster_root_url"]}, session=session) + tc_response = await temp_queue.createArtifact(*args) + headers = {aiohttp.hdrs.CONTENT_TYPE: "text/plain", aiohttp.hdrs.CONTENT_ENCODING: "gzip"} + skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE] + with open(self.log_path, "rb") as fh: + async with async_timeout.timeout(self.config["artifact_upload_timeout"]): + async with session.put(tc_response["putUrl"], data=fh, headers=headers, skip_auto_headers=skip_auto_headers, compress=False) as resp: + log.info("create_artifact public/logs/live_backing.log: {}".format(resp.status)) + response_text = await resp.text() + log.info(response_text) + if resp.status not in (200, 204): + raise RetryError("Bad status {}".format(resp.status)) + + async def complete_task(self): + """Submit task status to Taskcluster.""" + reversed_statuses = get_reversed_statuses() + args = [self.task_id, self.run_id] + try: + async with aiohttp.ClientSession() as session: + temp_queue = Queue(options={"credentials": self.task_credentials, "rootUrl": self.config["taskcluster_root_url"]}, session=session) + if self.status == STATUSES["success"]: + log.info("Reporting task complete...") + response = await temp_queue.reportCompleted(*args) + elif self.status != 1 and self.status in reversed_statuses: + reason = reversed_statuses[self.status] + log.info("Reporting task exception {}...".format(reason)) + payload = {"reason": reason} + response = await temp_queue.reportException(*args, payload) + else: + log.info("Reporting task failed...") + response = await temp_queue.reportFailed(*args) + log.debug("Task status response:\n{}".format(pprint.pformat(response))) + except taskcluster.exceptions.TaskclusterRestFailure as exc: + if exc.status_code == 409: + log.info("complete_task: 409: not reporting complete/failed for %s %s", self.task_id, self.run_id) + else: + log.exception("complete_task: unknown exception for %s %s", self.task_id, self.run_id) + + def task_log(self, msg, *args, level=logging.INFO, worker_log=True): + """Log to ``self.log_path``. + + The ``log`` object is the logger for the entire worker, and will log + information from ``n`` tasks. ``self.log_path`` should only contain + information from this specific task. + + Args: + msg (str): the message to log + *args (list): any additional args to pass on to ``log.log`` + level (int): the logging level to use. + worker_log (bool, optional): if True, also log to the worker log. + Defaults to ``True``. + + """ + with open(self.log_path, "a") as log_fh: + print( + "{} {} - {}".format(arrow.utcnow().format(self.config["task_log_datefmt"]), logging._levelToName.get(level, str(level)), msg % args), + file=log_fh, + ) + worker_log and log.log(level, "%s:%s - {}".format(msg), self.task_id, self.run_id, *args) + + async def download_uuids(self): + """Download the UUID manifest.""" + payload = self.claim_task["task"]["payload"] + if payload.get("uuids"): + # enable specifying uuids directly, for integration tests + uuids = payload["uuids"] + else: + url = self.claim_task["task"]["payload"]["uuid_manifest"] + path = os.path.join(self.task_dir, "uuids.json") + self.task_log("Downloading %s", url) + await retry_async(download_file, args=(url, path), retry_exceptions=(DownloadError,)) + uuids = load_json_or_yaml(path, is_path=True) + self.uuids = tuple(uuids) + self.task_log("UUIDs: %s", self.uuids) + + async def run_task(self): + """Run the task, creating a task-specific log file.""" + self.status = 0 + username = self.config["notarization_username"] + password = self.config["notarization_password"] + + await self.download_uuids() + self.pending_uuids = list(self.uuids) + while True: + self.task_log("pending uuids: %s", self.pending_uuids) + for uuid in sorted(self.pending_uuids): + self.task_log("Polling %s", uuid) + base_cmd = list(self.config["xcrun_cmd"]) + ["altool", "--notarization-info", uuid, "-u", username, "--password"] + log_cmd = base_cmd + ["********"] + rm(self.poll_log_path) + status = await retry_async( + run_command, + args=[base_cmd + [password]], + kwargs={"log_path": self.poll_log_path, "log_cmd": log_cmd, "exception": RetryError}, + retry_exceptions=(RetryError,), + attempts=10, + ) + with open(self.poll_log_path, "r") as fh: + contents = fh.read() + self.task_log("Polling response (status %d)", status, worker_log=False) + for line in contents.splitlines(): + self.task_log(" %s", line, worker_log=False) + if status == STATUSES["success"]: + m = NOTARIZATION_POLL_REGEX.search(contents) + if m is not None: + if m["status"] == "invalid": + self.status = STATUSES["failure"] + self.task_log("Apple believes UUID %s is invalid!", uuid, level=logging.CRITICAL) + raise TaskError("Apple believes UUID %s is invalid!" % uuid) + # There are only two possible matches with the regex + # Adding `pragma: no branch` to be explicit in our + # checks, but still avoid testing an unreachable code + # branch + if m["status"] == "success": # pragma: no branch + self.task_log("UUID %s is successful", uuid) + self.pending_uuids.remove(uuid) + if len(self.pending_uuids) == 0: + self.task_log("All UUIDs are successfully notarized: %s", self.uuids) + break + else: + await asyncio.sleep(self.config["poll_sleep_time"]) diff --git a/notarization_poller/src/notarization_poller/worker.py b/notarization_poller/src/notarization_poller/worker.py new file mode 100644 index 000000000..fec7940d1 --- /dev/null +++ b/notarization_poller/src/notarization_poller/worker.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +"""Notarization poller worker functions. + +Attributes: + log (logging.Logger): the log object for the module. + +""" +import asyncio +import logging +import signal +import socket +import sys +import typing +from asyncio import sleep + +import aiohttp +import arrow +import taskcluster +from taskcluster.aio import Queue + +from notarization_poller.config import get_config_from_cmdln, update_logging_config +from notarization_poller.constants import MAX_CLAIM_WORK_TASKS +from notarization_poller.task import Task +from scriptworker_client.constants import STATUSES +from scriptworker_client.utils import makedirs, rm + +log = logging.getLogger(__name__) + + +# claim_work {{{1 +async def claim_work(config, worker_queue, num_tasks=1): + """Find and claim the next pending task(s) in the queue, if any. + + Args: + config (dict): the running config + + Returns: + dict: a dict containing a list of the task definitions of the tasks claimed. + + """ + log.debug("Calling claimWork for {}/{}...".format(config["worker_group"], config["worker_id"])) + payload = {"workerGroup": config["worker_group"], "workerId": config["worker_id"], "tasks": num_tasks} + try: + return await worker_queue.claimWork(config["provisioner_id"], config["worker_type"], payload) + except (taskcluster.exceptions.TaskclusterFailure, aiohttp.ClientError) as exc: + log.warning("{} {}".format(exc.__class__, exc)) + + +# RunTasks {{{1 +class RunTasks: + """Manages processing of Taskcluster tasks.""" + + def __init__(self, config): + """Initialize RunTasks.""" + self.config = config + self.running_tasks = [] + self.last_claim_work = arrow.get(0) + self.is_stopped = False + self.is_cancelled = False + self.future = None + + async def invoke(self): + """Claims and processes Taskcluster work.""" + while not self.is_cancelled and not self.is_stopped: + num_tasks_to_claim = min(self.config["max_concurrent_tasks"] - len(self.running_tasks), MAX_CLAIM_WORK_TASKS) + if num_tasks_to_claim > 0: + async with aiohttp.ClientSession() as session: + queue = Queue( + options={ + "credentials": {"accessToken": self.config["taskcluster_access_token"], "clientId": self.config["taskcluster_client_id"]}, + "rootUrl": self.config["taskcluster_root_url"], + }, + session=session, + ) + new_tasks = await self._run_cancellable(claim_work(self.config, queue, num_tasks=num_tasks_to_claim)) or {} + self.last_claim_work = arrow.utcnow() + for claim_task in new_tasks.get("tasks", []): + new_task = Task(self.config, claim_task) + new_task.start() + self.running_tasks.append(new_task) + await self.prune_running_tasks() + sleep_time = self.last_claim_work.timestamp + self.config["claim_work_interval"] - arrow.utcnow().timestamp + sleep_time > 0 and await self._run_cancellable(sleep(sleep_time)) + self.running_tasks and await asyncio.wait([task.main_fut for task in self.running_tasks if task.main_fut]) + + async def prune_running_tasks(self): + """Prune any complete tasks from ``self.running_tasks``.""" + for task in self.running_tasks: + if task.complete: + self.running_tasks.remove(task) + + async def _run_cancellable(self, coroutine: typing.Awaitable): + if not self.is_cancelled: + self.future = asyncio.ensure_future(coroutine) + try: + result = await self.future + finally: + self.future = None + return result + + async def cancel(self, status=STATUSES["worker-shutdown"]): + """Cancel any running tasks.""" + self.is_cancelled = True + self.future and self.future.cancel() + try: + for task in self.running_tasks: + task.task_fut and task.task_fut.cancel() + await asyncio.wait([task.main_fut for task in self.running_tasks if task.main_fut]) + except (asyncio.CancelledError, ValueError): + pass + + +# main {{{1 +def main(event_loop=None): + """Notarization poller entry point: get everything set up, then enter the main loop. + + Args: + event_loop (asyncio.BaseEventLoop, optional): the event loop to use. + If None, use ``asyncio.get_event_loop()``. Defaults to None. + + """ + event_loop = event_loop or asyncio.get_event_loop() + config = get_config_from_cmdln(sys.argv[1:]) + update_logging_config(config) + + log.info("Notarization poller starting up at {} UTC".format(arrow.utcnow().format())) + log.info("Worker FQDN: {}".format(socket.getfqdn())) + rm(config["work_dir"]) + makedirs(config["work_dir"]) + running_tasks = RunTasks(config) + + async def _handle_sigterm(): + log.info("SIGTERM received; shutting down") + await running_tasks.cancel() + + def _handle_sigusr1(): + """Stop accepting new tasks.""" + log.info("SIGUSR1 received; no more tasks will be taken") + running_tasks.is_stopped = True + + event_loop.add_signal_handler(signal.SIGTERM, lambda: asyncio.ensure_future(_handle_sigterm())) + event_loop.add_signal_handler(signal.SIGUSR1, _handle_sigusr1()) + + try: + event_loop.run_until_complete(running_tasks.invoke()) + except Exception: + log.critical("Fatal exception", exc_info=1) + raise + finally: + log.info("Notarization poller stopped at {} UTC".format(arrow.utcnow().format())) + log.info("Worker FQDN: {}".format(socket.getfqdn())) diff --git a/notarization_poller/tests/__init__.py b/notarization_poller/tests/__init__.py new file mode 100644 index 000000000..08b2f6f51 --- /dev/null +++ b/notarization_poller/tests/__init__.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test base files +""" +import arrow + + +def integration_create_task_payload(config, task_group_id, scopes=None, task_payload=None, task_extra=None): + """For various integration tests, we need to call createTask for test tasks. + + This function creates a dummy payload for those createTask calls. + """ + now = arrow.utcnow() + deadline = now.shift(hours=1) + expires = now.shift(days=3) + scopes = scopes or [] + task_payload = task_payload or {} + task_extra = task_extra or {} + return { + "provisionerId": config["provisioner_id"], + "schedulerId": "test-dummy-scheduler", + "workerType": config["worker_type"], + "taskGroupId": task_group_id, + "dependencies": [], + "requires": "all-completed", + "routes": [], + "priority": "normal", + "retries": 5, + "created": now.isoformat(), + "deadline": deadline.isoformat(), + "expires": expires.isoformat(), + "scopes": scopes, + "payload": task_payload, + "metadata": { + "name": "Notarization Poller Integration Test", + "description": "Notarization Poller Integration Test", + "owner": "release+python@mozilla.com", + "source": "https://github.com/mozilla-releng/scriptworker-scripts/", + }, + "tags": {}, + "extra": task_extra, + } + + +async def noop_async(*args, **kwargs): + pass + + +def noop_sync(*args, **kwargs): + pass + + +def create_async(result=None): + async def fn(*args, **kwargs): + return result + + return fn diff --git a/notarization_poller/tests/conftest.py b/notarization_poller/tests/conftest.py new file mode 100644 index 000000000..0eddd87a3 --- /dev/null +++ b/notarization_poller/tests/conftest.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test notarization_poller.config +""" +import json +import os +from copy import deepcopy + +import pytest + +from notarization_poller.constants import DEFAULT_CONFIG +from scriptworker_client.utils import makedirs + + +@pytest.fixture(scope="function") +def config(tmpdir): + _config = deepcopy(dict(DEFAULT_CONFIG)) + with open(os.path.join(os.path.dirname(__file__), "data", "good.json")) as fh: + _config.update(json.load(fh)) + _config["artifact_dir"] = os.path.join(str(tmpdir), "artifacts") + _config["log_dir"] = os.path.join(str(tmpdir), "logs") + _config["work_dir"] = os.path.join(str(tmpdir), "work") + for name in ("artifact_dir", "log_dir", "work_dir"): + makedirs(_config[name]) + yield _config diff --git a/notarization_poller/tests/data/bad.json b/notarization_poller/tests/data/bad.json new file mode 100644 index 000000000..2c63c0851 --- /dev/null +++ b/notarization_poller/tests/data/bad.json @@ -0,0 +1,2 @@ +{ +} diff --git a/notarization_poller/tests/data/good.json b/notarization_poller/tests/data/good.json new file mode 100644 index 000000000..14bdad08b --- /dev/null +++ b/notarization_poller/tests/data/good.json @@ -0,0 +1,10 @@ +{ + "taskcluster_access_token": "taskcluster_access_token", + "taskcluster_client_id": "taskcluster_client_id", + "provisioner_id": "provisioner_id", + "worker_group": "worker_group", + "worker_type": "worker_type", + "worker_id": "worker_id", + "notarization_username": "user", + "notarization_password": "pass" +} diff --git a/notarization_poller/tests/test_config.py b/notarization_poller/tests/test_config.py new file mode 100644 index 000000000..8f11ff45e --- /dev/null +++ b/notarization_poller/tests/test_config.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test notarization_poller.config +""" +import json +import logging +import os +from copy import deepcopy + +import pytest +from immutabledict import immutabledict + +import notarization_poller.config as npconfig +from notarization_poller.constants import DEFAULT_CONFIG +from notarization_poller.exceptions import ConfigError + + +# constants helpers and fixtures {{{1 +def close_handlers(log_name=None): + log_name = log_name or __name__.split(".")[0] + log = logging.getLogger(log_name) + handlers = log.handlers[:] + for handler in handlers: + handler.close() + log.removeHandler(handler) + log.addHandler(logging.NullHandler()) + + +# update_logging_config {{{1 +def test_update_logging_config_verbose(config): + config["verbose"] = True + npconfig.update_logging_config(config, log_name=config["log_dir"]) + log = logging.getLogger(config["log_dir"]) + assert log.level == logging.DEBUG + assert len(log.handlers) == 3 + close_handlers(log_name=config["log_dir"]) + + +def test_update_logging_config_verbose_existing_handler(config): + log = logging.getLogger(config["log_dir"]) + log.addHandler(logging.NullHandler()) + log.addHandler(logging.NullHandler()) + config["verbose"] = True + npconfig.update_logging_config(config, log_name=config["log_dir"]) + assert log.level == logging.DEBUG + assert len(log.handlers) == 4 + close_handlers(log_name=config["log_dir"]) + + +def test_update_logging_config_not_verbose(config): + config["verbose"] = False + npconfig.update_logging_config(config, log_name=config["log_dir"]) + log = logging.getLogger(config["log_dir"]) + assert log.level == logging.INFO + assert len(log.handlers) == 3 + close_handlers(log_name=config["log_dir"]) + + +def test_watched_log_file(config): + config["watch_log_file"] = True + config["log_fmt"] = "%(levelname)s - %(message)s" + npconfig.update_logging_config(config, log_name=config["log_dir"]) + path = os.path.join(config["log_dir"], "worker.log") + log = logging.getLogger(config["log_dir"]) + log.info("foo") + os.rename(path, "{}.1".format(path)) + log.info("bar") + with open(path, "r") as fh: + assert fh.read().rstrip() == "INFO - bar" + close_handlers(log_name=config["log_dir"]) + + +# get_config_from_cmdln {{{1 +def test_get_config_from_cmdln(): + path = os.path.join(os.path.dirname(__file__), "data", "good.json") + c = deepcopy(dict(DEFAULT_CONFIG)) + with open(path) as fh: + c.update(json.load(fh)) + expected_config = immutabledict(c) + + config = npconfig.get_config_from_cmdln([path]) + assert config == expected_config + + +@pytest.mark.parametrize( + "path,raises", + ((os.path.join(os.path.dirname(__file__), "data", "good.json"), None), (os.path.join(os.path.dirname(__file__), "data", "bad.json"), ConfigError)), +) +def test_validate_config(path, raises): + if raises: + with pytest.raises(raises): + npconfig.get_config_from_cmdln([path]) + else: + npconfig.get_config_from_cmdln([path]) diff --git a/notarization_poller/tests/test_constants.py b/notarization_poller/tests/test_constants.py new file mode 100644 index 000000000..ada8c7579 --- /dev/null +++ b/notarization_poller/tests/test_constants.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test notarization_poller.constants +""" +from immutabledict import immutabledict + +import notarization_poller.constants as constants + + +def test_get_reversed_statuses(): + assert constants.get_reversed_statuses() == immutabledict( + { + 0: "success", + 1: "failure", + 2: "worker-shutdown", + 3: "malformed-payload", + 4: "resource-unavailable", + 5: "internal-error", + 6: "superseded", + 7: "intermittent-task", + -11: "intermittent-task", + -15: "intermittent-task", + } + ) diff --git a/notarization_poller/tests/test_task.py b/notarization_poller/tests/test_task.py new file mode 100644 index 000000000..64f5089f3 --- /dev/null +++ b/notarization_poller/tests/test_task.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test notarization_poller.task +""" +import asyncio +import logging +from contextlib import asynccontextmanager + +import aiohttp +import arrow +import pytest +from taskcluster.exceptions import TaskclusterRestFailure + +import notarization_poller.task as nptask +from notarization_poller.exceptions import RetryError +from scriptworker_client.constants import STATUSES +from scriptworker_client.exceptions import Download404, DownloadError, TaskError + +from . import noop_async + + +# Constants, fixtures, and helpers {{{1 +class NoOpTask(nptask.Task): + async_start = noop_async + + +class AsyncStartTask(nptask.Task): + reclaim_task = noop_async + run_task = noop_async + upload_task = noop_async + complete_task = noop_async + + +class NoReclaimTask(nptask.Task): + reclaim_task = noop_async + + +class NoRunTask(nptask.Task): + run_task = noop_async + + +@pytest.fixture(scope="function") +def claim_task(): + return { + "runId": "0", + "credentials": {}, + "status": {"taskId": "task_id"}, + "task": {"expires": arrow.get(0).isoformat(), "payload": {"uuid_manifest": "uuid_url"}}, + } + + +# task_credentials {{{1 +@pytest.mark.asyncio +async def test_task_credentials(mocker, claim_task, config, event_loop): + expected = {"foo": "bar"} + claim_task["credentials"] = expected + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + nooptask.start() + assert nooptask.task_credentials == expected + + +# async_start {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "raises, expected_status", + ( + (None, 0), + (Download404, STATUSES["resource-unavailable"]), + (DownloadError, STATUSES["intermittent-task"]), + (RetryError, STATUSES["intermittent-task"]), + (TaskError, STATUSES["malformed-payload"]), + ), +) +async def test_async_start(mocker, claim_task, config, event_loop, raises, expected_status): + async def fake_run_task(*args): + if raises: + raise raises("foo") + + asynctask = AsyncStartTask(config, claim_task, event_loop=event_loop) + asynctask.status = 0 + asynctask.run_task = fake_run_task + asynctask.start() + await asynctask.main_fut + for fut in (asynctask.reclaim_fut, asynctask.task_fut): + assert fut.cancelled() or fut.done() + assert asynctask.complete + assert asynctask.status == expected_status + + +# reclaim_task {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize("status_code, expected_status", ((409, STATUSES["superseded"]), (500, STATUSES["internal-error"]))) +async def test_reclaim_task(mocker, claim_task, config, event_loop, status_code, expected_status): + reclaim_status_codes = [None, status_code] + + async def fake_reclaim_task(*args, **kwargs): + status = reclaim_status_codes.pop(0) + if status: + raise TaskclusterRestFailure("foo", None, status_code=status) + return {} + + fake_queue = mocker.MagicMock() + fake_queue.reclaimTask = fake_reclaim_task + mocker.patch.object(asyncio, "sleep", new=noop_async) + mocker.patch.object(nptask, "Queue", return_value=fake_queue) + noruntask = NoRunTask(config, claim_task, event_loop=event_loop) + noruntask.status = 0 + noruntask.start() + await noruntask.reclaim_task() + assert noruntask.status == expected_status + + +# upload_task {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "status,raises,expected_status", + ((0, None, 0), (1, None, 1), (0, aiohttp.ClientError, STATUSES["intermittent-task"]), (0, KeyError, STATUSES["intermittent-task"]), (1, RetryError, 1)), +) +async def test_upload_task(mocker, config, claim_task, event_loop, status, raises, expected_status): + async def fake_retry(*args, **kwargs): + if raises: + raise raises("foo") + + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + nooptask.status = status + nooptask.start() + with open(nooptask.log_path, "w") as fh: + print("foo", file=fh) + mocker.patch.object(nptask, "retry_async", new=fake_retry) + await nooptask.upload_task() + assert nooptask.status == expected_status + + +# _upload_log {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize("response_status, raises", ((200, None), (204, None), (500, RetryError))) +async def test_upload_log(mocker, config, claim_task, event_loop, response_status, raises): + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + nooptask.status = 0 + nooptask.start() + fake_resp = mocker.MagicMock() + fake_resp.text = noop_async + fake_resp.status = response_status + + async def fake_create_artifact(*args): + return {"putUrl": "putUrl"} + + @asynccontextmanager + async def fake_put(*args, **kwargs): + yield fake_resp + + session = mocker.MagicMock() + session.put = fake_put + + @asynccontextmanager + async def fake_session(*args, **kwargs): + yield session + + queue = mocker.MagicMock() + queue.createArtifact = fake_create_artifact + mocker.patch.object(nptask, "Queue", return_value=queue) + mocker.patch.object(aiohttp, "ClientSession", new=fake_session) + with open(nooptask.log_path, "w") as fh: + print("foo", file=fh) + if raises: + with pytest.raises(raises): + await nooptask._upload_log() + else: + await nooptask._upload_log() + + +# complete_task {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "status, raises, result", + ( + (0, None, "completed"), + (1, None, "failed"), + (2, None, "worker-shutdown"), + (0, TaskclusterRestFailure("foo", None, status_code=409), None), + (0, TaskclusterRestFailure("foo", None, status_code=500), None), + ), +) +async def test_complete_task(mocker, config, claim_task, event_loop, status, raises, result): + + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + nooptask.status = status + nooptask._reclaim_task = {} + + async def fake_completed(*args): + if raises: + raise raises + assert result == "completed" + + async def fake_failed(*args): + assert result == "failed" + + async def fake_exception(task_id, run_id, payload): + assert payload["reason"] == result + + queue = mocker.MagicMock() + queue.reportCompleted = fake_completed + queue.reportFailed = fake_failed + queue.reportException = fake_exception + mocker.patch.object(nptask, "Queue", return_value=queue) + await nooptask.complete_task() + + +# task_log {{{1 +def test_task_log(mocker, config, claim_task, event_loop): + now = arrow.utcnow() + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + nooptask.start() + mocker.patch.object(arrow, "utcnow", return_value=now) + nooptask.task_log("foo") + nooptask.task_log("bar", level=logging.ERROR) + with open(nooptask.log_path, "r") as fh: + contents = fh.read() + assert ( + contents + == """{now} INFO - foo +{now} ERROR - bar +""".format( + now=now.format(config["task_log_datefmt"]) + ) + ) + + +# download_uuids {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize("download_raises,json_raises", ((None, None), (Download404, None), (DownloadError, None), (None, TaskError))) +async def test_download_uuids(mocker, config, claim_task, event_loop, download_raises, json_raises): + async def fake_download(*args, **kwargs): + if download_raises: + raise download_raises("foo") + + def fake_json(*args, **kwargs): + if json_raises: + raise json_raises("foo") + return ["one", "two"] + + nooptask = NoOpTask(config, claim_task, event_loop=event_loop) + mocker.patch.object(nptask, "retry_async", new=fake_download) + mocker.patch.object(nptask, "load_json_or_yaml", new=fake_json) + nooptask.start() + if download_raises or json_raises: + raises = download_raises if download_raises else json_raises + with pytest.raises(raises): + await nooptask.download_uuids() + else: + await nooptask.download_uuids() + assert nooptask.uuids == ("one", "two") + + nooptask.claim_task["task"]["payload"] = {"uuids": ["asdf", "hjkl"]} + await nooptask.download_uuids() + assert nooptask.uuids == ("asdf", "hjkl") + + +# run_task {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "uuids, responses, expected_status, raises", + ( + (["one", "two"], ["pending", "pending", "success", "success"], 0, False), + (["one", "two"], ["broken", "success", "success"], 0, False), + (["one", "two"], ["pending", "invalid"], STATUSES["failure"], TaskError), + ), +) +async def test_run_task(mocker, config, claim_task, event_loop, uuids, responses, expected_status, raises): + no_reclaim_task = NoReclaimTask(config, claim_task, event_loop=event_loop) + no_reclaim_task.uuids = {u: False for u in uuids} + no_reclaim_task.download_uuids = noop_async + + async def fake_run_command(*args, **kwargs): + status = responses.pop(0) + contents = """RequestUUID: feb8616e-e2e2-4621-bafc-3ef67fd86f6b +Date: 2019-12-13 18:05:41 +0000 +Status: {} +LogFileURL: (null) +""".format( + status + ) + if status == "broken": + contents = "" + with open(no_reclaim_task.poll_log_path, "w") as fh: + fh.write(contents) + if status == "broken": + return 1 + return 0 + + mocker.patch.object(nptask, "run_command", new=fake_run_command) + mocker.patch.object(asyncio, "sleep", new=noop_async) + no_reclaim_task.async_start = noop_async + no_reclaim_task.start() + if raises: + with pytest.raises(raises): + await no_reclaim_task.run_task() + else: + await no_reclaim_task.run_task() + assert no_reclaim_task.status == expected_status diff --git a/notarization_poller/tests/test_worker.py b/notarization_poller/tests/test_worker.py new file mode 100644 index 000000000..ade7a5560 --- /dev/null +++ b/notarization_poller/tests/test_worker.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +# coding=utf-8 +"""Test notarization_poller.worker +""" +import asyncio +import json +import os +import signal +import sys +from copy import deepcopy + +import arrow +import pytest +from taskcluster.exceptions import TaskclusterRestFailure + +import notarization_poller.worker as worker +from notarization_poller.exceptions import WorkerError +from notarization_poller.worker import RunTasks + +from . import noop_async + + +# claim_work {{{1 +@pytest.mark.asyncio +@pytest.mark.parametrize("raises", (True, False)) +async def test_claim_work(raises, config, mocker): + async def foo(*args): + raise TaskclusterRestFailure("foo", None, status_code=4) + + queue = mocker.MagicMock() + if raises: + queue.claimWork = foo + else: + queue.claimWork = noop_async + assert await worker.claim_work(config, queue) is None + + +# main {{{1 +def test_main(mocker, config, event_loop): + async def foo(*args): + raise WorkerError("foo") + + fake_run_tasks = mocker.MagicMock() + fake_run_tasks.invoke = foo + mocker.patch.object(worker, "RunTasks", return_value=fake_run_tasks) + + tmp = os.path.join(config["work_dir"], "foo") + with open(tmp, "w") as fh: + json.dump(config, fh) + mocker.patch.object(sys, "argv", new=["x", tmp]) + with pytest.raises(WorkerError): + worker.main(event_loop=event_loop) + + +@pytest.mark.parametrize("running", (True, False)) +def test_main_running_sigterm(mocker, config, event_loop, running): + """Test that sending SIGTERM causes the main loop to stop after the next + call to invoke.""" + run_tasks_cancelled = event_loop.create_future() + + class MockRunTasks: + async def cancel(*args): + run_tasks_cancelled.set_result(True) + + async def invoke(*args): + os.kill(os.getpid(), signal.SIGTERM) + + mrt = MockRunTasks() + + mocker.patch.object(worker, "RunTasks", return_value=mrt) + + tmp = os.path.join(config["work_dir"], "foo") + with open(tmp, "w") as fh: + json.dump(config, fh) + mocker.patch.object(sys, "argv", new=["x", tmp]) + worker.main(event_loop=event_loop) + + if running: + event_loop.run_until_complete(run_tasks_cancelled) + assert run_tasks_cancelled.result() + + +@pytest.mark.parametrize("running", (True, False)) +def test_main_running_sigusr1(mocker, config, event_loop, running): + """Test that sending SIGUSR1 causes the main loop to stop after the next + call to invoke without cancelling the task.""" + run_tasks_cancelled = event_loop.create_future() + + class MockRunTasks: + is_stopped = False + + async def cancel(*args): + run_tasks_cancelled.set_result(True) + + async def invoke(*args): + os.kill(os.getpid(), signal.SIGUSR1) + await asyncio.sleep(0.1) + + mrt = MockRunTasks() + mrt.running_tasks = [] + if running: + fake_task1 = mocker.MagicMock() + fake_task1.main_fut = noop_async() + fake_task2 = mocker.MagicMock() + fake_task2.main_fut = noop_async() + mrt.running_tasks = [fake_task1, fake_task2] + + tmp = os.path.join(config["work_dir"], "foo") + with open(tmp, "w") as fh: + json.dump(config, fh) + mocker.patch.object(worker, "RunTasks", return_value=mrt) + mocker.patch.object(sys, "argv", new=["x", tmp]) + worker.main(event_loop=event_loop) + + assert not run_tasks_cancelled.done() + assert mrt.is_stopped + + +# invoke {{{1 +@pytest.mark.asyncio +async def test_mocker_invoke(config, mocker): + task = {"foo": "bar", "credentials": {"a": "b"}, "task": {"task_defn": True}} + rt = worker.RunTasks(config) + + async def claim_work(*args, **kwargs): + return {"tasks": [deepcopy(task)]} + + async def fake_sleep(*args, **kwargs): + await asyncio.sleep(0.01) + await rt.cancel() + + fake_task = mocker.MagicMock() + fake_task.complete = False + fake_task.main_fut = asyncio.ensure_future(noop_async()) + + mocker.patch.object(worker, "claim_work", new=claim_work) + mocker.patch.object(worker, "Task", return_value=fake_task) + mocker.patch.object(worker, "Queue") + mocker.patch.object(worker, "sleep", new=fake_sleep) + await rt.invoke() + assert rt.is_cancelled + assert len(rt.running_tasks) == 1 + + +@pytest.mark.asyncio +async def test_mocker_invoke_noop(config, mocker): + config["max_concurrent_tasks"] = 0 + config["claim_work_interval"] = 30 + rt = RunTasks(config) + rt.running_tasks = [] + # This is needed, or we'll never sleep, and cancel_rt will never + # get a chance to run + rt.last_claim_work = arrow.utcnow() + + async def cancel_rt(): + await rt.cancel() + + tasks = [asyncio.ensure_future(rt.invoke()), asyncio.ensure_future(cancel_rt())] + await asyncio.wait(tasks) + assert rt.is_cancelled + assert len(rt.running_tasks) == 0 + + +# prune_running_tasks {{{1 +@pytest.mark.asyncio +async def test_prune_running_tasks(config, mocker): + task1 = mocker.MagicMock() + task1.complete = True + task2 = mocker.MagicMock() + task2.complete = False + task3 = mocker.MagicMock() + task3.complete = False + task4 = mocker.MagicMock() + task4.complete = True + rt = RunTasks(config) + rt.running_tasks = [task1, task2, task3, task4] + await rt.prune_running_tasks() + assert rt.running_tasks == [task2, task3] + + +# run_cancellable {{{1 +@pytest.mark.asyncio +async def test_run_cancellable(config): + async def return_true(): + return True + + rt = RunTasks(config) + future1 = return_true() + result = await rt._run_cancellable(future1) + assert result is True + + # noop if is_cancelled + rt.is_cancelled = True + future2 = return_true() + result = await rt._run_cancellable(future2) + assert result is None + await future2 # silence warnings diff --git a/notarization_poller/tox.ini b/notarization_poller/tox.ini new file mode 100644 index 000000000..8b4ad92a5 --- /dev/null +++ b/notarization_poller/tox.ini @@ -0,0 +1,62 @@ +[tox] +envlist = docker + +[testenv:docker] +whitelist_externals=docker +deps = +usedevelop = false +depends = +skip_install = true +commands = + docker build --build-arg PYTHON_VERSION=3.7 -t notarization_poller-{envname}-py37-test -f Dockerfile.test . + docker run --rm -v {toxinidir}:/app -v notarization_poller-{envname}-py37-tox:/app/.tox notarization_poller-{envname}-py37-test check,py37 + docker build --build-arg PYTHON_VERSION=3.8 -t notarization_poller-{envname}-py38-test -f Dockerfile.test . + docker run --rm -v {toxinidir}:/app -v notarization_poller-{envname}-py38-tox:/app/.tox notarization_poller-{envname}-py38-test py38 + +[testenv] +depends = clean +setenv = + PYTHONDONTWRITEBYTECODE=1 + PYTHONPATH = {toxinidir}/tests +usedevelop = true +deps = + -r {toxinidir}/requirements/test.in + -e {toxinidir}/../scriptworker_client +commands = + {posargs:py.test --cov-config=tox.ini --cov-append --cov=notarization_poller --cov-report term-missing tests} + +[testenv:clean] +skip_install = true +commands = coverage erase +depends = + +[testenv:report] +skip_install = true +commands = coverage report -m +depends = py38 +parallel_show_output = true + +[testenv:check] +skip_install = true +commands = + black --diff --check {toxinidir} + isort --check -rc -df {toxinidir} + pip-compile-multi verify + flake8 {toxinidir} +# Doesn't work without a .git in the same directory +# check-manifest -v {toxinidir} + +[flake8] +max-line-length = 160 +exclude = .ropeproject,.tox,sandbox,setup.py,build/ +show-source = True +per-file-ignores = + # No docstring tests in tests + tests/*:D + tests/test_script.py:E + tests/test_script.py:F + src/notarization_poller/script.py:E + src/notarization_poller/script.py:D + +[coverage:run] +branch = true diff --git a/notarization_poller/version.txt b/notarization_poller/version.txt new file mode 100644 index 000000000..3eefcb9dd --- /dev/null +++ b/notarization_poller/version.txt @@ -0,0 +1 @@ +1.0.0 diff --git a/scriptworker_client/requirements/base.in b/scriptworker_client/requirements/base.in index 825ccb7dc..2716b2769 100644 --- a/scriptworker_client/requirements/base.in +++ b/scriptworker_client/requirements/base.in @@ -1,4 +1,4 @@ aiohttp -async_generator; python_version < '3.7' +immutabledict jsonschema PyYAML diff --git a/scriptworker_client/requirements/base.py37.txt b/scriptworker_client/requirements/base.py37.txt index 6fde995a1..a3aa277e9 100644 --- a/scriptworker_client/requirements/base.py37.txt +++ b/scriptworker_client/requirements/base.py37.txt @@ -1,4 +1,4 @@ -# SHA1:4394accc9a6785c41fad566f7901100c5f8dd7ed +# SHA1:5100419eb680e32df6178954e836e48892da42bf # # This file is autogenerated by pip-compile-multi # To update, run: @@ -35,6 +35,10 @@ idna==2.9 \ --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ # via yarl +immutabledict==0.2.0 \ + --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \ + --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a \ + # via -r requirements/base.in (line 2) importlib-metadata==1.5.0 \ --hash=sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302 \ --hash=sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b \ diff --git a/scriptworker_client/requirements/base.txt b/scriptworker_client/requirements/base.txt index b53d4903d..4d43b714c 100644 --- a/scriptworker_client/requirements/base.txt +++ b/scriptworker_client/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:4394accc9a6785c41fad566f7901100c5f8dd7ed +# SHA1:5100419eb680e32df6178954e836e48892da42bf # # This file is autogenerated by pip-compile-multi # To update, run: @@ -35,6 +35,10 @@ idna==2.9 \ --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ # via yarl +immutabledict==0.2.0 \ + --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \ + --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a \ + # via -r requirements/base.in (line 2) jsonschema==3.2.0 \ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a \ diff --git a/scriptworker_client/src/scriptworker_client/aio.py b/scriptworker_client/src/scriptworker_client/aio.py index 9a0b36c67..2ccf17a73 100644 --- a/scriptworker_client/src/scriptworker_client/aio.py +++ b/scriptworker_client/src/scriptworker_client/aio.py @@ -1,13 +1,15 @@ #!/usr/bin/env python """Async helper functions.""" -import aiohttp import asyncio -import async_timeout import fcntl import logging import os import random import sys + +import aiohttp +import async_timeout + from scriptworker_client.exceptions import ( Download404, DownloadError, diff --git a/scriptworker_client/src/scriptworker_client/client.py b/scriptworker_client/src/scriptworker_client/client.py index 82d29aa46..f62e3ff7c 100644 --- a/scriptworker_client/src/scriptworker_client/client.py +++ b/scriptworker_client/src/scriptworker_client/client.py @@ -6,11 +6,13 @@ """ import asyncio -import jsonschema import logging import os import sys +import jsonschema +from immutabledict import immutabledict + from scriptworker_client.exceptions import ClientError, TaskVerificationError from scriptworker_client.utils import load_json_or_yaml @@ -107,7 +109,7 @@ def sync_main( async_main (function): The function to call once everything is set up config_path (str, optional): The path to the file to load the config from. Loads from ``sys.argv[1]`` if ``None``. Defaults to None. - default_config (dict, optional): the default config to use for ``_init_config``. + default_config (dict, optional): the default config to use for ``init_config``. defaults to None. should_verify_task (bool, optional): whether we should verify the task schema. Defaults to True. @@ -116,7 +118,7 @@ def sync_main( ``asyncio.get_event_loop``. """ - config = _init_config(config_path, default_config) + config = init_config(config_path, default_config) _init_logging(config) task = get_task(config) if should_verify_task: @@ -125,16 +127,40 @@ def sync_main( loop.run_until_complete(_handle_asyncio_loop(async_main, config, task)) -def _init_config(config_path=None, default_config=None): +def init_config(config_path=None, default_config=None, validator_callback=None): + """Initialize the config. + + First, read config overrides from ``config_path``. Apply over + ``default_config``. Send to ``validator_config``, then return a immutabledict + of the config. + + Args: + config_path (str, optional): the path to the config file. Defaults to + ``sys.argv[1]``. + default_config (dict, optional): the config defaults. These are the + config values if not overridden in ``config_path``. Defaults to + ``{}``. + validator_callback (function, optional): a function that takes a single + arg (``config``), and raises an exception if invalid. If ``None``, + don't validate the config. Defaults to ``None``. + + Raises: + Exception: if the config doesn't pass the ``validator_callback``. + + Returns: + immutabledict: the config. + + """ if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] - config = {} if default_config is None else default_config + config = {} if default_config is None else dict(default_config) config.update(load_json_or_yaml(config_path, file_type="yaml", is_path=True)) + validator_callback and validator_callback(config) - return config + return immutabledict(config) def _usage(): diff --git a/scriptworker_client/src/scriptworker_client/exceptions.py b/scriptworker_client/src/scriptworker_client/exceptions.py index eed694996..3265a1905 100644 --- a/scriptworker_client/src/scriptworker_client/exceptions.py +++ b/scriptworker_client/src/scriptworker_client/exceptions.py @@ -2,6 +2,7 @@ """Scriptworker-client exceptions.""" import builtins + from scriptworker_client.constants import STATUSES diff --git a/scriptworker_client/src/scriptworker_client/utils.py b/scriptworker_client/src/scriptworker_client/utils.py index 31a5fc23e..88291711c 100644 --- a/scriptworker_client/src/scriptworker_client/utils.py +++ b/scriptworker_client/src/scriptworker_client/utils.py @@ -6,14 +6,16 @@ """ import asyncio -from asyncio.subprocess import PIPE -from contextlib import contextmanager import json import logging import os import shutil import tempfile +from asyncio.subprocess import PIPE +from contextlib import contextmanager + import yaml + from scriptworker_client.exceptions import TaskError log = logging.getLogger(__name__) diff --git a/scriptworker_client/tests/test_client.py b/scriptworker_client/tests/test_client.py index b3746865d..2ff46c1b4 100644 --- a/scriptworker_client/tests/test_client.py +++ b/scriptworker_client/tests/test_client.py @@ -203,12 +203,12 @@ async def async_error(*args, **kwargs): def test_init_config_cli(mocker, tmpdir): - """_init_config can get its config from the commandline if not specified. + """init_config can get its config from the commandline if not specified. """ mocker.patch.object(sys, "argv", new=["x"]) with pytest.raises(SystemExit): - client._init_config() + client.init_config() path = os.path.join(tmpdir, "foo.json") config = {"a": "b"} default_config = {"c": "d"} @@ -217,4 +217,4 @@ def test_init_config_cli(mocker, tmpdir): expected = deepcopy(default_config) expected.update(config) mocker.patch.object(sys, "argv", new=["x", path]) - assert client._init_config(default_config=default_config) == expected + assert client.init_config(default_config=default_config) == expected diff --git a/tox.ini b/tox.ini index 4f90c2c59..9e4a78958 100644 --- a/tox.ini +++ b/tox.ini @@ -12,6 +12,8 @@ envlist = beetmoverscript-py38 bouncerscript-py37 bouncerscript-py38 + notarization_poller-py37 + notarization_poller-py38 pushapkscript-py37 pushapkscript-py38 pushsnapscript-py37 @@ -105,6 +107,17 @@ changedir = {toxinidir}/bouncerscript commands = tox -e py37 +[testenv:notarization_poller-py38] +changedir = {toxinidir}/notarization_poller +commands = + tox -e py38 + +[testenv:notarization_poller-py37] +changedir = {toxinidir}/notarization_poller +commands = + tox -e py37 + - tox -e coveralls + [testenv:pushapkscript-py38] changedir = {toxinidir}/pushapkscript commands =