From f1433a126e7a399a9ab583685c3cf1b84752b389 Mon Sep 17 00:00:00 2001 From: Connor Sheehan Date: Mon, 30 Dec 2024 13:51:29 -0500 Subject: [PATCH] api: initial implementation of headless API (Bug 1941363) Build out the basic functionality of the headless API for Lando. Using django-ninja we define two API endpoints, to POST automation jobs and GET job statuses after submission. The API endpoints take a set of `actions` defined in the request body which are stored in the database for processing by a worker. Authentication is handled by an API key associated with a user profile. A single action, `add-commit` is implemented which can be used to test adding patches to the repo as commits. --- pyproject.toml | 1 + requirements.txt | 118 ++++++ .../api/legacy/workers/automation_worker.py | 295 +++++++++++++++ src/lando/main/api.py | 169 +++++++++ .../management/commands/landing_worker.py | 1 + ...ed_lando_api_key_automationjob_and_more.py | 99 +++++ src/lando/main/models/automation_job.py | 124 ++++++ src/lando/main/models/configuration.py | 2 + src/lando/main/models/profile.py | 21 ++ src/lando/main/tests/conftest.py | 100 +++++ src/lando/main/tests/test_automation_api.py | 353 ++++++++++++++++++ src/lando/urls.py | 7 + 12 files changed, 1290 insertions(+) create mode 100644 src/lando/api/legacy/workers/automation_worker.py create mode 100644 src/lando/main/api.py create mode 100644 src/lando/main/migrations/0014_profile_encrypted_lando_api_key_automationjob_and_more.py create mode 100644 src/lando/main/models/automation_job.py create mode 100644 src/lando/main/tests/test_automation_api.py diff --git a/pyproject.toml b/pyproject.toml index 72ee7c27..b3ba784e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ dependencies = [ "datadog", "django ~= 5.0", "django-libsass", + "django-ninja", "django-storages[google]", "django_compressor", "jinja2", diff --git a/requirements.txt b/requirements.txt index 5f5e07d0..8e3d02f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -104,6 +104,10 @@ anyio==4.8.0 \ --hash=sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a \ --hash=sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a # via gql +annotated-types==0.7.0 \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 + # via pydantic asgiref==3.8.1 \ --hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \ --hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590 @@ -584,6 +588,7 @@ django==5.1.4 \ # via # django-appconf # django-compressor + # django-ninja # django-storages # lando (pyproject.toml) # mozilla-django-oidc @@ -601,6 +606,10 @@ django-libsass==0.9 \ --hash=sha256:5234d29100889cac79e36a0f44207ec6d275adfd2da1acb6a94b55c89fe2bd97 \ --hash=sha256:bfbbb55a8950bb40fa04dd416605f92da34ad1f303b10a41abc3232386ec27b5 # via lando (pyproject.toml) +django-ninja==1.3.0 \ + --hash=sha256:5b320e2dc0f41a6032bfa7e1ebc33559ae1e911a426f0c6be6674a50b20819be \ + --hash=sha256:f58096b6c767d1403dfd6c49743f82d780d7b9688d9302ecab316ac1fa6131bb + # via lando (pyproject.toml) django-storages[google]==1.14.4 \ --hash=sha256:69aca94d26e6714d14ad63f33d13619e697508ee33ede184e462ed766dc2a73f \ --hash=sha256:d61930acb4a25e3aebebc6addaf946a3b1df31c803a6bf1af2f31c9047febaa3 @@ -1284,6 +1293,112 @@ pyjwt[crypto]==2.10.1 \ --hash=sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953 \ --hash=sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb # via simple-github +pydantic==2.10.4 \ + --hash=sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d \ + --hash=sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06 + # via django-ninja +pydantic-core==2.27.2 \ + --hash=sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278 \ + --hash=sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50 \ + --hash=sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9 \ + --hash=sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f \ + --hash=sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6 \ + --hash=sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc \ + --hash=sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54 \ + --hash=sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630 \ + --hash=sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9 \ + --hash=sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236 \ + --hash=sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7 \ + --hash=sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee \ + --hash=sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b \ + --hash=sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048 \ + --hash=sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc \ + --hash=sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130 \ + --hash=sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4 \ + --hash=sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd \ + --hash=sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4 \ + --hash=sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7 \ + --hash=sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7 \ + --hash=sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4 \ + --hash=sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e \ + --hash=sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa \ + --hash=sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6 \ + --hash=sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962 \ + --hash=sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b \ + --hash=sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f \ + --hash=sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474 \ + --hash=sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5 \ + --hash=sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459 \ + --hash=sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf \ + --hash=sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a \ + --hash=sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c \ + --hash=sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76 \ + --hash=sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362 \ + --hash=sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4 \ + --hash=sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934 \ + --hash=sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320 \ + --hash=sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118 \ + --hash=sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96 \ + --hash=sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306 \ + --hash=sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046 \ + --hash=sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3 \ + --hash=sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2 \ + --hash=sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af \ + --hash=sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9 \ + --hash=sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67 \ + --hash=sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a \ + --hash=sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27 \ + --hash=sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35 \ + --hash=sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b \ + --hash=sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151 \ + --hash=sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b \ + --hash=sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154 \ + --hash=sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133 \ + --hash=sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef \ + --hash=sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145 \ + --hash=sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15 \ + --hash=sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4 \ + --hash=sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc \ + --hash=sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee \ + --hash=sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c \ + --hash=sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0 \ + --hash=sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5 \ + --hash=sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57 \ + --hash=sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b \ + --hash=sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8 \ + --hash=sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1 \ + --hash=sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da \ + --hash=sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e \ + --hash=sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc \ + --hash=sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993 \ + --hash=sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656 \ + --hash=sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4 \ + --hash=sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c \ + --hash=sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb \ + --hash=sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d \ + --hash=sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9 \ + --hash=sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e \ + --hash=sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1 \ + --hash=sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc \ + --hash=sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a \ + --hash=sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9 \ + --hash=sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506 \ + --hash=sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b \ + --hash=sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1 \ + --hash=sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d \ + --hash=sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99 \ + --hash=sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3 \ + --hash=sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31 \ + --hash=sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c \ + --hash=sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39 \ + --hash=sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a \ + --hash=sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308 \ + --hash=sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2 \ + --hash=sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228 \ + --hash=sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b \ + --hash=sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9 \ + --hash=sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad + # via pydantic pyopenssl==24.3.0 \ --hash=sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36 \ --hash=sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a @@ -1567,6 +1682,9 @@ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via anyio + # via + # pydantic + # pydantic-core tzdata==2024.2 \ --hash=sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc \ --hash=sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd diff --git a/src/lando/api/legacy/workers/automation_worker.py b/src/lando/api/legacy/workers/automation_worker.py new file mode 100644 index 00000000..9d035206 --- /dev/null +++ b/src/lando/api/legacy/workers/automation_worker.py @@ -0,0 +1,295 @@ +import logging +from contextlib import contextmanager +from datetime import datetime +from io import StringIO +from typing import Any + +import kombu +from django.db import transaction + +from lando.api.legacy.hgexports import HgPatchHelper +from lando.api.legacy.notifications import ( + notify_user_of_landing_failure, +) +from lando.api.legacy.workers.base import Worker +from lando.main.api import ( + Action, + AddBranchAction, + AddCommitAction, + MergeOntoAction, + TagAction, +) +from lando.main.models.automation_job import ( + AutomationJob, +) +from lando.main.models.configuration import ConfigurationKey +from lando.main.models.landing_job import LandingJobAction, LandingJobStatus +from lando.main.models.repo import Repo +from lando.main.scm.abstract_scm import AbstractSCM +from lando.main.scm.exceptions import ( + PatchConflict, + SCMInternalServerError, + SCMLostPushRace, + SCMPushTimeoutException, + TreeApprovalRequired, + TreeClosed, +) +from lando.utils.tasks import phab_trigger_repo_update + +logger = logging.getLogger(__name__) + + +def map_to_pydantic_action(action_type: str, action_data: dict[str, Any]) -> Action: + """Convert a dict to an `Action` object. + + TODO there must be a better way to do this? + """ + return { + "add-commit": AddCommitAction, + "merge-onto": MergeOntoAction, + "tag": TagAction, + "add-branch": AddBranchAction, + }[action_type](**action_data) + + +@contextmanager +def job_processing(job: AutomationJob): + """Mutex-like context manager that manages job processing miscellany. + + This context manager facilitates graceful worker shutdown, tracks the duration of + the current job, and commits changes to the DB at the very end. + + Args: + job: the job currently being processed + db: active database session + """ + start_time = datetime.now() + try: + yield + finally: + job.duration_seconds = (datetime.now() - start_time).seconds + + +class AutomationWorker(Worker): + """Worker to land headless API patches.""" + + @property + def STOP_KEY(self) -> ConfigurationKey: + """Return the configuration key that prevents the worker from starting.""" + return ConfigurationKey.AUTOMATION_WORKER_STOPPED + + @property + def PAUSE_KEY(self) -> ConfigurationKey: + """Return the configuration key that pauses the worker.""" + return ConfigurationKey.AUTOMATION_WORKER_PAUSED + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.last_job_finished = None + self.refresh_enabled_repos() + + def loop(self): + logger.debug( + f"{len(self.applicable_repos)} applicable repos: {self.applicable_repos}" + ) + + # Check if any closed trees reopened since the beginning of this iteration + if len(self.enabled_repos) != len(self.applicable_repos): + self.refresh_enabled_repos() + + if self.last_job_finished is False: + logger.info("Last job did not complete, sleeping.") + self.throttle(self.sleep_seconds) + self.refresh_enabled_repos() + + with transaction.atomic(): + job = AutomationJob.next_job(repositories=self.enabled_repos).first() + + if job is None: + self.throttle(self.sleep_seconds) + return + + with job_processing(job): + job.status = LandingJobStatus.IN_PROGRESS + job.attempts += 1 + job.save() + + # Make sure the status and attempt count are updated in the database + logger.info("Starting landing job", extra={"id": job.id}) + self.last_job_finished = self.run_automation_job(job) + logger.info("Finished processing landing job", extra={"id": job.id}) + + def add_commit_action( + self, job: AutomationJob, repo: Repo, scm: AbstractSCM, action: AddCommitAction + ) -> bool: + """Run the `add-commit` action.""" + patch_helper = HgPatchHelper(StringIO(action.content)) + + date = patch_helper.get_header("Date") + user = patch_helper.get_header("User") + + try: + scm.apply_patch( + patch_helper.get_diff(), + patch_helper.get_commit_description(), + user, + date, + ) + except PatchConflict as exc: + # TODO how to handle merge conflicts? + # TODO 999 here should be replaced, or perhaps revision ID becomes optional. + # breakdown = self.process_merge_conflict(exc, repo, scm, 999) + # job.error_breakdown = breakdown + + message = ( + # TODO some kind of ID for which patch failed to apply? + f"Problem while applying patch in revision.\n\n" + f"{str(exc)}" + ) + logger.exception(message) + job.transition_status(LandingJobAction.FAIL, message=message) + # TODO no notifications required? at least not initially? + # self.notify_user_of_landing_failure(job) + return True + except Exception as e: + message = ( + # TODO some kind of ID for which patch failed to apply? + f"Aborting, could not apply patch buffer." + f"\n{e}" + ) + logger.exception(message) + job.transition_status( + LandingJobAction.FAIL, + message=message, + ) + # TODO no notifications required? at least not initially? + # self.notify_user_of_landing_failure(job) + return True + + return True + + def process_action( + self, job: AutomationJob, repo: Repo, scm: AbstractSCM, action: Action + ) -> bool: + """Process an automation action.""" + if action.action == "add-commit": + return self.add_commit_action(job, repo, scm, action) + + raise NotImplementedError( + f"Action type {action.action} is not yet implemented." + ) + + def run_automation_job(self, job: AutomationJob) -> bool: + """Run an automation job.""" + repo = job.target_repo + scm = repo.scm + + # TODO should we check treestatus? + + with scm.for_push(job.requester_email): + repo_pull_info = f"tree: {repo.tree}, pull path: {repo.pull_path}" + try: + # TODO should we always update to the latest pull_path for a repo? + # or perhaps we need to specify some commit SHA? + scm.update_repo(repo.pull_path) + except SCMInternalServerError as e: + message = ( + f"`Temporary error ({e.__class__}) " + f"encountered while pulling from {repo_pull_info}" + ) + logger.exception(message) + job.transition_status(LandingJobAction.DEFER, message=message) + + # Try again, this is a temporary failure. + return False + except Exception as e: + message = f"Unexpected error while fetching repo from {repo.pull_path}." + logger.exception(message) + job.transition_status( + LandingJobAction.FAIL, + message=message + f"\n{e}", + ) + # TODO no notifications required? at least not initially? + # self.notify_user_of_landing_failure(job) + return True + + actions = job.actions.all() + for action_row in actions: + # Turn the row action into a Pydantic action. + action = map_to_pydantic_action(action_row.action_type, action_row.data) + + # Execute the action locally. + self.process_action(job, repo, scm, action) + + repo_push_info = f"tree: {repo.tree}, push path: {repo.push_path}" + try: + scm.push( + repo.push_path, + push_target=repo.push_target, + force_push=repo.force_push, + ) + except ( + TreeClosed, + TreeApprovalRequired, + SCMLostPushRace, + SCMPushTimeoutException, + SCMInternalServerError, + ) as e: + message = ( + f"`Temporary error ({e.__class__}) " + f"encountered while pushing to {repo_push_info}" + ) + logger.exception(message) + job.transition_status(LandingJobAction.DEFER, message=message) + return False # Try again, this is a temporary failure. + except Exception as e: + message = f"Unexpected error while pushing to {repo.push_path}.\n{e}" + logger.exception(message) + job.transition_status( + LandingJobAction.FAIL, + message=message, + ) + # TODO no notifications required? at least not initially? + # self.notify_user_of_landing_failure(job) + return True # Do not try again, this is a permanent failure. + + # Get the changeset hash of the first node. + commit_id = scm.head_ref() + + job.transition_status(LandingJobAction.LAND, commit_id=commit_id) + + # Trigger update of repo in Phabricator so patches are closed quicker. + # Especially useful on low-traffic repositories. + if repo.phab_identifier: + self.phab_trigger_repo_update(repo.phab_identifier) + + return True + + @staticmethod + def notify_user_of_landing_failure(job: AutomationJob): + """Wrapper around notify_user_of_landing_failure for convenience. + + Args: + job (LandingJob): A LandingJob instance to use when fetching the + notification parameters. + """ + notify_user_of_landing_failure( + job.requester_email, job.landing_job_identifier, job.error, job.id + ) + + @staticmethod + def phab_trigger_repo_update(phab_identifier: str): + """Wrapper around `phab_trigger_repo_update` for convenience. + + Args: + phab_identifier: `str` to be passed to Phabricator to identify + repo. + """ + try: + # Send a Phab repo update task to Celery. + phab_trigger_repo_update.apply_async(args=(phab_identifier,)) + except kombu.exceptions.OperationalError as e: + # Log the exception but continue gracefully. + # The repo will eventually update. + logger.exception("Failed sending repo update task to Celery.") + logger.exception(e) diff --git a/src/lando/main/api.py b/src/lando/main/api.py new file mode 100644 index 00000000..16262495 --- /dev/null +++ b/src/lando/main/api.py @@ -0,0 +1,169 @@ +import datetime +import logging +from typing import Annotated, Literal, Union + +from django.contrib.auth.models import User +from django.db import transaction +from ninja import ( + NinjaAPI, + Schema, +) +from ninja.responses import codes_4xx +from ninja.security import HttpBearer +from pydantic import Field + +from lando.main.models.automation_job import ( + AutomationAction, + AutomationJob, +) +from lando.main.models.landing_job import LandingJobStatus +from lando.main.models.repo import Repo + +logger = logging.getLogger(__name__) + + +class APIPermissionDenied(Exception): + """Custom exception type to allow JSON responses for invalid auth.""" + + pass + + +class HeadlessAPIAuthentication(HttpBearer): + """Authentication class to verify API token.""" + + def authenticate(self, request, token: str) -> str: + user_agent = request.headers.get("User-Agent") + if not user_agent: + raise APIPermissionDenied("`User-Agent` header is required.") + + try: + user = User.objects.get(email=user_agent) + except User.DoesNotExist: + raise APIPermissionDenied(f"No user found for `User-Agent` {user_agent}") + + decrypted_token = user.profile.lando_api_key + if not decrypted_token or not token or decrypted_token != token: + raise APIPermissionDenied("API token is invalid.") + + # Django-Ninja sets `request.auth` to the verified token, since + # some APIs may have authentication without user management. Our + # API tokens always correspond to a specific user, so set that on + # the request here. + request.user = user + + return token + + +api = NinjaAPI(auth=HeadlessAPIAuthentication()) + + +@api.exception_handler(APIPermissionDenied) +def on_invalid_token(request, exc): + """Create a JSON response when the API returns a 401.""" + return api.create_response(request, {"details": str(exc)}, status=401) + + +class AddCommitAction(Schema): + """Create a new commit the given patch content.""" + + action: Literal["add-commit"] + content: str + + +class MergeOntoAction(Schema): + """Merge the current branch into the target commit.""" + + action: Literal["merge-onto"] + target: str + message: str + + +class TagAction(Schema): + """Create a new tag with the given name.""" + + action: Literal["tag"] + name: str + + +class AddBranchAction(Schema): + """Create a new branch at the given commit.""" + + action: Literal["add-branch"] + name: str + commit: str + + +Action = Union[AddCommitAction, MergeOntoAction, AddBranchAction, TagAction] + + +class AutomationOperation(Schema): + """Represents the body of an automation API operation request.""" + + # `Annotated` here to specify `min_items=1`. + actions: Annotated[list[Action], Field(min_items=1)] + + +class ApiError(Schema): + """Response format for an error within the API.""" + + details: str + + +class JobStatus(Schema): + """Response format of a job status report.""" + + job_id: int + status_url: str + message: str + created_at: datetime.datetime + + +@api.post("/repo/{repo_name}/{branch}", response={202: JobStatus, codes_4xx: ApiError}) +def post_repo_actions( + request, repo_name: str, branch: str, operation: AutomationOperation +): + """API endpoint to handle submission of pushes.""" + # Get the repo object. + try: + repo = Repo.objects.get(name=repo_name) + except Repo.DoesNotExist: + error = f"Repo {repo_name} does not exist." + logger.info(error) + return 404, {"details": error} + + with transaction.atomic(): + automation_job = AutomationJob.objects.create( + status=LandingJobStatus.SUBMITTED, + requester_email=request.user.email, + target_repo=repo, + ) + + for index, action in enumerate(operation.actions): + AutomationAction.objects.create( + job_id=automation_job, + action_type=action.action, + data=action.dict(), + order=index, + ) + + logger.info( + f"Created automation job {automation_job.id} with " + f"{len(operation.actions)} actions." + ) + + return 202, automation_job.to_api_status() + + +@api.get("/job/{int:job_id}", response={200: JobStatus, codes_4xx: ApiError}) +def get_job_status(request, job_id: int): + """Retrieve the status of a job by ID.""" + try: + automation_job = AutomationJob.objects.get(id=job_id) + except AutomationJob.DoesNotExist: + error = f"Automation job {job_id} does not exist." + logger.info(error) + return 404, {"details": error} + + logger.info(f"Retrieved status for job {automation_job.id}.") + + return 200, automation_job.to_api_status() diff --git a/src/lando/main/management/commands/landing_worker.py b/src/lando/main/management/commands/landing_worker.py index 805d6fb5..675aac9d 100644 --- a/src/lando/main/management/commands/landing_worker.py +++ b/src/lando/main/management/commands/landing_worker.py @@ -33,6 +33,7 @@ def job_processing(job: LandingJob): job.duration_seconds = (datetime.now() - start_time).seconds +# TODO what is this? should it be removed? class Command(BaseCommand, WorkerMixin): help = "Start the landing worker." name = "landing-worker" diff --git a/src/lando/main/migrations/0014_profile_encrypted_lando_api_key_automationjob_and_more.py b/src/lando/main/migrations/0014_profile_encrypted_lando_api_key_automationjob_and_more.py new file mode 100644 index 00000000..d42377d1 --- /dev/null +++ b/src/lando/main/migrations/0014_profile_encrypted_lando_api_key_automationjob_and_more.py @@ -0,0 +1,99 @@ +# Generated by Django 5.1.4 on 2025-01-13 16:27 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("main", "0013_alter_repo_scm_type_alter_worker_scm"), + ] + + operations = [ + migrations.AddField( + model_name="profile", + name="encrypted_lando_api_key", + field=models.BinaryField(blank=True, default=b""), + ), + migrations.CreateModel( + name="AutomationJob", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "status", + models.CharField( + choices=[ + ("SUBMITTED", "Submitted"), + ("IN_PROGRESS", "In progress"), + ("DEFERRED", "Deferred"), + ("FAILED", "Failed"), + ("LANDED", "Landed"), + ("CANCELLED", "Cancelled"), + ], + default=None, + max_length=32, + ), + ), + ( + "requester_email", + models.CharField(blank=True, default="", max_length=255), + ), + ("landed_commit_id", models.TextField(blank=True, default="")), + ("attempts", models.IntegerField(default=0)), + ("priority", models.IntegerField(default=0)), + ("duration_seconds", models.IntegerField(default=0)), + ( + "target_repo", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="main.repo", + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="AutomationAction", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ("action_type", models.CharField()), + ("data", models.JSONField()), + ("order", models.PositiveIntegerField()), + ( + "job_id", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="actions", + to="main.automationjob", + ), + ), + ], + options={ + "ordering": ["order"], + }, + ), + ] diff --git a/src/lando/main/models/automation_job.py b/src/lando/main/models/automation_job.py new file mode 100644 index 00000000..e026ccdc --- /dev/null +++ b/src/lando/main/models/automation_job.py @@ -0,0 +1,124 @@ +from typing import Any + +from django.db import models + +from lando.main.models.base import BaseModel +from lando.main.models.landing_job import ( + LandingJobAction, + LandingJobStatus, +) +from lando.main.models.repo import Repo + + +class AutomationJob(BaseModel): + """An automation job. + + TODO write better docstring + """ + + # Current status of the job. + status = models.CharField( + max_length=32, + choices=LandingJobStatus, + default=None, + ) + + # Email of the user who created the automation job. + requester_email = models.CharField(blank=True, default="", max_length=255) + + # Identifier for the most descendent commit created by this landing. + landed_commit_id = models.TextField(blank=True, default="") + + # Number of attempts made to complete the job. + attempts = models.IntegerField(default=0) + + # Priority of the job. Higher values are processed first. + priority = models.IntegerField(default=0) + + # Duration of job from start to finish + duration_seconds = models.IntegerField(default=0) + + # Reference to the target repo. + target_repo = models.ForeignKey(Repo, on_delete=models.SET_NULL, null=True) + + def to_api_status(self) -> dict[str, Any]: + """Return the job details as API status JSON. + + TODO make this better? + """ + return { + "job_id": self.id, + "status_url": "TODO", + "message": f"Job is in the {self.status} state.", + "created_at": self.created_at, + } + + def transition_status( + self, + action: LandingJobAction, + **kwargs, + ): + """Change the status and other applicable fields according to actions. + + Args: + action (LandingJobAction): the action to take, e.g. "land" or "fail" + **kwargs: + Additional arguments required by each action, e.g. `message` or + `commit_id`. + """ + actions = { + LandingJobAction.LAND: { + "required_params": ["commit_id"], + "status": LandingJobStatus.LANDED, + }, + LandingJobAction.FAIL: { + "required_params": ["message"], + "status": LandingJobStatus.FAILED, + }, + LandingJobAction.DEFER: { + "required_params": ["message"], + "status": LandingJobStatus.DEFERRED, + }, + LandingJobAction.CANCEL: { + "required_params": [], + "status": LandingJobStatus.CANCELLED, + }, + } + + if action not in actions: + raise ValueError(f"{action} is not a valid action") + + required_params = actions[action]["required_params"] + if sorted(required_params) != sorted(kwargs.keys()): + missing_params = required_params - kwargs.keys() + raise ValueError(f"Missing {missing_params} params") + + self.status = actions[action]["status"] + + if action in (LandingJobAction.FAIL, LandingJobAction.DEFER): + self.error = kwargs["message"] + + if action == LandingJobAction.LAND: + self.landed_commit_id = kwargs["commit_id"] + + self.save() + + +class AutomationAction(BaseModel): + """An action in the automation API.""" + + # TODO should we have `on_delete=models.CASCADE` here? + job_id = models.ForeignKey( + AutomationJob, on_delete=models.CASCADE, related_name="actions" + ) + + action_type = models.CharField() + + # Data for each individual action. Data in these fields should be + # parsable into the appropriate Pydantic schema. + data = models.JSONField() + + order = models.PositiveIntegerField() + + class Meta: + ordering = ["order"] diff --git a/src/lando/main/models/configuration.py b/src/lando/main/models/configuration.py index 148c0ad7..24a2362a 100644 --- a/src/lando/main/models/configuration.py +++ b/src/lando/main/models/configuration.py @@ -23,6 +23,8 @@ class ConfigurationKey(enum.Enum): LANDING_WORKER_STOPPED = "LANDING_WORKER_STOPPED" API_IN_MAINTENANCE = "API_IN_MAINTENANCE" WORKER_THROTTLE_SECONDS = "WORKER_THROTTLE_SECONDS" + AUTOMATION_WORKER_PAUSED = "AUTOMATION_WORKER_PAUSED" + AUTOMATION_WORKER_STOPPED = "AUTOMATION_WORKER_STOPPED" class VariableTypeChoices(models.TextChoices): diff --git a/src/lando/main/models/profile.py b/src/lando/main/models/profile.py index 07f5bc8b..36007367 100644 --- a/src/lando/main/models/profile.py +++ b/src/lando/main/models/profile.py @@ -77,6 +77,9 @@ class Meta: # Encrypted Phabricator API token. encrypted_phabricator_api_key = models.BinaryField(default=b"", blank=True) + # Encrypted API key. + encrypted_lando_api_key = models.BinaryField(default=b"", blank=True) + def _encrypt_value(self, value: str) -> bytes: """Encrypt a given string value.""" return self.cryptography.encrypt(value.encode("utf-8")) @@ -127,6 +130,24 @@ def save_phabricator_api_key(self, key: str): self.encrypted_phabricator_api_key = self._encrypt_value(key) self.save() + @property + def lando_api_key(self) -> str: + """Decrypt and return the value of the Lando API key.""" + encrypted_key = bytes(self.encrypted_lando_api_key) + if encrypted_key: + return self._decrypt_value(encrypted_key) + + return "" + + def clear_lando_api_key(self): + """Set the Lando API key to an empty string and save.""" + self.save_lando_api_key("") + + def save_lando_api_key(self, key: str): + """Given a raw Lando API key, encrypt it and store it in the relevant field.""" + self.encrypted_lando_api_key = self._encrypt_value(key) + self.save() + def update_permissions(self): """Remove SCM permissions and re-add them based on userinfo.""" permissions = self.get_all_scm_permissions() diff --git a/src/lando/main/tests/conftest.py b/src/lando/main/tests/conftest.py index cc44bf02..ae421d6e 100644 --- a/src/lando/main/tests/conftest.py +++ b/src/lando/main/tests/conftest.py @@ -1,7 +1,13 @@ import pathlib import subprocess +import time +from pathlib import Path import pytest +import requests +from django.contrib.auth.models import User + +from lando.main.models import Profile @pytest.fixture @@ -39,3 +45,97 @@ def _git_setup_user(repo_dir): check=True, cwd=repo_dir, ) + + +@pytest.fixture +def hg_clone(hg_server, tmpdir): + clone_dir = tmpdir.join("hg_clone") + subprocess.run(["hg", "clone", hg_server, clone_dir.strpath], check=True) + return clone_dir + + +@pytest.fixture +def hg_test_bundle(request): + return Path(request.path.parent.parent.parent).joinpath( + "api", "tests", "data", "test-repo.bundle" + ) + + +@pytest.fixture +def hg_server(hg_test_bundle, tmpdir): + # TODO: Select open port. + port = "8000" + hg_url = "http://localhost:" + port + + repo_dir = tmpdir.mkdir("hg_server") + subprocess.run(["hg", "clone", hg_test_bundle, repo_dir], check=True, cwd="/") + + serve = subprocess.Popen( + [ + "hg", + "serve", + "--config", + "web.push_ssl=False", + "--config", + "web.allow_push=*", + "-p", + port, + "-R", + repo_dir, + ] + ) + if serve.poll() is not None: + raise Exception("Failed to start the mercurial server.") + # Wait until the server is running. + for _i in range(10): + try: + requests.get(hg_url) + except Exception: + time.sleep(1) + break + + yield hg_url + serve.kill() + + +@pytest.fixture +def conduit_permissions(): + permissions = ( + "scm_level_1", + "scm_level_2", + "scm_level_3", + "scm_conduit", + ) + all_perms = Profile.get_all_scm_permissions() + + return [all_perms[p] for p in permissions] + + +@pytest.fixture +def user_plaintext_password(): + return "test_password" + + +@pytest.fixture +def user(user_plaintext_password, conduit_permissions): + user = User.objects.create_user( + username="test_user", + password=user_plaintext_password, + email="testuser@example.org", + ) + + user.profile = Profile(user=user, userinfo={"name": "test user"}) + + for permission in conduit_permissions: + user.user_permissions.add(permission) + + user.save() + user.profile.save() + + return user + + +@pytest.fixture +def headless_user(user): + user.profile.save_lando_api_key("api-dummy-key") + return user diff --git a/src/lando/main/tests/test_automation_api.py b/src/lando/main/tests/test_automation_api.py new file mode 100644 index 00000000..aeef7f44 --- /dev/null +++ b/src/lando/main/tests/test_automation_api.py @@ -0,0 +1,353 @@ +import datetime +import json +import unittest.mock as mock + +import pytest + +from lando.api.legacy.workers.automation_worker import AutomationWorker +from lando.main.api import AutomationAction, AutomationJob +from lando.main.models import SCM_LEVEL_3, Repo +from lando.main.models.landing_job import LandingJobStatus +from lando.main.scm import SCM_TYPE_HG + + +@pytest.mark.django_db +def test_auth_missing_user_agent(client, headless_user): + # Create a job and actions + job = AutomationJob.objects.create(status=LandingJobStatus.SUBMITTED) + AutomationAction.objects.create( + job_id=job, action_type="add-commit", data={"content": "test"}, order=0 + ) + + # Fetch job status. + response = client.get( + f"/api/job/{job.id}", + headers={ + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert response.status_code == 401, "Missing `User-Agent` should result in 401." + assert response.json() == {"details": "`User-Agent` header is required."} + + +@pytest.mark.django_db +def test_auth_missing_authorization_header(client, headless_user): + # Create a job and actions + job = AutomationJob.objects.create(status=LandingJobStatus.SUBMITTED) + AutomationAction.objects.create( + job_id=job, action_type="add-commit", data={"content": "test"}, order=0 + ) + + # Fetch job status. + response = client.get( + f"/api/job/{job.id}", + headers={ + "User-Agent": "testuser@example.org", + }, + ) + + assert response.status_code == 401, "Missing `User-Agent` should result in 401." + assert response.json() == {"detail": "Unauthorized"} + + +@pytest.mark.django_db +def test_auth_unknown_user(client, headless_user): + # Create a job and actions + job = AutomationJob.objects.create(status=LandingJobStatus.SUBMITTED) + AutomationAction.objects.create( + job_id=job, action_type="add-commit", data={"content": "test"}, order=0 + ) + + # Fetch job status. + response = client.get( + f"/api/job/{job.id}", + headers={ + "Authorization": "Bearer api-dummy-key", + "User-Agent": "unknown-user@example.org", + }, + ) + + assert response.status_code == 401, "Unknown user should result in 401 status code." + assert response.json() == { + "details": "No user found for `User-Agent` unknown-user@example.org" + } + + +@pytest.mark.django_db +def test_auth_invalid_token(client, headless_user): + # Create a job and actions + job = AutomationJob.objects.create(status=LandingJobStatus.SUBMITTED) + AutomationAction.objects.create( + job_id=job, action_type="add-commit", data={"content": "test"}, order=0 + ) + + # Fetch job status. + response = client.get( + f"/api/job/{job.id}", + headers={ + "Authorization": "Bearer api-bad-key", + "User-Agent": "testuser@example.org", + }, + ) + + assert ( + response.status_code == 401 + ), "Invalid API key shoudl result in 401 status code." + assert response.json() == {"details": "API token is invalid."} + + +@pytest.mark.django_db +def test_automation_job_create_bad_repo(client, headless_user): + body = { + "actions": [ + {"action": "add-commit", "content": "TESTIN123"}, + ], + } + response = client.post( + "/api/repo/blah/autoland", + data=json.dumps(body), + content_type="application/json", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert response.status_code == 404, "Unknown repo should respond with 404." + assert response.json() == {"details": "Repo blah does not exist."} + + +@pytest.mark.django_db +def test_automation_job_empty_actions(client, headless_user): + body = { + "actions": [], + } + response = client.post( + "/api/repo/blah/autoland", + data=json.dumps(body), + content_type="application/json", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert ( + response.status_code == 422 + ), "Empty `actions` should result in validation error." + + +@pytest.mark.parametrize( + "bad_action,reason", + ( + ( + {"action": "bad-action", "content": "TESTIN123"}, + "`bad-action` is an invalid action name.", + ), + ( + {"action": "add-commit", "content": {"test": 123}}, + "`content` should be a `str`.", + ), + ( + {"action": "add-commit", "content": 1}, + "`content` should be a `str`.", + ), + ), +) +@pytest.mark.django_db +def test_automation_job_create_bad_action(bad_action, reason, client, headless_user): + body = { + "actions": [bad_action], + } + response = client.post( + "/api/repo/blah/autoland", + data=json.dumps(body), + content_type="application/json", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert ( + response.status_code == 422 + ), f"Improper `actions` JSON schema should return 422 status: {reason}" + + +def is_isoformat_timestamp(date_string: str) -> bool: + """Return `True` if `date_string` is an ISO format datetime string.""" + try: + datetime.datetime.fromisoformat(date_string) + return True + except ValueError: + return False + + +@pytest.mark.django_db +def test_automation_job_create(client, hg_server, hg_clone, headless_user): + Repo.objects.create( + scm_type=SCM_TYPE_HG, + name="mozilla-central", + url=hg_server, + required_permission=SCM_LEVEL_3, + push_path=hg_server, + pull_path=hg_server, + system_path=hg_clone.strpath, + ) + + body = { + "actions": [ + # Set `content` to a string integer to test order is preserved. + {"action": "add-commit", "content": "0"}, + {"action": "add-commit", "content": "1"}, + ], + } + response = client.post( + "/api/repo/mozilla-central/autoland", + data=json.dumps(body), + content_type="application/json", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert ( + response.status_code == 202 + ), "Successful submission should result in `202 Accepted` status code." + + response_json = response.json() + assert isinstance( + response_json["job_id"], int + ), "Job ID should be returned as an `int`." + assert response_json["status_url"] == "TODO" + assert response_json["message"] == "Job is in the SUBMITTED state." + assert is_isoformat_timestamp( + response_json["created_at"] + ), "Response should include an ISO formatted creation timestamp." + + job = AutomationJob.objects.get(id=response_json["job_id"]) + + for index, action in enumerate(job.actions.all()): + assert action.data["content"] == str( + index + ), "Actions should be retrieved in order of submission." + + +@pytest.mark.django_db +def test_get_job_status_not_found(client, headless_user): + response = client.get( + "/api/job/12345", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + assert ( + response.status_code == 404 + ), "API should respond with a 404 for non-existent job ID." + + +@pytest.mark.parametrize( + "status,message", + ( + (LandingJobStatus.SUBMITTED, "Job is in the SUBMITTED state."), + (LandingJobStatus.IN_PROGRESS, "Job is in the IN_PROGRESS state."), + (LandingJobStatus.DEFERRED, "Job is in the DEFERRED state."), + (LandingJobStatus.FAILED, "Job is in the FAILED state."), + (LandingJobStatus.LANDED, "Job is in the LANDED state."), + (LandingJobStatus.CANCELLED, "Job is in the CANCELLED state."), + ), +) +@pytest.mark.django_db +def test_get_job_status(status, message, client, headless_user): + # Create a job and actions + job = AutomationJob.objects.create(status=status) + AutomationAction.objects.create( + job_id=job, action_type="add-commit", data={"content": "test"}, order=0 + ) + + # Fetch job status. + response = client.get( + f"/api/job/{job.id}", + headers={ + "User-Agent": "testuser@example.org", + "Authorization": "Bearer api-dummy-key", + }, + ) + + assert ( + response.status_code == 200 + ), "Response code should be 200 when status is retrieved successfully." + + response_data = response.json() + + assert response_data["job_id"] == job.id + assert ( + response_data["message"] == message + ), "Response message should align with current job status." + # TODO test a few more things? formatting? + + +PATCH_NORMAL_1 = r""" +# HG changeset patch +# User Test User +# Date 0 0 +# Thu Jan 01 00:00:00 1970 +0000 +# Diff Start Line 7 +add another file. +diff --git a/test.txt b/test.txt +--- a/test.txt ++++ b/test.txt +@@ -1,1 +1,2 @@ + TEST ++adding another line +""".strip() + + +@pytest.mark.django_db +def test_automation_job_add_commit(hg_server, hg_clone, monkeypatch): + repo = Repo.objects.create( + scm_type=SCM_TYPE_HG, + name="mozilla-central", + url=hg_server, + required_permission=SCM_LEVEL_3, + push_path=hg_server, + pull_path=hg_server, + system_path=hg_clone.strpath, + ) + scm = repo.scm + + # Create a job and actions + job = AutomationJob.objects.create( + status=LandingJobStatus.SUBMITTED, + requester_email="example@example.com", + target_repo=repo, + ) + AutomationAction.objects.create( + job_id=job, + action_type="add-commit", + data={"action": "add-commit", "content": PATCH_NORMAL_1}, + order=0, + ) + + worker = AutomationWorker( + repos=Repo.objects.all(), + ) + + # Mock `phab_trigger_repo_update` so we can make sure that it was called. + mock_trigger_update = mock.MagicMock() + monkeypatch.setattr( + "lando.api.legacy.workers.automation_worker.AutomationWorker.phab_trigger_repo_update", + mock_trigger_update, + ) + + scm.push = mock.MagicMock() + + assert worker.run_automation_job(job) + assert scm.push.call_count == 1 + assert len(scm.push.call_args) == 2 + assert len(scm.push.call_args[0]) == 1 + assert scm.push.call_args[0][0] == hg_server + assert scm.push.call_args[1] == {"push_target": "", "force_push": False} diff --git a/src/lando/urls.py b/src/lando/urls.py index d6c63b26..0145414c 100644 --- a/src/lando/urls.py +++ b/src/lando/urls.py @@ -19,6 +19,9 @@ from django.urls import include, path from lando.api.legacy.api import landing_jobs +from lando.main.api import ( + api as automation_api, +) from lando.ui.legacy import pages, revisions, user_settings urlpatterns = [ @@ -37,3 +40,7 @@ urlpatterns += [ path("landing_jobs//", landing_jobs.put, name="landing-jobs"), ] + +urlpatterns += [ + path("api/", automation_api.urls, name="automation-api"), +]