diff --git a/.github/workflows/custom_docker_builds.yml b/.github/workflows/custom_docker_builds.yml index e053e9ef8..0393a9159 100644 --- a/.github/workflows/custom_docker_builds.yml +++ b/.github/workflows/custom_docker_builds.yml @@ -42,7 +42,7 @@ jobs: - docker-image: ./images/cache-indexer image-tags: ghcr.io/spack/cache-indexer:0.0.3 - docker-image: ./analytics - image-tags: ghcr.io/spack/django:0.3.27 + image-tags: ghcr.io/spack/django:0.3.28 - docker-image: ./images/ci-prune-buildcache image-tags: ghcr.io/spack/ci-prune-buildcache:0.0.4 - docker-image: ./images/protected-publish diff --git a/analytics/analytics/job_processor/__init__.py b/analytics/analytics/job_processor/__init__.py index 81878eb26..a01f21a6b 100644 --- a/analytics/analytics/job_processor/__init__.py +++ b/analytics/analytics/job_processor/__init__.py @@ -140,5 +140,6 @@ def process_job(job_input_data_json: str): # Create build timing facts in a separate transaction, in case this fails with transaction.atomic(): - if job.job.job_type == JobDataDimension.JobType.BUILD: + job_data = job.job + if job_data.job_type == JobDataDimension.JobType.BUILD and job_data.status == 'success': create_build_timing_facts(job_fact=job, gljob=gl_job) diff --git a/analytics/analytics/job_processor/artifacts.py b/analytics/analytics/job_processor/artifacts.py index 2e3a42d15..4fc341ffd 100644 --- a/analytics/analytics/job_processor/artifacts.py +++ b/analytics/analytics/job_processor/artifacts.py @@ -8,6 +8,12 @@ from gitlab.v4.objects import ProjectJob +class JobArtifactDownloadFailed(Exception): + def __init__(self, job: ProjectJob) -> None: + message = f"Job {job.id} artifact download failed" + super().__init__(message) + + class JobArtifactFileNotFound(Exception): def __init__(self, job: ProjectJob, filename: str): message = f"File {filename} not found in job artifacts of job {job.id}" @@ -37,7 +43,7 @@ def get_job_artifacts_file(job: ProjectJob, filename: str): with open(artifacts_file, "wb") as f: job.artifacts(streamed=True, action=f.write) except GitlabGetError: - raise JobArtifactFileNotFound(job, filename) + raise JobArtifactDownloadFailed(job) # Open specific file within artifacts zip with zipfile.ZipFile(artifacts_file) as zfile: diff --git a/analytics/analytics/job_processor/metadata.py b/analytics/analytics/job_processor/metadata.py index e9958e65e..18fa01b02 100644 --- a/analytics/analytics/job_processor/metadata.py +++ b/analytics/analytics/job_processor/metadata.py @@ -8,6 +8,7 @@ from gitlab.v4.objects import ProjectJob from analytics.job_processor.artifacts import ( + JobArtifactDownloadFailed, JobArtifactFileNotFound, JobArtifactVariablesNotFound, get_job_artifacts_data, @@ -180,7 +181,7 @@ def retrieve_job_info(gljob: ProjectJob, is_build: bool) -> JobInfo: # If the build is failed, this is not unexpected. Otherwise, raise the error try: artifacts = get_job_artifacts_data(gljob) - except (JobArtifactFileNotFound, JobArtifactVariablesNotFound): + except (JobArtifactDownloadFailed, JobArtifactFileNotFound, JobArtifactVariablesNotFound): if gljob.status == "failed": return JobInfo() diff --git a/k8s/production/custom/webhook-handler/deployments.yaml b/k8s/production/custom/webhook-handler/deployments.yaml index 235c3353e..25252895f 100644 --- a/k8s/production/custom/webhook-handler/deployments.yaml +++ b/k8s/production/custom/webhook-handler/deployments.yaml @@ -23,7 +23,7 @@ spec: serviceAccountName: webhook-handler containers: - name: webhook-handler - image: ghcr.io/spack/django:0.3.27 + image: ghcr.io/spack/django:0.3.28 imagePullPolicy: Always resources: requests: @@ -146,7 +146,7 @@ spec: serviceAccountName: webhook-handler containers: - name: webhook-handler-worker - image: ghcr.io/spack/django:0.3.27 + image: ghcr.io/spack/django:0.3.28 command: [ "celery",