Skip to content

Commit

Permalink
Merge pull request #1025 from spack/job-processor-keyerror
Browse files Browse the repository at this point in the history
Ignore all extra cache=False install times
  • Loading branch information
mvandenburgh authored Jan 7, 2025
2 parents 2c3da6a + fbfde22 commit db688aa
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/custom_docker_builds.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
- docker-image: ./images/cache-indexer
image-tags: ghcr.io/spack/cache-indexer:0.0.3
- docker-image: ./analytics
image-tags: ghcr.io/spack/django:0.4.0
image-tags: ghcr.io/spack/django:0.4.1
- docker-image: ./images/ci-prune-buildcache
image-tags: ghcr.io/spack/ci-prune-buildcache:0.0.4
- docker-image: ./images/protected-publish
Expand Down
14 changes: 11 additions & 3 deletions analytics/analytics/job_processor/build_timings.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@
TimerPhaseDimension,
)
from analytics.core.models.facts import JobFact, TimerFact, TimerPhaseFact
from analytics.job_processor.artifacts import JobArtifactFileNotFound, get_job_artifacts_file
from analytics.job_processor.artifacts import (
JobArtifactFileNotFound,
get_job_artifacts_file,
)


def get_timings_json(job: ProjectJob) -> list[dict]:
Expand Down Expand Up @@ -107,11 +110,16 @@ def create_build_timing_facts(job_fact: JobFact, gljob: ProjectJob):
# since `install_times.json` is created using `spack` itself, there's no guarantee
# that the file will be present.
try:
data = get_timings_json(gljob)
timings = [t for t in data if t.get("name") and t.get("hash")]
timing_data = get_timings_json(gljob)
except JobArtifactFileNotFound:
return

# For bootstrapped packages, install times with cache=False can be present, which don't have a
# corresonding entry in the spec json. To filter these out, avoid all install times that are
# cache=False, except the package being built.
job_package_hash = job_fact.spec.hash
timings = [t for t in timing_data if t["cache"] or t["hash"] == job_package_hash]

# First, ensure that all packages and specs are entered into the db. Then, fetch all timing packages
create_packages_and_specs(gljob)
package_mapping = get_package_mapping(timings=timings)
Expand Down
4 changes: 2 additions & 2 deletions k8s/production/custom/webhook-handler/deployments.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ spec:
serviceAccountName: webhook-handler
containers:
- name: webhook-handler
image: ghcr.io/spack/django:0.4.0
image: ghcr.io/spack/django:0.4.1
imagePullPolicy: Always
resources:
requests:
Expand Down Expand Up @@ -146,7 +146,7 @@ spec:
serviceAccountName: webhook-handler
containers:
- name: webhook-handler-worker
image: ghcr.io/spack/django:0.4.0
image: ghcr.io/spack/django:0.4.1
command:
[
"celery",
Expand Down

0 comments on commit db688aa

Please sign in to comment.