diff --git a/hosts/models.py b/hosts/models.py index 0e477d1d..b0d23ba9 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -58,6 +58,9 @@ class Host(models.Model): tags = TagField() updated_at = models.DateTimeField(default=timezone.now) + from hosts.managers import HostManager + objects = HostManager() + class Meta: verbose_name = 'Host' verbose_name_plural = 'Hosts' diff --git a/hosts/serializers.py b/hosts/serializers.py index ab086801..713fc580 100644 --- a/hosts/serializers.py +++ b/hosts/serializers.py @@ -31,10 +31,10 @@ class Meta: 'updated_at', 'bugfix_update_count', 'security_update_count') def get_bugfix_update_count(self, obj): - return len([u for u in obj.updates.all() if not u.security]) + return obj.updates.filter(security=False).count() def get_security_update_count(self, obj): - return len([u for u in obj.updates.all() if u.security]) + return obj.updates.filter(security=True).count() class HostRepoSerializer(serializers.HyperlinkedModelSerializer): diff --git a/hosts/templatetags/report_alert.py b/hosts/templatetags/report_alert.py index 025e2cde..72cd2540 100644 --- a/hosts/templatetags/report_alert.py +++ b/hosts/templatetags/report_alert.py @@ -18,12 +18,13 @@ from datetime import timedelta from django.conf import settings - from django.template import Library from django.utils.html import format_html from django.templatetags.static import static from django.utils import timezone +from util import has_setting_of_type + register = Library() @@ -31,8 +32,7 @@ def report_alert(lastreport): html = '' alert_icon = static('img/icon-alert.gif') - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 diff --git a/operatingsystems/fixtures/os.json b/operatingsystems/fixtures/os.json new file mode 100644 index 00000000..a581487f --- /dev/null +++ b/operatingsystems/fixtures/os.json @@ -0,0 +1,66 @@ +[ + { + "model": "operatingsystems.os", + "fields": { + "name": "Rocky Linux 9.3", + "osgroup": [ + "Rocky Linux 9", + "Blue Onyx" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Rocky Linux 8.9", + "osgroup": [ + "Rocky Linux 8", + "Green Obsidian" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Debian 12.5", + "osgroup": [ + "Debian 12", + "bookworm" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "Arch Linux", + "osgroup": null + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "openSUSE Leap 15.5", + "osgroup": null + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "AlmaLinux 8.10", + "osgroup": [ + "AlmaLinux 8", + "Cerulean Leopard" + ] + } + }, + { + "model": "operatingsystems.os", + "fields": { + "name": "AlmaLinux 9.5", + "osgroup": [ + "AlmaLinux 9", + "Teal Serval" + ] + } + } +] diff --git a/operatingsystems/fixtures/osgroup.json b/operatingsystems/fixtures/osgroup.json new file mode 100644 index 00000000..e4b785ee --- /dev/null +++ b/operatingsystems/fixtures/osgroup.json @@ -0,0 +1,58 @@ +[ + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "CentOS 7", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "CentOS 8", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Rocky Linux 8", + "codename": "Green Obsidian", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Rocky Linux 9", + "codename": "Blue Onyx", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "AlmaLinux 8", + "codename": "Cerulean Leopard", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "AlmaLinux 9", + "codename": "Teal Serval", + "repos": [] + } + }, + { + "model": "operatingsystems.osgroup", + "fields": { + "name": "Debian 12", + "codename": "bookworm", + "repos": [] + } + } +] diff --git a/operatingsystems/managers.py b/operatingsystems/managers.py new file mode 100644 index 00000000..99bdfa1f --- /dev/null +++ b/operatingsystems/managers.py @@ -0,0 +1,22 @@ +# Copyright 2024 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class OSGroupManager(models.Manager): + def get_by_natural_key(self, name, codename): + return self.get(name=name, codename=codename) diff --git a/operatingsystems/migrations/0003_osgroup_codename.py b/operatingsystems/migrations/0003_osgroup_codename.py new file mode 100644 index 00000000..426c7a15 --- /dev/null +++ b/operatingsystems/migrations/0003_osgroup_codename.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.15 on 2025-01-13 18:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0002_initial'), + ] + + operations = [ + migrations.AddField( + model_name='osgroup', + name='codename', + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/operatingsystems/migrations/0004_alter_osgroup_unique_together.py b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py new file mode 100644 index 00000000..dbda97e6 --- /dev/null +++ b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2025-01-13 19:57 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0003_osgroup_codename'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osgroup', + unique_together={('name', 'codename')}, + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index aed4d59f..24bfa83a 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -25,18 +25,29 @@ class OSGroup(models.Model): name = models.CharField(max_length=255, unique=True) repos = models.ManyToManyField(Repository, blank=True) + codename = models.CharField(max_length=255, blank=True) + + from operatingsystems.managers import OSGroupManager + objects = OSGroupManager() class Meta: verbose_name = 'Operating System Group' verbose_name_plural = 'Operating System Groups' + unique_together = ('name', 'codename') ordering = ('name',) def __str__(self): - return self.name + if self.codename: + return f'{self.name} ({self.codename})' + else: + return self.name def get_absolute_url(self): return reverse('operatingsystems:osgroup_detail', args=[str(self.id)]) + def natural_key(self): + return (self.name, self.codename) + class OS(models.Model): diff --git a/packages/admin.py b/packages/admin.py index 4a782eb9..1a5a2dfa 100644 --- a/packages/admin.py +++ b/packages/admin.py @@ -21,11 +21,19 @@ class ErratumAdmin(admin.ModelAdmin): - readonly_fields = ('packages',) + readonly_fields = ('packages', 'references') -admin.site.register(Package) +class PackageAdmin(admin.ModelAdmin): + readonly_fields = ('name',) + + +class PackageUpdateAdmin(admin.ModelAdmin): + readonly_fields = ('oldpackage', 'newpackage') + + +admin.site.register(Package, PackageAdmin) admin.site.register(PackageName) -admin.site.register(PackageUpdate) +admin.site.register(PackageUpdate, PackageUpdateAdmin) admin.site.register(Erratum, ErratumAdmin) admin.site.register(ErratumReference) diff --git a/packages/migrations/0002_erratumreference_er_type.py b/packages/migrations/0002_erratumreference_er_type.py new file mode 100644 index 00000000..52b205f7 --- /dev/null +++ b/packages/migrations/0002_erratumreference_er_type.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.15 on 2025-01-12 21:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='erratumreference', + name='er_type', + field=models.CharField(default=None, max_length=255), + preserve_default=False, + ), + ] diff --git a/packages/models.py b/packages/models.py index 560d0efb..6a03299f 100644 --- a/packages/models.py +++ b/packages/models.py @@ -214,6 +214,7 @@ def __str__(self): class ErratumReference(models.Model): + er_type = models.CharField(max_length=255) url = models.URLField(max_length=255) def __str__(self): diff --git a/packages/utils.py b/packages/utils.py index cb408993..fd588c6f 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -15,18 +15,22 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import json import re +from datetime import datetime +from debian.deb822 import Dsc from defusedxml.lxml import _etree as etree +from urllib.parse import urlparse from django.conf import settings from django.core.exceptions import MultipleObjectsReturned from django.db import IntegrityError, DatabaseError, transaction -from util import bunzip2, get_url, download_url, get_sha1 +from util import bunzip2, get_url, download_url, get_sha1, tz_aware_datetime, has_setting_of_type from packages.models import ErratumReference, PackageName, \ Package, PackageUpdate from arch.models import MachineArchitecture, PackageArchitecture -from patchman.signals import error_message, progress_info_s, progress_update_s +from patchman.signals import error_message, info_message, progress_info_s, progress_update_s def find_evr(s): @@ -78,7 +82,6 @@ def parse_package_string(pkg_str): """ Parse a package string and return name, epoch, ver, release, dist, arch """ - for suffix in ['rpm', 'deb']: pkg_str = re.sub(f'.{suffix}$', '', pkg_str) pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa @@ -95,36 +98,474 @@ def parse_package_string(pkg_str): def update_errata(force=False): + """ Update all distros errata + """ + update_rocky_errata(force) + update_alma_errata(force) + update_debian_errata(force) + update_ubuntu_errata(force) + update_arch_errata(force) + update_centos_errata(force) + + +def update_rocky_errata(force): + """ Update Rocky Linux errata + """ + rocky_errata_api_host = 'https://apollo.build.resf.org' + rocky_errata_api_url = '/api/v3/' + if check_rocky_errata_endpoint_health(rocky_errata_api_host): + advisories = download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url) + process_rocky_errata(advisories, force) + + +def check_rocky_errata_endpoint_health(rocky_errata_api_host): + """ Check Rocky Linux errata endpoint health + """ + rocky_errata_healthcheck_path = '/_/healthz' + rocky_errata_healthcheck_url = rocky_errata_api_host + rocky_errata_healthcheck_path + headers = {'Accept': 'application/json'} + res = get_url(rocky_errata_healthcheck_url, headers=headers) + data = download_url(res, 'Rocky Linux Errata API healthcheck') + try: + health = json.loads(data) + if health.get('status') == 'ok': + s = f'Rocky Linux Errata API healthcheck OK: {rocky_errata_healthcheck_url}' + info_message.send(sender=None, text=s) + return True + else: + s = f'Rocky Linux Errata API healthcheck FAILED: {rocky_errata_healthcheck_url}' + error_message.send(sender=None, text=s) + return False + except Exception as e: + s = f'Rocky Linux Errata API healthcheck exception occured: {rocky_errata_healthcheck_url}\n' + s += str(e) + error_message.send(sender=None, text=s) + return False + + +def download_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url): + """ Download Rocky Linux advisories and return the list + """ + rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' + headers = {'Accept': 'application/json'} + page = 1 + pages = None + advisories = [] + params = {'page': 1, 'size': 100} + while True: + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = download_url(res, f'Rocky Linux Advisories {page}{"/"+pages if pages else ""}') + advisories_dict = json.loads(data) + advisories += advisories_dict.get('advisories') + links = advisories_dict.get('links') + if page == 1: + last_link = links.get('last') + pages = last_link.split('=')[-1] + next_link = links.get('next') + if next_link: + rocky_errata_advisories_url = rocky_errata_api_host + next_link + params = {} + page += 1 + else: + break + return advisories + + +def process_rocky_errata(advisories, force): + """ Process Rocky Linux errata + """ + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): + progress_update_s.send(sender=None, index=i + 1) + erratum_name = advisory.get('name') + etype = advisory.get('kind').lower() + issue_date = advisory.get('published_at') + synopsis = advisory.get('synopsis') + e, created = get_or_create_erratum( + name=erratum_name, + etype=etype, + issue_date=issue_date, + synopsis=synopsis, + ) + if created or force: + add_rocky_errata_references(e, advisory) + add_rocky_errata_oses(e, advisory) + add_rocky_errata_packages(e, advisory) + + +def add_rocky_errata_references(e, advisory): + """ Add Rocky Linux errata references + """ + references = [] + cves = advisory.get('cves') + for cve in cves: + cve_id = cve.get('cve') + references.append({'er_type': 'cve', 'url': f'https://www.cve.org/CVERecord?id={cve_id}'}) + fixes = advisory.get('fixes') + for fix in fixes: + fix_url = fix.get('source') + references.append({'er_type': 'bugzilla', 'url': fix_url}) + add_erratum_refs(e, references) + + +def add_rocky_errata_oses(e, advisory): + """ Update OS, OSGroup and MachineArch for Rocky Linux errata + """ + affected_oses = advisory.get('affected_products') + from operatingsystems.models import OS, OSGroup + osgroups = OSGroup.objects.all() + oses = OS.objects.all() + m_arches = MachineArchitecture.objects.all() + for affected_os in affected_oses: + m_arch = affected_os.get('arch') + variant = affected_os.get('variant') + major_version = affected_os.get('major_version') + osgroup_name = f'{variant} {major_version}' + os_name = affected_os.get('name').replace(f' {m_arch}', '').replace(' (Legacy)', '') + with transaction.atomic(): + osgroup, c = osgroups.get_or_create(name=osgroup_name) + with transaction.atomic(): + os, c = oses.get_or_create(name=os_name) + with transaction.atomic(): + m_arch, c = m_arches.get_or_create(name=m_arch) + e.releases.add(osgroup) + e.arches.add(m_arch) + e.save() + + +def add_rocky_errata_packages(e, advisory): + """ Parse and add packages for Rocky Linux errata + """ + packages = advisory.get('packages') + for package in packages: + package_name = package.get('nevra') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + e.save() + + +def update_alma_errata(force=False): + """ Update Alma Linux advisories from errata.almalinux.org: + https://errata.almalinux.org/8/errata.full.json + https://errata.almalinux.org/9/errata.full.json + and process advisories + """ + default_alma_releases = [8, 9] + if has_setting_of_type('ALMA_RELEASES', list): + alma_releases = settings.ALMA_RELEASES + else: + alma_releases = default_alma_releases + for release in alma_releases: + advisories = download_alma_advisories(release) + process_alma_errata(release, advisories, force) + + +def download_alma_advisories(release): + """ Download Alma Linux advisories + """ + alma_errata_url = f'https://errata.almalinux.org/{release}/errata.full.json' + headers = {'Accept': 'application/json', 'Cache-Control': 'no-cache, no-tranform'} + res = get_url(alma_errata_url, headers=headers) + data = download_url(res, 'Downloading Alma Linux Errata:') + advisories = json.loads(data).get('data') + return advisories + + +def process_alma_errata(release, advisories, force): + """ Process Alma Linux Errata + """ + elen = len(advisories) + ptext = f'Processing {elen} Errata:' + progress_info_s.send(sender=None, ptext=ptext, plen=elen) + for i, advisory in enumerate(advisories): + progress_update_s.send(sender=None, index=i + 1) + erratum_name = advisory.get('id') + issue_date = advisory.get('issued_date') + synopsis = advisory.get('title') + etype = advisory.get('type') + e, created = get_or_create_erratum( + name=erratum_name, + etype=etype, + issue_date=issue_date, + synopsis=synopsis, + ) + if created or force: + add_alma_errata_osgroups(e, release) + add_alma_errata_references(e, advisory) + add_alma_errata_packages(e, advisory) + modules = advisory.get('modules') + for modules in modules: + pass + + +def add_alma_errata_osgroups(e, release): + """ Update OSGroup foe Alma Linux errata + """ + from operatingsystems.models import OSGroup + osgroups = OSGroup.objects.all() + with transaction.atomic(): + osgroup, c = osgroups.get_or_create(name=f'Alma Linux {release}') + e.releases.add(osgroup) + e.save() + + +def add_alma_errata_references(e, advisory): + """ Add references for Alma Linux errata + """ + references = [] + refs = advisory.get('references') + for ref in refs: + er_type = ref.get('type') + name = ref.get('id') + if er_type == 'self': + er_type = name.split('-')[0].lower() + if er_type == 'cve': + url = f'https://www.cve.org/CVERecord?id={name}' + else: + url = ref.get('href') + references.append({'er_type': er_type, 'url': url}) + add_erratum_refs(e, references) + + +def add_alma_errata_packages(e, advisory): + """ Parse and add packages for Alma Linux errata + """ + packages = advisory.get('packages') + for package in packages: + package_name = package.get('filename') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + e.save() + + +def update_debian_errata(force=False): + """ Update Debian errata using: + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + """ + dsas = download_debian_dsa_advisories() + dlas = download_debian_dla_advisories() + advisories = dsas + dlas + process_debian_errata(advisories, force) + + +def download_debian_dsa_advisories(): + """ Download the current Debian DLA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list' + res = get_url(debian_dsa_url) + data = download_url(res, 'Downloading Debian DSAs') + return data.decode() + + +def download_debian_dla_advisories(): + """ Download the current Debian DSA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DLA/list' + res = get_url(debian_dsa_url) + data = download_url(res, 'Downloading Debian DLAs') + return data.decode() + + +def process_debian_errata(advisories, force): + """ Parse a Debian DSA/DLA file for security advisories + """ + title_pattern = re.compile(r'^\[(.+?)\] (.+?) (.+?)[ ]+[-]+ (.*)') + for line in advisories.splitlines(): + if line.startswith('['): + match = re.match(title_pattern, line) + if match: + e, created = parse_debian_errata_advisory(match, force) + elif line.startswith('\t{'): + if created or force: + parse_debian_errata_cves(e, line) + elif line.startswith('\t['): + if created or force: + parse_debian_errata_packages(e, line) + + +def parse_debian_errata_advisory(match, force): + """ Parse the initial details for an erratum in a DSA/DLA file + """ + date = match.group(1) + issue_date = int(datetime.strptime(date, '%d %b %Y').strftime('%s')) + erratum_name = match.group(2) + synopsis = match.group(4) + e, created = get_or_create_erratum( + name=erratum_name, + etype='security', + issue_date=issue_date, + synopsis=synopsis, + ) + if created or force: + er_type = erratum_name.split('-')[0].lower() + er_url = f'https://security-tracker.debian.org/tracker/{erratum_name}' + st_ref = {'er_type': er_type, 'url': er_url} + add_erratum_refs(e, [st_ref]) + return e, created + + +def parse_debian_errata_cves(e, line): + """ Parse the CVEs related to a given erratum and add them as + erratum references + """ + references = [] + cve_refs = line.strip('\t{}').split() + er_type = 'cve' + for cve in cve_refs: + er_url = f'https://www.cve.org/CVERecord?id={cve}' + references.append({'er_type': er_type, 'url': er_url}) + add_erratum_refs(e, references) + + +def parse_debian_errata_packages(e, line): + """ Parse the codename and source packages from a DSA/DLA file + """ + distro_package_pattern = re.compile(r'^\t\[(.+?)\] - (.+?) (.*)') + accepted_codenames = get_accepted_debian_codenames() + match = re.match(distro_package_pattern, line) + if match: + codename = match.group(1) + if codename in accepted_codenames: + source_package = match.group(2) + source_version = match.group(3) + dsc = get_debian_package_dsc(codename, source_package, source_version) + if dsc: + process_debian_errata_affected_packages(e, dsc, source_version) + + +def get_debian_package_dsc(codename, package, version): + """ Download a DSC file for the given source package + From this we can determine which packages are built from + a given source package + """ + dsc_pattern = re.compile(r'.*"(http.*dsc)"') + source_url = f'https://packages.debian.org/source/{codename}/{package}' + res = get_url(source_url) + data = download_url(res, f'debian src {package}-{version}', 60) + dscs = re.findall(dsc_pattern, data.decode()) + if dscs: + dsc_url = dscs[0] + res = get_url(dsc_url) + data = download_url(res, f'debian dsc {package}-{version}', 60) + return Dsc(data.decode()) + + +def get_accepted_debian_codenames(): + """ Get acceptable Debian OS codenames + Can be overridden by specifying DEBIAN_CODENAMES in settings + """ + default_codenames = ['bookworm', 'bullseye'] + if has_setting_of_type('DEBIAN_CODENAMES', list): + accepted_codenames = settings.DEBIAN_C0ODENAMES + else: + accepted_codenames = default_codenames + return accepted_codenames + + +def process_debian_errata_affected_packages(e, dsc, version): + """ Process packages affected by Debian errata + """ + epoch, ver, rel = find_evr(version) + package_list = dsc.get('package-list') + for line in package_list.splitlines(): + if not line: + continue + line_parts = line.split() + if line_parts[1] != 'deb': + continue + name = line_parts[0] + arches = process_debian_dsc_arches(line_parts[4]) + p_type = Package.DEB + for arch in arches: + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + e.save() + + +def process_debian_dsc_arches(arches): + """ Process arches for dsc files + Return a list of arches for a given package in a dsc file + """ + arches = arches.replace('arch=', '') + accepted_arches = [] + # https://www.debian.org/ports/ + official_ports = [ + 'amd64', + 'arm64', + 'armel', + 'armhf', + 'i386', + 'mips64el', + 'ppc64el', + 'riscv64', + 's390x', + 'all', # architecture-independent packages + ] + for arch in arches.split(','): + if arch == 'any': + return official_ports + elif arch in official_ports: + accepted_arches.append(arch) + continue + elif arch.startswith('any-'): + real_arch = arch.split('-')[1] + if real_arch in official_ports: + accepted_arches.append(real_arch) + continue + elif arch.endswith('-any'): + if arch.startswith('linux'): + return official_ports + return accepted_arches + + +def update_ubuntu_errata(force=False): + pass + + +def update_arch_errata(force=False): + pass + + +def update_centos_errata(force=False): """ Update CentOS errata from https://cefs.steve-meier.de/ and mark packages that are security updates """ - data = download_errata_checksum() - expected_checksum = parse_errata_checksum(data) - data = download_errata() + data = download_centos_errata_checksum() + expected_checksum = parse_centos_errata_checksum(data) + data = download_centos_errata() actual_checksum = get_sha1(data) if actual_checksum != expected_checksum: - e = 'CEFS checksum did not match, skipping errata parsing' + e = 'CEFS checksum did not match, skipping CentOS errata parsing' error_message.send(sender=None, text=e) else: if data: - parse_errata(bunzip2(data), force) + parse_centos_errata(bunzip2(data), force) -def download_errata_checksum(): +def download_centos_errata_checksum(): """ Download CentOS errata checksum from https://cefs.steve-meier.de/ """ res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') - return download_url(res, 'Downloading Errata Checksum:') + return download_url(res, 'Downloading CentOS Errata Checksum:') -def download_errata(): +def download_centos_errata(): """ Download CentOS errata from https://cefs.steve-meier.de/ """ res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') return download_url(res, 'Downloading CentOS Errata:') -def parse_errata_checksum(data): +def parse_centos_errata_checksum(data): """ Parse the errata checksum and return the bz2 checksum """ for line in data.decode('utf-8').splitlines(): @@ -132,7 +573,7 @@ def parse_errata_checksum(data): return line.split()[0] -def parse_errata(data, force): +def parse_centos_errata(data, force): """ Parse CentOS errata from https://cefs.steve-meier.de/ """ result = etree.XML(data) @@ -142,21 +583,22 @@ def parse_errata(data, force): progress_info_s.send(sender=None, ptext=ptext, plen=elen) for i, child in enumerate(errata_xml): progress_update_s.send(sender=None, index=i + 1) - if not check_centos_release(child.findall('os_release')): + releases = get_centos_erratum_releases(child.findall('os_release')) + if not accepted_centos_release(releases): continue - e = parse_errata_tag(child.tag, child.attrib, force) + e = parse_centos_errata_tag(child.tag, child.attrib, force) if e is not None: - parse_errata_children(e, child.getchildren()) + parse_centos_errata_children(e, child.getchildren()) -def parse_errata_tag(name, attribs, force): +def parse_centos_errata_tag(name, attribs, force): """ Parse all tags that contain errata. If the erratum already exists, we assume that it already has all refs, packages, releases and arches. """ e = None if name.startswith('CE'): issue_date = attribs['issue_date'] - references = attribs['references'] + refs = attribs['references'] synopsis = attribs['synopsis'] if name.startswith('CEBA'): etype = 'bugfix' @@ -164,17 +606,47 @@ def parse_errata_tag(name, attribs, force): etype = 'security' elif name.startswith('CEEA'): etype = 'enhancement' - e = create_erratum(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis, - force=force) - if e is not None: + e, created = get_or_create_erratum( + name=name, + etype=etype, + issue_date=issue_date, + synopsis=synopsis, + ) + if created or force: + references = create_centos_errata_references(refs) add_erratum_refs(e, references) - return e + return e -def parse_errata_children(e, children): +def create_centos_errata_references(refs): + """ Create references for CentOS errata. Return references + Skip lists.centos.org references + """ + references = [] + for ref_url in refs.split(' '): + url = urlparse(ref_url) + if url.hostname == 'lists.centos.org': + continue + if url.hostname == 'rhn.redhat.com': + netloc = url.netloc.replace('rhn', 'access') + path = url.path.replace('.html', '') + url = url._replace(netloc=netloc, path=path) + if url.hostname == 'access.redhat.com': + old_ref = url.path.split('/')[-1] + refs = old_ref.split('-') + if ':' not in url.path: + try: + new_ref = f'{refs[0]}-{refs[1]}:{refs[2]}' + path = url.path.replace(old_ref, new_ref) + url = url._replace(path=path) + except IndexError: + pass + er_type = refs[0].lower() + references.append({'er_type': er_type, 'url': url.geturl()}) + return references + + +def parse_centos_errata_children(e, children): """ Parse errata children to obtain architecture, release and packages """ for c in children: @@ -184,71 +656,70 @@ def parse_errata_children(e, children): m_arch, c = m_arches.get_or_create(name=c.text) e.arches.add(m_arch) elif c.tag == 'os_release': - from operatingsystems.models import OSGroup - osgroups = OSGroup.objects.all() - osgroup_name = f'CentOS {c.text!s}' - with transaction.atomic(): - osgroup, c = osgroups.get_or_create(name=osgroup_name) - e.releases.add(osgroup) + if accepted_centos_release([c.text]): + from operatingsystems.models import OSGroup + osgroups = OSGroup.objects.all() + osgroup_name = f'CentOS {c.text}' + with transaction.atomic(): + osgroup, c = osgroups.get_or_create(name=osgroup_name) + e.releases.add(osgroup) elif c.tag == 'packages': - pkg_str = c.text.replace('.rpm', '') - pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa - m = pkg_re.match(pkg_str) - if m: - name, epoch, ver, rel, dist, arch = m.groups() - else: - e = 'Error parsing errata: ' - e += f'could not parse package "{pkg_str!s}"' - error_message.send(sender=None, text=e) - continue - if dist: - rel = f'{rel!s}.{dist!s}' - p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) - - -def check_centos_release(releases_xml): - """ Check if we care about the release that the erratum affects + name, epoch, ver, rel, dist, arch = parse_package_string(c.text) + match = re.match(r'.*el([0-9]+).*', rel) + if match: + release = match.group(1) + if accepted_centos_release([release]): + p_type = Package.RPM + pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) + e.packages.add(pkg) + + +def get_centos_erratum_releases(releases_xml): + """ Collect the releases a given erratum pertains to """ releases = set() for release in releases_xml: releases.add(int(release.text)) - if hasattr(settings, 'MIN_CENTOS_RELEASE') and \ - isinstance(settings.MIN_CENTOS_RELEASE, int): + return releases + + +def accepted_centos_release(releases): + """ Check if we accept the releases that the erratum pertains to + If any release is accepted we return True, else False + """ + if has_setting_of_type('MIN_CENTOS_RELEASE', int): min_release = settings.MIN_CENTOS_RELEASE else: - # defaults to CentOS 6 - min_release = 6 - wanted_release = False + min_release = 7 + acceptable_release = False for release in releases: - if release >= min_release: - wanted_release = True - return wanted_release + if int(release) >= min_release: + acceptable_release = True + return acceptable_release -def create_erratum(name, etype, issue_date, synopsis, force=False): - """ Create an Erratum object. Returns the object or None if it already - exists. To force update the erratum, set force=True +def get_or_create_erratum(name, etype, issue_date, synopsis): + """ Get or create an Erratum object. Returns the object and created """ from packages.models import Erratum errata = Erratum.objects.all() with transaction.atomic(): - e, c = errata.get_or_create(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis) - if c or force: - return e + e, created = errata.get_or_create( + name=name, + etype=etype, + issue_date=tz_aware_datetime(issue_date), + synopsis=synopsis, + ) + return e, created def add_erratum_refs(e, references): """ Add references to an Erratum object """ - for reference in references.split(' '): + for reference in references: erratarefs = ErratumReference.objects.all() with transaction.atomic(): - er, c = erratarefs.get_or_create(url=reference) + er, c = erratarefs.get_or_create(er_type=reference.get('er_type'), url=reference.get('url')) e.references.add(er) diff --git a/patchman/settings.py b/patchman/settings.py index 4a943a2f..b65e2a49 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -59,7 +59,7 @@ TIME_ZONE = 'America/NewYork' USE_I18N = True USE_L10N = True -USE_TZ = False +USE_TZ = True DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' diff --git a/reports/models.py b/reports/models.py index 442e8008..43c919ae 100644 --- a/reports/models.py +++ b/reports/models.py @@ -15,12 +15,14 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +import re + from django.db import models, IntegrityError, DatabaseError, transaction from django.urls import reverse from hosts.models import Host from arch.models import MachineArchitecture -from operatingsystems.models import OS +from operatingsystems.models import OS, OSGroup from domains.models import Domain from patchman.signals import error_message, info_message @@ -105,10 +107,20 @@ def process(self, find_updates=True, verbose=False): """ if self.os and self.kernel and self.arch and not self.processed: - + osgroup_codename = None + match = re.match(r'(.*) \((.*)\)', self.os) + if match: + os_name = match.group(1) + osgroup_codename = match.group(2) + else: + os_name = self.os oses = OS.objects.all() with transaction.atomic(): - os, c = oses.get_or_create(name=self.os) + os, c = oses.get_or_create(name=os_name) + if osgroup_codename: + osgroups = OSGroup.objects.filter(codename=osgroup_codename) + if osgroups.count() == 1: + os.osgroup = osgroups[0] machine_arches = MachineArchitecture.objects.all() with transaction.atomic(): diff --git a/repos/models.py b/repos/models.py index 105f5a95..b572b12f 100644 --- a/repos/models.py +++ b/repos/models.py @@ -46,6 +46,9 @@ class Repository(models.Model): repo_id = models.CharField(max_length=255, null=True, blank=True) auth_required = models.BooleanField(default=False) + from repos.managers import RepositoryManager + objects = RepositoryManager() + class Meta: verbose_name_plural = 'Repository' verbose_name_plural = 'Repositories' diff --git a/repos/utils.py b/repos/utils.py index e18f1acc..d331ed48 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -32,7 +32,7 @@ from packages.utils import parse_package_string, get_or_create_package from arch.models import PackageArchitecture from util import get_url, download_url, response_is_valid, extract, \ - get_checksum, Checksum + get_checksum, Checksum, has_setting_of_type from patchman.signals import progress_info_s, progress_update_s, \ info_message, warning_message, error_message, debug_message @@ -235,20 +235,17 @@ def get_mirrorlist_urls(url): def add_mirrors_from_urls(repo, mirror_urls): """ Creates mirrors from a list of mirror urls """ + max_mirrors = get_max_mirrors() for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - # only add X mirrors, where X = max_mirrors - q = Q(mirrorlist=False, refresh=True) - existing = repo.mirror_set.filter(q).count() - if existing >= max_mirrors: - text = f'{max_mirrors!s} mirrors already ' - text += f'exist, not adding {mirror_url!s}' - warning_message.send(sender=None, text=text) - continue + q = Q(mirrorlist=False, refresh=True) + existing = repo.mirror_set.filter(q).count() + if existing >= max_mirrors: + text = f'{max_mirrors!s} mirrors already ' + text += f'exist, not adding {mirror_url!s}' + warning_message.send(sender=None, text=text) + continue from repos.models import Mirror m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) if c: @@ -567,10 +564,8 @@ def refresh_yum_repo(mirror, data, mirror_url, ts): if not mirror_checksum_is_valid(computed_checksum, modules_checksum, mirror, 'module'): return - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS # only refresh X mirrors, where X = max_mirrors + max_mirrors = get_max_mirrors() checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, file_checksum=primary_checksum) have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() @@ -606,11 +601,9 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): def refresh_arch_repo(repo): """ Refresh all mirrors of an arch linux repo """ - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS + max_mirrors = get_max_mirrors() fname = f'{repo.arch!s}/{repo.repo_id!s}.db' - ts = datetime.now().replace(microsecond=0) + ts = datetime.now().astimezone().replace(microsecond=0) for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): res = find_mirror_url(mirror.url, [fname]) mirror.last_access_ok = response_is_valid(res) @@ -687,10 +680,8 @@ def refresh_rpm_repo(repo): check_for_mirrorlists(repo) check_for_metalinks(repo) - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - ts = datetime.now().replace(microsecond=0) + max_mirrors = get_max_mirrors() + ts = datetime.now().astimezone().replace(microsecond=0) enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) for i, mirror in enumerate(enabled_mirrors): res = find_mirror_url(mirror.url, formats) @@ -728,7 +719,7 @@ def refresh_deb_repo(repo): formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] - ts = datetime.now().replace(microsecond=0) + ts = datetime.now().astimezone().replace(microsecond=0) for mirror in repo.mirror_set.filter(refresh=True): res = find_mirror_url(mirror.url, formats) mirror.last_access_ok = response_is_valid(res) @@ -778,3 +769,13 @@ def find_best_repo(package, hostrepos): if hostrepo.priority > best_repo.priority: best_repo = hostrepo return best_repo + + +def get_max_mirrors(): + """ Find the max number of mirrors for refresh + """ + if has_setting_of_type('MAX_MIRRORS', int): + max_mirrors = settings.MAX_MIRRORS + else: + max_mirrors = 5 + return max_mirrors diff --git a/util/__init__.py b/util/__init__.py index 5e228771..3cdfe7d3 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -22,10 +22,15 @@ import zlib import lzma from colorama import Fore, Style +from datetime import datetime from enum import Enum from hashlib import md5, sha1, sha256, sha512 from progressbar import Bar, ETA, Percentage, ProgressBar -from patchman.signals import error_message +from patchman.signals import error_message, info_message + +from django.utils.timezone import make_aware +from django.utils.dateparse import parse_datetime +from django.conf import settings if ProgressBar.__dict__.get('maxval'): @@ -52,12 +57,12 @@ def set_verbosity(value): verbose = value -def create_pbar(ptext, plength, **kwargs): +def create_pbar(ptext, plength, ljust=35, **kwargs): """ Create a global progress bar if global verbose is True """ global pbar, verbose if verbose and plength > 0: - jtext = str(ptext).ljust(35) + jtext = str(ptext).ljust(ljust) if pbar2: pbar = ProgressBar(widgets=[Style.RESET_ALL + Fore.YELLOW + jtext, Percentage(), Bar(), ETA()], @@ -79,34 +84,37 @@ def update_pbar(index, **kwargs): pmax = pbar.max_value else: pmax = pbar.maxval - if index == pmax: + if index >= pmax: pbar.finish() print_nocr(Fore.RESET) pbar = None -def download_url(res, text=''): +def download_url(res, text='', ljust=35): """ Display a progress bar to download the request content if verbose is True. Otherwise, just return the request content """ global verbose - if verbose and 'content-length' in res.headers: - clen = int(res.headers['content-length']) - create_pbar(text, clen) - chunk_size = 16384 - i = 0 - data = b'' - for chunk in res.iter_content(chunk_size=chunk_size, - decode_unicode=False): - i += len(chunk) - if i > clen: - update_pbar(clen) - else: - update_pbar(i) - data += chunk - return data - else: - return res.content + if verbose: + content_length = res.headers.get('content-length') + if content_length: + clen = int(content_length) + create_pbar(text, clen, ljust) + chunk_size = 16384 + i = 0 + data = b'' + for chunk in res.iter_content(chunk_size=chunk_size, + decode_unicode=False): + i += len(chunk) + if i > clen: + update_pbar(clen) + else: + update_pbar(i) + data += chunk + return data + else: + info_message.send(sender=None, text=text) + return res.content def print_nocr(text): @@ -116,12 +124,12 @@ def print_nocr(text): sys.stdout.softspace = False -def get_url(url): +def get_url(url, headers={}, params={}): """ Perform a http GET on a URL. Return None on error. """ res = None try: - res = requests.get(url, stream=True) + res = requests.get(url, headers=headers, params=params, stream=True) except requests.exceptions.Timeout: error_message.send(sender=None, text=f'Timeout - {url!s}') except requests.exceptions.TooManyRedirects: @@ -142,6 +150,17 @@ def response_is_valid(res): return False +def has_setting_of_type(setting_name, expected_type): + """ Checks if the Django settings module has the specified attribute + and if it is of the expected type + Returns True if the setting exists and is of the expected type, False otherwise. + """ + if not hasattr(settings, setting_name): + return False + setting_value = getattr(settings, setting_name) + return isinstance(setting_value, expected_type) + + def gunzip(contents): """ gunzip contents in memory and return the data """ @@ -236,3 +255,16 @@ def get_md5(data): """ Return the md5 checksum for data """ return md5(data).hexdigest() + + +def tz_aware_datetime(date): + """ Ensure a datetime is timezone-aware + Returns the tz-aware datetime object + """ + if isinstance(date, int): + parsed_date = datetime.fromtimestamp(date) + else: + parsed_date = parse_datetime(date) + if not parsed_date.tzinfo: + parsed_date = make_aware(parsed_date) + return parsed_date diff --git a/util/templatetags/common.py b/util/templatetags/common.py index e59ceb67..88d70f43 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -26,6 +26,8 @@ from django.templatetags.static import static from django.core.paginator import Paginator +from util import has_setting_of_type + try: from urllib.parse import urlencode except ImportError: @@ -105,8 +107,7 @@ def searchform(): @register.simple_tag def reports_timedelta(): - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14 diff --git a/util/views.py b/util/views.py index e55575f2..8de578e5 100644 --- a/util/views.py +++ b/util/views.py @@ -28,6 +28,7 @@ from repos.models import Repository, Mirror from packages.models import Package from reports.models import Report +from util import has_setting_of_type @login_required @@ -45,8 +46,7 @@ def dashboard(request): packages = Package.objects.all() # host issues - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): + if has_setting_of_type('DAYS_WITHOUT_REPORT', int): days = settings.DAYS_WITHOUT_REPORT else: days = 14