Skip to content

Commit

Permalink
Add dates and changelog for packages and vulnerabilities (#1310)
Browse files Browse the repository at this point in the history
* Add chaneglog

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add importer name to advisory importers

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Fix views

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* clean-up models.py

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add importer name and importing authority for istio

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Fix models

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Make code more readable

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Cleanup models

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add tests for changelogs

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Adjust package changelog according to new requirements

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Remove importing_authority

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add vulnerability changelog

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Address review comments

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Make changes according to recent PR merging

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Format package_details.html

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add tests for checking unicity of importer names

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Add tooltip for package and vulnerability views

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Typo fixes

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Fix failing tests

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Address review comments

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Fix typos

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Address review comments

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Fix pypa importer changelog url

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

* Refactor ubuntu usn

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>

---------

Signed-off-by: Tushar Goel <tushar.goel.dav@gmail.com>
  • Loading branch information
TG1999 authored Dec 26, 2023
1 parent d21d2c1 commit e2b60c9
Show file tree
Hide file tree
Showing 118 changed files with 4,942 additions and 2,290 deletions.
21 changes: 16 additions & 5 deletions vulnerabilities/import_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,20 @@

from vulnerabilities.importer import AdvisoryData
from vulnerabilities.importer import Importer
from vulnerabilities.importers import IMPORTERS_REGISTRY
from vulnerabilities.improver import Inference
from vulnerabilities.improvers.default import DefaultImporter
from vulnerabilities.models import Advisory
from vulnerabilities.models import Alias
from vulnerabilities.models import Package
from vulnerabilities.models import PackageRelatedVulnerability
from vulnerabilities.models import Vulnerability
from vulnerabilities.models import VulnerabilityChangeLog
from vulnerabilities.models import VulnerabilityReference
from vulnerabilities.models import VulnerabilityRelatedReference
from vulnerabilities.models import VulnerabilitySeverity
from vulnerabilities.models import Weakness
from vulnerabilities.utils import get_importer_name

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -102,6 +105,7 @@ def process_advisories(
"created_by": importer_name,
"date_collected": datetime.datetime.now(tz=datetime.timezone.utc),
},
url=data.url,
)
if not obj.date_imported:
advisories.append(obj)
Expand Down Expand Up @@ -151,6 +155,7 @@ def process_inferences(inferences: List[Inference], advisory: Advisory, improver
vulnerability_id=inference.vulnerability_id,
aliases=inference.aliases,
summary=inference.summary,
advisory=advisory,
)

if not vulnerability:
Expand Down Expand Up @@ -193,24 +198,24 @@ def process_inferences(inferences: List[Inference], advisory: Advisory, improver
)

for affected_purl in inference.affected_purls or []:
vulnerable_package = Package.objects.get_or_create_from_purl(purl=affected_purl)
vulnerable_package, _ = Package.objects.get_or_create_from_purl(purl=affected_purl)
PackageRelatedVulnerability(
vulnerability=vulnerability,
package=vulnerable_package,
created_by=improver_name,
confidence=inference.confidence,
fix=False,
).update_or_create()
).update_or_create(advisory=advisory)

if inference.fixed_purl:
fixed_package = Package.objects.get_or_create_from_purl(purl=inference.fixed_purl)
fixed_package, _ = Package.objects.get_or_create_from_purl(purl=inference.fixed_purl)
PackageRelatedVulnerability(
vulnerability=vulnerability,
package=fixed_package,
created_by=improver_name,
confidence=inference.confidence,
fix=True,
).update_or_create()
).update_or_create(advisory=advisory)

if inference.weaknesses and vulnerability:
for cwe_id in inference.weaknesses:
Expand Down Expand Up @@ -246,7 +251,7 @@ def create_valid_vulnerability_reference(url, reference_id=None):


def get_or_create_vulnerability_and_aliases(
aliases: List[str], vulnerability_id=None, summary=None
aliases: List[str], vulnerability_id=None, summary=None, advisory=None
):
"""
Get or create vulnerabilitiy and aliases such that all existing and new
Expand Down Expand Up @@ -297,6 +302,12 @@ def get_or_create_vulnerability_and_aliases(
vulnerability = create_vulnerability_and_add_aliases(
aliases=new_alias_names, summary=summary
)
importer_name = get_importer_name(advisory)
VulnerabilityChangeLog.log_import(
importer=importer_name,
source_url=advisory.url,
vulnerability=vulnerability,
)
except Exception as e:
logger.error(
f"Cannot create vulnerability with summary {summary!r} and {new_alias_names!r} {e!r}.\n{traceback_format_exc()}."
Expand Down
9 changes: 9 additions & 0 deletions vulnerabilities/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ class AdvisoryData:
references: List[Reference] = dataclasses.field(default_factory=list)
date_published: Optional[datetime.datetime] = None
weaknesses: List[int] = dataclasses.field(default_factory=list)
url: Optional[str] = None

def __post_init__(self):
if self.date_published and not self.date_published.tzinfo:
Expand All @@ -271,6 +272,7 @@ def to_dict(self):
"references": [ref.to_dict() for ref in self.references],
"date_published": self.date_published.isoformat() if self.date_published else None,
"weaknesses": self.weaknesses,
"url": self.url if self.url else "",
}

@classmethod
Expand All @@ -287,6 +289,7 @@ def from_dict(cls, advisory_data):
if date_published
else None,
"weaknesses": advisory_data["weaknesses"],
"url": advisory_data.get("url") or None,
}
return cls(**transformed)

Expand All @@ -313,6 +316,8 @@ class Importer:
license_url = ""
notice = ""
vcs_response: VCSResponse = None
# It needs to be unique and immutable
importer_name = ""

def __init__(self):
if not self.spdx_license_expression:
Expand Down Expand Up @@ -358,6 +363,9 @@ class OvalImporter(Importer):
`OvalDataSource` class. Subclasses must implement the methods `_fetch` and `set_api`.
"""

data_url: str = ""
importer_name = "Oval Importer"

@staticmethod
def create_purl(pkg_name: str, pkg_data: Mapping) -> PackageURL:
"""
Expand Down Expand Up @@ -472,4 +480,5 @@ def get_data_from_xml_doc(
affected_packages=affected_packages,
references=sorted(references),
date_published=date_published,
url=self.data_url,
)
18 changes: 11 additions & 7 deletions vulnerabilities/importers/alpine_linux.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
class AlpineImporter(Importer):
spdx_license_expression = "CC-BY-SA-4.0"
license_url = "https://secdb.alpinelinux.org/license.txt"
importer_name = "Alpine Linux Importer"

def advisory_data(self) -> Iterable[AdvisoryData]:
page_response_content = fetch_response(BASE_URL).content
Expand All @@ -50,7 +51,7 @@ def advisory_data(self) -> Iterable[AdvisoryData]:
if not record["packages"]:
LOGGER.error(f'"packages" not found in {link!r}')
continue
yield from process_record(record)
yield from process_record(record=record, url=link)


def fetch_advisory_directory_links(page_response_content: str) -> List[str]:
Expand Down Expand Up @@ -98,12 +99,12 @@ def check_for_attributes(record) -> bool:
return True


def process_record(record: dict) -> Iterable[AdvisoryData]:
def process_record(record: dict, url: str) -> Iterable[AdvisoryData]:
"""
Return a list of AdvisoryData objects by processing data
present in that `record`
"""
if not record["packages"]:
if not record.get("packages"):
LOGGER.error(f'"packages" not found in this record {record!r}')
return []

Expand All @@ -114,10 +115,11 @@ def process_record(record: dict) -> Iterable[AdvisoryData]:
if not check_for_attributes(record):
continue
yield from load_advisories(
package["pkg"],
record["distroversion"],
record["reponame"],
record["archs"],
pkg_infos=package["pkg"],
distroversion=record["distroversion"],
reponame=record["reponame"],
archs=record["archs"],
url=url,
)


Expand All @@ -126,6 +128,7 @@ def load_advisories(
distroversion: str,
reponame: str,
archs: List[str],
url: str,
) -> Iterable[AdvisoryData]:
"""
Yield AdvisoryData by mapping data from `pkg_infos`
Expand Down Expand Up @@ -211,4 +214,5 @@ def load_advisories(
references=references,
affected_packages=affected_packages,
aliases=aliases,
url=url,
)
2 changes: 2 additions & 0 deletions vulnerabilities/importers/apache_httpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class ApacheHTTPDImporter(Importer):
base_url = "https://httpd.apache.org/security/json/"
spdx_license_expression = "Apache-2.0"
license_url = "https://www.apache.org/licenses/LICENSE-2.0"
importer_name = "Apache HTTPD Importer"

def advisory_data(self):
links = fetch_links(self.base_url)
Expand Down Expand Up @@ -106,6 +107,7 @@ def to_advisory(self, data):
summary=description or "",
affected_packages=affected_packages,
references=[reference],
url=reference.url,
)

def to_version_ranges(self, versions_data, fixed_versions):
Expand Down
2 changes: 2 additions & 0 deletions vulnerabilities/importers/apache_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ class ApacheKafkaImporter(Importer):
ASF_PAGE_URL = "https://kafka.apache.org/cve-list"
spdx_license_expression = "Apache-2.0"
license_url = "https://www.apache.org/licenses/"
importer_name = "Apache Kafka Importer"

@staticmethod
def fetch_advisory_page(self):
Expand Down Expand Up @@ -191,6 +192,7 @@ def to_advisory(self, advisory_page):
affected_packages=affected_packages,
references=references,
date_published=date_published,
url=f"{self.ASF_PAGE_URL}#{cve_id}",
)
)

Expand Down
14 changes: 8 additions & 6 deletions vulnerabilities/importers/apache_tomcat.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,14 +118,15 @@ class ApacheTomcatImporter(Importer):

spdx_license_expression = "Apache-2.0"
license_url = "https://www.apache.org/licenses/LICENSE-2.0"
importer_name = "Apache Tomcat Importer"

def fetch_advisory_pages(self):
"""
Yield the content of each HTML page containing version-related security data.
"""
links = self.fetch_advisory_links("https://tomcat.apache.org/security")
for page_url in links:
yield requests.get(page_url).content
yield page_url, requests.get(page_url).content

def fetch_advisory_links(self, url):
"""
Expand All @@ -147,12 +148,12 @@ def advisory_data(self):
"""
advisories = []

for advisory_page in self.fetch_advisory_pages():
advisories.extend(self.extract_advisories_from_page(advisory_page))
for url, advisory_page in self.fetch_advisory_pages():
advisories.extend(self.extract_advisories_from_page(url, advisory_page))

return advisories

def extract_advisories_from_page(self, apache_tomcat_advisory_html):
def extract_advisories_from_page(self, url, apache_tomcat_advisory_html):
"""
Yield AdvisoryData objects extracted from the HTML text ``apache_tomcat_advisory_html``.
"""
Expand All @@ -162,7 +163,7 @@ def extract_advisories_from_page(self, apache_tomcat_advisory_html):
)

for advisory_group in fixed_version_advisory_groups:
yield from generate_advisory_data_objects(advisory_group)
yield from generate_advisory_data_objects(url, advisory_group)


@dataclasses.dataclass(order=True)
Expand Down Expand Up @@ -265,7 +266,7 @@ def extract_tomcat_advisory_data_from_page(apache_tomcat_advisory_html):
)


def generate_advisory_data_objects(tomcat_advisory_data_object):
def generate_advisory_data_objects(url, tomcat_advisory_data_object):
fixed_versions = tomcat_advisory_data_object.fixed_versions
severity_scores = ("Low:", "Moderate:", "Important:", "High:", "Critical:")

Expand Down Expand Up @@ -361,6 +362,7 @@ def generate_advisory_data_objects(tomcat_advisory_data_object):
summary="",
affected_packages=affected_packages,
references=references,
url=url,
)


Expand Down
2 changes: 2 additions & 0 deletions vulnerabilities/importers/archlinux.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class ArchlinuxImporter(Importer):
url = "https://security.archlinux.org/json"
spdx_license_expression = "MIT"
license_url = "https://github.com/archlinux/arch-security-tracker/blob/master/LICENSE"
importer_name = "Arch Linux Importer"

def fetch(self) -> Iterable[Mapping]:
response = fetch_response(self.url)
Expand Down Expand Up @@ -91,6 +92,7 @@ def parse_advisory(self, record) -> List[AdvisoryData]:
summary=summary,
affected_packages=affected_packages,
references=references,
url=f"https://security.archlinux.org/{record['name']}",
)
)

Expand Down
2 changes: 2 additions & 0 deletions vulnerabilities/importers/debian.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ class DebianImporter(Importer):
"""

api_url = "https://security-tracker.debian.org/tracker/data/json"
importer_name = "Debian Importer"

def get_response(self):
response = requests.get(self.api_url)
Expand Down Expand Up @@ -154,4 +155,5 @@ def parse(self, pkg_name: str, records: Mapping[str, Any]) -> Iterable[AdvisoryD
summary=record.get("description", ""),
affected_packages=affected_packages,
references=references,
url=self.api_url,
)
2 changes: 2 additions & 0 deletions vulnerabilities/importers/debian_oval.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class DebianOvalImporter(OvalImporter):
Cheers,
Moritz
"""
importer_name = "Debian Oval Importer"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand All @@ -66,6 +67,7 @@ def _fetch(self):
releases = ["wheezy", "stretch", "jessie", "buster", "bullseye"]
for release in releases:
file_url = f"https://www.debian.org/security/oval/oval-definitions-{release}.xml.bz2"
self.data_url = file_url
resp = requests.get(file_url).content
extracted = bz2.decompress(resp)
yield (
Expand Down
27 changes: 20 additions & 7 deletions vulnerabilities/importers/elixir_security.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@
# See https://github.com/nexB/vulnerablecode for support or download.
# See https://aboutcode.org for more information about nexB OSS projects.
#
import urllib.parse as urlparse
from pathlib import Path
from typing import Set

from dateutil import parser as dateparser
from packageurl import PackageURL
from univers.version_constraint import VersionConstraint
from univers.version_range import HexVersionRange
Expand All @@ -26,21 +28,26 @@ class ElixirSecurityImporter(Importer):
repo_url = "git+https://github.com/dependabot/elixir-security-advisories"
license_url = "https://github.com/dependabot/elixir-security-advisories/blob/master/LICENSE.txt"
spdx_license_expression = "CC0-1.0"
importer_name = "Elixir Security Importer"

def advisory_data(self) -> Set[AdvisoryData]:
try:
self.clone(repo_url=self.repo_url)
path = Path(self.vcs_response.dest_dir)
vuln = path / "packages"
self.clone(self.repo_url)
base_path = Path(self.vcs_response.dest_dir)
vuln = base_path / "packages"
for file in vuln.glob("**/*.yml"):
yield from self.process_file(file)
yield from self.process_file(file, base_path)
finally:
if self.vcs_response:
self.vcs_response.delete()

def process_file(self, path):
path = str(path)
yaml_file = load_yaml(path)
def process_file(self, file, base_path):
relative_path = str(file.relative_to(base_path)).strip("/")
advisory_url = (
f"https://github.com/dependabot/elixir-security-advisories/blob/master/{relative_path}"
)
file = str(file)
yaml_file = load_yaml(file)
cve_id = ""
summary = yaml_file.get("description") or ""
pkg_name = yaml_file.get("package") or ""
Expand Down Expand Up @@ -94,9 +101,15 @@ def process_file(self, path):
)
)

date_published = None
if yaml_file.get("disclosure_date"):
date_published = dateparser.parse(yaml_file.get("disclosure_date"))

yield AdvisoryData(
aliases=[cve_id],
summary=summary,
references=references,
affected_packages=affected_packages,
url=advisory_url,
date_published=date_published,
)
Loading

0 comments on commit e2b60c9

Please sign in to comment.