From 46aacf1dfab9db260ba825a86b56402af59ea79d Mon Sep 17 00:00:00 2001 From: Pushpit Date: Thu, 27 May 2021 19:14:07 +0530 Subject: [PATCH 01/27] Added code to import Xen Signed-off-by: Pushpit --- vulnerabilities/fixtures/openssl.json | 14 +++++ vulnerabilities/importer_yielder.py | 10 +++ vulnerabilities/importers/__init__.py | 1 + vulnerabilities/importers/xen.py | 90 +++++++++++++++++++++++++++ 4 files changed, 115 insertions(+) create mode 100644 vulnerabilities/importers/xen.py diff --git a/vulnerabilities/fixtures/openssl.json b/vulnerabilities/fixtures/openssl.json index 5520effd0..b53d2ad8a 100644 --- a/vulnerabilities/fixtures/openssl.json +++ b/vulnerabilities/fixtures/openssl.json @@ -40764,5 +40764,19 @@ ] } } + }, + { + "model": "vulnerabilities.importer", + "pk": 17, + "fields": { + "name": "xen", + "license": "", + "last_run": null, + "data_source": "XenDataSource", + "data_source_cfg": { + "etags": {}, + "db_url": "https://xenbits.xen.org/xsa/xsa.json" + } + } } ] \ No newline at end of file diff --git a/vulnerabilities/importer_yielder.py b/vulnerabilities/importer_yielder.py index 567755c3c..7ca191607 100644 --- a/vulnerabilities/importer_yielder.py +++ b/vulnerabilities/importer_yielder.py @@ -234,6 +234,16 @@ "data_source": "IstioDataSource", "data_source_cfg": {"repository_url": "https://github.com/istio/istio.io"}, }, + { + "name": "xen", + "license": "", + "last_run": None, + "data_source": "XenDataSource", + "data_source_cfg": { + "etags": {}, + "db_url": "https://xenbits.xen.org/xsa/xsa.json", + }, + } ] diff --git a/vulnerabilities/importers/__init__.py b/vulnerabilities/importers/__init__.py index f7387df61..c4fdd5d4a 100644 --- a/vulnerabilities/importers/__init__.py +++ b/vulnerabilities/importers/__init__.py @@ -47,5 +47,6 @@ from vulnerabilities.importers.ubuntu import UbuntuDataSource from vulnerabilities.importers.ubuntu_usn import UbuntuUSNDataSource from vulnerabilities.importers.istio import IstioDataSource +from vulnerabilities.importers.xen import XenDataSource # from vulnerabilities.importers.suse_backports import SUSEBackportsDataSource diff --git a/vulnerabilities/importers/xen.py b/vulnerabilities/importers/xen.py new file mode 100644 index 000000000..2d32b6df3 --- /dev/null +++ b/vulnerabilities/importers/xen.py @@ -0,0 +1,90 @@ +# Copyright (c) nexB Inc. and others. All rights reserved. +# http://nexb.com and https://github.com/nexB/vulnerablecode/ +# The VulnerableCode software is licensed under the Apache License version 2.0. +# Data generated with VulnerableCode require an acknowledgment. +# +# You may not use this software except in compliance with the License. +# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# +# When you publish or redistribute any data created with VulnerableCode or any VulnerableCode +# derivative work, you must accompany this data with the following acknowledgment: +# +# Generated with VulnerableCode and provided on an 'AS IS' BASIS, WITHOUT WARRANTIES +# OR CONDITIONS OF ANY KIND, either express or implied. No content created from +# VulnerableCode should be considered or used as legal advice. Consult an Attorney +# for any legal advice. +# VulnerableCode is a free software code scanning tool from nexB Inc. and others. +# Visit https://github.com/nexB/vulnerablecode/ for support and download. + +import bz2 +import dataclasses +import json + +import requests +from packageurl import PackageURL + +from vulnerabilities.data_source import DataSource +from vulnerabilities.data_source import Advisory +from vulnerabilities.data_source import Reference +from vulnerabilities.helpers import create_etag +from vulnerabilities.helpers import is_cve + + +@dataclasses.dataclass +class XenDBConfiguration: + etags: list + db_url: str + + +class XenDataSource(DataSource): + CONFIG_CLASS = XenDBConfiguration + + def updated_advisories(self): + advisories = [] + if create_etag(data_src=self, url=self.config.db_url, etag_key="etag"): + advisories.extend(self.to_advisories(fetch(self.config.db_url))) + + return self.batch_advisories(advisories) + + def create_etag(self, url): + etag = requests.head(url).headers.get("etag") + if not etag: + return True + + elif url in self.config.etags: + if self.config.etags[url] == etag: + return False + + self.config.etags[url] = etag + return True + + @staticmethod + def to_advisories(xen_db): + advisories = [] + for xsa in xen_db[0]["xsas"]: + reference = get_xen_references(xsa["xsa"]) + title = xsa.get("title", [""]) + for cve in xsa.get("cve", [""]): + if not is_cve(cve): + cve = "" + + advisories.append( + Advisory( + vulnerability_id=cve, + summary=title, + references=[reference], + ) + ) + return advisories + + +def get_xen_references(xsa_id): + return Reference(reference_id="XSA-" + xsa_id, url="https://xenbits.xen.org/xsa/advisory-{}.html".format(xsa_id)) + +def fetch(url): + response = requests.get(url).content + return json.loads(response) \ No newline at end of file From 307ead3ae716ba91fe72e42562955008165479f7 Mon Sep 17 00:00:00 2001 From: Pushpit Date: Sun, 25 Jul 2021 21:20:26 +0530 Subject: [PATCH 02/27] Update importer_yielder.py --- vulnerabilities/importer_yielder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/importer_yielder.py b/vulnerabilities/importer_yielder.py index 7ca191607..cc64714a0 100644 --- a/vulnerabilities/importer_yielder.py +++ b/vulnerabilities/importer_yielder.py @@ -236,7 +236,7 @@ }, { "name": "xen", - "license": "", + "license": "gplv2", "last_run": None, "data_source": "XenDataSource", "data_source_cfg": { From 7b828eaba8d2d20fad3c7952da1ec31463c5e2bb Mon Sep 17 00:00:00 2001 From: Pushpit Date: Sun, 25 Jul 2021 21:22:47 +0530 Subject: [PATCH 03/27] Added xen license info Signed-off-by: Pushpit --- vulnerabilities/fixtures/openssl.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/fixtures/openssl.json b/vulnerabilities/fixtures/openssl.json index b53d2ad8a..1b790b791 100644 --- a/vulnerabilities/fixtures/openssl.json +++ b/vulnerabilities/fixtures/openssl.json @@ -40770,7 +40770,7 @@ "pk": 17, "fields": { "name": "xen", - "license": "", + "license": "gplv2", "last_run": null, "data_source": "XenDataSource", "data_source_cfg": { From 73a884ff573916a01873996be0096a90090c97af Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 18 Apr 2021 16:29:09 +0530 Subject: [PATCH 04/27] helper: split_markdown_front_matter helper for istio and mozilla importers Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index 37fa7d1cb..c0139091e 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -26,6 +26,7 @@ import re from typing import Optional from typing import List +from typing import Tuple import requests import saneyaml @@ -164,3 +165,30 @@ def __lt__(self, other): ) return affected_package_with_patched_package_objects + + +def split_markdown_front_matter(text: str) -> Tuple[str, str]: + r""" + Split text into markdown front matter and the markdown body + Returns ("", text) for text with non existing front matter + + >>> text='''--- + ... title: DUMMY-SECURITY-2019-001 + ... description: Incorrect access control. + ... cves: [CVE-2042-1337] + ... --- + ... # Markdown starts here + ... ''' + >>> split_markdown_front_matter(text) + ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]\n', '\n# Markdown starts here\n') + """ + + front_matter = "" + body = text + text = text.replace("\r\n", "\n") + linezero,_, text = text.partition("---\n") + + if not linezero: # nothing before first --- + front_matter,_, body = text.partition("---") + + return front_matter, body From a1752bda7f71d2b9df78818637b5b988e6750539 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 18 Apr 2021 17:13:43 +0530 Subject: [PATCH 05/27] black -l 100 Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index c0139091e..958507abb 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -186,9 +186,9 @@ def split_markdown_front_matter(text: str) -> Tuple[str, str]: front_matter = "" body = text text = text.replace("\r\n", "\n") - linezero,_, text = text.partition("---\n") + linezero, _, text = text.partition("---\n") - if not linezero: # nothing before first --- - front_matter,_, body = text.partition("---") + if not linezero: # nothing before first --- + front_matter, _, body = text.partition("---") return front_matter, body From 614e0f3c86f736c8454f2d77298f0828fee85b69 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 25 Apr 2021 21:22:16 +0530 Subject: [PATCH 06/27] Use split_markdown_front_matter helper in istio also, sort imports Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 2 +- vulnerabilities/importers/istio.py | 48 ++++-------------------------- 2 files changed, 6 insertions(+), 44 deletions(-) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index 958507abb..11f6f238c 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -170,7 +170,7 @@ def __lt__(self, other): def split_markdown_front_matter(text: str) -> Tuple[str, str]: r""" Split text into markdown front matter and the markdown body - Returns ("", text) for text with non existing front matter + Return ("", text) for text with non existing front matter >>> text='''--- ... title: DUMMY-SECURITY-2019-001 diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index 509bde08d..00be91c76 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -32,6 +32,7 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import GitDataSource from vulnerabilities.data_source import Reference +from vulnerabilities.helpers import split_markdown_front_matter from vulnerabilities.helpers import nearest_patched_package from vulnerabilities.package_managers import GitHubTagsAPI @@ -80,45 +81,6 @@ def get_pkg_versions_from_ranges(self, version_range_list): safe_pkg_versions = set(all_version) - set(vuln_pkg_versions) return safe_pkg_versions, vuln_pkg_versions - def get_data_from_yaml_lines(self, yaml_lines): - """Return a mapping of data from a iterable of yaml_lines - for example : - ['title: ISTIO-SECURITY-2019-001', - 'description: Incorrect access control.','cves: [CVE-2019-12243]'] - - would give {'title':'ISTIO-SECURITY-2019-001', - 'description': 'Incorrect access control.', - 'cves': '[CVE-2019-12243]'} - """ - - return saneyaml.load("\n".join(yaml_lines)) - - def get_yaml_lines(self, lines): - """The istio advisory file contains lines similar to yaml format . - This function extracts those lines and return an iterable of lines - - for example : - lines = - --- - title: ISTIO-SECURITY-2019-001 - description: Incorrect access control. - cves: [CVE-2019-12243] - --- - - get_yaml_lines(lines) would return - ['title: ISTIO-SECURITY-2019-001','description: Incorrect access control.' - ,'cves: [CVE-2019-12243]'] - """ - - for index, line in enumerate(lines): - line = line.strip() - if line.startswith("---") and index == 0: - continue - elif line.endswith("---"): - break - else: - yield line - def process_file(self, path): advisories = [] @@ -212,10 +174,10 @@ def process_file(self, path): return advisories def get_data_from_md(self, path): - """Return a mapping of vulnerability data from istio . The data is - in the form of yaml_lines inside a .md file. + """Return a mapping of vulnerability data from istio. The data is + in the form of yaml objects found inside front matter of the .md file. """ with open(path) as f: - yaml_lines = self.get_yaml_lines(f) - return self.get_data_from_yaml_lines(yaml_lines) + yaml_lines, _ = split_markdown_front_matter(f.read()) + return saneyaml.load(yaml_lines) From d872763ab21adbef1f11dcbfbd8d9adc258a2767 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Mon, 26 Apr 2021 20:29:40 +0530 Subject: [PATCH 07/27] Use a more obvious version and reorder imports Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 28 ++++++++++++++++------------ vulnerabilities/importers/istio.py | 2 +- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index 11f6f238c..d46aa02ea 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -167,7 +167,7 @@ def __lt__(self, other): return affected_package_with_patched_package_objects -def split_markdown_front_matter(text: str) -> Tuple[str, str]: +def split_markdown_front_matter(lines: str) -> Tuple[str, str]: r""" Split text into markdown front matter and the markdown body Return ("", text) for text with non existing front matter @@ -180,15 +180,19 @@ def split_markdown_front_matter(text: str) -> Tuple[str, str]: ... # Markdown starts here ... ''' >>> split_markdown_front_matter(text) - ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]\n', '\n# Markdown starts here\n') + ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]', '# Markdown starts here\n') """ - - front_matter = "" - body = text - text = text.replace("\r\n", "\n") - linezero, _, text = text.partition("---\n") - - if not linezero: # nothing before first --- - front_matter, _, body = text.partition("---") - - return front_matter, body + fmlines = [] + mdlines = [] + splitter = mdlines + + lines = lines.replace("\r\n", "\n") + for index, line in enumerate(lines.split("\n")): + if index == 0 and line.strip().startswith("---"): + splitter = fmlines + elif line.strip().startswith("---"): + splitter = mdlines + else: + splitter.append(line) + + return "\n".join(fmlines), "\n".join(mdlines) diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index 00be91c76..ec131896b 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -32,8 +32,8 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import GitDataSource from vulnerabilities.data_source import Reference -from vulnerabilities.helpers import split_markdown_front_matter from vulnerabilities.helpers import nearest_patched_package +from vulnerabilities.helpers import split_markdown_front_matter from vulnerabilities.package_managers import GitHubTagsAPI is_release = re.compile(r"^[\d.]+$", re.IGNORECASE).match From f4ca8610edbe17982f8468c4c433ee2e3293d14f Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 9 May 2021 14:03:08 +0530 Subject: [PATCH 08/27] Update according to first review Better documentation and more readable function structrue review: https://github.com/nexB/vulnerablecode/pull/443#pullrequestreview-650928076 Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 30 ++++++++++++------------------ vulnerabilities/importers/istio.py | 8 +++----- 2 files changed, 15 insertions(+), 23 deletions(-) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index d46aa02ea..f296aef5e 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -167,10 +167,10 @@ def __lt__(self, other): return affected_package_with_patched_package_objects -def split_markdown_front_matter(lines: str) -> Tuple[str, str]: +def split_markdown_front_matter(text: str) -> Tuple[str, str]: r""" - Split text into markdown front matter and the markdown body - Return ("", text) for text with non existing front matter + Return a tuple of (front matter, markdown body) strings split from ``text``. + Each can be an empty string. >>> text='''--- ... title: DUMMY-SECURITY-2019-001 @@ -180,19 +180,13 @@ def split_markdown_front_matter(lines: str) -> Tuple[str, str]: ... # Markdown starts here ... ''' >>> split_markdown_front_matter(text) - ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]', '# Markdown starts here\n') + ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]', '# Markdown starts here') """ - fmlines = [] - mdlines = [] - splitter = mdlines - - lines = lines.replace("\r\n", "\n") - for index, line in enumerate(lines.split("\n")): - if index == 0 and line.strip().startswith("---"): - splitter = fmlines - elif line.strip().startswith("---"): - splitter = mdlines - else: - splitter.append(line) - - return "\n".join(fmlines), "\n".join(mdlines) + lines = text.splitlines() + if lines[0] == "---": + lines = lines[1:] + text = "\n".join(lines) + frontmatter, _, markdown = text.partition("\n---\n") + return frontmatter, markdown + + return "", text diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index ec131896b..103044bc2 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -174,10 +174,8 @@ def process_file(self, path): return advisories def get_data_from_md(self, path): - """Return a mapping of vulnerability data from istio. The data is - in the form of yaml objects found inside front matter of the .md file. - """ + """Return a mapping of vulnerability data extracted from an advisory.""" with open(path) as f: - yaml_lines, _ = split_markdown_front_matter(f.read()) - return saneyaml.load(yaml_lines) + front_matter, _ = split_markdown_front_matter(f.read()) + return saneyaml.load(front_matter) From 617700add4038bdf8b724bdc83cb5f42234b3a83 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 20 Jun 2021 14:54:46 +0530 Subject: [PATCH 09/27] Comment regarding raw docstring and sort imports Signed-off-by: Hritik Vijay --- vulnerabilities/helpers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/helpers.py b/vulnerabilities/helpers.py index f296aef5e..95dc2d801 100644 --- a/vulnerabilities/helpers.py +++ b/vulnerabilities/helpers.py @@ -24,8 +24,8 @@ import dataclasses import json import re -from typing import Optional from typing import List +from typing import Optional from typing import Tuple import requests @@ -182,6 +182,7 @@ def split_markdown_front_matter(text: str) -> Tuple[str, str]: >>> split_markdown_front_matter(text) ('title: DUMMY-SECURITY-2019-001\ndescription: Incorrect access control.\ncves: [CVE-2042-1337]', '# Markdown starts here') """ + # The doctest contains \n and for the sake of clarity I chose raw strings than escaping those. lines = text.splitlines() if lines[0] == "---": lines = lines[1:] From 0f6eece65f7f1b8efdc558950bd9c40653205fa7 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Fri, 25 Jun 2021 05:18:10 +0530 Subject: [PATCH 10/27] Remove added_advisories for updated_advisories Internally, the difference between both has faded and updated_advisories is preferred. Signed-off-by: Hritik Vijay --- vulnerabilities/importers/elixir_security.py | 11 +---------- vulnerabilities/importers/retiredotnet.py | 11 +---------- vulnerabilities/importers/ruby.py | 11 +---------- vulnerabilities/importers/rust.py | 5 +---- 4 files changed, 4 insertions(+), 34 deletions(-) diff --git a/vulnerabilities/importers/elixir_security.py b/vulnerabilities/importers/elixir_security.py index fcb3e6c42..23550d69c 100644 --- a/vulnerabilities/importers/elixir_security.py +++ b/vulnerabilities/importers/elixir_security.py @@ -49,16 +49,7 @@ def set_api(self, packages): asyncio.run(self.pkg_manager_api.load_api(packages)) def updated_advisories(self) -> Set[Advisory]: - files = self._updated_files - advisories = [] - for f in files: - processed_data = self.process_file(f) - if processed_data: - advisories.append(processed_data) - return self.batch_advisories(advisories) - - def added_advisories(self) -> Set[Advisory]: - files = self._added_files + files = self._updated_files.union(self._added_files) advisories = [] for f in files: processed_data = self.process_file(f) diff --git a/vulnerabilities/importers/retiredotnet.py b/vulnerabilities/importers/retiredotnet.py index 25e08b04d..7096c9ba6 100644 --- a/vulnerabilities/importers/retiredotnet.py +++ b/vulnerabilities/importers/retiredotnet.py @@ -43,16 +43,7 @@ def __enter__(self): ) def updated_advisories(self) -> Set[Advisory]: - files = self._updated_files - advisories = [] - for f in files: - processed_data = self.process_file(f) - if processed_data: - advisories.append(processed_data) - return self.batch_advisories(advisories) - - def added_advisories(self) -> Set[Advisory]: - files = self._added_files + files = self._updated_files.union(self._added_files) advisories = [] for f in files: processed_data = self.process_file(f) diff --git a/vulnerabilities/importers/ruby.py b/vulnerabilities/importers/ruby.py index a801a5f81..f0d8885c2 100644 --- a/vulnerabilities/importers/ruby.py +++ b/vulnerabilities/importers/ruby.py @@ -52,16 +52,7 @@ def set_api(self, packages): asyncio.run(self.pkg_manager_api.load_api(packages)) def updated_advisories(self) -> Set[Advisory]: - files = self._updated_files - advisories = [] - for f in files: - processed_data = self.process_file(f) - if processed_data: - advisories.append(processed_data) - return self.batch_advisories(advisories) - - def added_advisories(self) -> Set[Advisory]: - files = self._added_files + files = self._updated_files.union(self._added_files) advisories = [] for f in files: processed_data = self.process_file(f) diff --git a/vulnerabilities/importers/rust.py b/vulnerabilities/importers/rust.py index 557ff7d06..a8b549e82 100644 --- a/vulnerabilities/importers/rust.py +++ b/vulnerabilities/importers/rust.py @@ -60,11 +60,8 @@ def crates_api(self): def set_api(self, packages): asyncio.run(self.crates_api.load_api(packages)) - def added_advisories(self) -> Set[Advisory]: - return self._load_advisories(self._added_files) - def updated_advisories(self) -> Set[Advisory]: - return self._load_advisories(self._updated_files) + return self._load_advisories(self._updated_files.union(self._added_files)) def _load_advisories(self, files) -> Set[Advisory]: # per @tarcieri It will always be named RUSTSEC-0000-0000.md From b891459f223edcbc9604942e7b9580c0944416f7 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Fri, 25 Jun 2021 05:40:40 +0530 Subject: [PATCH 11/27] Speed up test_upstream Earlier, one batch of advisories was requested from updated_advisories method of the respective importers. This was inefficient as not all importers respect batching internally. Eventually, we wish to eliminate batches as well ( # 338 ). Now, the updated_advisories method of each importer is expected to create at least one Advisory object. If it does so, the importer is marked working. This brings major performance improvement. It is a necessity to improve this test as GitHub only allows 6 hrs of workflow time. Before: ~6hrs, now ~9 minutes Signed-off-by: Hritik Vijay --- vulnerabilities/tests/test_upstream.py | 44 +++++++++++++++++++++++--- 1 file changed, 40 insertions(+), 4 deletions(-) diff --git a/vulnerabilities/tests/test_upstream.py b/vulnerabilities/tests/test_upstream.py index a875f570e..c3e7cf392 100644 --- a/vulnerabilities/tests/test_upstream.py +++ b/vulnerabilities/tests/test_upstream.py @@ -1,7 +1,19 @@ +import inspect +from unittest.mock import patch + import pytest + from vulnerabilities import importers +from vulnerabilities.data_source import Advisory from vulnerabilities.importer_yielder import IMPORTER_REGISTRY +MAX_ADVISORIES = 1 + + +class MaxAdvisoriesCreatedInterrupt(BaseException): + # Inheriting BaseException is intentional because the function being tested might catch Exception + pass + @pytest.mark.webtest @pytest.mark.parametrize( @@ -9,10 +21,34 @@ ((data["data_source"], data["data_source_cfg"]) for data in IMPORTER_REGISTRY), ) def test_updated_advisories(data_source, config): - if not data_source == "GitHubAPIDataSource": data_src = getattr(importers, data_source) - data_src = data_src(batch_size=1, config=config) - with data_src: - for i in data_src.updated_advisories(): + data_src = data_src(batch_size=MAX_ADVISORIES, config=config) + advisory_counter = 0 + + def patched_advisory(*args, **kwargs): + nonlocal advisory_counter + + if advisory_counter >= MAX_ADVISORIES: + raise MaxAdvisoriesCreatedInterrupt + + advisory_counter += 1 + return Advisory(*args, **kwargs) + + module = inspect.getmodule(data_src) + module_members = [m[0] for m in inspect.getmembers(module)] + advisory_class = f"{module.__name__}.Advisory" + if "Advisory" not in module_members: + advisory_class = "vulnerabilities.data_source.Advisory" + + # Either + # 1) Advisory class is successfully patched and MaxAdvisoriesCreatedInterrupt is thrown when + # an importer tries to create an Advisory or + # 2) Importer somehow bypasses the patch / handles BaseException internally, then + # updated_advisories is required to return non zero advisories + with patch(advisory_class, side_effect=patched_advisory): + try: + with data_src: + assert len(list(data_src.updated_advisories())) > 0 + except MaxAdvisoriesCreatedInterrupt: pass From 1f2cb59e6dc9611c7fadc2c75590c25a930f0d5b Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sat, 26 Jun 2021 14:02:53 +0000 Subject: [PATCH 12/27] :see_no_evil: Ignore legacy pypi package versions in github importer Signed-off-by: GitHub --- vulnerabilities/importers/github.py | 67 ++++++++++++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/importers/github.py b/vulnerabilities/importers/github.py index 9e45c93dc..6da7a5769 100644 --- a/vulnerabilities/importers/github.py +++ b/vulnerabilities/importers/github.py @@ -82,6 +82,69 @@ } """ +# See https://github.com/nexB/vulnerablecode/issues/486 +IGNORE_VERSIONS = { + "0.1-bulbasaur", + "0.1-charmander", + "0.3m1", + "0.3m2", + "0.3m3", + "0.3m4", + "0.3m5", + "0.4m1", + "0.4m2", + "0.4m3", + "0.4m4", + "0.4m5", + "0.5m1", + "0.5m2", + "0.5m3", + "0.5m4", + "0.5m5", + "0.6m1", + "0.6m2", + "0.6m3", + "0.6m4", + "0.6m5", + "0.6m6", + "0.7.10p1", + "0.7.11p1", + "0.7.11p2", + "0.7.11p3", + "0.8.1p1", + "0.8.3p1", + "0.8.4p1", + "0.8.4p2", + "0.8.6p1", + "0.8.7p1", + "0.9-doduo", + "0.9-eevee", + "0.9-fearow", + "0.9-gyarados", + "0.9-horsea", + "0.9-ivysaur", + "2013-01-21T20:33:09+0100", + "2013-01-23T17:11:52+0100", + "2013-02-01T20:50:46+0100", + "2013-02-02T19:59:03+0100", + "2013-02-02T20:23:17+0100", + "2013-02-08T17:40:57+0000", + "2013-03-27T16:32:26+0100", + "2013-05-09T12:47:53+0200", + "2013-05-10T17:55:56+0200", + "2013-05-14T20:16:05+0200", + "2013-06-01T10:32:51+0200", + "2013-07-19T09:11:08+0000", + "2013-08-12T21:48:56+0200", + "2013-09-11T19-27-10", + "2013-12-23T17-51-15", + "2014-01-12T15-52-10", + "2.0.1rc2-git", + "3.0.0b3-", + "3.0b6dev-r41684", + "-class.-jw.util.version.Version-", +} + class GitHubTokenError(Exception): pass @@ -265,9 +328,11 @@ def categorize_versions( affected_versions = [] unaffected_versions = [] for version in all_versions: + if version in IGNORE_VERSIONS: + continue + if version_class(version) in version_range: affected_versions.append(version) else: unaffected_versions.append(version) - return (affected_versions, unaffected_versions) From 1f8ef6b8d36a206d33f9bdc316d491906950b45d Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 30 May 2021 17:11:27 +0530 Subject: [PATCH 13/27] Add basic implementation for time travel in imports Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/debian_oval.py | 6 -- vulnerabilities/importers/github.py | 17 +++--- vulnerabilities/importers/istio.py | 1 - vulnerabilities/importers/npm.py | 11 ++-- vulnerabilities/importers/rust.py | 5 +- vulnerabilities/package_managers.py | 70 ++++++++++++++++++++---- 6 files changed, 78 insertions(+), 32 deletions(-) diff --git a/vulnerabilities/importers/debian_oval.py b/vulnerabilities/importers/debian_oval.py index e73a1da26..50f9eb9ce 100644 --- a/vulnerabilities/importers/debian_oval.py +++ b/vulnerabilities/importers/debian_oval.py @@ -23,14 +23,8 @@ import asyncio import dataclasses -from typing import Iterable -from typing import List -from typing import Mapping -from typing import Set import xml.etree.ElementTree as ET -from aiohttp import ClientSession -from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError import requests from vulnerabilities.data_source import OvalDataSource, DataSourceConfiguration diff --git a/vulnerabilities/importers/github.py b/vulnerabilities/importers/github.py index 6da7a5769..a10ceedb3 100644 --- a/vulnerabilities/importers/github.py +++ b/vulnerabilities/importers/github.py @@ -24,6 +24,8 @@ import os import dataclasses import json +from xml.etree.ElementTree import parse +from dateutil import parser from typing import Set from typing import Tuple from typing import List @@ -34,6 +36,7 @@ from packageurl import PackageURL from univers.version_specifier import VersionSpecifier from univers.versions import version_class_by_package_type +from univers.versions import InvalidVersion from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import DataSource @@ -67,6 +70,7 @@ url } severity + publishedAt } package { name @@ -185,7 +189,6 @@ def fetch(self) -> Mapping[str, List[Mapping]]: end_cursor_exp = "" while True: - query_json = {"query": query % (ecosystem, end_cursor_exp)} resp = requests.post(self.config.endpoint, headers=headers, json=query_json).json() if resp.get("message") == "Bad credentials": @@ -260,12 +263,16 @@ def process_response(self) -> List[Advisory]: for resp_page in self.advisories[ecosystem]: for adv in resp_page["data"]["securityVulnerabilities"]["edges"]: name = adv["node"]["package"]["name"] - + cutoff_time = parser.parse(adv["node"]["advisory"]["publishedAt"]) + affected_purls = [] + unaffected_purls = [] if self.process_name(ecosystem, name): ns, pkg_name = self.process_name(ecosystem, name) aff_range = adv["node"]["vulnerableVersionRange"] aff_vers, unaff_vers = self.categorize_versions( - self.version_api.package_type, aff_range, self.version_api.get(name) + self.version_api.package_type, + aff_range, + self.version_api.get(name, until=cutoff_time)["valid"], ) affected_purls = [ PackageURL(name=pkg_name, namespace=ns, version=version, type=pkg_type) @@ -276,10 +283,6 @@ def process_response(self) -> List[Advisory]: PackageURL(name=pkg_name, namespace=ns, version=version, type=pkg_type) for version in unaff_vers ] - else: - affected_purls = [] - unaffected_purls = [] - cve_ids = set() references = self.extract_references(adv["node"]["advisory"]["references"]) vuln_desc = adv["node"]["advisory"]["summary"] diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index 103044bc2..4d6729b99 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -21,7 +21,6 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio import re -from typing import List from typing import Set import saneyaml diff --git a/vulnerabilities/importers/npm.py b/vulnerabilities/importers/npm.py index 76c6fbc28..eb28869d8 100644 --- a/vulnerabilities/importers/npm.py +++ b/vulnerabilities/importers/npm.py @@ -22,14 +22,11 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio -from typing import Any +import pytz from typing import List -from typing import Mapping from typing import Set from typing import Tuple -from urllib.error import HTTPError from urllib.parse import quote -from urllib.request import urlopen from dateutil.parser import parse from univers.version_specifier import VersionSpecifier @@ -87,7 +84,11 @@ def process_file(self, file) -> List[Advisory]: record = load_json(file) advisories = [] package_name = record["module_name"].strip() - all_versions = self.versions.get(package_name) + + publish_date = parse(record["updated_at"]) + publish_date.replace(tzinfo=pytz.UTC) + + all_versions = self.versions.get(package_name, until=publish_date) aff_range = record.get("vulnerable_versions") if not aff_range: aff_range = "" diff --git a/vulnerabilities/importers/rust.py b/vulnerabilities/importers/rust.py index a8b549e82..059c57ba7 100644 --- a/vulnerabilities/importers/rust.py +++ b/vulnerabilities/importers/rust.py @@ -26,8 +26,10 @@ from typing import List from typing import Set from typing import Tuple +from dateutil.parser import parse import toml +import pytz from univers.version_specifier import VersionSpecifier from univers.versions import SemverVersion from packageurl import PackageURL @@ -95,7 +97,8 @@ def _load_advisory(self, path: str) -> Optional[Advisory]: if advisory.get("url"): references.append(Reference(url=advisory["url"])) - all_versions = self.crates_api.get(crate_name) + publish_date = parse(advisory["date"]).replace(tzinfo=pytz.UTC) + all_versions = self.crates_api.get(crate_name, publish_date)["valid"] # FIXME: Avoid wildcard version ranges for now. # See https://github.com/RustSec/advisory-db/discussions/831 diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 501949201..892e33f9a 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -21,6 +21,8 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio +from collections import namedtuple +from dateutil import parser from json import JSONDecodeError from typing import Mapping from typing import Set @@ -32,12 +34,22 @@ from aiohttp.client_exceptions import ServerDisconnectedError +Version = namedtuple("Version", field_names=["value", "release_date"]) + + class VersionAPI: def __init__(self, cache: Mapping[str, Set[str]] = None): self.cache = cache or {} - def get(self, package_name: str) -> Set[str]: - return self.cache.get(package_name, set()) + def get(self, package_name, until=None) -> Set[str]: + versions = {"new": set(), "valid": set()} + for version in self.cache.get(package_name, set()): + if until and version.release_date and version.release_date > until: + versions["new"].add(version.value) + continue + versions["valid"].add(version.value) + + return versions def client_session(): @@ -97,7 +109,16 @@ async def fetch(self, pkg, session): try: response = await session.request(method="GET", url=url) response = await response.json() - versions = set(response["releases"]) + for version in response["releases"]: + if response["releases"][version]: + versions.add( + Version( + value=version, + release_date=parser.parse( + response["releases"][version][-1]["upload_time_iso_8601"] + ), + ) + ) except ClientResponseError: # PYPI removed this package. # https://www.zdnet.com/article/twelve-malicious-python-libraries-found-and-removed-from-pypi/ # nopep8 @@ -121,7 +142,11 @@ async def fetch(self, pkg, session): response = await response.json() versions = set() for version_info in response["versions"]: - versions.add(version_info["num"]) + versions.add( + Version( + value=version_info["num"], release_date=parser.parse(version_info["updated_at"]) + ) + ) self.cache[pkg] = versions @@ -143,7 +168,11 @@ async def fetch(self, pkg, session): response = await session.request(method="GET", url=url) response = await response.json() for release in response: - versions.add(release["number"]) + versions.add( + Version( + value=release["number"], release_date=parser.parse(release["created_at"]) + ) + ) except (ClientResponseError, JSONDecodeError): pass @@ -166,7 +195,13 @@ async def fetch(self, pkg, session): try: response = await session.request(method="GET", url=url) response = await response.json() - versions = {v for v in response.get("versions", [])} + for version in response.get("versions", []): + release_date = response.get("time", {}).get(version) + if release_date: + release_date = parser.parse(release_date) + versions.add(Version(value=version, release_date=release_date)) + else: + versions.add(Version(value=version, release_date=None)) except ClientResponseError: pass @@ -294,7 +329,12 @@ def extract_versions(resp: dict) -> Set[str]: try: for entry_group in resp["items"]: for entry in entry_group["items"]: - all_versions.add(entry["catalogEntry"]["version"]) + all_versions.add( + Version( + value=entry["catalogEntry"]["version"], + release_date=parser.parse(entry["catalogEntry"]["published"]), + ) + ) # FIXME: json response for YamlDotNet.Signed triggers this exception. # Some packages with many versions give a response of a list of endpoints. # In such cases rather, we should collect data from those endpoints. @@ -327,15 +367,21 @@ def composer_url(pkg_name: str) -> str: vendor, name = pkg_name.split("/") except ValueError: # TODO Log this - return None + return return f"https://repo.packagist.org/p/{vendor}/{name}.json" @staticmethod def extract_versions(resp: dict, pkg_name: str) -> Set[str]: - all_versions = resp["packages"][pkg_name].keys() - all_versions = { - version.replace("v", "") for version in all_versions if "dev" not in version - } + all_versions = set() + for version in resp["packages"][pkg_name]: + if "dev" in version: + continue + all_versions.add( + Version( + value=version.replace("v", ""), + release_date=parser.parse(resp["packages"][pkg_name][version]["time"]), + ) + ) # This if statement ensures, that all_versions contains only released versions # See https://github.com/composer/composer/blob/44a4429978d1b3c6223277b875762b2930e83e8c/doc/articles/versions.md#tags # nopep8 # for explanation of removing 'v' From e84730384a9ffeff423f1253a7d5f05e41943213 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Tue, 1 Jun 2021 20:59:21 +0530 Subject: [PATCH 14/27] Use web scraping to obtain github tags Signed-off-by: Shivam Sandbhor --- vulnerabilities/package_managers.py | 52 ++++++++++++++++++++++++----- 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 892e33f9a..ed1573516 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -22,6 +22,8 @@ import asyncio from collections import namedtuple +import aiohttp +from bs4 import BeautifulSoup from dateutil import parser from json import JSONDecodeError from typing import Mapping @@ -32,6 +34,7 @@ from aiohttp import ClientSession from aiohttp.client_exceptions import ClientResponseError from aiohttp.client_exceptions import ServerDisconnectedError +from requests.sessions import session Version = namedtuple("Version", field_names=["value", "release_date"]) @@ -393,22 +396,51 @@ class GitHubTagsAPI(VersionAPI): package_type = "github" async def load_api(self, repo_set): - async with client_session() as session: + session = client_session() + async with session as session: await asyncio.gather( *[ - self.fetch(owner_repo.lower(), session) + self.fetch(owner_repo.lower()) for owner_repo in repo_set if owner_repo.lower() not in self.cache ] ) - async def fetch(self, owner_repo: str, session) -> None: + async def fetch(self, owner_repo: str, endpoint=None) -> None: # owner_repo is a string of format "{repo_owner}/{repo_name}" # Example value of owner_repo = "nexB/scancode-toolkit" - endpoint = f"https://api.github.com/repos/{owner_repo}/git/refs/tags" - resp = await session.request(method="GET", url=endpoint) - resp = await resp.json() - self.cache[owner_repo] = [release["ref"].split("/")[-1] for release in resp] + if owner_repo not in self.cache: + self.cache[owner_repo] = set() + + if not endpoint: + endpoint = f"https://github.com/{owner_repo}/tags" + async with client_session() as session: + resp = await session.get(endpoint) + resp = await resp.read() + + soup = BeautifulSoup(resp, features="lxml") + for release_entry in soup.find_all("div", {"class": "commit"}): + version = None + for links in release_entry.find_all("a"): + if f"/{owner_repo}/releases/tag/" in links["href"].lower(): + version = links["href"].split("/")[-1] + break + + release_date = release_entry.find("relative-time")["datetime"] + self.cache[owner_repo].add( + Version(value=version, release_date=parser.parse(release_date)) + ) + + url = None + pagination_links = soup.find("div", {"class": "paginate-container"}).find_all("a") + for link in pagination_links: + if link.text == "Next": + url = link["href"] + break + + if url: + # FIXME: this could be asynced to improve performance + await self.fetch(owner_repo, url) class HexVersionAPI(VersionAPI): @@ -425,7 +457,11 @@ async def fetch(self, pkg, session): response = await session.request(method="GET", url=url) response = await response.json() for release in response["releases"]: - versions.add(release["version"]) + versions.add( + Version( + value=release["version"], release_date=parser.parse(release["inserted_at"]) + ) + ) except (ClientResponseError, JSONDecodeError): pass From ba9051d3de10d7222415225e6559ed0bca1b9a2c Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 6 Jun 2021 13:32:24 +0530 Subject: [PATCH 15/27] Time travel for githubtags, maven version api importers Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/elixir_security.py | 2 +- vulnerabilities/importers/istio.py | 9 +- vulnerabilities/importers/nginx.py | 8 +- vulnerabilities/importers/suse_backports.py | 162 +++++++++---------- vulnerabilities/package_managers.py | 54 +++++-- 5 files changed, 130 insertions(+), 105 deletions(-) diff --git a/vulnerabilities/importers/elixir_security.py b/vulnerabilities/importers/elixir_security.py index 23550d69c..a6074fba1 100644 --- a/vulnerabilities/importers/elixir_security.py +++ b/vulnerabilities/importers/elixir_security.py @@ -74,7 +74,7 @@ def get_versions_for_pkg_from_range_list(self, version_range_list, pkg_name): safe_pkg_versions = [] vuln_pkg_versions = [] - all_version_list = self.pkg_manager_api.get(pkg_name) + all_version_list = self.pkg_manager_api.get(pkg_name)["valid"] if not version_range_list: return [], all_version_list version_ranges = [ diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index 4d6729b99..c4883011d 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -20,7 +20,9 @@ # VulnerableCode is a free software tool from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio +import pytz import re +from dateutil import parser from typing import Set import saneyaml @@ -61,11 +63,11 @@ def updated_advisories(self) -> Set[Advisory]: advisories.extend(processed_data) return self.batch_advisories(advisories) - def get_pkg_versions_from_ranges(self, version_range_list): + def get_pkg_versions_from_ranges(self, version_range_list, release_date): """Takes a list of version ranges(affected) of a package as parameter and returns a tuple of safe package versions and vulnerable package versions""" - all_version = self.version_api.get("istio/istio") + all_version = self.version_api.get("istio/istio", release_date)["valid"] safe_pkg_versions = [] vuln_pkg_versions = [] version_ranges = [ @@ -85,6 +87,7 @@ def process_file(self, path): advisories = [] data = self.get_data_from_md(path) + release_date = parser.parse(data["publishdate"]).replace(tzinfo=pytz.UTC) releases = [] if data.get("releases"): @@ -127,7 +130,7 @@ def process_file(self, path): data["release_ranges"] = [] safe_pkg_versions, vuln_pkg_versions = self.get_pkg_versions_from_ranges( - data["release_ranges"] + data["release_ranges"], release_date ) affected_packages = [] diff --git a/vulnerabilities/importers/nginx.py b/vulnerabilities/importers/nginx.py index e5be6247c..fb8d82404 100644 --- a/vulnerabilities/importers/nginx.py +++ b/vulnerabilities/importers/nginx.py @@ -53,8 +53,8 @@ def set_api(self): # For some reason nginx tags it's releases are in the form of `release-1.2.3` # Chop off the `release-` part here. - for index, version in enumerate(self.version_api.cache["nginx/nginx"]): - self.version_api.cache["nginx/nginx"][index] = version.replace("release-", "") + for index, version in enumerate(self.version_api.cache["nginx/nginx"]["valid"]): + self.version_api.cache["nginx/nginx"]["valid"][index] = version.replace("release-", "") def updated_advisories(self): advisories = [] @@ -134,7 +134,9 @@ def extract_fixed_pkgs(self, vuln_info): VersionSpecifier.from_scheme_version_spec_string("semver", "^" + rng[:-1]) ) - valid_versions = find_valid_versions(self.version_api.get("nginx/nginx"), version_ranges) + valid_versions = find_valid_versions( + self.version_api.get("nginx/nginx")["valid"], version_ranges + ) return [ PackageURL(type="generic", name="nginx", version=version) for version in valid_versions diff --git a/vulnerabilities/importers/suse_backports.py b/vulnerabilities/importers/suse_backports.py index c80d25969..6ad43ee08 100644 --- a/vulnerabilities/importers/suse_backports.py +++ b/vulnerabilities/importers/suse_backports.py @@ -1,95 +1,95 @@ -# Copyright (c) 2017 nexB Inc. and others. All rights reserved. -# http://nexb.com and https://github.com/nexB/vulnerablecode/ -# The VulnerableCode software is licensed under the Apache License version 2.0. -# Data generated with VulnerableCode require an acknowledgment. -# -# You may not use this software except in compliance with the License. -# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software distributed -# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. -# -# When you publish or redistribute any data created with VulnerableCode or any VulnerableCode -# derivative work, you must accompany this data with the following acknowledgment: -# -# Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES -# OR CONDITIONS OF ANY KIND, either express or implied. No content created from -# VulnerableCode should be considered or used as legal advice. Consult an Attorney -# for any legal advice. -# VulnerableCode is a free software code scanning tool from nexB Inc. and others. -# Visit https://github.com/nexB/vulnerablecode/ for support and download. -import dataclasses +# # Copyright (c) 2017 nexB Inc. and others. All rights reserved. +# # http://nexb.com and https://github.com/nexB/vulnerablecode/ +# # The VulnerableCode software is licensed under the Apache License version 2.0. +# # Data generated with VulnerableCode require an acknowledgment. +# # +# # You may not use this software except in compliance with the License. +# # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 +# # Unless required by applicable law or agreed to in writing, software distributed +# # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# # CONDITIONS OF ANY KIND, either express or implied. See the License for the +# # specific language governing permissions and limitations under the License. +# # +# # When you publish or redistribute any data created with VulnerableCode or any VulnerableCode +# # derivative work, you must accompany this data with the following acknowledgment: +# # +# # Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES +# # OR CONDITIONS OF ANY KIND, either express or implied. No content created from +# # VulnerableCode should be considered or used as legal advice. Consult an Attorney +# # for any legal advice. +# # VulnerableCode is a free software code scanning tool from nexB Inc. and others. +# # Visit https://github.com/nexB/vulnerablecode/ for support and download. +# import dataclasses -import requests -import saneyaml -from bs4 import BeautifulSoup -from packageurl import PackageURL +# import requests +# import saneyaml +# from bs4 import BeautifulSoup +# from packageurl import PackageURL -from vulnerabilities.data_source import Advisory -from vulnerabilities.data_source import DataSource -from vulnerabilities.data_source import DataSourceConfiguration -from vulnerabilities.helpers import create_etag +# from vulnerabilities.data_source import Advisory +# from vulnerabilities.data_source import DataSource +# from vulnerabilities.data_source import DataSourceConfiguration +# from vulnerabilities.helpers import create_etag -@dataclasses.dataclass -class SUSEBackportsConfiguration(DataSourceConfiguration): - url: str - etags: dict +# @dataclasses.dataclass +# class SUSEBackportsConfiguration(DataSourceConfiguration): +# url: str +# etags: dict -class SUSEBackportsDataSource(DataSource): +# class SUSEBackportsDataSource(DataSource): - CONFIG_CLASS = SUSEBackportsConfiguration +# CONFIG_CLASS = SUSEBackportsConfiguration - @staticmethod - def get_all_urls_of_backports(url): - r = requests.get(url) - soup = BeautifulSoup(r.content, "lxml") - for a_tag in soup.find_all("a", href=True): - if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"): - yield url + a_tag["href"] +# @staticmethod +# def get_all_urls_of_backports(url): +# r = requests.get(url) +# soup = BeautifulSoup(r.content, "lxml") +# for a_tag in soup.find_all("a", href=True): +# if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"): +# yield url + a_tag["href"] - def updated_advisories(self): - advisories = [] - all_urls = self.get_all_urls_of_backports(self.config.url) - for url in all_urls: - if not create_etag(data_src=self, url=url, etag_key="ETag"): - continue - advisories.extend(self.process_file(self._fetch_yaml(url))) - return self.batch_advisories(advisories) +# def updated_advisories(self): +# advisories = [] +# all_urls = self.get_all_urls_of_backports(self.config.url) +# for url in all_urls: +# if not create_etag(data_src=self, url=url, etag_key="ETag"): +# continue +# advisories.extend(self.process_file(self._fetch_yaml(url))) +# return self.batch_advisories(advisories) - def _fetch_yaml(self, url): +# def _fetch_yaml(self, url): - try: - resp = requests.get(url) - resp.raise_for_status() - return saneyaml.load(resp.content) +# try: +# resp = requests.get(url) +# resp.raise_for_status() +# return saneyaml.load(resp.content) - except requests.HTTPError: - return {} +# except requests.HTTPError: +# return {} - @staticmethod - def process_file(yaml_file): - advisories = [] - try: - for pkg in yaml_file[0]["packages"]: - for version in yaml_file[0]["packages"][pkg]["fixed"]: - for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]: - # yaml_file specific data can be added - purl = [ - PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse") - ] - advisories.append( - Advisory( - vulnerability_id=vuln, - resolved_package_urls=purl, - summary="", - impacted_package_urls=[], - ) - ) - except TypeError: - # could've used pass - return advisories +# @staticmethod +# def process_file(yaml_file): +# advisories = [] +# try: +# for pkg in yaml_file[0]["packages"]: +# for version in yaml_file[0]["packages"][pkg]["fixed"]: +# for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]: +# # yaml_file specific data can be added +# purl = [ +# PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse") +# ] +# advisories.append( +# Advisory( +# vulnerability_id=vuln, +# resolved_package_urls=purl, +# summary="", +# impacted_package_urls=[], +# ) +# ) +# except TypeError: +# # could've used pass +# return advisories - return advisories +# return advisories diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index ed1573516..7ce7d7c34 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -22,19 +22,16 @@ import asyncio from collections import namedtuple -import aiohttp +import pytz from bs4 import BeautifulSoup from dateutil import parser from json import JSONDecodeError from typing import Mapping from typing import Set -from typing import List -import xml.etree.ElementTree as ET from aiohttp import ClientSession from aiohttp.client_exceptions import ClientResponseError from aiohttp.client_exceptions import ServerDisconnectedError -from requests.sessions import session Version = namedtuple("Version", field_names=["value", "release_date"]) @@ -255,12 +252,15 @@ class MavenVersionAPI(VersionAPI): async def load_api(self, pkg_set): async with client_session() as session: await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] + *[ + self.fetch(pkg, session) + for pkg in pkg_set + if pkg not in self.cache and "camel" not in pkg + ] ) async def fetch(self, pkg, session) -> None: - artifact_comps = pkg.split(":") - endpoint = self.artifact_url(artifact_comps) + endpoint = self.artifact_url(pkg) try: resp = await session.request(method="GET", url=endpoint) resp = await resp.read() @@ -269,11 +269,15 @@ async def fetch(self, pkg, session) -> None: self.cache[pkg] = set() return - xml_resp = ET.ElementTree(ET.fromstring(resp.decode("utf-8"))) - self.cache[pkg] = self.extract_versions(xml_resp) + soup = BeautifulSoup(resp, features="lxml") + try: + self.cache[pkg] = self.extract_versions(soup) + except: + raise @staticmethod - def artifact_url(artifact_comps: List[str]) -> str: + def artifact_url(pkg: str) -> str: + artifact_comps = pkg.split(":") base_url = "https://repo1.maven.org/maven2/{}" try: group_id, artifact_id = artifact_comps @@ -289,19 +293,35 @@ def artifact_url(artifact_comps: List[str]) -> str: raise group_url = group_id.replace(".", "/") - suffix = group_url + "/" + artifact_id + "/" + "maven-metadata.xml" + suffix = group_url + "/" + artifact_id + "/" endpoint = base_url.format(suffix) return endpoint @staticmethod - def extract_versions(xml_response: ET.ElementTree) -> Set[str]: - all_versions = set() - for child in xml_response.getroot().iter(): - if child.tag == "version": - all_versions.add(child.text) + def extract_versions(soup: BeautifulSoup) -> Set[Version]: + pre_tag = soup.find("pre") + prev_tag = None + versions = set() + for atag in pre_tag: + if atag.name == "a" and atag["href"] != "../": + prev_tag = atag + elif prev_tag: + text_groups = atag.split() + if text_groups[-1] != "-": + break + date = " ".join(text_groups[:-1]) + if date != "-": + versions.add( + Version( + value=prev_tag.text[:-1], + release_date=parser.parse(date).replace(tzinfo=pytz.UTC), + ) + ) + else: + versions.add(Version(value=prev_tag.text[:-1], release_date=None)) - return all_versions + return versions class NugetVersionAPI(VersionAPI): From 9842367de05d456cd94fc0c81e75d8c8dc359f44 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Tue, 8 Jun 2021 21:08:55 +0530 Subject: [PATCH 16/27] Fix tests Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/apache_httpd.py | 4 +- vulnerabilities/importers/apache_kafka.py | 4 +- vulnerabilities/importers/apache_tomcat.py | 6 +- vulnerabilities/importers/nginx.py | 4 +- vulnerabilities/importers/npm.py | 2 +- .../importers/project_kb_msr2019.py | 2 - vulnerabilities/importers/ruby.py | 8 +- vulnerabilities/importers/suse_backports.py | 162 +++--- vulnerabilities/importers/ubuntu.py | 8 - vulnerabilities/package_managers.py | 12 +- vulnerabilities/tests/test_apache_httpd.py | 3 +- vulnerabilities/tests/test_apache_kafka.py | 5 +- vulnerabilities/tests/test_apache_tomcat.py | 15 +- .../tests/test_data/github_api/response.json | 15 +- .../tests/test_data/maven_api/easygcm.html | 31 ++ vulnerabilities/tests/test_data/npm.zip | Bin 27147 -> 36323 bytes vulnerabilities/tests/test_elixir_security.py | 23 +- vulnerabilities/tests/test_github.py | 15 +- vulnerabilities/tests/test_istio.py | 21 +- vulnerabilities/tests/test_nginx.py | 12 +- vulnerabilities/tests/test_npm.py | 16 +- .../tests/test_package_managers.py | 477 ++++++++++++++---- vulnerabilities/tests/test_ruby.py | 2 +- vulnerabilities/tests/test_rust.py | 22 +- vulnerabilities/tests/test_safety_db.py | 17 +- 25 files changed, 614 insertions(+), 272 deletions(-) create mode 100644 vulnerabilities/tests/test_data/maven_api/easygcm.html diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index 0b6e87bea..09c64f34c 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -106,7 +106,7 @@ def to_advisory(self, data): fixed_packages.extend( [ PackageURL(type="apache", name="httpd", version=version) - for version in self.version_api.get("apache/httpd") + for version in self.version_api.get("apache/httpd")["valid"] if MavenVersion(version) in version_range ] ) @@ -115,7 +115,7 @@ def to_advisory(self, data): affected_packages.extend( [ PackageURL(type="apache", name="httpd", version=version) - for version in self.version_api.get("apache/httpd") + for version in self.version_api.get("apache/httpd")["valid"] if MavenVersion(version) in version_range ] ) diff --git a/vulnerabilities/importers/apache_kafka.py b/vulnerabilities/importers/apache_kafka.py index 105f327f0..d0e8f7d7e 100644 --- a/vulnerabilities/importers/apache_kafka.py +++ b/vulnerabilities/importers/apache_kafka.py @@ -72,7 +72,7 @@ def to_advisory(self, advisory_page): fixed_packages = [ PackageURL(type="apache", name="kafka", version=version) - for version in self.version_api.get("apache/kafka") + for version in self.version_api.get("apache/kafka")["valid"] if any( [ MavenVersion(version) in version_range @@ -83,7 +83,7 @@ def to_advisory(self, advisory_page): affected_packages = [ PackageURL(type="apache", name="kafka", version=version) - for version in self.version_api.get("apache/kafka") + for version in self.version_api.get("apache/kafka")["valid"] if any( [ MavenVersion(version) in version_range diff --git a/vulnerabilities/importers/apache_tomcat.py b/vulnerabilities/importers/apache_tomcat.py index 42a728840..3326a3bf9 100644 --- a/vulnerabilities/importers/apache_tomcat.py +++ b/vulnerabilities/importers/apache_tomcat.py @@ -62,7 +62,9 @@ def updated_advisories(self): return self.batch_advisories(advisories) def fetch_pages(self): - tomcat_major_versions = {i[0] for i in self.version_api.get("org.apache.tomcat:tomcat")} + tomcat_major_versions = { + i[0] for i in self.version_api.get("org.apache.tomcat:tomcat")["valid"] + } for version in tomcat_major_versions: page_url = self.base_url.format(version) if create_etag(self, page_url, "ETag"): @@ -102,7 +104,7 @@ def to_advisories(self, apache_tomcat_advisory_html): PackageURL( type="maven", namespace="apache", name="tomcat", version=version ) - for version in self.version_api.get("org.apache.tomcat:tomcat") + for version in self.version_api.get("org.apache.tomcat:tomcat")["valid"] if MavenVersion(version) in version_range ] ) diff --git a/vulnerabilities/importers/nginx.py b/vulnerabilities/importers/nginx.py index fb8d82404..13a572f3e 100644 --- a/vulnerabilities/importers/nginx.py +++ b/vulnerabilities/importers/nginx.py @@ -171,7 +171,9 @@ def extract_vuln_pkgs(self, vuln_info): ) ) - valid_versions = find_valid_versions(self.version_api.get("nginx/nginx"), version_ranges) + valid_versions = find_valid_versions( + self.version_api.get("nginx/nginx")["valid"], version_ranges + ) qualifiers = {} if windows_only: qualifiers["os"] = "windows" diff --git a/vulnerabilities/importers/npm.py b/vulnerabilities/importers/npm.py index eb28869d8..e66f9e5d4 100644 --- a/vulnerabilities/importers/npm.py +++ b/vulnerabilities/importers/npm.py @@ -88,7 +88,7 @@ def process_file(self, file) -> List[Advisory]: publish_date = parse(record["updated_at"]) publish_date.replace(tzinfo=pytz.UTC) - all_versions = self.versions.get(package_name, until=publish_date) + all_versions = self.versions.get(package_name, until=publish_date)["valid"] aff_range = record.get("vulnerable_versions") if not aff_range: aff_range = "" diff --git a/vulnerabilities/importers/project_kb_msr2019.py b/vulnerabilities/importers/project_kb_msr2019.py index 8b97eccf1..897a0c4ff 100644 --- a/vulnerabilities/importers/project_kb_msr2019.py +++ b/vulnerabilities/importers/project_kb_msr2019.py @@ -22,12 +22,10 @@ import csv import dataclasses -import re import urllib.request # Reading CSV file from a url using `requests` is bit too complicated. # Use `urllib.request` for that purpose. -from packageurl import PackageURL from vulnerabilities.data_source import Advisory diff --git a/vulnerabilities/importers/ruby.py b/vulnerabilities/importers/ruby.py index f0d8885c2..00f76765f 100644 --- a/vulnerabilities/importers/ruby.py +++ b/vulnerabilities/importers/ruby.py @@ -23,6 +23,8 @@ import asyncio from typing import Set from typing import List +from dateutil.parser import parse +from pytz import UTC from packageurl import PackageURL from univers.version_specifier import VersionSpecifier @@ -81,6 +83,7 @@ def process_file(self, path) -> List[Advisory]: else: return + publish_time = parse(record["date"]).replace(tzinfo=UTC) safe_version_ranges = record.get("patched_versions", []) # this case happens when the advisory contain only 'patched_versions' field # and it has value None(i.e it is empty :( ). @@ -91,7 +94,10 @@ def process_file(self, path) -> List[Advisory]: if not getattr(self, "pkg_manager_api", None): self.pkg_manager_api = RubyVersionAPI() - all_vers = self.pkg_manager_api.get(package_name) + all_vers = self.pkg_manager_api.get(package_name, until=publish_time)["valid"] + print( + f"Ignored {len(self.pkg_manager_api.get(package_name,until=publish_time)['new'])} versions" + ) safe_versions, affected_versions = self.categorize_versions(all_vers, safe_version_ranges) impacted_purls = [ diff --git a/vulnerabilities/importers/suse_backports.py b/vulnerabilities/importers/suse_backports.py index 6ad43ee08..c80d25969 100644 --- a/vulnerabilities/importers/suse_backports.py +++ b/vulnerabilities/importers/suse_backports.py @@ -1,95 +1,95 @@ -# # Copyright (c) 2017 nexB Inc. and others. All rights reserved. -# # http://nexb.com and https://github.com/nexB/vulnerablecode/ -# # The VulnerableCode software is licensed under the Apache License version 2.0. -# # Data generated with VulnerableCode require an acknowledgment. -# # -# # You may not use this software except in compliance with the License. -# # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 -# # Unless required by applicable law or agreed to in writing, software distributed -# # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# # CONDITIONS OF ANY KIND, either express or implied. See the License for the -# # specific language governing permissions and limitations under the License. -# # -# # When you publish or redistribute any data created with VulnerableCode or any VulnerableCode -# # derivative work, you must accompany this data with the following acknowledgment: -# # -# # Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES -# # OR CONDITIONS OF ANY KIND, either express or implied. No content created from -# # VulnerableCode should be considered or used as legal advice. Consult an Attorney -# # for any legal advice. -# # VulnerableCode is a free software code scanning tool from nexB Inc. and others. -# # Visit https://github.com/nexB/vulnerablecode/ for support and download. -# import dataclasses +# Copyright (c) 2017 nexB Inc. and others. All rights reserved. +# http://nexb.com and https://github.com/nexB/vulnerablecode/ +# The VulnerableCode software is licensed under the Apache License version 2.0. +# Data generated with VulnerableCode require an acknowledgment. +# +# You may not use this software except in compliance with the License. +# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. +# +# When you publish or redistribute any data created with VulnerableCode or any VulnerableCode +# derivative work, you must accompany this data with the following acknowledgment: +# +# Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES +# OR CONDITIONS OF ANY KIND, either express or implied. No content created from +# VulnerableCode should be considered or used as legal advice. Consult an Attorney +# for any legal advice. +# VulnerableCode is a free software code scanning tool from nexB Inc. and others. +# Visit https://github.com/nexB/vulnerablecode/ for support and download. +import dataclasses -# import requests -# import saneyaml -# from bs4 import BeautifulSoup -# from packageurl import PackageURL +import requests +import saneyaml +from bs4 import BeautifulSoup +from packageurl import PackageURL -# from vulnerabilities.data_source import Advisory -# from vulnerabilities.data_source import DataSource -# from vulnerabilities.data_source import DataSourceConfiguration -# from vulnerabilities.helpers import create_etag +from vulnerabilities.data_source import Advisory +from vulnerabilities.data_source import DataSource +from vulnerabilities.data_source import DataSourceConfiguration +from vulnerabilities.helpers import create_etag -# @dataclasses.dataclass -# class SUSEBackportsConfiguration(DataSourceConfiguration): -# url: str -# etags: dict +@dataclasses.dataclass +class SUSEBackportsConfiguration(DataSourceConfiguration): + url: str + etags: dict -# class SUSEBackportsDataSource(DataSource): +class SUSEBackportsDataSource(DataSource): -# CONFIG_CLASS = SUSEBackportsConfiguration + CONFIG_CLASS = SUSEBackportsConfiguration -# @staticmethod -# def get_all_urls_of_backports(url): -# r = requests.get(url) -# soup = BeautifulSoup(r.content, "lxml") -# for a_tag in soup.find_all("a", href=True): -# if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"): -# yield url + a_tag["href"] + @staticmethod + def get_all_urls_of_backports(url): + r = requests.get(url) + soup = BeautifulSoup(r.content, "lxml") + for a_tag in soup.find_all("a", href=True): + if a_tag["href"].endswith(".yaml") and a_tag["href"].startswith("backports"): + yield url + a_tag["href"] -# def updated_advisories(self): -# advisories = [] -# all_urls = self.get_all_urls_of_backports(self.config.url) -# for url in all_urls: -# if not create_etag(data_src=self, url=url, etag_key="ETag"): -# continue -# advisories.extend(self.process_file(self._fetch_yaml(url))) -# return self.batch_advisories(advisories) + def updated_advisories(self): + advisories = [] + all_urls = self.get_all_urls_of_backports(self.config.url) + for url in all_urls: + if not create_etag(data_src=self, url=url, etag_key="ETag"): + continue + advisories.extend(self.process_file(self._fetch_yaml(url))) + return self.batch_advisories(advisories) -# def _fetch_yaml(self, url): + def _fetch_yaml(self, url): -# try: -# resp = requests.get(url) -# resp.raise_for_status() -# return saneyaml.load(resp.content) + try: + resp = requests.get(url) + resp.raise_for_status() + return saneyaml.load(resp.content) -# except requests.HTTPError: -# return {} + except requests.HTTPError: + return {} -# @staticmethod -# def process_file(yaml_file): -# advisories = [] -# try: -# for pkg in yaml_file[0]["packages"]: -# for version in yaml_file[0]["packages"][pkg]["fixed"]: -# for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]: -# # yaml_file specific data can be added -# purl = [ -# PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse") -# ] -# advisories.append( -# Advisory( -# vulnerability_id=vuln, -# resolved_package_urls=purl, -# summary="", -# impacted_package_urls=[], -# ) -# ) -# except TypeError: -# # could've used pass -# return advisories + @staticmethod + def process_file(yaml_file): + advisories = [] + try: + for pkg in yaml_file[0]["packages"]: + for version in yaml_file[0]["packages"][pkg]["fixed"]: + for vuln in yaml_file[0]["packages"][pkg]["fixed"][version]: + # yaml_file specific data can be added + purl = [ + PackageURL(name=pkg, type="rpm", version=version, namespace="opensuse") + ] + advisories.append( + Advisory( + vulnerability_id=vuln, + resolved_package_urls=purl, + summary="", + impacted_package_urls=[], + ) + ) + except TypeError: + # could've used pass + return advisories -# return advisories + return advisories diff --git a/vulnerabilities/importers/ubuntu.py b/vulnerabilities/importers/ubuntu.py index dadc4e15d..4124317c0 100644 --- a/vulnerabilities/importers/ubuntu.py +++ b/vulnerabilities/importers/ubuntu.py @@ -25,19 +25,11 @@ import bz2 import dataclasses import logging -from typing import Iterable -from typing import List -from typing import Mapping -from typing import Set import xml.etree.ElementTree as ET - -from aiohttp import ClientSession -from aiohttp.client_exceptions import ClientResponseError import requests from vulnerabilities.data_source import OvalDataSource, DataSourceConfiguration from vulnerabilities.package_managers import LaunchpadVersionAPI -from vulnerabilities.helpers import create_etag logger = logging.getLogger(__name__) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 7ce7d7c34..5b9735435 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -21,20 +21,24 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio -from collections import namedtuple +import dataclasses import pytz from bs4 import BeautifulSoup from dateutil import parser from json import JSONDecodeError from typing import Mapping from typing import Set +from datetime import datetime from aiohttp import ClientSession from aiohttp.client_exceptions import ClientResponseError from aiohttp.client_exceptions import ServerDisconnectedError -Version = namedtuple("Version", field_names=["value", "release_date"]) +@dataclasses.dataclass(frozen=True) +class Version: + value: str + release_date: datetime = None class VersionAPI: @@ -303,8 +307,8 @@ def extract_versions(soup: BeautifulSoup) -> Set[Version]: pre_tag = soup.find("pre") prev_tag = None versions = set() - for atag in pre_tag: - if atag.name == "a" and atag["href"] != "../": + for i, atag in enumerate(pre_tag): + if atag.name == "a" and i != 0: prev_tag = atag elif prev_tag: text_groups = atag.split() diff --git a/vulnerabilities/tests/test_apache_httpd.py b/vulnerabilities/tests/test_apache_httpd.py index 384696646..47dcbef7e 100644 --- a/vulnerabilities/tests/test_apache_httpd.py +++ b/vulnerabilities/tests/test_apache_httpd.py @@ -31,6 +31,7 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import VulnerabilitySeverity from vulnerabilities.package_managers import GitHubTagsAPI +from vulnerabilities.package_managers import Version from vulnerabilities.severity_systems import scoring_systems from vulnerabilities.importers.apache_httpd import ApacheHTTPDDataSource from vulnerabilities.helpers import AffectedPackage @@ -44,7 +45,7 @@ class TestApacheHTTPDDataSource(TestCase): def setUpClass(cls): data_source_cfg = {"etags": {}} cls.data_src = ApacheHTTPDDataSource(1, config=data_source_cfg) - known_versions = ["1.3.2", "1.3.1", "1.3.0"] + known_versions = [Version("1.3.2"), Version("1.3.1"), Version("1.3.0")] cls.data_src.version_api = GitHubTagsAPI(cache={"apache/httpd": known_versions}) with open(TEST_DATA) as f: cls.data = json.load(f) diff --git a/vulnerabilities/tests/test_apache_kafka.py b/vulnerabilities/tests/test_apache_kafka.py index a2435b425..46869ed6c 100644 --- a/vulnerabilities/tests/test_apache_kafka.py +++ b/vulnerabilities/tests/test_apache_kafka.py @@ -29,6 +29,7 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.package_managers import GitHubTagsAPI +from vulnerabilities.package_managers import Version from vulnerabilities.importers.apache_kafka import ApacheKafkaDataSource from vulnerabilities.importers.apache_kafka import to_version_ranges from vulnerabilities.helpers import AffectedPackage @@ -63,7 +64,9 @@ def test_to_version_ranges(self): def test_to_advisory(self): data_source = ApacheKafkaDataSource(batch_size=1) - data_source.version_api = GitHubTagsAPI(cache={"apache/kafka": ["2.1.2", "0.10.2.2"]}) + data_source.version_api = GitHubTagsAPI( + cache={"apache/kafka": [Version("2.1.2"), Version("0.10.2.2")]} + ) expected_advisories = [ Advisory( summary="In Apache Kafka versions between 0.11.0.0 and 2.1.0, it is possible to manually\n craft a Produce request which bypasses transaction/idempotent ACL validation.\n Only authenticated clients with Write permission on the respective topics are\n able to exploit this vulnerability. Users should upgrade to 2.1.1 or later\n where this vulnerability has been fixed.", diff --git a/vulnerabilities/tests/test_apache_tomcat.py b/vulnerabilities/tests/test_apache_tomcat.py index 290ca3652..d7419dedc 100644 --- a/vulnerabilities/tests/test_apache_tomcat.py +++ b/vulnerabilities/tests/test_apache_tomcat.py @@ -21,7 +21,6 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import os -from unittest.mock import MagicMock from unittest.mock import patch from unittest import TestCase @@ -31,6 +30,8 @@ from vulnerabilities.data_source import Reference from vulnerabilities.importers.apache_tomcat import ApacheTomcatDataSource from vulnerabilities.helpers import AffectedPackage +from vulnerabilities.package_managers import Version +from vulnerabilities.package_managers import MavenVersionAPI BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data", "apache_tomcat", "security-9.html") @@ -40,11 +41,19 @@ class TestApacheTomcatDataSource(TestCase): @classmethod def setUpClass(cls): data_source_cfg = {"etags": {}} - mock_api = {"org.apache.tomcat:tomcat": ["9.0.0.M1", "9.0.0.M2", "8.0.0.M1", "6.0.0M2"]} + mock_api = MavenVersionAPI( + cache={ + "org.apache.tomcat:tomcat": [ + Version("9.0.0.M1"), + Version("9.0.0.M2"), + Version("8.0.0.M1"), + Version("6.0.0M2"), + ] + } + ) with patch("vulnerabilities.importers.apache_tomcat.MavenVersionAPI"): with patch("vulnerabilities.importers.apache_tomcat.asyncio"): cls.data_src = ApacheTomcatDataSource(1, config=data_source_cfg) - cls.data_src.version_api = mock_api def test_to_advisories(self): diff --git a/vulnerabilities/tests/test_data/github_api/response.json b/vulnerabilities/tests/test_data/github_api/response.json index a1fb7e9ea..d890394fe 100644 --- a/vulnerabilities/tests/test_data/github_api/response.json +++ b/vulnerabilities/tests/test_data/github_api/response.json @@ -21,7 +21,8 @@ "url":"https://github.com/advisories/GHSA-qcxh-w3j9-58qr" } ], - "severity": "MODERATE" + "severity": "MODERATE", + "publishedAt": "2021-05-24T18:12:20Z" }, "package": { "name": "org.apache.tomcat.embed:tomcat-embed-core" @@ -48,7 +49,8 @@ "url":"https://github.com/advisories/GHSA-qcxh-w3j9-58qr" } ], - "severity": "HIGH" + "severity": "HIGH", + "publishedAt": "2021-05-24T18:12:20Z" }, "package": { "name": "org.apache.tomcat.embed:tomcat-embed-core" @@ -75,7 +77,8 @@ "url":"https://github.com/advisories/GHSA-c9hw-wf7x-jp9j" } ], - "severity": "LOW" + "severity": "LOW", + "publishedAt": "2021-05-24T18:12:20Z" }, "package": { "name": "org.apache.tomcat.embed:tomcat-embed-core" @@ -102,7 +105,8 @@ "url":"https://github.com/advisories/GHSA-c9hw-wf7x-jp9j" } ], - "severity": "MODERATE" + "severity": "MODERATE", + "publishedAt": "2021-05-24T18:12:20Z" }, "package": { "name": "org.apache.tomcat.embed:tomcat-embed-core" @@ -129,7 +133,8 @@ "url":"https://github.com/advisories/GHSA-c9hw-wf7x-jp9j" } ], - "severity": "LOW" + "severity": "LOW", + "publishedAt": "2021-05-24T18:12:20Z" }, "package": { "name": "org.apache.tomcat.embed:tomcat-embed-core" diff --git a/vulnerabilities/tests/test_data/maven_api/easygcm.html b/vulnerabilities/tests/test_data/maven_api/easygcm.html new file mode 100644 index 000000000..280faf6a2 --- /dev/null +++ b/vulnerabilities/tests/test_data/maven_api/easygcm.html @@ -0,0 +1,31 @@ + + + + Central Repository: eu/inloop/easygcm + + + + + +
+

eu/inloop/easygcm

+
+
+
+
../
+1.2.2/                                            2014-12-22 10:29         -      
+1.2.3/                                            2014-12-22 10:53         -      
+1.3.0/                                            2015-03-12 15:20         -      
+maven-metadata.xml                                2015-03-12 15:22       385      
+maven-metadata.xml.md5                            2015-03-12 15:22        32      
+maven-metadata.xml.sha1                           2015-03-12 15:22        40      
+		
+
+
+ + + \ No newline at end of file diff --git a/vulnerabilities/tests/test_data/npm.zip b/vulnerabilities/tests/test_data/npm.zip index 37d86e7f1ee2dca6ca5803ba4f9e43024e1b1621..fdb957a32e9bb63b4bc8b6315e5722cd2435d88c 100644 GIT binary patch literal 36323 zcmeFZbyU>b{sv5gG)Q-M4KOstAvO2lMbREwEqxw_pDC0|AB##=*&+Q%e&A z23fVV)%4#lcP}&;ScE-z7?|IFsQw4R$l+Feos8#`A}Tfg$`Up0$HH#Ot^9K7jrE z^%qdrujnoorlzKFW;=m*=lO4+6Q6RW`1u7SXFp|%_RDsDqoNlM*^;P37Np2AMHl9enGn}B#{m#H*e^*)KU zu*%zOnx#Y>#!cU8hrnjBM_yKB8fc>5-W^sR-%X1thz*Z5({V0(1U zd;W#O)-95rZ1DRX{j4NX+^qxDJb$9M_~=dMjt$R)?ceF0Sq#l_S{`40YX#5NAX+0T z8jH*t&JEX_+zycRx06(3sBM#}I2^uRS&)Yx(s>^%GmIXNd}j%bIr)c02!pYz#6S~hF#42%ah9s7-! z%YdR~Lq$RQo!)RF^SW^UBhQg%bxeo^Bu^^WV1)xk2lj|1D5z=Pk>kuCb9L=7As8qK z5R9}1WbdG8hcM_NdqJC4VHIjT^*B(3;@tG`5##qA_OXso6O8s>9 z6uy47wr1_tP(zOO`_2vFPhl#-1w>KPbCY3I&*mEPmT9=+tR6L^-{;$fz?KtW)>ZD3 zFp5@|sgTY!JEE?}M8O+c$N!+pB?hHCwFNPe6U*6p7qOrqPPxl?%Am?J(wRR}!5-Fg zoDf=h4kqBVjv)TrXIl9}!$24p_LaQ9YFvAnCH% zoR_WGf}g(K3W+wqnIQ5>uDAuV#^}wTS@NQml$*47@7#O8?Y=RR?#Nkth_?Oj!`PS5 zNPCE>pGO-qIt&cpZ=%h@)!xy;+Rf30-SeRc*n=GYVGW4aUUDD>V*4btn^p5uu}0T= zT&tIw0sUO9!#eq0tWkR-#E;Z*H}RVoni3FYuP!pEJ+~FrYcC{o=q=w^j*pVL9IELH zrz*!Ovs#ZhG0@ea51Vj#o-g;YN5ftd!*hB9EoN(nQNgC= z;^A0eUTq=|YipiL_wHHHB-dK9w@DM5QXx&^V}Z98!xM)?a>;HBN%=ndX74I{oDJzA z;_lKx-?%TI`U~iD$nW_R6sT$h-4(Iqle#xlUV>Il)goCo$UG2@6b|F2YGXfsP&gXF zKRTC05jSS1TaO`F6i6mD5(rEtX~ABtX$ZjK$+7I}nQbp_LtB68$B7Fe#)-~EQ*m`> zwx*%CY!%;&8>hdZ$8CudN)yU?ys;ZePYoVKJ2PcAu^wrOV}Ve8vyf_rc+lMx>F!QxJ3~4nc9#v`;pB&0hP7CckiAv z{?j4Nu`ikNxkUyT?hJ0{MXk3H&%FtqQET!Pej;{$?97+1r{W~~=WYx*S8Hf!pa zr*W7`Fyd>bKWYcl?6_uI;)=tW!60#Dp-?g(9o4_`A(>unY+=wdV}L19jfsg`oYR7^ z>CmaW^k@%Oj}?5}nIld2KUQmy{H|yW3rFZhlSGmihc=cAB&i~fJmjueTEBbiz44Y5 zME3mJ<=M?zQG*lf$6Uv?_eO&{HTlzRPgjg&=`Q8>O)|_EZ1=Ge`;=eZ<_>xC8oYRs zl2NdKWLMVu9^Wf)NhQroaO9Mzx(hw>2-Se9%pgpE_pGKK#cob|Xm3>r^T1=iK^;*9 z!*+apf3s(8tzAI)$9K@vu@j&y{DFPi8xw1RSqt3_sg8ql+B&AE?^V&K3yYo}5)#{s zy+FBNHgI&aT2wWbql|RU&U>tvLeCDtszo8(!9$f0vBRW&sYoG#6)b8iO?~kZL83a& z=_ApLZxtR2E|;*L4iF+&Kwn(teMB(7CP{BKOm5l$ObMy z>zsd`FjA<^(1^GLiG*7d3Ey^v=JCu-q5SUl6xx^N{g;t3ap+7VZ$%kjDx3nfKYm9Rch-bFPe_flR_;@K{s_l9aR3L$MxXeoF&<6wWB!n|{L+3> zM*Wy)<&Fai``oGnaiDdTDN9a9mf?G)(I2oH%0#=QFsJ^w3|k$_>euRV#ce6osVGlA)uDBSXs{QPh zCHb4F+uob4Et6Ii<$3RwPh9g0x)@kr2#6?VeXkF{WWD}C=wcZQ$EwJ<2cFG@U&I(| zwts{CaatezQS2q{p|n2mvH-SkJT7reOXLMQahIm4rc*}*#pP8**Cxr9EP?gpLc!eX zE3dDoE5ZsHgorh(ElYz>Fx#5|Mz^6u&Z0ILAt*_~l0ST$<5!pUx=w!>=GhS}ZfQ-{ zj|F*7oQS@y=ezfG{2>!{VVxM-=k%)C^<9_?v-a_Y4~f=<)L0rzxTP|gr=|ZZFo_2u z|KrebXT&-VY7=e@bxbEIr&QO zXAJZ>;on|uEq6XT>DcMgJ5`FfEWpb`n=a(gQVUNSvC>$fd$NK73Cnf5KkX*3!EDh% zX&za*$u(X+Xby(fTSh`SM^6zRdQZbfqS&_sep7tYeCv8)UbNSiAE~97CSHs+d=q=< z8pQ~hkqwA2Fb}=q=Ut->s#010rfWDky1KEuJDG#rApfvku#c(X`g5TpOx|ah1)2Eq zcp-CQQ^jLWyTz=pAC>Lx9*?)$bM_>inkrgJh8rMJ*2Xh}ar8j31)rS6)j0CdC9MjmQZwtwWU_BHQLbxW^_R&G3nQJB+2!!9?y6_=8Zph;N%em z`k=i05-96(TRg6NsFdgu_XYNWffV%1&M@d2AJ+ME2Hr!v>fh;RCl?6&AKLjJlAslB z@SvNe?`y&og^Hg>Yu{>1_&=%i8=akzdd8skc^|%a_3Jm-PnV0L0~enhR{EXCy4XhH z9BKFVG>B2Ccd?vwz&LXK88)9)1Q0kw(AReM(;5Of3h|_^N?u_s*^lD>#u8ofF_voJ^4-9=ljxhL$k5?++&Nt?H)r6YmuAwd<#co>hq z&?mwb4-Ac?y96_0yF{kvi_VpEeiwDh=Cu2DCujG38v9Eh3VJOf(OFk`?W}yV_WKLI zIw*Lj+PGx6&3|KW!P}LZIImJTy6ImBbgT~%9n1nTGt8!oWZPJrpKi<>G*BPOc^Z8& zsuTj$k>##(Euae01<1g{62_h|&B=zGjecEtEZm0Frx_4Rm88~L;c=^>*L22v1XuE| zvz5tCx|`{Xvb&HJL%xif?x-@4;yEmP>_27ZiZ!|Kfn2}ay)83+?_N6RB+Y{HuJBYx8iMxaW$E7f z4cnVaag$ro!{*z@7cWQIf?V?kt*P2)CS9LIt7Pv!e@D3y5t&~H{yN<!x9pPu@G{l&a(Sd8O;CO7s5~jS5W@H$G zEc(5-aC){!xjl!E+fk1NX^pl;hE#^h3X36sr<3P}?F=N2uAB0<{UUmC6a`g%7oKRo zKP*7R=Nm=}$^OnY_jFDawTd^Y4o}#J_=U*irH{tQqHvwx8HjTGqqAYAHP3U_&BF%p8ilftJ=IWj;;y~(AYe** z#DJg>JHa^97mKu!vL1mQcNRvRHLtJbX$TpDJA+=f91Tv7;2gsJ_g=YvkA)rWF$m#} zZB=vhwL<`PHZ8W-TN%{zhQD^^pBIxn*Fv;JON(^_+_X*C6El9Fw}+%}5bi%mdVX+M z)TDra|62{lCAVY=h3;D(vi#@yp9QT9oG`ynk^eIT{%vsk`|RQ&|Lq|zmjA~p;1i{y z63B(!cXEfP!sU(I=*^Ro)JAe#(D{@!+g>PO=gb#wYsM z{4IIO>t?rbmkW%3QZF?wHt`895i>w7jjPAx*-;~(q^;i1ZOo1HVCvZKsc5c$ z>Z6vL7b!zS+#s=9F$oDj6k$?K#SP}w6pN$PEiEMhqC&>D=Yk*ccNWBfhgVUpk!sTA zmfj#3ILMHLOfEhI6W@Ih z>7OI!zcbO$MnEJi5;?wHUDcu9(}AuE*G2? zcR*Lr5zIsbIE21c6An*%(O(426#F5ZkR#8SX@0F%wc)eQ86!V;Xk@evr?=DK6V9*q zuI^KXaSGe{a7PFRIRf!pAP2=;fyiH*8m^KTgnPMKeAGD`zclF66RdKA+QX> zR&i7Y7T#YmBIsG^Mwe1E-^nlb<3A!5j^`hsKMGq6O?Fr(#S1sGf+f{ewg1jxkvAB& zV24RLXyacoHAyx--jtO6sFh^qGAsD%F$}fgCQN=}z52lZ8x2lB2H)!FfIYVnj?YDt z$GUH@9h&ubs0+~uX(om;Unhj!B08x!)V+vYwR-o>=rp&p27Wdl#~<_5%vCY!(Sk<@ zuO?vv!FM?IOCf=4@vpL_nTq%gvDA1PY%U&OM^Q@S0>Tq5Ra-sV%CLsNs0z?@e81{y`G$s_ zb>9qeaA&jqq$z4D*3lxoG%Z={8{G?u-QvYPyQXU1x3auqBDcPyukMxa-;X?-61-zW zRb!F$Zi<<$s7dB2cPlbiY+6L`WmQFe#a#2UX`F4eKs`QIEKi$l>4QacT{(b> z;5o-*+2gGOmWjC(dQFLN3H_omo&i3AoT1*S+4w5`uTn$n1CNwc|OeoRV@4(V_2-_$ z(P-=@R7Y-zV`!bzOpKzwFs;;j!4r}5<#S-24BINJ=ss5DwOT?8Tdw_cYiIJ%FAH zk*=~%uCSbSi6mW))vP{VFqbzxhv-8WUST#?!h3M%{{H8ErUPP8(-CD+zZOYf2=1ow)uPp%T%`QJeJ0!kpe+`LR6mq=ubD6IDV$O@r9o_3@Vj z0TIs87~0#~Y+aDu7p6_Uk6vs3Lq&GY-8GjGu@GY4C4ditU(ox;m)>m)!5b|-Ls(`p z#&`BoJOw8hfF|XPnW>PI;~~xG7Qr=_+jeB5!p$xfTgN`9|@yY|0aRMj*=IMczp!S$g#k3s#-OcHz?WG2e zu+`VbMhsTwM*%l`IPRGwh_X7=fsyEI0rmIiEez+Bm#GC`ozL&TZ{CChcRzBPm`JI4 zGPR_4hiG!xWJThf(6|M@=0;HXN{6H9rzaXmOo&aNoKPiAibz*o-PA*I zO5e8BPD+b}JyN2l)ANBy8^b+IB^sYKw#YVNnOSjPt>ZmUoV)ND_o^M0ZVmn@XV5s$ zadT%(V>}WmP146GwlFo7$xd79fxaUq<=3f^$&beqBb>SV@@L$!M&(O*2m{sor-`zG z<$VZPs2@fNaWTmIJ9`b7Sudo3;aJIU{Wf|oNUt+U35-mP5~ZTyi9|eGakpb&b6XWl ztHh_~gX51lB&?p8JrReA40YFbmUYYOK423Q42kp;OYdcQ#43@?-&@+-P3nwf9(~|l zJVBWA74W!;vZz5s{9P>fSVA2ES{5Z|ZkI|0M+K*GTly8SGTvth{bFMe*kLBA4u4pM z7PQ9rv75cp&7hcX{fP+4l$#JeuRnR7^MD&@i{{{)f+LsucS^j;7|D8rS1$vdwk_?9 zAVUPA|U4c@OHW? zKX-a3r$Lp@v#ZBiL?kW68I9gIx64y~M3w$j2-Z&Fv8o%MX5|tvXXDmZ%^h}SC8NG> z1Ht4}KFGc5arJqoDcWYULJr+oRdw7}DUuH%0)a-JK~l-rh=+(Bax( znQyQ5!z=@rH_Y)cHNCc-_Y9Oshxan4DTs!j8>1YC(`YhPt{wWxDlx=%+#*d1N9xUg zOSz9KPY$C*aXR%BxAYFR5T3Z0bjvsezNtM*G!`jHb8kP|y`$~sSm{po`th23a%}IA z#**XUUJ+*LDZi^|*m%E_AM5RM`V0j>&f4&VVVfy1kM|~!fl?vn6+BA(CUAZ-mTUrT zAl4$kL^tEb1)3J2S8DcHL7>B^?^pKw=uZm0mj@OxLQi&;Sv+5_tPZO7RF9VF9=jQJ zNM<5GU!sI@lG{4FPd@Rg?(Ht?Q-nMa$D}JTxSROJfu*hLr-AmHCk57UCJsSrH9zhiJ*bMn z&$?=B$S^Pu+TrJ_f(oiR{&vy^-Q&1Gzz}PXe<~~7RY#P2xUfYAOaP5IK2HsX-?7Mr zz<-<8rfH+ra%g_1^jJ7-1zF-&*kd>s#d%@wg=Ig_k`QTm9EJXnerb>}hEPZ~3(9m7 zhMjiFa2Ii$ndH2_|B#w}Szu8b_adeUYy3B*n&Ac=RR#>bkeu&9Z(_T=R(O!iI+8fyxXG&lWgik_~94T zQ;*OoSR;Kme(Xxy_T+?kv)iJViX_FbL1LlIBc_-nlT>$xp3;l+D(x$&B*B zfib<`6Xc24iC7qykRx*Dqvh2n2(UHUQieiEUL&2V#u#v#K z*BxG3i6}wp4W{bsHfY-7WmP*QT?8ma#Vnv~P1g65MD5*4m4&TrfPigyDb{9Eim-L# z`R-i03T(~e3Rg8D7KiNh{&ehih&{1vbV;@5rQZ~Y8{H-_#x@0p+79i2l&g4(xgVy6 z=JQia_gq^DcA#^rsf>n4b{mU?GF%goYBz@$gp8>!e-WbZSGCM@;q9t#r=q**cZ%(!2auo5P77_OhuS|*GY_NCSZ&<0N|B(wDc9k9 z^OPueC{Jzw%hIh~BQw{M+t96EF#5-Zf+dj=Cbp!o_};fgB9^mN>T;9=JR9w~%!yLq zQYJ<9Ei@@=$-&_Elx|ka5T|@Ry#(sQ@5-E<91Kul_3h(gz!Wnq0vCC(0$^y#kD z@o#5CQ;nDPGx8+4clVVtMsSb6#;n27KDpD6O`VieSR&ILa=-0(7AsIN(A;J|8T|fI zLVC+4Ug-OdwkpqoAUlqA=Z)Asc@{dl+Auws_ zM(hlf6Yb;cp$E_4=)NhrJP`}QTAM)r3gUdO{o0dDOyQv2VBY!BF6SK&drgWl+BmX@Jb&&-?wER8O<9B$i-#J0qQMxQj;Amo?Wh(Gi*r}iUaqi zW#w;K4a&Kdi`1j?HVsSBiKT0v^+nYtJX3%qZMbsVWit=W#WeDY5W3)q_ppy}lZ~UI z5_YSOgE=z3d-Z)9i01Fk>pksz@d(E!Xhy1r+%(MvoG68Uo$7E68Z9MYJ#)sVVRRRLu=9EEFJV^3%&!i+Rg= zB5R)RwAF7d($XmGP#?`S9(>z+nP1Dr!by{piJEd2SjGdMKwD&50Afd^=N436qOV|NCu%k17A$-wM-j)z zjp0oQdQCEP_4q>~=r#A(L&|Q2FIWjq@NUdel<@B+GkJf=fKC!sF0ni_eX;gO>ZIuc zFrzf$#?r%O+Y=}h^*)#9v43eqqD9@``CN^gcd2Upp3gpsm$o5nZt=s(i#iD}g@Ey- zxi!3kRY9NE2EcD!HvvJOI~J-dx2Ok(2%#u84sq{LtV?g-B(C~gDqJiZ+Sae@$d(?H zmYuE($+tK^^FFb+4PRmA&@X;jIDqc|q&_pIe2pBz-k;)~gNgmT%vaF~E{+8MrEw<( zCvRs{_}xL-&|!HNr2lI15OIk7Q_e=cj2Acj_)lU;JrK#F){?e3|TN=7q&Eh zU}pYySF5((M)Z$-Y#PXLa1Zs}PTv91ZLeJNh0p7jQwNIz_6=eeZYW!0@jtO=f6MLp+)lF5Ix^BDYm(9Ymjq zI?%Jf>^;by|FAanbXn{16xKGO*Gs(o5>vFQ^p8Tr_EkXD=>_7e-POk#F9f^woWmSY zwC;G{*^Um3r1IF_J+;}8>A>CZ!T(nE+^^yNq>L;w_eDv-SAU|kXIiM?_K}HE{`LsP zel;aBs`Q`hgezK{=w3C<9s0!+7-x2)-oX~srO&5wb8ajdY@Blzd9*4;WLLR$IHDS}o&ayxi=^SZ4j!AzQ>hjX|gW z=*mR2o|#&qC8sB}Wy)@sVuiracKVJjZ<2Yn{J8u1m)}laNvNgh-4S459;(^zbq+d& zh5764&Hr|}*uwFDyt(-W`Xv-UEuPj6P*8{$*v{Sjm$&}dBg9d=x$vYeGt9EWi|0K! z8*vSv!#m?FW%9>tC2j@JM_2;mkKYWmCZp!!%+FhanRBdl8BlGrR2*lXMqqyCu(x*a zw>{=>EpY<26LB+(lH zu?E_-`(rr$;G092CoZ9A&OXpw@E!@mUkay$|sP9HTRpx@DLU^SAHlAsAG7YWC01 z7N)&Fyf9*x7vG#_5cj9TgVnIe`|KBUh6s>#e5QVrc+sD`c&0f$@->fQr^1xy6>`h& zyCqO;)MhRmrIA!P(QMm=_8Qy%d~>tC#$g1Or@6V?#Y33Tl3_dO*DowrYT5P~blzeT3?KzLU$!Gtn5vsd- z;SPdh?w3$^A)|0!KSzOuYVQX=|GWFhej3-mG3^4e_@_R62!y`|2F)*!tso%te~SOt zyX+5ZV){q;_8?a`h|6y`a{vE;`CkI&&#C#~W|E)M?r3HM0lWRnoqt30AA$H++@Iev z|2Hm(|M%Mc?}tJj-v9Do$j_@uCDdfezcm>O4Tb!Z~4CV%#@qvKmTwnnZ zH$VW)%_j&lGqbSZ`cDx3+lapy09y&8a_T3% zw0Z>LkIn>*Zx>1~{#oIAqwAnA<`e_j*K+&0$WG4rE806SJSlu8Vs|V%>?$@CKU~QI zV<(0V<|IjHXvy7wV5i`vweptS-EyYTH^Z+LS*8P6v>JGpv7d5K&(W2FC3DZ*GQ zbLyZxJ#h7hgFt_I2I%h{#LxY^gEFCx`S51ce51fz1Q}{9NWd z5CES51OVU`0P+c%37A3nfIy%*2nYmN0C;|L(7(U@;r%ZLz{bMa-Fd;*X(~ZGD)#oW z31Ik9UjMxOY*dJLi25T3IRpW!IOPwUFXqmVZ9K^tbM8(?%a8c|cm#WgzPAA#MLmGS zc3DY;eI;v*otdF*jEYPIyOxD_uQpKN`_|19CQhwIQwerU%-YjTIL>HNO_Y&LZAVZH z!2;dJrD5Ta3{K0$g+4P;e?|4#k^}7`aq7V=rWmo9g!i4AU*eawQAw=ht=*eeKNHU` zDJ8_uV!h+fQhx@H&2u2P`5vewdHjWsk4fuO9!n+?a%8DuJ?H%-12-t z*XHYrnDTQ=TD1Ww6rF6$*qkHUbjOhCx@A!Ez38+QEHa;$)KOHEWQ>RND`P}q)2`D; za8Mm3w6PQ_xN5_ig%KRsF^_i&XkA}sszY`pfh%fwZ~o|ZC1zqB{=C~+W}Rst@a^e% zkFAGADPP8Ll18ug0F9NU{1+Bj{Svz1+#>U?4nZXXB(e6(BG{&AU5Q|CEBotx_^@Df zQJpM|+GZIemV~@?s#Js<6rH}Kg;RwA^8GoinF5L_7F z{C!@hj?Lt=g~rIpzzz7ktCy}g&Xny7-UFMVt)RjmDL>HaXk=67XzbtStUnE#_vjfT z-ptUAkR`3EYpob)+?&>Gzj78f+PWgLXQDEYkfAh0>bXMoP8EO`Sa~FZ3o-9hTBIz% z@HBi8sujOex1(P+)f(Nl{liz_`87O}eo*2=4mOnCzE6Ut!b3XzuQ_PJ^FQVw%`bBh z3e5}T<_3Vw0NmVM+=2oC5EmCeh>x2O2oT`q7JwE@3oZyZFW)b7kOpoAnuBecN>MXh z*xRigW^rfr3dK}a_)Z`}DlNEBL^|#F!c~kBMPP&a8Hd1&4jvEOsdxdWoQAh+sh2w? z5%n0FTJC3`KH`6=P_3SqIIk|QD0JL-M4N9~#SGjoF4fWUX(+09Z!2^BP{F@_Y4!$F znPHD&vm=Y%It4dX{7_I@Q%%Ec?bzc>*DW1RJYV=zTV&eHvBB3v_8z72f(*lNhXDb1 zaYz_1k5z4#OnL@X(x&Ghrv@6uC3+pG?WfV{-0?1b-6~a~zmpwX9TuPQEn1grr*`a_ zhIh{ClsS1dN}v+Jzf}bq4t=@qkiYxMy^iDKwuB3j^Z>+=X0`n}Qq`@8x|{M=g$7L} zy9nDg)_Q9-MNZHt{5OPGy&AU*qaU0;;t1f$ieOuXbJIP2T@#j0PJ zA`w}Olm_ezGHRtGbRybaQ%QM{et)1&iVH(XDX!OIGzrpho@@zy)eI(WDuZ}T(wG2* zhFIPuhld|A_An3KZxW5)$u_pobi)pr{oW#gLLR+UYL68E$u&-3yq*aCXEnFs$rsk5 z!j(P~-ASIwH#dFcBHVtgDU95gaW^H;h3^m>=v8F4k~v9@v(}$d)R2;t_2%G(%oHUZ z=kCpB8+`hd7PYa`a9L7ZoM@$jv+$Vv`culzd3f$f8gW`w7)5$Li&^eBI_`VfEm=DQ zQSDU##|J%028)0`Hx(U~4%h-eDe8xmh>w{KVT4BJLv;SHDPhk0Kcmp z2SZ6tti}Q8$(hUGi;*MQ4xkF$%mZx zA6yKPzqqR)6u-GqUVdWL6Av54)BTz_P^_lsYVDXKXkt{qSxTUz`8!IT8iv>Y+Xg&t z-EE~%0^Cjl`pU&@*bzFV+<>^bMC=(wv=dRIr;_%IXgh-d-o1q4>dFy~ zXVgZ`GM|r+B!_Ed^nz3vSXCY5`!7(M5tmK(tw)lW8+k_UHlEn(HMn-lqt&*$wPsrf zso91zkazRy78g*j3J7nVYjL8NaLFA#K92gGg0Bf!tk&&Oj9Gzb1tjN8M+ zK*cyKvx1Tq7cZ|l7cUnV4?vBJ7XX|YVHuwQ%jr8c12#^3LV_iZ-U(j>C9bg~624*P z=HvBay8c{FSEm+g{RU4`TeUGzqbC$A$`B`vjVi4CcA?wV*hKmwC{4O=8-w1VD73km06(u8AHNwd01N^{z|bPj zXTb{wn?rcP0$>P)3&0KG`87M7Fq40A{< zCve@+hL^8ypfan9eNp)Anue}+iQUc=Xd@B*R5J&uS4z@!QQ321F~e0P@whzj{`eW; z6f)(rm$sJeBcq-Lc1n+CdOj`q>g2DL(zbbd@13%cu<1WZ7=c3j3Pb?2Wz37>I;tUh|uPCcHrA475)5< zN4g{=Eq-2p6ajt>3Il@Ws`|6pHpt8H_kz{^Rr{|(-3lVvSAK9C`9^K=f6_YNS~#$E zfi^bGM-c?i8+uJuUuUX>EC^(SCCrkGUM;=1n7;C4 za8iG|?qyDSS0Ey1{m3lFWJ>YiNmRLJSN2jT>1Stej4UsLJuQ6A0f&4ZRuzs|Y;vuF zLtWP|@ui{*O+6{*qXXuY@SiHZni-8HBY8Pl)9~6eikv7E1GpDiQ*j(p!*2H;dRltLig>dz2jhNkdALzBGHU`EuRWWAmi(A&k}_h`r$Cj^D4OWm zlDUiTVwnZO8q7{MgW**0Cy30lg!~>){Eik3rjg#4h$0Gr;Hc z%a_O0gZxns)#RSZRZIdJv=4#%zh(>HzvSWH1nbYU1^$=W0);l?140BrykIVKK@b=S z-~xdGKyHXRABdk90OI8W3YrUYgMOb{{EY#y@Nl=#Y$?-FeAqGV6uxy?)s~NspGR%1 zB#UP8>xIKbIa2;C4?dVrII1K`x^kjb6*boiU>X8r5QvlOB<sb5i(vhz(a{0=JvT<>p|4?s0_W4Zx0;JE>!*I&?(jd40kJi?P=eX_w;94QaG2 zzRau`8kP~b=TdB->SYJMq$5>_Ppah?=exl00_%Kkb(srUne^fjzHKlQ*ZvH+a@G9v z+w{7vSH11KuL386&X7+Aw?7%BPl&+;%5fb&#C#vZ8Y2oc^d7?RkC>nNKXnLx$nPEE z!$0W|e}?{b2ZcgI%y|SsKmk4w^eVR*50IOW$BfUy+??NB00Px+(2+mT!or;UH+}Ip z29%i2e)x8Z!N;Wt;4(M0S#h(DR7$-*sYU;MxO=bxAG>;4J+m-_k}rsU!wE%7~o6GD5G4MCX_lda=quOysav-eUIw zN4Ky8u3axzF2I6=NdAv7;RKqrv*LC4a1E|xBLub!pIzylJf4tnyAt8D?~mjET6a^R z+6pVWYN7k!D3$cx#D`}`A2|D8!x;Flf%E^_QNP}VK%x1-KnpO4hsVO)3<9x$Zt(%U zU;qdJwtxsgCtU&-Kz?pPKJYIc^{Y*Yl{waJ_1d?K*icmI8*oQiob~y|Jd%dFjTM|4 zUUi*A0@g|CCY){Wc{zuxVYjH~v{@hxS_sVu>5vAGq<{ee=?@|K_B3YR6RB@}Fv)LO z1s~YW$ywa`|87FM|8j2o_v3sZ`1c|q^-oT3t^f3g#{{8QkbaQ{U`~DjzXi7duOL)m zgUx|_yyigY9K{U6Yc9ye!^g!1;THh&a|3>3*Z05wxiVNm#QtmOC^69p`cx|38_yLJ z=d9Gar)9eJGjuWMp zpgL*Xl~~@eyE^2wr+9UwgTb`Dq-RLM)O!r4+d>Sp$5Gr+4>PT(LTVZbmUepUu_Yaa=&r&2%0^^}%!Z&cb2=PSCK?PsGxc z^IxEpqs6!q>m-SQWQK`B1mD7w!d_s?XA`6p&liy`$jD5T@`9FBX~F5cA)jkp2Yfjh zW$0qUjXa(&Fw~;+Ie10ozeD67&mezssooT8BQ+6QRK3WyD)ua$^~lN6GRIIqLV|7y z=UkHhXnZ90Y&U&q8C~dXWMpOBLGa{>jqPIq+|3|_Z&-6#w92YlEz(i*wOa4Gz=CLm z<$3!SMGK7XXJSLG5Dl%t7XPy2 zg9uaG-3<>pg*iHVpASxW(g7ZKKSXS}T*N3aE;3%G%C;%jdg7c1`&hka`dsn#8!IB> zj8B5W%5f#K8^-mKv18ynwzxsB?-N%mHFEJX}Usq?S zepE2&(Im$q97bO3FBuCcPd=C6VKCfn>?|wi`_M<6k1Nsp8V@NaNFAg%!WNYCigAWb z%Ndo)&%EkAn2W)aeEH6@DQB{M%o2YjN@4Ml|GAuOS6$O52 ziQ&)7gpemqgrWBmE55hg$eL44=1{cv6J%fr*IY$F2>ghG@D=G!9Zkfj z0NORoEkNAA3#Pv@0JaWB;WxpgWCOk0FY|CR&-sWJA|sX^SMCCvX==g7=s(C2slRu$ znP5m2=Xl=Mx4A#9e{=!!W%{FfX%^D%6a)tK;4xzVqaa?JG!!D-$S~?;72}a*`*$$T zsf3IiadAd?+evYTxOo7Xv8^_u9|wt$x}JPr*BbbV+}j@wOO*3#6ziMIjmDSFwVsDe z;7Ana1)VJTqzH$qwp$IbvinX6K=R21WSM=YHa2sU$9e}-j0@!Q(9aK1ehf83$$B8{ z4=??BcJpg53H(de{>?t~4=?>0+WnUsT_`jckPl$NYi7a22bD{_&`A&%klW0H&)k9^ zBnW`cgm}$(1X zY^(j$TY7y_Qp)x2^g^OXl9NbKr?2YqFaMq^aqc4D<0vb9SkifplOXg%RsNt@cl50` zzKZzuw$<;0%x_lp^DeVURjiyr5_w>A>_-cMbLDfVq-w!Z<5WRC7k-&}3(3|Q2X4vk zAzX^iZParOy%&=_c`Eg3E2*jfYe@EndcLYY?$@rBExZGs7d98^pFj4|Xn|?7O^a|Y z-`{oTdK+|it=VYsBxA~JABxuOgAH?^+7ZtxJXD)pSJYq<=Vz(YaBR2LLka$s_e+no zO4~Oki5OjT)4YD2xFfe$QY<($C1Rb)lr8hc_gi%QZTR{-e`IG|)*j9Gn&CW2 z|NP>vCp`)*>Cey9+7THU9Q>~2v&U&L%lL!W&i6r_9XLF5mccJ9_Os0D_nrjfRm7ZS zaM$cZ_ej@B+?7aDv$eA#B3ni)6*XjEtwBKINCZ0qGSNsX$|Rx+Nn>c$jPU@>tu@_xA#6f7EdeKUEfw#-qp6mL;|tJWWFA~Y~as;bEAgGd%6Ao39l?w7cLN;;8HcpjbMNn6{RES7OLc@OeL=EEs zn8x|zI601`e>d9nz5Dh$u%P8jpM1mpf&lwD)48gBe;N(h=9EM@ZKUI{Dn+rkqIOop zTTxs>V%R4&**Xr=wK7rJnnbWyA`xw^G>~wNI?38vS(!jmMLa;+M%`LNg`lJ~{MCiw zxsxNKy0ko-eQ13OZ%ka3CU@D*+sMrk)jV?> zm}AFf54C{`x8SLnZZu9)S&ze1aMuy+O=Yb}BB&uhO4dYcBFRo&gQ#Y0gGBmlREfws zOx0FR%|=62Ny&QX{ev-9lOi}=z*?+=ELgNagxmC0+@XA@-uGgbCx{IfmmKVIGPo?w zvpz4s#%ofOM}xKf)i}zHZL)ik=gwFX${BxjL3Y9GTj7M%^>x2>#3GZ@xkipWBod z;pV{?=9>`UUAmq0$xI#$!3El!@@T{oz#7aUU`;QO#8i_GWdsb_fNg8%P8IU))lGrN}>CG^(Ycu35z03_Svi?31Uk za0Ul~oBKgv3R%xoF!-k-&2{C8^(Gs;By>#AzvMQt3U;)uSr$KZ2FtVO+qOcv)f@IkIa$GauzV6j`~o4e=Zg{g69xm z9i#-7M7D|}ob*qhL=`pCvR!+BmjvJcd(gor%a~+uT<=y-;LrbNA+*v(r%6MyMZZ$C zIG6LS%Y%&11b*>F)!C+7W=d-qSN7izkBC=kttgkuNgS9jsoT@aJrG@{6kF_dAmG63 zz-jukWe(L{juA<+R+^KjSX%x4tH<^ey{&d6?wp(xR6~CKbg|Ixl02E_$nqc7q_;fL zmy<1O(kNWBZB0eW5)BN^cFGRuxhn8{b_CknZ7v!uOQoimQ?&J73!No|t&hmvoBV!$ z?9n&xi9mZR-C|d1-d`fFqu;eli}y$M%Xy-zg=vWNcJ^B*M0zXP7?3_NRn^Ks zP+s#{_?#7u+l^(#b1G{nFDebL=-?cHs>X{qZe zvADU5#hTZzUeGt^tUUGC)*cPNOWV~&`A z^`zq>fAz2}Mq&azmDb1pJ(K&Xounh~KTx2t@>%~@DSngN%OWk~f;;;QyXm?OgpF>w zzt*MEPK!Dvc-BW=+_^w1=YQ$t5?_wj+{x~~95uZ!Kr%N=?zVQPUgo9Rivm2hhF*Es zivxnvuH2qhuCPJxs8ztg*N#S^RM|e~vTVyF?*;V2o`k#~Jm3}2^}JtfAF{#+Zah4_ zfeu0sa_}8Uei?>?s}fb14!--hgM&{N2hXPzoyXBuZrP4i1jVYE7slsEXkXt>uwoYo5yR@0Fnf)|A1^t6%>4q+{I^p_j#k4p|Y09;xl-b2G10jp6 zr4e20z@}4zh_3ZgO+>9z@agrD3k;-s&m6avE=!NUJH_Z`q!RZk#o7mH>k|$I9hS)Y z5i8&$O_Y`_*ierK`(7h%B@kOb8-6oa-Dcaar-$FSAwmYHW|cs$$DC zpRGN%YXW*TTzIDPDt$PrCr50edc}n-P1U`Y^LD9n@VqOY?-~?SQWB~X7G^BTGwK() z@3rr%!-Li;r__a{b>R#9w#=xHl1xx8E!T9WJ#>%yocd`8aJh9Y@`}8O%K^t?y4)(r z<*4jjPMzs;cE4Q?e6qM4@=i6W*AdF^mD9T`BHf`*O%}>NE2Mit^u;Dq*;!Jv>ziyh z`g-1zG^RWZ?7X49z--0W=W%grZVgZJ;=OA*e+sNw8B^q_q*?3o%i(C{29C*1wsC99 z0x_?w_Z7hmU zZTgG9H?Z7H_S@pOeVn0*2~Tnpl~c=}|8^ zle@#rxup^XGzHqJC+EfHuW{ci`{tag)~Q>sc@b5@MV)5aN0G3 zHGXRmrvqoV!E$i-anGKYnALj{t38I4Z% zLu#6iWHgSucbMY?fZ!``!1XYUmPM@@6$sy5Jgg>zucrag3`WbMh9DDYB%N_wp2L6w zU9wsZCPXKzAZwbDP{P+vWv*N;K z#L)v?B!q#BRAgcfHqIiptk7@;WAs4l&BTTtSJiS>Sh#pED38o&bgG^xHtc^Wmdpwc z*GUESfff zq(pxuz~w+dH55k6qK1zRg0nUXObCuk0}M2wWaOA-Bq5yTMwo=a(hx3?0k1)_sPwTx z{w#JdBbviN;OSY}fXE@E(a8?T;m8ZCyqmv8SAVINO9C(a{DQrSe zkAu;&s8Yz07#6c(JeB}1`0t}eh?(|eUDJbd6pThE$0&`4%E*lw+B@JJH&z^YRYxax zVdCH_R)O|KxC8?@>|``)s-ZF#r3ouu0ecc$o&h`vn)_)aMQP;7GHi^i4g`}sfPz!s z(cC-OK}}3u2>jec(u6$S~cK`(^ilVvKvV)qK{8;?l>ycCGvFAIkY*jROI1LodoutDC zYGSfi@pJ!<`9QdGQPJGt6i+mFGcp$U3Cm4|xx+fYk?(nNB=$D;xZ}z*WpW2laKav% z`#yG16O(s}pS!*;7I$1(gJ|w>;uf0wC3a8~lSK$~A5EJC#ve2~dHGr_?zpnYnA`yr zoR)>=9>or7VzSBbbKhcs#T{2}7@9kr>4fH<#SUs>a>ej-=QPCRjwjs;%^l7|LUaEg zJE)0Czk<1smi{#C@xbSI8ewwBlMKb=4$`4uWj&hvJ$6tNlN<$ehedIKGkQHALc!#Y zN8Zom4xnJYI-2`Oc2E-|_lLP-f5#Ebu(;zA%`>?JC|E^|<{rupYGOq6Fn8>6kFvnx zj!VkS zm=AT8=IkSP-7gkBa z6Df-tJ0=Eh=`Ul}0$%{aOIRO=2J^ESix3g73=;+ek~>(}ga+%h!vMo4z%w=EK`!Iv R;MjotEM3gOaoG|4?f>_U4KM%z literal 27147 zcmdpeby!qe)HfjADP2nUz%TAu-hqOAbZie4Zoa>LkRX59+S}+kn>abM zD*S!fxa0Siq2IoY#mwTLYZ0MtuH~z39}|LHBME6FKV2&;Dj z35Ub!IOfP{rw;1oIg$TRr^)6(fziUL03U*R)3ATSmJod?;%H*ZLjn2EiPhZ1z}Sh^ z#=yzh#1Z))b#HD1k^K8}bF05u`9obpM*~|Ua}%e(IYNZG5dw2UwaP;-)`m3PpM;F; zY)vi9IQv@Bd?`@jMmS`a5Lp(fx`g8mCBmp%Iy-hc1G*ENP89a<-l;3ZQ|HxG?6nKV zYtA=GG3pVmvs|VWUng*F3z0Dt&ZZ6Zb+QXh+p+uc0m2c;1t7pT#1sDX0LCUxMvfNt z&K7pID1-gd1G2J;bW+SctV2w_tVLC760*Haigdj_OllJS1Cpu=@{-JQEPYILy#vF_ z>U6#8P#aKv1@|n*R$%_1{7(z|k*lB8Hn+30`jMz0`Ew*3Xeg)!NTdJhIscR}u(!8< zY;WLfWX^2kWX9rTU}JA>q8c?T*UpA2_~8hTrkDmZ7w46P#6HHA;ADM7^&D=a^__#g z+)`Ihm&joUHH=i|PP7NrN{r{ve8q_?#qH^L{2GNc1+ksc8~L6Je^NrES{#RhPTujQ zaE-#MDX=W(zErLm22sW|^j{BYuZOA2*4TLwG#= za+mMULkHGA_}0L~WnvN9sZM@n6273zDB+``T)3#{8s+!oMKm|-2D`%@*d*D4v6nB4x zp$qrPjY&ohlM()jxVvd`0t&n8^ohHAvBHt6AC@B%<0Pz>IW0W9u!Fgd**4Vq7)UT3 zjQZ{I_Irsx(&obZ#c90T3*`-YTg`;`v4SNm$*L^x5Vghqg1jb07=s}uRsc?r!LcVT zFf%c?=cz|%6>+YXa48$E0HNWed(CL@TbqU~sEy#HDRsM{0*Eyw|9iH)=2sq-Pl z^0kSxZscxUZHGQ3nD1B1GfVHLUVBw1(LCvJkGSTs$l+d*s_{KvxW_9)NO~`=Oqe9< zMagne@O#tos5>7jhq)FeW$H@qg*RDQ^^zZF)-b7W_wWQ#4iR!};pMdJ1!s#d>BAI9 ztdOL~oIQOFz@n5w&jfs~)VMT@5kFd3AJ!jxG~eVEv~$EybA7FY(Bl8$d!s3WMUPB` zgMvapg@R)KjnSAo+1S}yINLcgyWPB#HU_qTy^`^2v$o@cnBKN;4B6txUQn^@`(tYf z&SAlH=wvI{Y)e+Yd(YXpm9L2OVJNvey#^OaOEt9F>4DQ)WxWq`X_GYVOfGd}_IM$0 zEQXw!0ZC?JAEAxu8z0XvYfI+G4aQDZa~s+P2*Kwf%zgeAwmVp#2R7Ih%5!BGla>88 zVv*l_Wud-vTceEgiOW}xKY)dAPewnoWPbcY$9g@EFyg3uBQB|Py7^5)T$W`}|@B__{B&iA|pG+}WMD8B@Lnl6C zpL8;n-CZ)ZoEHkBn9PV{^B);)Q_Cvxk6ILp1S^Ru-haWS?zgs?WYPBQ*)XPu4SR4} z^fVxqz(vANCCO7uh5xND2|j}$@|5DZCUKA4lGsARBq>txV$yxg;XFqv3OO|xETMNx z6CsIcSP9cFB`FoFqtcFhm&`}gpd3--uU<}A~%lyeh!!@x^J%sgI)DlKLBIfhS6y z&r40EZ5pB*!h@z-UuR+Y7c-Z?X$_aMUI@G4nNki*B_X7kl8NcV}Px~h!XWo_> znJt^jZbra!xd6Y40Bo%GqS@j?QE0;;OIp4ZhAJQa>>VrhB|VR=`sn=9{_u$qHvRYkCZ)|Vwy*oOJ3nxvm}~s z{W1X4r=oH02$kdg35~L=1+}G*J$$g_nm5w3<)M0S##2cufR_*lXgki?q8#%zVrR(8 zGufJZHjey-W+nR1@XUK%%KQZf-*>x2>-XZ-%ZrqbppX*L4%T!y+ty3J`Uveiu^W&(pozXJP@TD)#OZQYel}fMb zFRR{rfOch?xX4omH%g~#ixi6}WX=i3*zi$jP~IEFk5nmeJiN17p zn+CzZGN|^VJGa&Gl!C--aS~0XD1N%TC;4Oqw1Y7uWV=~JrS?MFXqhOp$t)5kS)!B{Mm_@0A&TD+7Z7@o`@Mi}z^nXu|xD2l==6 zr~~mHF#|t%hqXpvBF=@WQ&H93!M7iiwTYfia2qGbbTf;sq0lumdo!#%Z#|O)A{po_ z*-sD@7fA5Kbo>J4#(CGzc)PIcOQ7(+f6u;6MnV&{^J9`6L>s)3j74cexlU>PktyCb z#_W(DbxtlAHmZ{kdhjFN@*|l3c^aD|jqfI6bl$ z@hZAy_9Y3}qn!K#w~koGP^I3GO0Y)bf4yHMwnDwJ;?-ZC7jtSePMe!fjdY0&__?JK#j%7@Y z&5rJT&n7mh8G{$ZTQr6Ly~lXN=AT~UUrgGl7+Ad%#Jrxn3R_pzLK&R6P%O1q5zV4A zZ^dA!Oib6Fly+U-T`QwqwsSL$-BR2arsy3SYC2mRBoS!P;CX_Tx?IQA)fTWTGgTWH z{5V1bZi<$wE7-r-Z5l+=Dw;%%@%d|vg2PmOT+a}7d$q-*N{wB%NAEKeMEA zBA!F~A-`Euy}YHYYRBYV?U|5^Lol3}3=NvnD>>X;dy+K0R`;Yg7;^|_*jYIQgJFx7 zy;+3Ma0SQu=k9ZQK0O*ZNO-uw^e6@gpTpk?_Va}76Uzd#XPV?M(+~&3+a?sm{6Hpl zLR4`dsrnkh;ZyD|1{O|BAvK+^)oBxh;!ZlFH3`>^8pfXc&q#ER0v0?I(=2FJh7ELd zjKWlG-5;68oZpQ(~3oY5_hD{N$_94SsWPViXoOUX+eMTvN#2V|85ePS1Fvn5;0 zM<xyh8IrG}|*Th(HpO0R+X@z*hyhpH}UD_Q9H z4NGQjvirll~*hYJBDKO1Sl>l0erNSzG z@Xqf+h0$q9b#ATG-klQ?o#pb>6_n8Kiv*oM@#@0k&Q;dl`#y*CjF`{#qElEA6Anl! zz<61fGS97W^zeB(P^@&DqWlyK-5RxhsC6JH-b2@ za_w-r9g$dHxes7s7X%D=9|;?}**d1Iw&^?AZyC27)&=WOb;eDkez{8H?{7mp*0SC_ zic=|cL`ryd8cwH_?>LqC`y=3?t{4VW?y;c1*XFS*@9Qw4{&L#5hU2UtjE zBc0OcxlKVbk=0(0UD_rLob=hYN_AdVv|k);eU10wtK^0Sur|w6mb^*rF%N0cQkX@< zTnbG%*IXFRvGM$+LDCqZ_SL#-zg`4Fk0&x){7_|;k&-Dk=5d8aNM6=ZHFi#_SpGg# z2yG1RL@MSIF%_m?dWIrjaM6+kR`V}1jSR=`Em zkl6SJ@v+3(<{J~7D<8El6VlB+3h~vO#n@S%dY#x~>CPVph1SnF4VV~&~;8~YE zWNatvfDT!vE2qyBPTBA+kU8uEY0_lZ&IqZNm$10j*sWrR$nc7thLBvm#wWKkIG$<6 z@yRw!U-6VlI;ot1e&U)oWA#Y6f1#fT%enOq>rU0V$4TJ}^#B)*TIje^Qfxiez#dq& zPHWAl3l;=sL6}i?a_R+NKV!jQZ*o0nqL;nEc^z>Vo*0amo zve8g#9c?~GZmN(KtJ{!DX5}VJ!>FSjhC;&_hQ{b+zAHbi$5c7G7-*MUNk=I=+11qO8%z@<&3vJsZ2e0M`Sh zfz#p#G)C>aCwPU2(A@n4+&<%sc)gwsK0k~Nf!i92We8AE_i+AwYIig-G;sRn^rdUO zFB@n3Bc7+jH_t*jhOW2|stABZ4Ahu^XMrY3VjZHC@MQfNJ~mx=2j0j0UM921;b-Zd z=ObQ)oAvX?A+Ot=Wx)?}CW*y!ykx>-Y31e5zj8Pil*Z>694%Ls zG`MpkCif&Y*==O^w6@IYQEc?9)FNY|UD~*$8A~@g{pqw@#`}>%Jw)qYB0&rE%|r_y zYpdgZV0~7`Wr;?vwJYJH6v*S0MVJs@|^g5aTW(ft(G$B6wv`S4b19nl{Om^E^n2IEN4s#x8WZ{ zO&5gH+swt?yG3~{s{BEe^9>FkFCJZ7a1xheexgc8&_|Mdi1h4kzr{?K%M10fP!f%1 zGqsmY{eI{QUcr>wm1MsCiR<0n?-#E=qqcmB7Ro_3GF#BniZ)x^^;@VcWh+n2mJy;J zEuGI$Sj@Nbk!yG%rO}azHD43Bim|u+5ZH0npSIF)YQ<_K5x|?bhZ4R{F<{jl%Xz(@ zBc~vyr=a*|)Oq8Re=r8{{auB7+2(hx_~X5WY4o3Rhux*;aS)A5o3Lw}D%X0x zSp|z*1#i4Hx0_u?{q*@UPtvvTF=!JeKhxo;K%<>5IuOhqUHJsci7b}~-Oe~qfL-%7 z`9ufafadhWZFkhqwLb3%dKImzlDn3NH11!|jC$lJM!@+H_A5W1s2XRUJ~e+amD;z!UkMDfmfg=jN3n{l~}u{%Hf#`D#d2Up$K3-I004T%=< zBk3A+V~I-;@s;hz7=NLr4=g$<6gKTDWr49-d}G-dU#wc;4U9Wn2jPRlgsEUwguc=b zMZV1tL^zrxY7qyG#R21;Q6CrSSmWD2w*NRfSimaxhKGDl-E|O^T>dVx!L?excF`MF zolJJwASc!>15^&4u>gLtw^G2lHvz3zC!acdH#7mvy6FVtX*&q_x3%$EM+*T@F2!o- z;ofSWqIoDNrCad>)1_8IzT zfbqtQkgwtLT+OfvZyt}E_iN|v1$61tZydyKfywv|nc$%UYIJ+E-BupenDON%LAi(_ z8>|U*H+%+f3lr@Qr@rL)bwS4CY2bY%uKVEzMCTmFG#z#tz_HoY$f9DzNVCB z0W*M%z-Qp9R7{ow9ra*EP_eursM-dupKjMlQ>=T#AiZDDEfR(d1WO|JDVALQ4YRwh ziHM{Lg%l10j#ZW=kz^;D2&0MdE}K}jpXdcB?he{%lSB3>) znS0hmXI%M|8c6xC(giQ3+le$Mlw>Mko5?Vwz-ke?;T4{QMZha^fxtx$v2+ZIMwe&U z=_Bxc11Ah5SV}V;n#>Hz+-S7~6Zj-kDAryo*|1c44?YaK=)82X>cHOYZr@&9+o829 z```n_koS$ZQmla~Gh2EZAt5=5!vfBrt<84LXxFl%+=F!epect^=PRj|&|csLjTDhI za37mr{7z|drQRhgzKglQ){I!GPftNe8)8C>+mJNUWlg9ZLny;Op{@jp-f1dP3{r$_ zi*w>yl0d6(dZKK-bO>+D@CF47wkUMoeJ^L5TvI$O9UIEK&UzZryD#SN$J}9y6QPsG zb!Ou=)pt1ns}WwkDkvxed+D1OuW4(GSnEg1IA_xPPggtiJldscN%Nk*bc^6i|Dtj~ zu3Gw1zsOg|NSbOaf++~)ydj_rin0yRX^^D3-WWRX`Fg}sX}<1ZeeifeLMp83iY9=1 zeQ8u0Xqo%|X_qT i-|RI_K*9AjPh47`S4ftEZBuRX^|U#lGYP^Q_1y~`T;q7OyR zmoE49L^At~%;`{IK;KD&sujfKoRdBx|wSUcSzWxJ0mM}4Lfz1#U3 z^FrL2HBWcez9Mp;m7IlMfTJxkhbtLz;(XD?Z4Fjgg{ht$GQQDsW-e67>?xX#d1Rox zUL^~T^&eI}_4AI1L~G2L-D()B74A?N1B&Tf#p4labqsG-#Rl_EaWT}+Em#{DhE}dB zGCj#?I6(Dg?vj22^;WcX@2h^`>pmWFfzaxKvR!JQ?9T+VRdU#3;PCTWR;pKbQc>42 z?k>@41fC(5vlK25WdwGfR|~3o9+frmCv}n~9i#IdwHn5h?}W5;cTnypDkkNvd~o|d zmXGN;Pmq8F0u&Hm_P5JCkYK>k#K^?L_1lc^@?2gX27uW)JS%^|BqG}RM7%eJ0HY61 zA}|$+zTcXsvMSZ)v(d8;^Y1$;y4;iV$IpDcIZAVi2^)*`Ri&%<7X=@^WN$*_S>{&r-zdo)A#ABRHp1k~uBe zIywWfr;eM!BZujWMo?wNYKP=VqquhuYzWVkw*Nat-kxO5huP~Rs1ANoTK#)+Lno;r zER36KaagY;^%y$2?*%UrO-0lDCoLAu-Vx@$=$tMci)D4IsXe~k_@`sY(c>@r#WXH7h`b-vlF=Q`(g(N z#P$Kw5APr`#qTy%AnPjj27j-u+}e=~QW;g6WyAEceY0%O3oBuqcv>pMBTS!{3~`gk zYEd6n3|E9P)yF`V=2ko_4TUW7PtIHUJ0CwKR9rL&YConAEalQ6#3Ybv4^Dm+1( z9(X$AH?BZjQvcANS}4C*itD)t9G+IcdJ^VopmZoV(;lT3QmqfY|AVEzyLs+GFTzh7 zFvpY~5U6Y42%STHA`n+Fr$5orKTaYEAj(n|$Ds(+34p&_Vk;Aj^tJ?*L!s1Grj*&g z+y{FgXf_zV2$B0>)#?s1OD-CRy@FN+0+6U=rL5wpvv*PlCJ$bg zGtVQU#uq7QroN!m^6ZDfD&A(0^2o?)_ftr=c05)i?64IF`Dt3^SD;r38NNDZb`2Ku zKBtL(L`8}|=u|pC6GP>XZ8DGN?>~9#Y_Gc!mKQie4b7{}5*5YZ8BeU3OKKTsY*Rhz4xnY3Jp^8WYqXIw8#Mpp5kz-@czDejw!Au zGZvx%{WCJs7{$3I^A8{HSmSHN3VM)5z5u2aBVyP2+!*v0K*N5E(I3m5n({bne* zJE^*ZXE6b1N7t(qa$gbOK1d?ZKY5F>T_1Q>3;LAAp)CQ-O-AbMNkH{;QH_zj&aqKD zyS7I!y>q%hwnumMpl9bvO5?#hrybakyZnXiOZuL}LdQRbDMXYa)JCvSP(+Zt63X91 z-`}lEm#W*?K|+)-kk!Zg>1tY>8}J`|(GKoml0V0MCXYx7t_S89l3JTlH(j|6xeCSH zoxG>I>L%RsyytXdJR2FpmrbUCy|3O-{^8C!XVzp))3* z8+H}7O`j3fD9NA}b_p}l-!*@3O)j{+O4s}(;6RT8#Qss!n*jFpfHX1i2 z;8`cLIeFe&>N~YlO^D0r%5pN@!H8d4MI}?zNZjw~7DgZ+_*fNxCH0OQgPrWS&72)e zi0V3MYWK^j*lAk{@X=BLanmmM$%Zv;@9tt9SG|?^qh(#m5v7l;9HvCu71zF}g04i) zuzW9@?@RTy(>tXZVN-S5vXiK7!xqGG$@BN*!cc~8WvFO}y-+#ipy|?$7M1lTrj!|K zk>Lqu@i<%pN+gMSY|{t76~%mmpMYTT>UA68mG5(vl$c>t4)>Lb3xcbzp5#*}C>>9X zVgZkHHo?J|4hV3Dq1lNzVtPdMEb*PIBpG(<^OM7yB;VOkxmjpapRhR0YL zq@R)ab{(2qG+#_O=Xn0qzvK9*#D{BE49Mru&i0w!jarLAQe1My$h(M<98`h`I%ucu;Yzbd| zrKwJ}%h9OZeWSL+ctzczw&!YOQ@S8F!Vz^A`cg7PW=0atO)?de4tBETiA?47((0Fb z2b42t6+`A#wYnf!_JBSCB8ys?DQEU0>}!YJQI|zm@H`4j-4FmR?XBiM%0F}YO+s78 z>af!th+pLoX^^#)AF{zLY>iFaoeoePP4)HlVGP&YTdRn2iYGyADSmzd@qXdsp?+Bo zND3Elu&-5sl?XoX?L01fMS`-gD?X_8Re5+?8(}gq4)%@@Gxkh?fdKG$Kf}gMDVRoOcT(!jnCz(J2T_--v8>Ox zs6IR9k2K+oPALpl3j)AYJrj5mZ~y{E0>_Wd)!GXUwo_}_i=|Iq&ATU@ZeDzqrYP0# zrJYr)dE9gcZlQ@?^0eRC)r<6`Zn+Zy+ec`R*ZD4!{Y&HCb#fHH68l$=i_3nX(PX7e zQk~_J!E2}aghz10gvRzVNj`-Ck`?wV&P;zyUUTCcq%4JyyANqZzaSYRVB&6M?P6@g z9PtpjjSW}iEW_|cc+s>gYaO=s<2w#mvzefn)x_11nFup*^ltD{b24%c*7UTw5q-9W z1`RUxNI|W|3o!)OM38d&K}uukf@&RcCR%bFMHsSwXHQ^5Q5H;=*?Nz(HOJ!>s37kB zR1d!uI@h3sx{OM>5o>1Ft|$$z_t30;$346hF}^Ok)4BjgEM$Je%&Suc8j@#SeMsR9 zaQ8(o&O)3LqILo%UI*^&B7CzXD5#zv@(O=u#oEs7cB8)xTrw>5bScjP2&xweC z2;3w!rbo(JLbf)8rcUUqtmy*@ALkMVDK%`uIr@+4-7hD7IuWxn6=y32rr~JZ<8vq9 z=2gx{SXdBy_9+~8H7SmoJ5GXyqhPTlV$IoBk0&*jg*$^){iyPbZ~Fzx0>7!vIHk^- zdH{>UV)Xk|GyD6z*3P!`yft|z`QziG$CalBXIy5l(xB5(`0WlP)WX^{;t+-B`<@qT z)wjn8u10+vZK@S5FgT;h5e*}Ri@m2b_(mu+2UZap`(82V_1yh+|CJ|>VCC7;C%XMS zUVkrQ{>&J`PiFB40e>WO{+?&Df06n#1@xQjpXsZ&DX01$W&cbc{r{8y9}|D?{=|lz zp{0qD^FKxl1vUIbvhdG&sNZBbxc^9B{WEv>M(it{lK>wiWV&fLX{_HT-QE&2<^pkW zbAq@zfo$AdKmdS?&D0pk4&XN7=H>>lbDJ1(aT*$f+@bT79?8E`z&tIGxwuDFHOp+R z54IF~E~t_X-ytHbKO^ThJd@$Xo48vR_;ptXe-wdS#>>j=P5*#fzO^jUc>D5u@9a%u z_?6ysbBvfiEch59rB%_4HG~ZxA5Q+1h=SK%DYEEctVr$IrC;IHl%*eWlFlYPei)P> za@9e&H!h8b$ii4)t>D4Z77^O;h8g^-a>if|gZJR^M-0IXZf$pd&?eKUSmy&utkM_ZsweWTdH! zp#UFOWXe`*_uLh(UhMs{{l??-JCODsUTL87H62vGUt{StsYb!GI}1K9pm_X0WGH=o zR7B_{xW}!_)zaXO5WztzLaKdCg6)o@BTA<9A_J(8)8i*z>VH8=f>+nsCl>px@`~;u z@7eyvHJmf*>x#~|sY2og29_8P7;;c6ML*4?&-3++XpL2SWtX)E!m-x$ze5{IDeBpS zq^Y%L&AuhxcJM@d;ePj%DxONWZ5{`sJWDJlscQa~hSPq03IF^0_7vlRZe#L0%jND3 zw7SBPKvu>y+&3A8_GHprWu6}+X6LqzUGc=}xj<%qh%&vsWluQ8ojo{u zaeR=gpk(G`4JU(s;Y{fqKYoS9%NUe(BxZqU=mFk``};VQJ)o#>L&E>XpgDm*7<4D> z2iiLjW4>v>H)x2MAt%^`+W=_9X3TA11O~7f7y-cSCdQlwARxd1$Oh&%=4LnGaEC5c zmX3l%K5GTf92e9i&i)XSkO5ZpB@ zn{0w9Cs?z;$-0snt7YDLvGh`ofp|}(;w*OjY(6@*ppU2)Ck{vw$#I9&i(o`cK)1E= zY{1oLm{LEsdeL0O2xViyF4{lJ`uZ!b_$ylJGkFJB+y`O&q17|WhU>3BoIe)O6vuiH zyh~5>g~vjbHN~X41dfEQ*e~FE!TaRUY}U!!?h{3>{A;LlGO~;-G76EE`^FeScLp9ne`aO4}Lb`{hi5ao z&Bjt>=suMfc7RXO?>g{(D>%3fdHoL?lEN3CTCznZusge+t@Pw;>_(Vf0<7<|C$+3q zhW7J5u1OeX&~@h%bu8MhO}o$FL(hn&Y!-oiBEn=)IcLk6w4-3VtWb7(vJMWQlwWyUu|k#=Wnr6QZ@H-h~m%@v3a}WZ$2F3j8=404g<8Nf}L|p(2mS4lKoYf0gJE zGfyp0-h+-1_QX@*YrxJGbhlHh*ZipwcMrTj5;_c5z1oX-jZfH*K8r_ieJqgqtht9X zEad!|m(8Xt4s=j`nXk|U&A(u(@t?S4|AEWw?3q;?2#+`IU$|svL_J zn3x(FnF4@D00V%LsR@@6mnoMi7{t!aY4pt;fIwq5AR8M8K#}e5pvK%7}t_vazK(j3RDjwQT~%UdalKRmVvgi<5R6zuTB$;60S5_xg!@l{tjD z9>EfS;gNikMi1N*Ip%~)e#XfC4*_jIc-OywAHYVp2(W-0fpQDycHo9A)84dyA;5?g z1OS<`a{;-HOpJ_-!JI&2upyAm&;)4A&BnpW#%2QIG6Jy!+@YHjb?jzAxWR7odJZpA zs|8CnYR0L=luNAq^B;woN=!mCh9xAJhZj;=Qhki7{=DajVIm#5#ey`(j2|kT&Pqnc z@4?r1{Pe=}vYDJ;fCBmWlTz0Zys$^elTi2QCou8)%rgSU5`%RO#Jf@p5>v zpa~6i(L;kO-;YHe?=lOQaV|lw8jK>lE6sP+xMW+3vQwTWYk;Hc6_U>$*BwIczUl=x<@LxGS6dkT-rO^z*pK!D-zJ26b3Nvu=6GwYTzRRUfiN2jatm zF^xPFbWGlzkVb=AG;L)3g6XA(!Po6*_ah3Fsm(|hci0Iw0Ny42R}y)OGnho%q>==Z zHa@x^Tr()sk*plbogEJ@;=07vRq%#X?lRLBGvY`~lg21(J|}o!(Gw=beFDCu3lV=> zg3_C({iO@-a&&Aobt-mad=Xg2fAd|-ISu~LX!7lV#6gsgIR_9M(OKMw_iJT`>@iA zQN@JoxITVQQ-#WD>mHRC4F?*^AoVy?s*kl48ICQioMBoJkV$9Uu{Sfz*4By;qMF4z z5vJZ5>W@8IPw$;W@F}PpSHGJ#==UQEaqpqnIH$X`Vg%NyRKABQ3F^UEW&%`78%uheDPq)&lF zw4w@clvtTsnL+Ip#(_OV2@lnd`I0tnN0r8#3dZYgjW+gCi`amMSzWOEJ(7I}qhzRH z>jGqi2H)okq+H9QWhor7@ogP+ixN(8yorHLszogrKb#TlG2RH7W z@MwOw9KjjwVn^35Fq$c@$NhZx^E~OR(W_lS9uUP?=v%8nAl?6o@@Ix;U9Ha0YMFg2GZdzz z^x9QOvGDs4X1a<81Im(5gg9ul*X!PvmT^vY66RqGb@<}KX9p=6X!bJ&Wj~=EXHs=Q zrt>qdc*&%KqR&=_{8h=wqaHT~!PXTzIUyDZ-&t5u`zSan`q9fa7=a8^<>u&{wsh_DCn*Gee7dj3I?D@utcYz$cV zT4=;eCxp&AsuxiSnj~Yz06Zdpj-adl=i);~p?ic1&rXnCgHB}*KKhC}lX~-&?nS3D zZoQyLKKw%M`%HS$X$jQ%;NB9?h~1l@(=x$kpD#Ny#Puw=vnQU1&LcXPw)RglKm<*k z@>z#XK*Tgx|CWTup#&;4M)Mb+H(MtHoD zHDo5cY5y{q8?%B8zz~Oplam|b&KPlW0NJ@r4A?kK41hoYw<#B7U^g~3VE@B2_+x2B ztZ|aQiWTX;iI_lET$v+uroJf?t$z}{yIJ=YP&XtWZt({Y-{jn*fk{9sn zqb5+un>cV?NI^i$G(-aI$S}%edENdwn`kJ9RD4>NxHui$k4bUb*tr0)!PRDhFPn)b z)$MscPOt9_=UjcMoh6_CK(@FtSEqZ{Q02BoXB3G@|6cvYoumlcismb2BRQ9~Vt_E; zu*n?rcvWoX3WvoSx&WJj<3F?F|2`V$y6s}!h@pl^khwqtmz(x`A|YaIU`~K3(9o2F z6XF*DAptlWnBCBn)7TVbzzu+e;y^nR9|nFr;tvH7kj)(%Va8UNXx`UCGt9gac(M#N>r^*OONXguYW<4pQ(BsRRCaJ} zpxdhSKifM8gj{-ZJqXIFjEUiOWFU*(oL)cbD!iXlRCrL)3UJJ^zzy2-Sms~Nrm<=i&$-Re$m9FIs4s6+p4U1y7;f+Zz5s>``H$W?=$6?iqvxuW7urszJ zpWx5kUTCz5r#$^AL!q!{^a-Z{@zwj}qXkh*5oO7NN~9SNC0+vG1>!*yKs<8?7~y@} zf_$2nmf*ruYl99_RQ{H(z|ES#VP*}4fSnOgFug@_k6Bw`TjR1jh&Pd)80S*?eHxaH ziwr{cvKXL8I70HATNgyuNcnI@3?;30E<&U498M{c_QPT43R(}7*Adt+O3dT*_3?18 zybUv4<-k2=rH7s2O^59%ccQhZOc!%-WeV(g3JU>x4M*hoA>lnaHUoEitj76*B+;NYKco8m7X=J&FGJiYz(L>vW`R8XP5Zq9 z5HTYVfE_aWv9WQ2jM%w2K?Xp`dkyjWA$~t6n*kda(AWqFf((g2$9tj%IOBWFRY@Po zp2YV1MF}u=%244zofV+g{G-ADK4LYwt-+1hbJD1u|G5hwVup}0hYJMcGBE|3a)6CN z03*=NQv(c)4Gm2VfB;A=$ZZP#TZ4bPAUo+ox$DVUv%t})xYU}S{F5iirl?Rbdl(k~ zDDdL~{_kfc6N4XSrBDX{O&$pSH|_Ub01@K=a2SHvOxZyIV*_qTu*wB8H00o5191Y_ zj5&N}6~qRH{9`u+7y>{hT-*S#p%G*~!H5$8 znO`}L!C)h9PIh)fNc27Q5WS587gk{^&A-{=XbE5I8DJrc*7wjtGM5TYwyAl-9eTxa z!O%26O{qki!Qmw_7Hv38dN4)x{v_;+hYvy!wUG9?(1x5T@=Q;46PpekLNQX$&DHl7 zK5O%jB(Q3)8?Th6J2u#7$(VU;dk#juz??3-bk7s8(#TUHBwlHXv2#R>@XYXe+@%Xz zS7G{xr5_6%5JOb{^}4i$?X4{>s6PrqK(77!^Ls&i10$

x|Rg`c;He%SB$pWmxn z)h=!{`*X$pMr>%%-1epb!%ZWHwC^>$CHCho2N>|bTTc0xVj4eh;rxN(Y;bGm3kpi} z2mkN8iX*Nr*0#6!dI{0vZ%NiuNiars#eCkCu?ayhylk`#|Nz%8sDPwBfgFl_L)GT~r*;sYBsIxav z(tE^CqI>K8K~b6dQS7FBJ!S%p^!@&mN%rTmHB{NGtEgK0XkG<%oMC|r=L;LfgONyQ zI979|Y#&3~%U-hFV4lIUgs zM?f_#ojJX@c(RI~TrUdMD+=gnM$>}=M~}iC4)fmVu6D_Z+&R0Cjge_Aa*p^ufvHbZ zM2?&jbo~-fZmmrTgO>%Y>}LD?$OQeZXM(h=5K6~ z69D|f7E6^{!)}gU#v2Yb3bTuhJZ!Eb3xtMKhZA*WoR61$kiI*jll;DXpyg4#OdK2r z*(tkBv&XB2l{|9wp|TovtWoMK_HuM6e6EvX7_B=tgU=_wC?SSU;4-&`!N$zRrlf?%?7Pan+4_ICYlbJRSZ z*A>+!W>gRCgPOh^^z=*-vt;ZO0Dy0$lVzrLeEuH4LJ@suiHK7!({xnk;a9G6K_sgz zGI|qyVYQ1bN1YNB>a(7F>PzFx)4@y^+^bKYa`&f_VJiGCJNL*gRt;ey*({)NTJ@AW zGo2fQXA@qCJH6%ei!y(W&7MY5>!9DmM>zG2Wg9w+unC{NmcfaThvXp&4>Wr}8d5I$ z5u@UpBcKk<-9)B&!4YzNXTs(@Dq>?K*E_olgprC%yKlAx3PgpcJ_Xm>_J z|MAG>CjzVW^+)kDoM?pJ-7zguelWw5L+iJybKKa+G|Tt^9b~K;_&59D`oli*lnmr= z?1M4Z2Mc>JOz^qGyWwsAR2SJX1tMmcHYIU#*KEV<-x2Y0ggb@d8zl&!oY0e z*>i>X0exOl^4*6@JSxwyBxpWU$sYwc3$zfC4|R#ZzM@MdlX*iegi>>XuN8!b4kS$x zgQBrzE0Rk);BTf9t79V7q~VJSn}OAt$0J_ir9#`-izY$Ps&3vFdfW-W5R2|qt>h3` znw`JS*Ow`5MY8gh*+LeNsA={vQ2?evHaDKsbw|Flb@+5jA}7w4HRSSA0F*R z!LSq16=Vbw6LlKmYOMwp@@^{x6B?t~rrvCG##O&8J}-2J3&uYz*NL`;n$mz5do1>F zJ8t&vgNedc@N;*+NU40aGQ6P@-Ug=p)pDQ>b(=lJwe|CwY>xzpBi1^?6^tECbAu_f$kWsH$O?|CA4p8~e*~t_ zoEU?5Rr5-=-DVVkDq;gU1e6Idr>a7c6*x*}qwu$g`lsz3%p=4Gedu;qAd?|9jKH1W)d0K6dARvt`d&Z0`TpA}K!&2Y z`3(gPMF8d7z|4-LJqZovbH;J={q6VdTmR^ophMlhH7(>$f8Y9#vRdEWzpf|)|K|Rk zN3U+aegFFX&;5T@YwX+oDSWH^lz+Pa_m#(fh4u~Q+xMSnzpE~G8*TU}w41b?+Za$? z9X{AMnoLzC{h=9!O63i6|7i5H(}@2aV|O95#P^K-U5M;gK;Ni?`U&V)<;rd=BmEQ5 zpT*05h4OoK{zCb#CfRM2fuB%rRVe!v)HeoyR_JHuaKB?t2r_|C?~4iChHb(6J?t+^3;Y-G?+@YM28V@&z`xV^){*@G0{ZD!>Q zc)thz?!feaf&KoV)@|4U(tm@!eQfK$;C_E7={BwgB>evk$G<+J^k3k=Kk0EB-v8dO z;lDfg@qdN9@jt##-Tnu%%8v@-fPAmGKYst)l#TP<#G^s?GYDjv=r+jT zv)cbK?|h#heOr^zUxWOWE&VI3-%r{``0e-Y?)h=emf*YYdBN~YZT?u2`oF34GjC+yAx-`Z z(w|Al|2Mo}r7GXH32aC)m|yhMPfL2YQNQu`Z+!mf)8GA0OA4kRQNLyF{u1Z6zWuMV zF28Aa6PBp_76+14d>aZX)hFe481e1*ZO8uS)Z=eZl)hAc;r|Bpze_~^exkwf_z3f0wiR%Ln=w z-G8(%!td;>3Gh4JA=!$zq5d}IUz*te>*S188VG_gd`5f%E49;7P>2x`MKlIP&^Hi7 zqXC=P+o*_zm6b&b1q-pU5Cz{r8!dbWt*xS!2=4A>a`WHLPBtOMan1en-|o!q&VD3o zfFlaYy*D+fI51KQhytT4A9vmRVKL<0V2+X$)nAm#ydy)&y+3JEkw*Z$SJ2wKQy_GW z_x?H+^j_@AJ2H>ld!eLA#SxIwV323Ja_>I6NCDY=e+vgx1@Fnh*-7}jn~uAWJCc;m z_x|ai_gMImdrt-*7F=(LdO2Pr!pP-LdNAJ`=Dbv{bu&BY{rk4!+EG;NKq~carH~Hd z-rqH;I93R}w*t~uT6&%TmH`< zWEL2AgIR!7I^s}7FBvbg zh`*<%jHI-o`!qwdWic`_G<9rS;!l%G2^szZNg#jaB_aO;u{}|(_D#6Sy?*)_n&du)!=L}`J>R4|X7+Hk?hg9)$Iso4y>*|xEeYKr z4LdwPqDC9!0YkN`Jtsmdl-7M-4A~4WcD%xdCZmRk)aXSF5Bj;-^=d#YJ@%Kxj_!Zm X>rC3t2FzJal9bJ_X?o0sHGB6DA-+`U diff --git a/vulnerabilities/tests/test_elixir_security.py b/vulnerabilities/tests/test_elixir_security.py index 774a2209f..68022b17c 100644 --- a/vulnerabilities/tests/test_elixir_security.py +++ b/vulnerabilities/tests/test_elixir_security.py @@ -30,6 +30,7 @@ from vulnerabilities.data_source import Reference from vulnerabilities.importers.elixir_security import ElixirSecurityDataSource from vulnerabilities.package_managers import HexVersionAPI +from vulnerabilities.package_managers import Version from vulnerabilities.helpers import AffectedPackage BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -45,17 +46,17 @@ def setUpClass(cls): cls.data_src.pkg_manager_api = HexVersionAPI( { "coherence": [ - "0.5.2", - "0.5.1", - "0.5.0", - "0.4.0", - "0.3.1", - "0.3.0", - "0.2.0", - "0.1.3", - "0.1.2", - "0.1.1", - "0.1.0", + Version("0.5.2"), + Version("0.5.1"), + Version("0.5.0"), + Version("0.4.0"), + Version("0.3.1"), + Version("0.3.0"), + Version("0.2.0"), + Version("0.1.3"), + Version("0.1.2"), + Version("0.1.1"), + Version("0.1.0"), ] } ) diff --git a/vulnerabilities/tests/test_github.py b/vulnerabilities/tests/test_github.py index 3306b75a5..c186711aa 100644 --- a/vulnerabilities/tests/test_github.py +++ b/vulnerabilities/tests/test_github.py @@ -26,17 +26,14 @@ from unittest.mock import patch from unittest.mock import MagicMock from unittest.mock import call -import xml.etree.ElementTree as ET -from collections import OrderedDict -from requests.models import Response from packageurl import PackageURL from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.data_source import VulnerabilitySeverity from vulnerabilities.importers.github import GitHubAPIDataSource -from vulnerabilities.package_managers import MavenVersionAPI +from vulnerabilities.package_managers import MavenVersionAPI, Version from vulnerabilities.package_managers import NugetVersionAPI from vulnerabilities.package_managers import ComposerVersionAPI from vulnerabilities.severity_systems import ScoringSystem @@ -295,12 +292,14 @@ def test_process_response(self): ), ] - mock_version_api = MagicMock() - mock_version_api.package_type = "maven" - mock_version_api.get = lambda x: {"1.2.0", "9.0.2"} + mock_version_api = MavenVersionAPI( + cache={ + "org.apache.tomcat.embed:tomcat-embed-core": {Version("1.2.0"), Version("9.0.2")} + } + ) with patch( "vulnerabilities.importers.github.MavenVersionAPI", return_value=mock_version_api - ): # nopep8 + ): with patch("vulnerabilities.importers.github.GitHubAPIDataSource.set_api"): found_advisories = self.data_src.process_response() diff --git a/vulnerabilities/tests/test_istio.py b/vulnerabilities/tests/test_istio.py index f17dc64ed..986830edc 100644 --- a/vulnerabilities/tests/test_istio.py +++ b/vulnerabilities/tests/test_istio.py @@ -29,6 +29,7 @@ from vulnerabilities.data_source import Advisory, Reference from vulnerabilities.importers.istio import IstioDataSource from vulnerabilities.package_managers import GitHubTagsAPI +from vulnerabilities.package_managers import Version from vulnerabilities.helpers import AffectedPackage BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -44,16 +45,16 @@ def setUpClass(cls): cls.data_src.version_api = GitHubTagsAPI( { "istio/istio": [ - "1.0.0", - "1.1.0", - "1.1.1", - "1.1.17", - "1.2.1", - "1.2.7", - "1.3.0", - "1.3.1", - "1.3.2", - "1.9.1", + Version(value="1.0.0"), + Version(value="1.1.0"), + Version(value="1.1.1"), + Version(value="1.1.17"), + Version(value="1.2.1"), + Version(value="1.2.7"), + Version(value="1.3.0"), + Version(value="1.3.1"), + Version(value="1.3.2"), + Version(value="1.9.1"), ] } ) diff --git a/vulnerabilities/tests/test_nginx.py b/vulnerabilities/tests/test_nginx.py index 38551a46b..a62e009f7 100644 --- a/vulnerabilities/tests/test_nginx.py +++ b/vulnerabilities/tests/test_nginx.py @@ -27,9 +27,9 @@ from packageurl import PackageURL from vulnerabilities.data_source import Advisory -from vulnerabilities.data_source import Reference from vulnerabilities.importers.nginx import NginxDataSource from vulnerabilities.package_managers import GitHubTagsAPI +from vulnerabilities.package_managers import Version from vulnerabilities.helpers import AffectedPackage @@ -45,11 +45,17 @@ def setUpClass(cls): data_source_cfg = {"etags": {}} cls.data_src = NginxDataSource(1, config=data_source_cfg) cls.data_src.version_api = GitHubTagsAPI( - cache={"nginx/nginx": {"1.2.3", "1.7.0", "1.3.9", "0.7.52"}} + cache={ + "nginx/nginx": { + Version("1.2.3"), + Version("1.7.0"), + Version("1.3.9"), + Version("0.7.52"), + } + } ) def test_to_advisories(self): - # expected_advisories = [Advisory(summary='An error log data are not sanitized', vulnerability_id='CVE-2009-4487', affected_packages=[], references=[]), Advisory(summary='Directory traversal vulnerability', vulnerability_id='CVE-2009-3898', affected_packages=[AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='0.7.52', qualifiers={}, subpath=None), patched_package=None)], references=[]), Advisory(summary='Stack-based buffer overflow with specially crafted request', vulnerability_id='CVE-2013-2028', affected_packages=[AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='1.3.9', qualifiers={}, subpath=None), patched_package=PackageURL(type='generic', namespace=None, name='nginx', version='1.7.0', qualifiers={}, subpath=None))], references=[]), Advisory(summary='The renegotiation vulnerability in SSL protocol', vulnerability_id='CVE-2009-3555', affected_packages=[AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='0.7.52', qualifiers={}, subpath=None), patched_package=None)], references=[]), Advisory(summary='Vulnerabilities with Windows directory aliases', vulnerability_id='CVE-2011-4963', affected_packages=[AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='0.7.52', qualifiers={'os': 'windows'}, subpath=None), patched_package=PackageURL(type='generic', namespace=None, name='nginx', version='1.2.3', qualifiers={}, subpath=None)), AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='1.2.3', qualifiers={'os': 'windows'}, subpath=None), patched_package=PackageURL(type='generic', namespace=None, name='nginx', version='1.3.9', qualifiers={}, subpath=None))], references=[]), Advisory(summary='Vulnerabilities with invalid UTF-8 sequence on Windows', vulnerability_id='CVE-2010-2266', affected_packages=[AffectedPackage(vulnerable_package=PackageURL(type='generic', namespace=None, name='nginx', version='0.7.52', qualifiers={'os': 'windows'}, subpath=None), patched_package=None)], references=[])] expected_advisories = [ Advisory( summary="An error log data are not sanitized", diff --git a/vulnerabilities/tests/test_npm.py b/vulnerabilities/tests/test_npm.py index 953cb1d96..6deab261d 100644 --- a/vulnerabilities/tests/test_npm.py +++ b/vulnerabilities/tests/test_npm.py @@ -20,7 +20,6 @@ # for any legal advice. # VulnerableCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. -import json import os import shutil import tempfile @@ -32,6 +31,7 @@ from vulnerabilities import models from vulnerabilities.import_runner import ImportRunner from vulnerabilities.package_managers import NpmVersionAPI +from vulnerabilities.package_managers import Version from vulnerabilities.importers.npm import categorize_versions BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -40,9 +40,15 @@ MOCK_VERSION_API = NpmVersionAPI( cache={ - "jquery": {"3.4.0", "3.8.0"}, - "kerberos": {"0.5.8", "1.2.0"}, - "@hapi/subtext": {"3.7.0", "4.1.1", "6.1.3", "7.0.0", "7.0.5"}, + "jquery": {Version("3.4.0"), Version("3.8.0")}, + "kerberos": {Version("0.5.8"), Version("1.2.0")}, + "@hapi/subtext": { + Version("3.7.0"), + Version("4.1.1"), + Version("6.1.3"), + Version("7.0.0"), + Version("7.0.5"), + }, } ) @@ -67,7 +73,7 @@ def setUpClass(cls) -> None: data_source="NpmDataSource", data_source_cfg={ "repository_url": "https://example.git", - "working_directory": os.path.join(cls.tempdir, "npm_test"), + "working_directory": os.path.join(cls.tempdir, "npm/npm_test"), "create_working_directory": False, "remove_working_directory": False, }, diff --git a/vulnerabilities/tests/test_package_managers.py b/vulnerabilities/tests/test_package_managers.py index a74a46907..5a33f3526 100644 --- a/vulnerabilities/tests/test_package_managers.py +++ b/vulnerabilities/tests/test_package_managers.py @@ -21,17 +21,20 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio +from datetime import datetime +from bs4 import BeautifulSoup +from dateutil.tz import tzlocal +from pytz import UTC import os import json from unittest import TestCase -from unittest.mock import patch -from unittest.mock import MagicMock, AsyncMock +from unittest.mock import AsyncMock import xml.etree.ElementTree as ET -from aiohttp import test_utils from vulnerabilities.package_managers import ComposerVersionAPI from vulnerabilities.package_managers import MavenVersionAPI from vulnerabilities.package_managers import NugetVersionAPI +from vulnerabilities.package_managers import Version BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data") @@ -62,72 +65,270 @@ def setUpClass(cls): cls.response = json.load(f) cls.expected_versions = { - "9.5.3", - "8.7.30", - "9.3.1", - "9.5.1", - "9.5.11", - "9.5.6", - "8.7.18", - "8.7.15", - "9.4.0", - "9.5.7", - "8.7.21", - "9.5.12", - "9.5.14", - "8.7.27", - "8.7.17", - "8.7.9", - "10.4.3", - "10.0.0", - "10.1.0", - "9.5.13", - "9.5.5", - "8.7.22", - "8.7.10", - "8.7.24", - "8.7.13", - "8.7.14", - "8.7.19", - "9.5.17", - "9.3.2", - "9.5.15", - "8.7.8", - "9.3.3", - "8.7.32", - "10.4.0", - "10.4.1", - "9.5.18", - "9.1.0", - "9.5.19", - "9.5.2", - "8.7.26", - "8.7.20", - "10.2.0", - "8.7.31", - "8.7.11", - "9.2.1", - "8.7.25", - "9.5.10", - "10.2.2", - "10.4.2", - "9.5.9", - "9.2.0", - "9.3.0", - "9.5.16", - "10.3.0", - "8.7.7", - "10.4.4", - "8.7.12", - "8.7.29", - "10.2.1", - "9.5.8", - "9.5.4", - "9.5.0", - "8.7.28", - "8.7.23", - "9.0.0", - "8.7.16", + Version( + value="8.7.10", + release_date=datetime(2018, 2, 6, 10, 46, 2, tzinfo=tzlocal()), + ), + Version( + value="8.7.11", + release_date=datetime(2018, 3, 13, 12, 44, 45, tzinfo=tzlocal()), + ), + Version( + value="8.7.12", + release_date=datetime(2018, 3, 22, 11, 35, 42, tzinfo=tzlocal()), + ), + Version( + value="8.7.13", + release_date=datetime(2018, 4, 17, 8, 15, 46, tzinfo=tzlocal()), + ), + Version( + value="8.7.14", + release_date=datetime(2018, 5, 22, 13, 51, 9, tzinfo=tzlocal()), + ), + Version( + value="8.7.15", + release_date=datetime(2018, 5, 23, 11, 31, 21, tzinfo=tzlocal()), + ), + Version( + value="8.7.16", + release_date=datetime(2018, 6, 11, 17, 18, 14, tzinfo=tzlocal()), + ), + Version( + value="8.7.17", + release_date=datetime(2018, 7, 12, 11, 29, 19, tzinfo=tzlocal()), + ), + Version( + value="8.7.18", + release_date=datetime(2018, 7, 31, 8, 15, 29, tzinfo=tzlocal()), + ), + Version( + value="8.7.19", + release_date=datetime(2018, 8, 21, 7, 23, 21, tzinfo=tzlocal()), + ), + Version( + value="8.7.21", + release_date=datetime(2018, 12, 11, 12, 40, 12, tzinfo=tzlocal()), + ), + Version( + value="8.7.20", + release_date=datetime(2018, 10, 30, 10, 39, 51, tzinfo=tzlocal()), + ), + Version( + value="8.7.22", + release_date=datetime(2018, 12, 14, 7, 43, 50, tzinfo=tzlocal()), + ), + Version( + value="8.7.23", + release_date=datetime(2019, 1, 22, 10, 10, 2, tzinfo=tzlocal()), + ), + Version( + value="8.7.24", + release_date=datetime(2019, 1, 22, 15, 25, 55, tzinfo=tzlocal()), + ), + Version( + value="8.7.25", + release_date=datetime(2019, 5, 7, 10, 5, 55, tzinfo=tzlocal()), + ), + Version( + value="8.7.26", + release_date=datetime(2019, 5, 15, 11, 24, 12, tzinfo=tzlocal()), + ), + Version( + value="8.7.27", + release_date=datetime(2019, 6, 25, 8, 24, 21, tzinfo=tzlocal()), + ), + Version( + value="8.7.28", + release_date=datetime(2019, 10, 15, 7, 21, 52, tzinfo=tzlocal()), + ), + Version( + value="8.7.29", + release_date=datetime(2019, 10, 30, 21, 0, 45, tzinfo=tzlocal()), + ), + Version( + value="8.7.30", + release_date=datetime(2019, 12, 17, 10, 49, 17, tzinfo=tzlocal()), + ), + Version( + value="8.7.31", + release_date=datetime(2020, 2, 17, 23, 29, 16, tzinfo=tzlocal()), + ), + Version( + value="8.7.7", + release_date=datetime(2017, 9, 19, 14, 22, 53, tzinfo=tzlocal()), + ), + Version( + value="8.7.32", + release_date=datetime(2020, 3, 31, 8, 33, 3, tzinfo=tzlocal()), + ), + Version( + value="8.7.8", + release_date=datetime(2017, 10, 10, 16, 8, 44, tzinfo=tzlocal()), + ), + Version( + value="8.7.9", + release_date=datetime(2017, 12, 12, 16, 9, 50, tzinfo=tzlocal()), + ), + Version( + value="9.0.0", + release_date=datetime(2017, 12, 12, 16, 48, 22, tzinfo=tzlocal()), + ), + Version( + value="9.1.0", + release_date=datetime(2018, 1, 30, 15, 31, 12, tzinfo=tzlocal()), + ), + Version( + value="9.2.0", + release_date=datetime(2018, 4, 9, 20, 51, 35, tzinfo=tzlocal()), + ), + Version( + value="9.2.1", + release_date=datetime(2018, 5, 22, 13, 47, 11, tzinfo=tzlocal()), + ), + Version( + value="9.3.0", + release_date=datetime(2018, 6, 11, 17, 14, 33, tzinfo=tzlocal()), + ), + Version( + value="9.3.1", + release_date=datetime(2018, 7, 12, 11, 33, 12, tzinfo=tzlocal()), + ), + Version( + value="9.3.2", + release_date=datetime(2018, 7, 12, 15, 51, 49, tzinfo=tzlocal()), + ), + Version( + value="9.3.3", + release_date=datetime(2018, 7, 31, 8, 20, 17, tzinfo=tzlocal()), + ), + Version( + value="9.5.0", + release_date=datetime(2018, 10, 2, 8, 10, 33, tzinfo=tzlocal()), + ), + Version( + value="9.4.0", + release_date=datetime(2018, 9, 4, 12, 8, 20, tzinfo=tzlocal()), + ), + Version( + value="9.5.1", + release_date=datetime(2018, 10, 30, 10, 45, 30, tzinfo=tzlocal()), + ), + Version( + value="9.5.10", + release_date=datetime(2019, 10, 15, 7, 29, 55, tzinfo=tzlocal()), + ), + Version( + value="9.5.11", + release_date=datetime(2019, 10, 30, 20, 46, 49, tzinfo=tzlocal()), + ), + Version( + value="9.5.12", + release_date=datetime(2019, 12, 17, 10, 53, 45, tzinfo=tzlocal()), + ), + Version( + value="9.5.13", + release_date=datetime(2019, 12, 17, 14, 17, 37, tzinfo=tzlocal()), + ), + Version( + value="9.5.14", + release_date=datetime(2020, 2, 17, 23, 37, 2, tzinfo=tzlocal()), + ), + Version( + value="9.5.15", + release_date=datetime(2020, 3, 31, 8, 40, 25, tzinfo=tzlocal()), + ), + Version( + value="9.5.16", + release_date=datetime(2020, 4, 28, 9, 22, 14, tzinfo=tzlocal()), + ), + Version( + value="9.5.17", + release_date=datetime(2020, 5, 12, 10, 36, tzinfo=tzlocal()), + ), + Version( + value="9.5.18", + release_date=datetime(2020, 5, 19, 13, 10, 50, tzinfo=tzlocal()), + ), + Version( + value="9.5.2", + release_date=datetime(2018, 12, 11, 12, 42, 55, tzinfo=tzlocal()), + ), + Version( + value="9.5.19", + release_date=datetime(2020, 6, 9, 8, 44, 34, tzinfo=tzlocal()), + ), + Version( + value="9.5.3", + release_date=datetime(2018, 12, 14, 7, 28, 48, tzinfo=tzlocal()), + ), + Version( + value="9.5.4", + release_date=datetime(2019, 1, 22, 10, 12, 4, tzinfo=tzlocal()), + ), + Version( + value="9.5.5", + release_date=datetime(2019, 3, 4, 20, 25, 8, tzinfo=tzlocal()), + ), + Version( + value="9.5.6", + release_date=datetime(2019, 5, 7, 10, 16, 30, tzinfo=tzlocal()), + ), + Version( + value="9.5.7", + release_date=datetime(2019, 5, 15, 11, 41, 51, tzinfo=tzlocal()), + ), + Version( + value="9.5.8", + release_date=datetime(2019, 6, 25, 8, 28, 51, tzinfo=tzlocal()), + ), + Version( + value="9.5.9", + release_date=datetime(2019, 8, 20, 9, 33, 35, tzinfo=tzlocal()), + ), + Version( + value="10.0.0", + release_date=datetime(2019, 7, 23, 7, 6, 3, tzinfo=tzlocal()), + ), + Version( + value="10.1.0", + release_date=datetime(2019, 10, 1, 8, 18, 18, tzinfo=tzlocal()), + ), + Version( + value="10.2.0", + release_date=datetime(2019, 12, 3, 11, 16, 26, tzinfo=tzlocal()), + ), + Version( + value="10.2.1", + release_date=datetime(2019, 12, 17, 11, 0, tzinfo=tzlocal()), + ), + Version( + value="10.2.2", + release_date=datetime(2019, 12, 17, 11, 36, 14, tzinfo=tzlocal()), + ), + Version( + value="10.3.0", + release_date=datetime(2020, 2, 25, 12, 50, 9, tzinfo=tzlocal()), + ), + Version( + value="10.4.0", + release_date=datetime(2020, 4, 21, 8, 0, 15, tzinfo=tzlocal()), + ), + Version( + value="10.4.1", + release_date=datetime(2020, 4, 28, 9, 7, 54, tzinfo=tzlocal()), + ), + Version( + value="10.4.2", + release_date=datetime(2020, 5, 12, 10, 41, 40, tzinfo=tzlocal()), + ), + Version( + value="10.4.4", + release_date=datetime(2020, 6, 9, 8, 56, 30, tzinfo=tzlocal()), + ), + Version( + value="10.4.3", + release_date=datetime(2020, 5, 19, 13, 16, 31, tzinfo=tzlocal()), + ), } def test_composer_url(self): @@ -142,45 +343,50 @@ def test_extract_versions(self): def test_fetch(self): - assert self.version_api.get("typo3/cms-core") == set() + assert self.version_api.get("typo3/cms-core") == {"valid": set(), "new": set()} client_session = MockClientSession(self.response) asyncio.run(self.version_api.fetch("typo3/cms-core", client_session)) - assert self.version_api.get("typo3/cms-core") == self.expected_versions + assert self.version_api.cache["typo3/cms-core"] == self.expected_versions class TestMavenVersionAPI(TestCase): @classmethod def setUpClass(cls): cls.version_api = MavenVersionAPI() - with open(os.path.join(TEST_DATA, "maven_api", "maven-metadata.xml")) as f: - cls.response = ET.parse(f) - - with open(os.path.join(TEST_DATA, "maven_api", "maven-metadata.xml"), "rb") as f: - cls.content = f.read() + with open(os.path.join(TEST_DATA, "maven_api", "easygcm.html"), "rb") as f: + data = f.read() + cls.response = BeautifulSoup(data) + cls.content = data def test_artifact_url(self): - eg_comps1 = ["org.apache", "kafka"] - eg_comps2 = ["apple.msft.windows.mac.oss", "exfat-ntfs"] + eg_pkg1 = "org.apache:kafka" + eg_pkg2 = "apple.msft.windows.mac.oss:exfat-ntfs" - url1 = self.version_api.artifact_url(eg_comps1) - url2 = self.version_api.artifact_url(eg_comps2) + url1 = self.version_api.artifact_url(eg_pkg1) + url2 = self.version_api.artifact_url(eg_pkg2) - assert "https://repo1.maven.org/maven2/org/apache/kafka/maven-metadata.xml" == url1 - assert ( - "https://repo1.maven.org/maven2" - "/apple/msft/windows/mac/oss/exfat-ntfs/maven-metadata.xml" == url2 - ) + assert "https://repo1.maven.org/maven2/org/apache/kafka/" == url1 + assert "https://repo1.maven.org/maven2/apple/msft/windows/mac/oss/exfat-ntfs/" == url2 def test_extract_versions(self): - expected_versions = {"1.2.2", "1.2.3", "1.3.0"} + expected_versions = { + Version(value="1.3.0", release_date=datetime(2015, 3, 12, 15, 20, tzinfo=UTC)), + Version(value="1.2.3", release_date=datetime(2014, 12, 22, 10, 53, tzinfo=UTC)), + Version(value="1.2.2", release_date=datetime(2014, 12, 22, 10, 29, tzinfo=UTC)), + } assert expected_versions == self.version_api.extract_versions(self.response) def test_fetch(self): - assert self.version_api.get("org.apache:kafka") == set() - expected = {"1.2.3", "1.3.0", "1.2.2"} + assert self.version_api.get("org.apache:kafka") == {"new": set(), "valid": set()} + expected = { + Version(value="1.2.2", release_date=datetime(2014, 12, 22, 10, 29, tzinfo=UTC)), + Version(value="1.3.0", release_date=datetime(2015, 3, 12, 15, 20, tzinfo=UTC)), + Version(value="1.2.3", release_date=datetime(2014, 12, 22, 10, 53, tzinfo=UTC)), + } + client_session = MockClientSession(self.content) asyncio.run(self.version_api.fetch("org.apache:kafka", client_session)) - assert self.version_api.get("org.apache:kafka") == expected + assert self.version_api.cache["org.apache:kafka"] == expected class TestNugetVersionAPI(TestCase): @@ -191,20 +397,62 @@ def setUpClass(cls): cls.response = json.load(f) cls.expected_versions = { - "0.23.0", - "0.24.0", - "1.0.0", - "1.0.1", - "1.0.2", - "2.0.0", - "2.0.0-preview01", - "2.6.0", - "2.1.0", - "2.2.0", - "2.3.0", - "2.4.0", - "2.5.0", - "2.7.0", + Version( + value="1.0.0", + release_date=datetime(2018, 9, 13, 8, 16, 0, 420000, tzinfo=tzlocal()), + ), + Version( + value="1.0.1", + release_date=datetime(2020, 1, 17, 15, 31, 41, 857000, tzinfo=tzlocal()), + ), + Version( + value="1.0.2", + release_date=datetime(2020, 4, 21, 12, 24, 53, 877000, tzinfo=tzlocal()), + ), + Version( + value="2.0.0-preview01", + release_date=datetime(2018, 1, 9, 17, 12, 20, 440000, tzinfo=tzlocal()), + ), + Version( + value="2.0.0", + release_date=datetime(2018, 9, 27, 13, 33, 15, 370000, tzinfo=tzlocal()), + ), + Version( + value="2.1.0", + release_date=datetime(2018, 10, 16, 6, 59, 44, 680000, tzinfo=tzlocal()), + ), + Version( + value="2.2.0", + release_date=datetime(2018, 11, 23, 8, 13, 8, 3000, tzinfo=tzlocal()), + ), + Version( + value="2.3.0", + release_date=datetime(2019, 6, 27, 14, 27, 31, 613000, tzinfo=tzlocal()), + ), + Version( + value="2.4.0", + release_date=datetime(2020, 1, 17, 15, 11, 5, 810000, tzinfo=tzlocal()), + ), + Version( + value="2.5.0", + release_date=datetime(2020, 3, 24, 14, 22, 39, 960000, tzinfo=tzlocal()), + ), + Version( + value="2.7.0", + release_date=datetime(2020, 4, 21, 12, 27, 36, 427000, tzinfo=tzlocal()), + ), + Version( + value="2.6.0", + release_date=datetime(2020, 3, 27, 11, 6, 27, 500000, tzinfo=tzlocal()), + ), + Version( + value="0.24.0", + release_date=datetime(2018, 3, 30, 7, 25, 18, 393000, tzinfo=tzlocal()), + ), + Version( + value="0.23.0", + release_date=datetime(2018, 1, 17, 9, 32, 59, 283000, tzinfo=tzlocal()), + ), } def test_nuget_url(self): @@ -219,13 +467,30 @@ def test_extract_versions(self): def test_fetch(self): - assert self.version_api.get("Exfat.Ntfs") == set() + assert self.version_api.get("Exfat.Ntfs") == {"new": set(), "valid": set()} client_session = MockClientSession(self.response) asyncio.run(self.version_api.fetch("Exfat.Ntfs", client_session)) - assert self.version_api.get("Exfat.Ntfs") == self.expected_versions + assert self.version_api.get("Exfat.Ntfs") == { + "new": set(), + "valid": { + "2.0.0", + "2.1.0", + "2.0.0-preview01", + "0.24.0", + "0.23.0", + "1.0.1", + "2.2.0", + "2.4.0", + "1.0.0", + "1.0.2", + "2.3.0", + "2.7.0", + "2.5.0", + "2.6.0", + }, + } # def test_load_to_api(self): - # assert self.version_api.get("Exfat.Ntfs") == set() # mock_response = MagicMock() diff --git a/vulnerabilities/tests/test_ruby.py b/vulnerabilities/tests/test_ruby.py index 664f001ed..a666bc5ae 100644 --- a/vulnerabilities/tests/test_ruby.py +++ b/vulnerabilities/tests/test_ruby.py @@ -55,7 +55,7 @@ def setUpClass(cls): @patch( "vulnerabilities.package_managers.RubyVersionAPI.get", - return_value={"1.0.0", "1.8.0", "2.0.3"}, + return_value={"valid": {"1.0.0", "1.8.0", "2.0.3"}, "new": {}}, ) def test_process_file(self, mock_write): expected_advisories = [ diff --git a/vulnerabilities/tests/test_rust.py b/vulnerabilities/tests/test_rust.py index f563ccc5e..7a28150b8 100644 --- a/vulnerabilities/tests/test_rust.py +++ b/vulnerabilities/tests/test_rust.py @@ -30,23 +30,27 @@ from vulnerabilities.importers.rust import categorize_versions from vulnerabilities.importers.rust import get_advisory_data from vulnerabilities.importers.rust import RustDataSource +from vulnerabilities.package_managers import Version +from vulnerabilities.package_managers import CratesVersionAPI from vulnerabilities.helpers import AffectedPackage BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/rust") -MOCKED_CRATES_API_VERSIONS = { - "bitvec": {"0.10.0", "0.12.0", "0.18.0"}, - "bumpalo": {"2.8.0", "3.0.1", "3.2.5"}, - "cbox": {"0.10.0", "0.12.0", "0.18.0"}, - "flatbuffers": {"0.3.0", "0.5.0", "0.6.5"}, - "hyper": {"0.10.0", "0.12.0", "0.13.0"}, - "byte_struct": {"0.6.1", "0.6.0", "1.0.0"}, -} +MOCKED_CRATES_API_VERSIONS = CratesVersionAPI( + cache={ + "bitvec": {Version("0.10.0"), Version("0.12.0"), Version("0.18.0")}, + "bumpalo": {Version("2.8.0"), Version("3.0.1"), Version("3.2.5")}, + "cbox": {Version("0.10.0"), Version("0.12.0"), Version("0.18.0")}, + "flatbuffers": {Version("0.3.0"), Version("0.5.0"), Version("0.6.5")}, + "hyper": {Version("0.10.0"), Version("0.12.0"), Version("0.13.0")}, + "byte_struct": {Version("0.6.1"), Version("0.6.0"), Version("1.0.0")}, + } +) def test_categorize_versions(): - flatbuffers_versions = MOCKED_CRATES_API_VERSIONS["flatbuffers"] + flatbuffers_versions = MOCKED_CRATES_API_VERSIONS.get("flatbuffers")["valid"] unaffected_ranges = [VersionSpecifier.from_scheme_version_spec_string("semver", "< 0.4.0")] affected_ranges = [ diff --git a/vulnerabilities/tests/test_safety_db.py b/vulnerabilities/tests/test_safety_db.py index 20bbc87b2..ddc78f9a7 100644 --- a/vulnerabilities/tests/test_safety_db.py +++ b/vulnerabilities/tests/test_safety_db.py @@ -22,16 +22,16 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import json import os -from unittest.mock import patch from unittest import TestCase from packageurl import PackageURL -from vulnerabilities.importers.safety_db import PypiVersionAPI from vulnerabilities.importers.safety_db import categorize_versions from vulnerabilities.importers.safety_db import SafetyDbDataSource from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference +from vulnerabilities.package_managers import PypiVersionAPI +from vulnerabilities.package_managers import Version from vulnerabilities.helpers import AffectedPackage BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -39,9 +39,16 @@ MOCK_VERSION_API = PypiVersionAPI( cache={ - "ampache": {"2.0", "5.2.1"}, - "django": {"1.8", "1.4.19", "1.4.22", "1.5.1", "1.6.9", "1.8.14"}, - "zulip": {"2.0", "2.1.1", "2.1.2", "2.1.3"}, + "ampache": {Version("2.0"), Version("5.2.1")}, + "django": { + Version("1.8"), + Version("1.4.19"), + Version("1.4.22"), + Version("1.5.1"), + Version("1.6.9"), + Version("1.8.14"), + }, + "zulip": {Version("2.0"), Version("2.1.1"), Version("2.1.2"), Version("2.1.3")}, } ) From 0dccf223b6fd92bf2db2a2387c27f4f8a0068ff9 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 13 Jun 2021 16:30:14 +0530 Subject: [PATCH 17/27] Use class for returning versions partitioned by cutoff time Signed-off-by: Shivam Sandbhor --- pytest.ini | 4 +++- vulnerabilities/importers/apache_httpd.py | 5 ++--- vulnerabilities/importers/apache_kafka.py | 4 ++-- vulnerabilities/importers/apache_tomcat.py | 6 +++-- vulnerabilities/importers/elixir_security.py | 2 +- vulnerabilities/importers/github.py | 2 +- vulnerabilities/importers/istio.py | 2 +- vulnerabilities/importers/nginx.py | 10 +++++---- vulnerabilities/importers/npm.py | 2 +- vulnerabilities/importers/ruby.py | 5 +---- vulnerabilities/importers/rust.py | 2 +- vulnerabilities/package_managers.py | 17 ++++++++++---- .../tests/test_package_managers.py | 22 +++++++++---------- vulnerabilities/tests/test_ruby.py | 7 +++--- vulnerabilities/tests/test_rust.py | 2 +- 15 files changed, 52 insertions(+), 40 deletions(-) diff --git a/pytest.ini b/pytest.ini index 8a0f512df..d64e30334 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,4 @@ [pytest] -DJANGO_SETTINGS_MODULE = vulnerablecode.settings \ No newline at end of file +DJANGO_SETTINGS_MODULE = vulnerablecode.settings +markers = + webtest \ No newline at end of file diff --git a/vulnerabilities/importers/apache_httpd.py b/vulnerabilities/importers/apache_httpd.py index 09c64f34c..ffb5288bf 100644 --- a/vulnerabilities/importers/apache_httpd.py +++ b/vulnerabilities/importers/apache_httpd.py @@ -37,7 +37,6 @@ from vulnerabilities.data_source import VulnerabilitySeverity from vulnerabilities.package_managers import GitHubTagsAPI from vulnerabilities.severity_systems import scoring_systems -from vulnerabilities.helpers import create_etag from vulnerabilities.helpers import nearest_patched_package @@ -106,7 +105,7 @@ def to_advisory(self, data): fixed_packages.extend( [ PackageURL(type="apache", name="httpd", version=version) - for version in self.version_api.get("apache/httpd")["valid"] + for version in self.version_api.get("apache/httpd").valid_versions if MavenVersion(version) in version_range ] ) @@ -115,7 +114,7 @@ def to_advisory(self, data): affected_packages.extend( [ PackageURL(type="apache", name="httpd", version=version) - for version in self.version_api.get("apache/httpd")["valid"] + for version in self.version_api.get("apache/httpd").valid_versions if MavenVersion(version) in version_range ] ) diff --git a/vulnerabilities/importers/apache_kafka.py b/vulnerabilities/importers/apache_kafka.py index d0e8f7d7e..3f30d340a 100644 --- a/vulnerabilities/importers/apache_kafka.py +++ b/vulnerabilities/importers/apache_kafka.py @@ -72,7 +72,7 @@ def to_advisory(self, advisory_page): fixed_packages = [ PackageURL(type="apache", name="kafka", version=version) - for version in self.version_api.get("apache/kafka")["valid"] + for version in self.version_api.get("apache/kafka").valid_versions if any( [ MavenVersion(version) in version_range @@ -83,7 +83,7 @@ def to_advisory(self, advisory_page): affected_packages = [ PackageURL(type="apache", name="kafka", version=version) - for version in self.version_api.get("apache/kafka")["valid"] + for version in self.version_api.get("apache/kafka").valid_versions if any( [ MavenVersion(version) in version_range diff --git a/vulnerabilities/importers/apache_tomcat.py b/vulnerabilities/importers/apache_tomcat.py index 3326a3bf9..6db5a7712 100644 --- a/vulnerabilities/importers/apache_tomcat.py +++ b/vulnerabilities/importers/apache_tomcat.py @@ -63,7 +63,7 @@ def updated_advisories(self): def fetch_pages(self): tomcat_major_versions = { - i[0] for i in self.version_api.get("org.apache.tomcat:tomcat")["valid"] + i[0] for i in self.version_api.get("org.apache.tomcat:tomcat").valid_versions } for version in tomcat_major_versions: page_url = self.base_url.format(version) @@ -104,7 +104,9 @@ def to_advisories(self, apache_tomcat_advisory_html): PackageURL( type="maven", namespace="apache", name="tomcat", version=version ) - for version in self.version_api.get("org.apache.tomcat:tomcat")["valid"] + for version in self.version_api.get( + "org.apache.tomcat:tomcat" + ).valid_versions if MavenVersion(version) in version_range ] ) diff --git a/vulnerabilities/importers/elixir_security.py b/vulnerabilities/importers/elixir_security.py index a6074fba1..5f352beca 100644 --- a/vulnerabilities/importers/elixir_security.py +++ b/vulnerabilities/importers/elixir_security.py @@ -74,7 +74,7 @@ def get_versions_for_pkg_from_range_list(self, version_range_list, pkg_name): safe_pkg_versions = [] vuln_pkg_versions = [] - all_version_list = self.pkg_manager_api.get(pkg_name)["valid"] + all_version_list = self.pkg_manager_api.get(pkg_name).valid_versions if not version_range_list: return [], all_version_list version_ranges = [ diff --git a/vulnerabilities/importers/github.py b/vulnerabilities/importers/github.py index a10ceedb3..f1461922b 100644 --- a/vulnerabilities/importers/github.py +++ b/vulnerabilities/importers/github.py @@ -272,7 +272,7 @@ def process_response(self) -> List[Advisory]: aff_vers, unaff_vers = self.categorize_versions( self.version_api.package_type, aff_range, - self.version_api.get(name, until=cutoff_time)["valid"], + self.version_api.get(name, until=cutoff_time).valid_versions, ) affected_purls = [ PackageURL(name=pkg_name, namespace=ns, version=version, type=pkg_type) diff --git a/vulnerabilities/importers/istio.py b/vulnerabilities/importers/istio.py index c4883011d..ca2717040 100644 --- a/vulnerabilities/importers/istio.py +++ b/vulnerabilities/importers/istio.py @@ -67,7 +67,7 @@ def get_pkg_versions_from_ranges(self, version_range_list, release_date): """Takes a list of version ranges(affected) of a package as parameter and returns a tuple of safe package versions and vulnerable package versions""" - all_version = self.version_api.get("istio/istio", release_date)["valid"] + all_version = self.version_api.get("istio/istio", release_date).valid_versions safe_pkg_versions = [] vuln_pkg_versions = [] version_ranges = [ diff --git a/vulnerabilities/importers/nginx.py b/vulnerabilities/importers/nginx.py index 13a572f3e..77913398a 100644 --- a/vulnerabilities/importers/nginx.py +++ b/vulnerabilities/importers/nginx.py @@ -53,8 +53,10 @@ def set_api(self): # For some reason nginx tags it's releases are in the form of `release-1.2.3` # Chop off the `release-` part here. - for index, version in enumerate(self.version_api.cache["nginx/nginx"]["valid"]): - self.version_api.cache["nginx/nginx"]["valid"][index] = version.replace("release-", "") + for index, version in enumerate(self.version_api.cache["nginx/nginx"].valid_versions): + self.version_api.cache["nginx/nginx"].valid_versions[index] = version.replace( + "release-", "" + ) def updated_advisories(self): advisories = [] @@ -135,7 +137,7 @@ def extract_fixed_pkgs(self, vuln_info): ) valid_versions = find_valid_versions( - self.version_api.get("nginx/nginx")["valid"], version_ranges + self.version_api.get("nginx/nginx").valid_versions, version_ranges ) return [ @@ -172,7 +174,7 @@ def extract_vuln_pkgs(self, vuln_info): ) valid_versions = find_valid_versions( - self.version_api.get("nginx/nginx")["valid"], version_ranges + self.version_api.get("nginx/nginx").valid_versions, version_ranges ) qualifiers = {} if windows_only: diff --git a/vulnerabilities/importers/npm.py b/vulnerabilities/importers/npm.py index e66f9e5d4..e8bdf84ba 100644 --- a/vulnerabilities/importers/npm.py +++ b/vulnerabilities/importers/npm.py @@ -88,7 +88,7 @@ def process_file(self, file) -> List[Advisory]: publish_date = parse(record["updated_at"]) publish_date.replace(tzinfo=pytz.UTC) - all_versions = self.versions.get(package_name, until=publish_date)["valid"] + all_versions = self.versions.get(package_name, until=publish_date).valid_versions aff_range = record.get("vulnerable_versions") if not aff_range: aff_range = "" diff --git a/vulnerabilities/importers/ruby.py b/vulnerabilities/importers/ruby.py index 00f76765f..883c7c6b3 100644 --- a/vulnerabilities/importers/ruby.py +++ b/vulnerabilities/importers/ruby.py @@ -94,10 +94,7 @@ def process_file(self, path) -> List[Advisory]: if not getattr(self, "pkg_manager_api", None): self.pkg_manager_api = RubyVersionAPI() - all_vers = self.pkg_manager_api.get(package_name, until=publish_time)["valid"] - print( - f"Ignored {len(self.pkg_manager_api.get(package_name,until=publish_time)['new'])} versions" - ) + all_vers = self.pkg_manager_api.get(package_name, until=publish_time).valid_versions safe_versions, affected_versions = self.categorize_versions(all_vers, safe_version_ranges) impacted_purls = [ diff --git a/vulnerabilities/importers/rust.py b/vulnerabilities/importers/rust.py index 059c57ba7..2bcc10f22 100644 --- a/vulnerabilities/importers/rust.py +++ b/vulnerabilities/importers/rust.py @@ -98,7 +98,7 @@ def _load_advisory(self, path: str) -> Optional[Advisory]: references.append(Reference(url=advisory["url"])) publish_date = parse(advisory["date"]).replace(tzinfo=pytz.UTC) - all_versions = self.crates_api.get(crate_name, publish_date)["valid"] + all_versions = self.crates_api.get(crate_name, publish_date).valid_versions # FIXME: Avoid wildcard version ranges for now. # See https://github.com/RustSec/advisory-db/discussions/831 diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 5b9735435..5e929cbc8 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -27,6 +27,7 @@ from dateutil import parser from json import JSONDecodeError from typing import Mapping +from typing import Tuple from typing import Set from datetime import datetime @@ -41,19 +42,27 @@ class Version: release_date: datetime = None +@dataclasses.dataclass(frozen=True) +class VersionResponse: + valid_versions: Set[str] = dataclasses.field(default_factory=set) + newer_versions: Set[str] = dataclasses.field(default_factory=set) + + +@dataclasses.dataclass(frozen=True) class VersionAPI: def __init__(self, cache: Mapping[str, Set[str]] = None): self.cache = cache or {} def get(self, package_name, until=None) -> Set[str]: - versions = {"new": set(), "valid": set()} + new_versions = set() + valid_versions = set() for version in self.cache.get(package_name, set()): if until and version.release_date and version.release_date > until: - versions["new"].add(version.value) + new_versions.add(version.value) continue - versions["valid"].add(version.value) + valid_versions.add(version.value) - return versions + return VersionResponse(valid_versions=valid_versions, newer_versions=new_versions) def client_session(): diff --git a/vulnerabilities/tests/test_package_managers.py b/vulnerabilities/tests/test_package_managers.py index 5a33f3526..994e4fbca 100644 --- a/vulnerabilities/tests/test_package_managers.py +++ b/vulnerabilities/tests/test_package_managers.py @@ -21,20 +21,20 @@ # Visit https://github.com/nexB/vulnerablecode/ for support and download. import asyncio +import json +import os from datetime import datetime from bs4 import BeautifulSoup from dateutil.tz import tzlocal from pytz import UTC -import os -import json from unittest import TestCase from unittest.mock import AsyncMock -import xml.etree.ElementTree as ET from vulnerabilities.package_managers import ComposerVersionAPI from vulnerabilities.package_managers import MavenVersionAPI from vulnerabilities.package_managers import NugetVersionAPI from vulnerabilities.package_managers import Version +from vulnerabilities.package_managers import VersionResponse BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data") @@ -343,7 +343,7 @@ def test_extract_versions(self): def test_fetch(self): - assert self.version_api.get("typo3/cms-core") == {"valid": set(), "new": set()} + assert self.version_api.get("typo3/cms-core") == VersionResponse() client_session = MockClientSession(self.response) asyncio.run(self.version_api.fetch("typo3/cms-core", client_session)) assert self.version_api.cache["typo3/cms-core"] == self.expected_versions @@ -355,7 +355,7 @@ def setUpClass(cls): cls.version_api = MavenVersionAPI() with open(os.path.join(TEST_DATA, "maven_api", "easygcm.html"), "rb") as f: data = f.read() - cls.response = BeautifulSoup(data) + cls.response = BeautifulSoup(data, features="lxml") cls.content = data def test_artifact_url(self): @@ -377,7 +377,7 @@ def test_extract_versions(self): assert expected_versions == self.version_api.extract_versions(self.response) def test_fetch(self): - assert self.version_api.get("org.apache:kafka") == {"new": set(), "valid": set()} + assert self.version_api.get("org.apache:kafka") == VersionResponse() expected = { Version(value="1.2.2", release_date=datetime(2014, 12, 22, 10, 29, tzinfo=UTC)), Version(value="1.3.0", release_date=datetime(2015, 3, 12, 15, 20, tzinfo=UTC)), @@ -467,12 +467,12 @@ def test_extract_versions(self): def test_fetch(self): - assert self.version_api.get("Exfat.Ntfs") == {"new": set(), "valid": set()} + assert self.version_api.get("Exfat.Ntfs") == VersionResponse() client_session = MockClientSession(self.response) asyncio.run(self.version_api.fetch("Exfat.Ntfs", client_session)) - assert self.version_api.get("Exfat.Ntfs") == { - "new": set(), - "valid": { + assert self.version_api.get("Exfat.Ntfs") == VersionResponse( + newer_versions=set(), + valid_versions={ "2.0.0", "2.1.0", "2.0.0-preview01", @@ -488,7 +488,7 @@ def test_fetch(self): "2.5.0", "2.6.0", }, - } + ) # def test_load_to_api(self): # assert self.version_api.get("Exfat.Ntfs") == set() diff --git a/vulnerabilities/tests/test_ruby.py b/vulnerabilities/tests/test_ruby.py index a666bc5ae..affcf8fa1 100644 --- a/vulnerabilities/tests/test_ruby.py +++ b/vulnerabilities/tests/test_ruby.py @@ -24,15 +24,14 @@ import pathlib from unittest.mock import patch from unittest import TestCase -from collections import OrderedDict from packageurl import PackageURL from vulnerabilities.importers.ruby import RubyDataSource -from vulnerabilities.data_source import GitDataSourceConfiguration from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.package_managers import RubyVersionAPI +from vulnerabilities.package_managers import VersionResponse from vulnerabilities.helpers import AffectedPackage BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -55,7 +54,9 @@ def setUpClass(cls): @patch( "vulnerabilities.package_managers.RubyVersionAPI.get", - return_value={"valid": {"1.0.0", "1.8.0", "2.0.3"}, "new": {}}, + return_value=VersionResponse( + valid_versions={"1.0.0", "1.8.0", "2.0.3"}, newer_versions=set() + ), ) def test_process_file(self, mock_write): expected_advisories = [ diff --git a/vulnerabilities/tests/test_rust.py b/vulnerabilities/tests/test_rust.py index 7a28150b8..9ec34c7b1 100644 --- a/vulnerabilities/tests/test_rust.py +++ b/vulnerabilities/tests/test_rust.py @@ -50,7 +50,7 @@ def test_categorize_versions(): - flatbuffers_versions = MOCKED_CRATES_API_VERSIONS.get("flatbuffers")["valid"] + flatbuffers_versions = MOCKED_CRATES_API_VERSIONS.get("flatbuffers").valid_versions unaffected_ranges = [VersionSpecifier.from_scheme_version_spec_string("semver", "< 0.4.0")] affected_ranges = [ From f7dc3e64eb5615f278a453e4610dc61576d5ec84 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sat, 3 Jul 2021 11:12:42 +0530 Subject: [PATCH 18/27] Make review changes Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/apache_tomcat.py | 5 +- vulnerabilities/importers/github.py | 7 +- vulnerabilities/importers/safety_db.py | 2 +- vulnerabilities/package_managers.py | 103 ++++++++---------- .../tests/test_package_managers.py | 40 +++---- 5 files changed, 72 insertions(+), 85 deletions(-) diff --git a/vulnerabilities/importers/apache_tomcat.py b/vulnerabilities/importers/apache_tomcat.py index 6db5a7712..afcc85fb8 100644 --- a/vulnerabilities/importers/apache_tomcat.py +++ b/vulnerabilities/importers/apache_tomcat.py @@ -29,6 +29,7 @@ from bs4 import BeautifulSoup from univers.version_specifier import VersionSpecifier from univers.versions import MavenVersion +from univers.versions import SemverVersion from packageurl import PackageURL from vulnerabilities.data_source import Advisory @@ -62,8 +63,10 @@ def updated_advisories(self): return self.batch_advisories(advisories) def fetch_pages(self): + # Here Semver is used because it has notion of major, minor versions. tomcat_major_versions = { - i[0] for i in self.version_api.get("org.apache.tomcat:tomcat").valid_versions + SemverVersion(i).value.major + for i in self.version_api.get("org.apache.tomcat:tomcat").valid_versions } for version in tomcat_major_versions: page_url = self.base_url.format(version) diff --git a/vulnerabilities/importers/github.py b/vulnerabilities/importers/github.py index f1461922b..b1cee1fda 100644 --- a/vulnerabilities/importers/github.py +++ b/vulnerabilities/importers/github.py @@ -23,9 +23,7 @@ import asyncio import os import dataclasses -import json -from xml.etree.ElementTree import parse -from dateutil import parser +from dateutil import parser as dateparser from typing import Set from typing import Tuple from typing import List @@ -36,7 +34,6 @@ from packageurl import PackageURL from univers.version_specifier import VersionSpecifier from univers.versions import version_class_by_package_type -from univers.versions import InvalidVersion from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import DataSource @@ -263,7 +260,7 @@ def process_response(self) -> List[Advisory]: for resp_page in self.advisories[ecosystem]: for adv in resp_page["data"]["securityVulnerabilities"]["edges"]: name = adv["node"]["package"]["name"] - cutoff_time = parser.parse(adv["node"]["advisory"]["publishedAt"]) + cutoff_time = dateparser.parse(adv["node"]["advisory"]["publishedAt"]) affected_purls = [] unaffected_purls = [] if self.process_name(ecosystem, name): diff --git a/vulnerabilities/importers/safety_db.py b/vulnerabilities/importers/safety_db.py index 9e0305439..e4f5d3cd2 100755 --- a/vulnerabilities/importers/safety_db.py +++ b/vulnerabilities/importers/safety_db.py @@ -86,7 +86,7 @@ def updated_advisories(self) -> Set[Advisory]: # Skip it. The 'cumin' entry is wrong continue - all_package_versions = self.versions.get(package_name) + all_package_versions = self.versions.get(package_name).valid_versions if not len(all_package_versions): # PyPi does not have data about this package, we skip these continue diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 5e929cbc8..399ac3c4a 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -23,12 +23,13 @@ import asyncio import dataclasses import pytz +import xml.etree.ElementTree as ET from bs4 import BeautifulSoup -from dateutil import parser +from dateutil import parser as dateparser from json import JSONDecodeError from typing import Mapping -from typing import Tuple from typing import Set +from typing import List from datetime import datetime from aiohttp import ClientSession @@ -122,13 +123,19 @@ async def fetch(self, pkg, session): try: response = await session.request(method="GET", url=url) response = await response.json() - for version in response["releases"]: - if response["releases"][version]: + for version, download_items in response["releases"].items(): + if download_items: + latest_download_item = max( + download_items, + key=lambda download_item: dateparser.parse( + download_item["upload_time_iso_8601"] + ), + ) versions.add( Version( value=version, - release_date=parser.parse( - response["releases"][version][-1]["upload_time_iso_8601"] + release_date=dateparser.parse( + latest_download_item["upload_time_iso_8601"] ), ) ) @@ -157,7 +164,8 @@ async def fetch(self, pkg, session): for version_info in response["versions"]: versions.add( Version( - value=version_info["num"], release_date=parser.parse(version_info["updated_at"]) + value=version_info["num"], + release_date=dateparser.parse(version_info["updated_at"]), ) ) @@ -183,7 +191,8 @@ async def fetch(self, pkg, session): for release in response: versions.add( Version( - value=release["number"], release_date=parser.parse(release["created_at"]) + value=release["number"], + release_date=dateparser.parse(release["created_at"]), ) ) except (ClientResponseError, JSONDecodeError): @@ -211,7 +220,7 @@ async def fetch(self, pkg, session): for version in response.get("versions", []): release_date = response.get("time", {}).get(version) if release_date: - release_date = parser.parse(release_date) + release_date = dateparser.parse(release_date) versions.add(Version(value=version, release_date=release_date)) else: versions.add(Version(value=version, release_date=None)) @@ -265,15 +274,12 @@ class MavenVersionAPI(VersionAPI): async def load_api(self, pkg_set): async with client_session() as session: await asyncio.gather( - *[ - self.fetch(pkg, session) - for pkg in pkg_set - if pkg not in self.cache and "camel" not in pkg - ] + *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] ) async def fetch(self, pkg, session) -> None: - endpoint = self.artifact_url(pkg) + artifact_comps = pkg.split(":") + endpoint = self.artifact_url(artifact_comps) try: resp = await session.request(method="GET", url=endpoint) resp = await resp.read() @@ -282,15 +288,11 @@ async def fetch(self, pkg, session) -> None: self.cache[pkg] = set() return - soup = BeautifulSoup(resp, features="lxml") - try: - self.cache[pkg] = self.extract_versions(soup) - except: - raise + xml_resp = ET.ElementTree(ET.fromstring(resp.decode("utf-8"))) + self.cache[pkg] = self.extract_versions(xml_resp) @staticmethod - def artifact_url(pkg: str) -> str: - artifact_comps = pkg.split(":") + def artifact_url(artifact_comps: List[str]) -> str: base_url = "https://repo1.maven.org/maven2/{}" try: group_id, artifact_id = artifact_comps @@ -306,35 +308,19 @@ def artifact_url(pkg: str) -> str: raise group_url = group_id.replace(".", "/") - suffix = group_url + "/" + artifact_id + "/" + suffix = group_url + "/" + artifact_id + "/" + "maven-metadata.xml" endpoint = base_url.format(suffix) return endpoint @staticmethod - def extract_versions(soup: BeautifulSoup) -> Set[Version]: - pre_tag = soup.find("pre") - prev_tag = None - versions = set() - for i, atag in enumerate(pre_tag): - if atag.name == "a" and i != 0: - prev_tag = atag - elif prev_tag: - text_groups = atag.split() - if text_groups[-1] != "-": - break - date = " ".join(text_groups[:-1]) - if date != "-": - versions.add( - Version( - value=prev_tag.text[:-1], - release_date=parser.parse(date).replace(tzinfo=pytz.UTC), - ) - ) - else: - versions.add(Version(value=prev_tag.text[:-1], release_date=None)) + def extract_versions(xml_response: ET.ElementTree) -> Set[str]: + all_versions = set() + for child in xml_response.getroot().iter(): + if child.tag == "version": + all_versions.add(Version(child.text)) - return versions + return all_versions class NugetVersionAPI(VersionAPI): @@ -368,7 +354,7 @@ def extract_versions(resp: dict) -> Set[str]: all_versions.add( Version( value=entry["catalogEntry"]["version"], - release_date=parser.parse(entry["catalogEntry"]["published"]), + release_date=dateparser.parse(entry["catalogEntry"]["published"]), ) ) # FIXME: json response for YamlDotNet.Signed triggers this exception. @@ -412,15 +398,16 @@ def extract_versions(resp: dict, pkg_name: str) -> Set[str]: for version in resp["packages"][pkg_name]: if "dev" in version: continue + + # This if statement ensures, that all_versions contains only released versions + # See https://github.com/composer/composer/blob/44a4429978d1b3c6223277b875762b2930e83e8c/doc/articles/versions.md#tags # nopep8 + # for explanation of removing 'v' all_versions.add( Version( - value=version.replace("v", ""), - release_date=parser.parse(resp["packages"][pkg_name][version]["time"]), + value=version.lstrip("v"), + release_date=dateparser.parse(resp["packages"][pkg_name][version]["time"]), ) ) - # This if statement ensures, that all_versions contains only released versions - # See https://github.com/composer/composer/blob/44a4429978d1b3c6223277b875762b2930e83e8c/doc/articles/versions.md#tags # nopep8 - # for explanation of removing 'v' return all_versions @@ -440,8 +427,10 @@ async def load_api(self, repo_set): ) async def fetch(self, owner_repo: str, endpoint=None) -> None: - # owner_repo is a string of format "{repo_owner}/{repo_name}" - # Example value of owner_repo = "nexB/scancode-toolkit" + """ + owner_repo is a string of format "{repo_owner}/{repo_name}" + Example value of owner_repo = "nexB/scancode-toolkit" + """ if owner_repo not in self.cache: self.cache[owner_repo] = set() @@ -456,12 +445,13 @@ async def fetch(self, owner_repo: str, endpoint=None) -> None: version = None for links in release_entry.find_all("a"): if f"/{owner_repo}/releases/tag/" in links["href"].lower(): - version = links["href"].split("/")[-1] + prefix, _slash, version = links["href"].rpartition("/") + version = version.lstrip("v") break release_date = release_entry.find("relative-time")["datetime"] self.cache[owner_repo].add( - Version(value=version, release_date=parser.parse(release_date)) + Version(value=version, release_date=dateparser.parse(release_date)) ) url = None @@ -492,7 +482,8 @@ async def fetch(self, pkg, session): for release in response["releases"]: versions.add( Version( - value=release["version"], release_date=parser.parse(release["inserted_at"]) + value=release["version"], + release_date=dateparser.parse(release["inserted_at"]), ) ) except (ClientResponseError, JSONDecodeError): diff --git a/vulnerabilities/tests/test_package_managers.py b/vulnerabilities/tests/test_package_managers.py index 994e4fbca..28c7cb3ab 100644 --- a/vulnerabilities/tests/test_package_managers.py +++ b/vulnerabilities/tests/test_package_managers.py @@ -23,6 +23,7 @@ import asyncio import json import os +import xml.etree.ElementTree as ET from datetime import datetime from bs4 import BeautifulSoup from dateutil.tz import tzlocal @@ -353,40 +354,35 @@ class TestMavenVersionAPI(TestCase): @classmethod def setUpClass(cls): cls.version_api = MavenVersionAPI() - with open(os.path.join(TEST_DATA, "maven_api", "easygcm.html"), "rb") as f: - data = f.read() - cls.response = BeautifulSoup(data, features="lxml") - cls.content = data + with open(os.path.join(TEST_DATA, "maven_api", "maven-metadata.xml")) as f: + cls.response = ET.parse(f) + + with open(os.path.join(TEST_DATA, "maven_api", "maven-metadata.xml"), "rb") as f: + cls.content = f.read() def test_artifact_url(self): - eg_pkg1 = "org.apache:kafka" - eg_pkg2 = "apple.msft.windows.mac.oss:exfat-ntfs" + eg_comps1 = ["org.apache", "kafka"] + eg_comps2 = ["apple.msft.windows.mac.oss", "exfat-ntfs"] - url1 = self.version_api.artifact_url(eg_pkg1) - url2 = self.version_api.artifact_url(eg_pkg2) + url1 = self.version_api.artifact_url(eg_comps1) + url2 = self.version_api.artifact_url(eg_comps2) - assert "https://repo1.maven.org/maven2/org/apache/kafka/" == url1 - assert "https://repo1.maven.org/maven2/apple/msft/windows/mac/oss/exfat-ntfs/" == url2 + assert "https://repo1.maven.org/maven2/org/apache/kafka/maven-metadata.xml" == url1 + assert ( + "https://repo1.maven.org/maven2" + "/apple/msft/windows/mac/oss/exfat-ntfs/maven-metadata.xml" == url2 + ) def test_extract_versions(self): - expected_versions = { - Version(value="1.3.0", release_date=datetime(2015, 3, 12, 15, 20, tzinfo=UTC)), - Version(value="1.2.3", release_date=datetime(2014, 12, 22, 10, 53, tzinfo=UTC)), - Version(value="1.2.2", release_date=datetime(2014, 12, 22, 10, 29, tzinfo=UTC)), - } + expected_versions = {Version("1.2.2"), Version("1.2.3"), Version("1.3.0")} assert expected_versions == self.version_api.extract_versions(self.response) def test_fetch(self): assert self.version_api.get("org.apache:kafka") == VersionResponse() - expected = { - Version(value="1.2.2", release_date=datetime(2014, 12, 22, 10, 29, tzinfo=UTC)), - Version(value="1.3.0", release_date=datetime(2015, 3, 12, 15, 20, tzinfo=UTC)), - Version(value="1.2.3", release_date=datetime(2014, 12, 22, 10, 53, tzinfo=UTC)), - } - + expected = {"1.2.2", "1.2.3", "1.3.0"} client_session = MockClientSession(self.content) asyncio.run(self.version_api.fetch("org.apache:kafka", client_session)) - assert self.version_api.cache["org.apache:kafka"] == expected + assert self.version_api.get("org.apache:kafka") == VersionResponse(valid_versions=expected) class TestNugetVersionAPI(TestCase): From 72a84bc1cbe33651e11e746580caf1b9d78f68ec Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 4 Jul 2021 02:07:53 +0530 Subject: [PATCH 19/27] Refactor package_manangers Use uniform names and prototypes for `fetch` function. Remove unnecessary checks. Re-use sessions Signed-off-by: Hritik Vijay --- vulnerabilities/package_managers.py | 36 ++++++++++------------------- 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index 399ac3c4a..fa8af0c14 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -77,12 +77,10 @@ class LaunchpadVersionAPI(VersionAPI): async def load_api(self, pkg_set): async with client_session() as session: await asyncio.gather( - *[self.set_api(pkg, session) for pkg in pkg_set if pkg not in self.cache] + *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] ) - async def set_api(self, pkg, session): - if pkg in self.cache: - return + async def fetch(self, pkg, session): url = ( "https://api.launchpad.net/1.0/ubuntu/+archive/" "primary?ws.op=getPublishedSources&" @@ -242,12 +240,10 @@ async def load_api(self, pkg_set): raise_for_status=True, headers={"Connection": "keep-alive"} ) as session: await asyncio.gather( - *[self.set_api(pkg, session) for pkg in pkg_set if pkg not in self.cache] + *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] ) - async def set_api(self, pkg, session, retry_count=5): - if pkg in self.cache: - return + async def fetch(self, pkg, session, retry_count=5): url = "https://sources.debian.org/api/src/{}".format(pkg) try: all_versions = set() @@ -415,30 +411,22 @@ class GitHubTagsAPI(VersionAPI): package_type = "github" - async def load_api(self, repo_set): - session = client_session() - async with session as session: + async def load_api(self, pkg_set): + async with client_session() as session: await asyncio.gather( - *[ - self.fetch(owner_repo.lower()) - for owner_repo in repo_set - if owner_repo.lower() not in self.cache - ] + *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] ) - async def fetch(self, owner_repo: str, endpoint=None) -> None: + async def fetch(self, owner_repo: str, session) -> None: """ owner_repo is a string of format "{repo_owner}/{repo_name}" Example value of owner_repo = "nexB/scancode-toolkit" """ - if owner_repo not in self.cache: - self.cache[owner_repo] = set() + self.cache[owner_repo] = set() + endpoint = f"https://github.com/{owner_repo}/tags" - if not endpoint: - endpoint = f"https://github.com/{owner_repo}/tags" - async with client_session() as session: - resp = await session.get(endpoint) - resp = await resp.read() + resp = await session.get(endpoint) + resp = await resp.read() soup = BeautifulSoup(resp, features="lxml") for release_entry in soup.find_all("div", {"class": "commit"}): From bece29b8559536a29c8ff0b247a6ca6e56b91599 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Sun, 4 Jul 2021 02:33:40 +0530 Subject: [PATCH 20/27] Move load_api to parent class and refactor imports The code for `load_api` was repetitive and used in all subclasses of VersionAPI, it is better suited in the parent class. `fetch` method is also now consistent and defined as an abstract method in the base class. Python ABC is not used as different implementations of `load_api` and `fetch` are allowed as done in DebianVersionAPI Signed-off-by: Hritik Vijay --- vulnerabilities/package_managers.py | 82 ++++++----------------------- 1 file changed, 16 insertions(+), 66 deletions(-) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index fa8af0c14..ea52106d2 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -19,22 +19,20 @@ # for any legal advice. # VulnerableCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. - import asyncio import dataclasses -import pytz import xml.etree.ElementTree as ET -from bs4 import BeautifulSoup -from dateutil import parser as dateparser +from datetime import datetime from json import JSONDecodeError +from typing import List from typing import Mapping from typing import Set -from typing import List -from datetime import datetime from aiohttp import ClientSession from aiohttp.client_exceptions import ClientResponseError from aiohttp.client_exceptions import ServerDisconnectedError +from bs4 import BeautifulSoup +from dateutil import parser as dateparser @dataclasses.dataclass(frozen=True) @@ -65,6 +63,18 @@ def get(self, package_name, until=None) -> Set[str]: return VersionResponse(valid_versions=valid_versions, newer_versions=new_versions) + async def load_api(self, pkg_set): + async with client_session() as session: + await asyncio.gather( + *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] + ) + + async def fetch(self, pkg, session): + """ + Override this method to fetch the pkg's version in the cache + """ + raise NotImplementedError + def client_session(): return ClientSession(raise_for_status=True, trust_env=True) @@ -74,12 +84,6 @@ class LaunchpadVersionAPI(VersionAPI): package_type = "deb" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = ( "https://api.launchpad.net/1.0/ubuntu/+archive/" @@ -109,12 +113,6 @@ class PypiVersionAPI(VersionAPI): package_type = "pypi" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = f"https://pypi.org/pypi/{pkg}/json" versions = set() @@ -148,12 +146,6 @@ class CratesVersionAPI(VersionAPI): package_type = "cargo" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = f"https://crates.io/api/v1/crates/{pkg}" response = await session.request(method="GET", url=url) @@ -174,12 +166,6 @@ class RubyVersionAPI(VersionAPI): package_type = "gem" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = f"https://rubygems.org/api/v1/versions/{pkg}.json" versions = set() @@ -203,12 +189,6 @@ class NpmVersionAPI(VersionAPI): package_type = "npm" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = f"https://registry.npmjs.org/{pkg}" versions = set() @@ -267,12 +247,6 @@ class MavenVersionAPI(VersionAPI): package_type = "maven" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session) -> None: artifact_comps = pkg.split(":") endpoint = self.artifact_url(artifact_comps) @@ -323,12 +297,6 @@ class NugetVersionAPI(VersionAPI): package_type = "nuget" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session) -> None: endpoint = self.nuget_url(pkg) resp = await session.request(method="GET", url=endpoint) @@ -366,12 +334,6 @@ class ComposerVersionAPI(VersionAPI): package_type = "composer" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session) -> None: endpoint = self.composer_url(pkg) if endpoint: @@ -411,12 +373,6 @@ class GitHubTagsAPI(VersionAPI): package_type = "github" - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, owner_repo: str, session) -> None: """ owner_repo is a string of format "{repo_owner}/{repo_name}" @@ -455,12 +411,6 @@ async def fetch(self, owner_repo: str, session) -> None: class HexVersionAPI(VersionAPI): - async def load_api(self, pkg_set): - async with client_session() as session: - await asyncio.gather( - *[self.fetch(pkg, session) for pkg in pkg_set if pkg not in self.cache] - ) - async def fetch(self, pkg, session): url = f"https://hex.pm/api/packages/{pkg}" versions = set() From 80e3cf026c06574d66c49bcdfc33f874165cd134 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sat, 17 Jul 2021 10:06:39 +0530 Subject: [PATCH 21/27] Remove misleading frozen dataclass Signed-off-by: Shivam Sandbhor --- vulnerabilities/package_managers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index ea52106d2..e827f26b7 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -41,13 +41,12 @@ class Version: release_date: datetime = None -@dataclasses.dataclass(frozen=True) +@dataclasses.dataclass class VersionResponse: valid_versions: Set[str] = dataclasses.field(default_factory=set) newer_versions: Set[str] = dataclasses.field(default_factory=set) -@dataclasses.dataclass(frozen=True) class VersionAPI: def __init__(self, cache: Mapping[str, Set[str]] = None): self.cache = cache or {} From 9d359f28eaaaaa02a214c6bf27116e15c37eb559 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sat, 17 Jul 2021 10:32:55 +0530 Subject: [PATCH 22/27] Fix npm importer date manipulation Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/npm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/importers/npm.py b/vulnerabilities/importers/npm.py index e8bdf84ba..0d3112b3a 100644 --- a/vulnerabilities/importers/npm.py +++ b/vulnerabilities/importers/npm.py @@ -86,7 +86,7 @@ def process_file(self, file) -> List[Advisory]: package_name = record["module_name"].strip() publish_date = parse(record["updated_at"]) - publish_date.replace(tzinfo=pytz.UTC) + publish_date = publish_date.replace(tzinfo=pytz.UTC) all_versions = self.versions.get(package_name, until=publish_date).valid_versions aff_range = record.get("vulnerable_versions") From 047398e74c7bad90413712354e86acc33b22ddec Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Mon, 19 Jul 2021 02:06:16 +0530 Subject: [PATCH 23/27] Fix recursion in GitHubTagsAPI A minor bug was introduced by 775aa1d7 which was crashing the nginx importer. Signed-off-by: Hritik Vijay --- vulnerabilities/package_managers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index e827f26b7..aeca55615 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -372,14 +372,14 @@ class GitHubTagsAPI(VersionAPI): package_type = "github" - async def fetch(self, owner_repo: str, session) -> None: + async def fetch(self, owner_repo: str, session, endpoint=None) -> None: """ owner_repo is a string of format "{repo_owner}/{repo_name}" Example value of owner_repo = "nexB/scancode-toolkit" """ self.cache[owner_repo] = set() - endpoint = f"https://github.com/{owner_repo}/tags" - + if not endpoint: + endpoint = f"https://github.com/{owner_repo}/tags" resp = await session.get(endpoint) resp = await resp.read() @@ -406,7 +406,7 @@ async def fetch(self, owner_repo: str, session) -> None: if url: # FIXME: this could be asynced to improve performance - await self.fetch(owner_repo, url) + await self.fetch(owner_repo, session, url) class HexVersionAPI(VersionAPI): From 9053ba9586d6db92837d2df4cf81023c06aff5fa Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Mon, 19 Jul 2021 02:10:23 +0530 Subject: [PATCH 24/27] Update importers to handle VersionResponse Recent time travel heuristics need published date of versions, thus Version dataclass was created. Some of the importers got bugged by this new system and were crashing. This fix is a part of # 467 Importers bugged: - nginx - debian_oval - ubuntu Signed-off-by: Hritik Vijay --- vulnerabilities/data_source.py | 4 ++-- vulnerabilities/importers/nginx.py | 11 ++++++++--- vulnerabilities/package_managers.py | 9 +++++++-- vulnerabilities/tests/test_debian_oval.py | 5 ++++- vulnerabilities/tests/test_ubuntu.py | 3 ++- 5 files changed, 23 insertions(+), 9 deletions(-) diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index 349563520..ff48a374a 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -483,7 +483,7 @@ def updated_advisories(self) -> List[Advisory]: except Exception: logger.error( f"Failed to get updated_advisories: {oval_file!r} " - "with {metadata!r}:\n" + traceback.format_exc() + f"with {metadata!r}:\n" + traceback.format_exc() ) continue @@ -538,7 +538,7 @@ def get_data_from_xml_doc(self, xml_doc: ET.ElementTree, pkg_metadata={}) -> Lis affected_version_range = VersionSpecifier.from_scheme_version_spec_string( version_scheme, affected_version_range ) - all_versions = self.pkg_manager_api.get(package_name) + all_versions = self.pkg_manager_api.get(package_name).valid_versions # FIXME: what is this 50 DB limit? that's too small for versions # FIXME: we should not drop data this way diff --git a/vulnerabilities/importers/nginx.py b/vulnerabilities/importers/nginx.py index 77913398a..c5372647d 100644 --- a/vulnerabilities/importers/nginx.py +++ b/vulnerabilities/importers/nginx.py @@ -34,6 +34,7 @@ from vulnerabilities.data_source import DataSourceConfiguration from vulnerabilities.data_source import Reference from vulnerabilities.package_managers import GitHubTagsAPI +from vulnerabilities.package_managers import Version from vulnerabilities.helpers import nearest_patched_package @@ -53,10 +54,14 @@ def set_api(self): # For some reason nginx tags it's releases are in the form of `release-1.2.3` # Chop off the `release-` part here. - for index, version in enumerate(self.version_api.cache["nginx/nginx"].valid_versions): - self.version_api.cache["nginx/nginx"].valid_versions[index] = version.replace( - "release-", "" + normalized_versions = set() + while self.version_api.cache["nginx/nginx"]: + version = self.version_api.cache["nginx/nginx"].pop() + normalized_version = Version( + version.value.replace("release-", ""), version.release_date ) + normalized_versions.add(normalized_version) + self.version_api.cache["nginx/nginx"] = normalized_versions def updated_advisories(self): advisories = [] diff --git a/vulnerabilities/package_managers.py b/vulnerabilities/package_managers.py index aeca55615..ca875a832 100644 --- a/vulnerabilities/package_managers.py +++ b/vulnerabilities/package_managers.py @@ -98,7 +98,12 @@ async def fetch(self, pkg, session): self.cache[pkg] = {} break for release in resp_json["entries"]: - all_versions.add(release["source_package_version"].replace("0:", "")) + all_versions.add( + Version( + value=release["source_package_version"].replace("0:", ""), + release_date=release["date_published"], + ) + ) if resp_json.get("next_collection_link"): url = resp_json["next_collection_link"] else: @@ -233,7 +238,7 @@ async def fetch(self, pkg, session, retry_count=5): self.cache[pkg] = {} return for release in resp_json["versions"]: - all_versions.add(release["version"].replace("0:", "")) + all_versions.add(Version(value=release["version"].replace("0:", ""))) self.cache[pkg] = all_versions # TODO : Handle ServerDisconnectedError by using some sort of diff --git a/vulnerabilities/tests/test_debian_oval.py b/vulnerabilities/tests/test_debian_oval.py index e7b298834..ffce735f5 100644 --- a/vulnerabilities/tests/test_debian_oval.py +++ b/vulnerabilities/tests/test_debian_oval.py @@ -6,6 +6,7 @@ from packageurl import PackageURL from vulnerabilities.importers.debian_oval import DebianOvalDataSource +from vulnerabilities.package_managers import VersionResponse from vulnerabilities.data_source import Advisory from vulnerabilities.helpers import AffectedPackage @@ -30,7 +31,9 @@ def setUpClass(cls): @patch( "vulnerabilities.importers.debian_oval.DebianVersionAPI.get", - return_value={"1.11.1+dfsg-5+deb7u1", "0.11.1+dfsg-5+deb7u1", "2.3.9"}, + return_value=VersionResponse( + valid_versions={"1.11.1+dfsg-5+deb7u1", "0.11.1+dfsg-5+deb7u1", "2.3.9"} + ), ) @patch("vulnerabilities.importers.debian_oval.DebianVersionAPI.load_api", new=mock) def test_get_data_from_xml_doc(self, mock_write): diff --git a/vulnerabilities/tests/test_ubuntu.py b/vulnerabilities/tests/test_ubuntu.py index 16d1f217b..23b10750f 100644 --- a/vulnerabilities/tests/test_ubuntu.py +++ b/vulnerabilities/tests/test_ubuntu.py @@ -10,6 +10,7 @@ from vulnerabilities.oval_parser import OvalParser from vulnerabilities.importers.ubuntu import UbuntuDataSource +from vulnerabilities.package_managers import VersionResponse from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.helpers import AffectedPackage @@ -182,7 +183,7 @@ def setUpClass(cls): @patch( "vulnerabilities.importers.ubuntu.LaunchpadVersionAPI.get", - return_value={"0.3.0", "0.2.0", "2.14-2"}, + return_value=VersionResponse(valid_versions={"0.3.0", "0.2.0", "2.14-2"}), ) @patch("vulnerabilities.importers.ubuntu.LaunchpadVersionAPI.load_api", new=mock) def test_get_data_from_xml_doc(self, mock_write): From 1f3fdef92459f64c1e688ce2f05946fcd76afbf5 Mon Sep 17 00:00:00 2001 From: Pushpit Date: Sun, 25 Jul 2021 21:20:26 +0530 Subject: [PATCH 25/27] Update importer_yielder.py Signed-off-by: Pushpit --- vulnerabilities/importer_yielder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/importer_yielder.py b/vulnerabilities/importer_yielder.py index 7ca191607..cc64714a0 100644 --- a/vulnerabilities/importer_yielder.py +++ b/vulnerabilities/importer_yielder.py @@ -236,7 +236,7 @@ }, { "name": "xen", - "license": "", + "license": "gplv2", "last_run": None, "data_source": "XenDataSource", "data_source_cfg": { From 5774649b8bc6a5de4f04a78701dec6b6472b90ed Mon Sep 17 00:00:00 2001 From: Pushpit Date: Sun, 25 Jul 2021 21:22:47 +0530 Subject: [PATCH 26/27] Added xen license info Signed-off-by: Pushpit --- vulnerabilities/fixtures/openssl.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/fixtures/openssl.json b/vulnerabilities/fixtures/openssl.json index b53d2ad8a..1b790b791 100644 --- a/vulnerabilities/fixtures/openssl.json +++ b/vulnerabilities/fixtures/openssl.json @@ -40770,7 +40770,7 @@ "pk": 17, "fields": { "name": "xen", - "license": "", + "license": "gplv2", "last_run": null, "data_source": "XenDataSource", "data_source_cfg": { From 6ef62fabb720b65903eab8bda27eefc5cb2a7537 Mon Sep 17 00:00:00 2001 From: Hritik Vijay Date: Wed, 9 Feb 2022 01:15:25 +0530 Subject: [PATCH 27/27] Rename DataSource to Improver, blackify, ignore tests This is required to merge the PR to main for a clean slate for new contributions for importer-improver migration Signed-off-by: Hritik Vijay --- pytest.ini | 1 + vulnerabilities/importers/xen.py | 22 ++++++++++------------ 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/pytest.ini b/pytest.ini index 5336fa383..51d2e2aec 100644 --- a/pytest.ini +++ b/pytest.ini @@ -32,5 +32,6 @@ addopts = --ignore=vulnerabilities/importers/ubuntu_usn.py --ignore=vulnerabilities/importers/mozilla.py --ignore=vulnerabilities/importers/mattermost.py + --ignore=vulnerabilities/importers/xen.py --ignore=vulnerabilities/management/commands/create_cpe_to_purl_map.py --ignore=vulnerabilities/lib_oval.py diff --git a/vulnerabilities/importers/xen.py b/vulnerabilities/importers/xen.py index 2d32b6df3..5f44c1880 100644 --- a/vulnerabilities/importers/xen.py +++ b/vulnerabilities/importers/xen.py @@ -27,20 +27,14 @@ import requests from packageurl import PackageURL -from vulnerabilities.data_source import DataSource -from vulnerabilities.data_source import Advisory -from vulnerabilities.data_source import Reference +from vulnerabilities.importer import Importer +from vulnerabilities.importer import Advisory +from vulnerabilities.importer import Reference from vulnerabilities.helpers import create_etag from vulnerabilities.helpers import is_cve -@dataclasses.dataclass -class XenDBConfiguration: - etags: list - db_url: str - - -class XenDataSource(DataSource): +class XenImporter(Importer): CONFIG_CLASS = XenDBConfiguration def updated_advisories(self): @@ -83,8 +77,12 @@ def to_advisories(xen_db): def get_xen_references(xsa_id): - return Reference(reference_id="XSA-" + xsa_id, url="https://xenbits.xen.org/xsa/advisory-{}.html".format(xsa_id)) + return Reference( + reference_id="XSA-" + xsa_id, + url="https://xenbits.xen.org/xsa/advisory-{}.html".format(xsa_id), + ) + def fetch(url): response = requests.get(url).content - return json.loads(response) \ No newline at end of file + return json.loads(response)