From 4fc018f12d41becdd5a750cd0ce420334a983e03 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Fri, 27 Nov 2020 21:10:43 +0530 Subject: [PATCH 01/21] Add db models to store severity scores of vulnerabilities A VulnerabilitySeverity models is added in models.py to store severity of vulnerability. Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 8 +++++- .../0002_vulnerabilityseverityscore.py | 24 +++++++++++++++++ .../migrations/0003_auto_20201127_1423.py | 27 +++++++++++++++++++ .../migrations/0004_auto_20201129_1024.py | 17 ++++++++++++ vulnerabilities/models.py | 18 ++++++++++++- 5 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 vulnerabilities/migrations/0002_vulnerabilityseverityscore.py create mode 100644 vulnerabilities/migrations/0003_auto_20201127_1423.py create mode 100644 vulnerabilities/migrations/0004_auto_20201129_1024.py diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index d72b18782..705ddf5a2 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017 nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/vulnerablecode/ # The VulnerableCode software is licensed under the Apache License version 2.0. # Data generated with VulnerableCode require an acknowledgment. @@ -46,9 +46,15 @@ logger = logging.getLogger(__name__) +@dataclasses.dataclass +class VulnerabilityScore: + severity_type: str + severity_value: str + @dataclasses.dataclass class Reference: + scores: List[VulnerabilityScore] url: str = '' reference_id: str = '' diff --git a/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py b/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py new file mode 100644 index 000000000..30f3e7911 --- /dev/null +++ b/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py @@ -0,0 +1,24 @@ +# Generated by Django 3.0.7 on 2020-11-27 13:43 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('vulnerabilities', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='VulnerabilitySeverityScore', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('severity_type', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), + ('severity_score', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), + ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), + ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), + ], + ), + ] diff --git a/vulnerabilities/migrations/0003_auto_20201127_1423.py b/vulnerabilities/migrations/0003_auto_20201127_1423.py new file mode 100644 index 000000000..362e8c71c --- /dev/null +++ b/vulnerabilities/migrations/0003_auto_20201127_1423.py @@ -0,0 +1,27 @@ +# Generated by Django 3.0.7 on 2020-11-27 14:23 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('vulnerabilities', '0002_vulnerabilityseverityscore'), + ] + + operations = [ + migrations.CreateModel( + name='VulnerabilitySeverity', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('severity_type', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), + ('severity_value', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), + ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), + ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), + ], + ), + migrations.DeleteModel( + name='VulnerabilitySeverityScore', + ), + ] diff --git a/vulnerabilities/migrations/0004_auto_20201129_1024.py b/vulnerabilities/migrations/0004_auto_20201129_1024.py new file mode 100644 index 000000000..f239a4bcf --- /dev/null +++ b/vulnerabilities/migrations/0004_auto_20201129_1024.py @@ -0,0 +1,17 @@ +# Generated by Django 3.0.7 on 2020-11-29 10:24 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('vulnerabilities', '0003_auto_20201127_1423'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='vulnerabilityseverity', + unique_together={('vulnerability', 'reference', 'severity_type')}, + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index e438cb516..f361526d1 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -163,7 +163,9 @@ class Importer(models.Model): name = models.CharField(max_length=100, unique=True, help_text="Name of the importer") license = models.CharField( - max_length=100, blank=True, help_text="License of the vulnerability data", + max_length=100, + blank=True, + help_text="License of the vulnerability data", ) last_run = models.DateTimeField(null=True, help_text="UTC Timestamp of the last run") @@ -199,3 +201,17 @@ def make_data_source(self, batch_size: int, cutoff_date: datetime = None) -> Dat def __str__(self): return self.name + + +class VulnerabilitySeverity(models.Model): + + severity_type = models.CharField( + max_length=50, help_text="Example: CVSS v2, Redhat Impact Score" + ) + severity_value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") + reference = models.ForeignKey(VulnerabilityReference, on_delete=models.CASCADE) + vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) + + class Meta: + # TBD + unique_together = ("vulnerability", "reference", "severity_type") From 38b456a91c21f49a43ebdba80a1d351b02124b5e Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 15:57:16 +0530 Subject: [PATCH 02/21] Add dataclasses and enable to import severity scores A dataclass `VulnerabilitySeverity` is added to enable to transport of severity scores. The logic in importer_runner.py is modified to store, update severity scores and link it to reference and vulnerability Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 165 ++++++++++++++++--------------- vulnerabilities/import_runner.py | 32 +++--- 2 files changed, 97 insertions(+), 100 deletions(-) diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index 705ddf5a2..4455d9289 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -47,16 +47,17 @@ @dataclasses.dataclass -class VulnerabilityScore: +class VulnerabilitySeverity: severity_type: str severity_value: str + @dataclasses.dataclass class Reference: - scores: List[VulnerabilityScore] - url: str = '' - reference_id: str = '' + scores: List[VulnerabilitySeverity] = dataclasses.field(default_factory=list) + url: str = "" + reference_id: str = "" def __post_init__(self): if not any([self.url, self.reference_id]): @@ -74,6 +75,7 @@ class Advisory: data class; PackageURL objects and strings. As a convention, the former is referred to in variable names, etc. as "package_urls" and the latter as "purls". """ + summary: str impacted_package_urls: Iterable[PackageURL] resolved_package_urls: Iterable[PackageURL] = dataclasses.field(default_factory=list) @@ -81,10 +83,10 @@ class Advisory: cve_id: Optional[str] = None def __hash__(self): - s = '{}{}{}{}'.format( + s = "{}{}{}{}".format( self.summary, - ''.join(sorted([str(p) for p in self.impacted_package_urls])), - ''.join(sorted([str(p) for p in self.resolved_package_urls])), + "".join(sorted([str(p) for p in self.impacted_package_urls])), + "".join(sorted([str(p) for p in self.resolved_package_urls])), self.cve_id, ) return hash(s) @@ -110,11 +112,11 @@ class DataSource(ContextManager): CONFIG_CLASS = DataSourceConfiguration def __init__( - self, - batch_size: int, - last_run_date: Optional[datetime] = None, - cutoff_date: Optional[datetime] = None, - config: Optional[Mapping[str, Any]] = None, + self, + batch_size: int, + last_run_date: Optional[datetime] = None, + cutoff_date: Optional[datetime] = None, + config: Optional[Mapping[str, Any]] = None, ): """ Create a DataSource instance. @@ -132,8 +134,8 @@ def __init__( # These really should be declared in DataSourceConfiguration above but that would # prevent DataSource subclasses from declaring mandatory parameters (i.e. positional # arguments) - setattr(self.config, 'last_run_date', last_run_date) - setattr(self.config, 'cutoff_date', cutoff_date) + setattr(self.config, "last_run_date", last_run_date) + setattr(self.config, "cutoff_date", cutoff_date) except Exception as e: raise InvalidConfigurationError(str(e)) @@ -151,7 +153,7 @@ def cutoff_timestamp(self) -> int: :return: An integer Unix timestamp of the last time this data source was queried or the cutoff date passed in the constructor, whichever is more recent. """ - if not hasattr(self, '_cutoff_timestamp'): + if not hasattr(self, "_cutoff_timestamp"): last_run = 0 if self.config.last_run_date is not None: last_run = int(self.config.last_run_date.timestamp()) @@ -160,7 +162,7 @@ def cutoff_timestamp(self) -> int: if self.config.cutoff_date is not None: cutoff = int(self.config.cutoff_date.timestamp()) - setattr(self, '_cutoff_timestamp', max(last_run, cutoff)) + setattr(self, "_cutoff_timestamp", max(last_run, cutoff)) return self._cutoff_timestamp @@ -196,7 +198,7 @@ def error(self, msg: str) -> None: """ Helper method for raising InvalidConfigurationError with the class name in the message. """ - raise InvalidConfigurationError(f'{type(self).__name__}: {msg}') + raise InvalidConfigurationError(f"{type(self).__name__}: {msg}") def batch_advisories(self, advisories: List[Advisory]) -> Set[Advisory]: """ @@ -205,7 +207,7 @@ def batch_advisories(self, advisories: List[Advisory]) -> Set[Advisory]: advisories = advisories[:] # copy the list as we are mutating it in the loop below while advisories: - b, advisories = advisories[:self.batch_size], advisories[self.batch_size:] + b, advisories = advisories[: self.batch_size], advisories[self.batch_size:] yield set(b) @@ -224,17 +226,24 @@ class GitDataSource(DataSource): def validate_configuration(self) -> None: if not self.config.create_working_directory and self.config.working_directory is None: - self.error('"create_working_directory" is not set but "working_directory" is set to ' - 'the default, which calls tempfile.mkdtemp()') - - if not self.config.create_working_directory and \ - not os.path.exists(self.config.working_directory): - self.error('"working_directory" does not contain an existing directory and' - '"create_working_directory" is not set') + self.error( + '"create_working_directory" is not set but "working_directory" is set to ' + "the default, which calls tempfile.mkdtemp()" + ) + + if not self.config.create_working_directory and not os.path.exists( + self.config.working_directory + ): + self.error( + '"working_directory" does not contain an existing directory and' + '"create_working_directory" is not set' + ) if not self.config.remove_working_directory and self.config.working_directory is None: - self.error('"remove_working_directory" is not set and "working_directory" is set to ' - 'the default, which calls tempfile.mkdtemp()') + self.error( + '"remove_working_directory" is not set and "working_directory" is set to ' + "the default, which calls tempfile.mkdtemp()" + ) def __enter__(self): self._ensure_working_directory() @@ -245,10 +254,10 @@ def __exit__(self, exc_type, exc_val, exc_tb): shutil.rmtree(self.config.working_directory) def file_changes( - self, - subdir: str = None, - recursive: bool = False, - file_ext: Optional[str] = None, + self, + subdir: str = None, + recursive: bool = False, + file_ext: Optional[str] = None, ) -> Tuple[Set[str], Set[str]]: """ Returns all added and modified files since last_run_date or cutoff_date (whichever is more @@ -269,30 +278,28 @@ def file_changes( if self.config.last_run_date is None and self.config.cutoff_date is None: if recursive: - glob = '**/*' + glob = "**/*" else: - glob = '*' + glob = "*" if file_ext: - glob = f'{glob}.{file_ext}' + glob = f"{glob}.{file_ext}" return {str(p) for p in path.glob(glob) if p.is_file()}, set() - return self._collect_file_changes( - subdir=subdir, recursive=recursive, file_ext=file_ext) + return self._collect_file_changes(subdir=subdir, recursive=recursive, file_ext=file_ext) def _collect_file_changes( - self, - subdir: Optional[str], - recursive: bool, - file_ext: Optional[str], + self, + subdir: Optional[str], + recursive: bool, + file_ext: Optional[str], ) -> Tuple[Set[str], Set[str]]: previous_commit = None added_files, updated_files = set(), set() - for commit in self._repo.walk( - self._repo.head.target, pygit2.GIT_SORT_TIME): + for commit in self._repo.walk(self._repo.head.target, pygit2.GIT_SORT_TIME): commit_time = commit.commit_time + commit.commit_time_offset # convert to UTC if commit_time < self.cutoff_timestamp: @@ -303,12 +310,10 @@ def _collect_file_changes( continue for d in commit.tree.diff_to_tree(previous_commit.tree).deltas: - if not _include_file( - d.new_file.path, subdir, recursive, file_ext) or d.is_binary: + if not _include_file(d.new_file.path, subdir, recursive, file_ext) or d.is_binary: continue - abspath = os.path.join( - self.config.working_directory, d.new_file.path) + abspath = os.path.join(self.config.working_directory, d.new_file.path) # TODO # Just filtering on the two status values for "added" and "modified" is too # simplistic. This does not cover file renames, copies & @@ -330,8 +335,9 @@ def _collect_file_changes( def _ensure_working_directory(self) -> None: if self.config.working_directory is None: self.config.working_directory = tempfile.mkdtemp() - elif self.config.create_working_directory and \ - not os.path.exists(self.config.working_directory): + elif self.config.create_working_directory and not os.path.exists( + self.config.working_directory + ): os.mkdir(self.config.working_directory) def _ensure_repository(self) -> None: @@ -355,12 +361,10 @@ def _ensure_repository(self) -> None: def _clone_repository(self) -> None: kwargs = {} if self.config.branch: - kwargs['checkout_branch'] = self.config.branch + kwargs["checkout_branch"] = self.config.branch self._repo = pygit2.clone_repository( - self.config.repository_url, - self.config.working_directory, - **kwargs + self.config.repository_url, self.config.working_directory, **kwargs ) def _find_or_add_remote(self): @@ -372,7 +376,8 @@ def _find_or_add_remote(self): if remote is None: remote = self._repo.remotes.create( - 'added_by_vulnerablecode', self.config.repository_url) + "added_by_vulnerablecode", self.config.repository_url + ) return remote @@ -381,30 +386,30 @@ def _update_from_remote(self, remote, branch) -> None: if progress.received_objects == 0: return - remote_branch = self._repo.branches[f'{remote.name}/{self.config.branch}'] + remote_branch = self._repo.branches[f"{remote.name}/{self.config.branch}"] branch.set_target(remote_branch.target) self._repo.checkout(branch, strategy=pygit2.GIT_CHECKOUT_FORCE) def _include_file( - path: str, - subdir: Optional[str] = None, - recursive: bool = False, - file_ext: Optional[str] = None, + path: str, + subdir: Optional[str] = None, + recursive: bool = False, + file_ext: Optional[str] = None, ) -> bool: match = True if subdir: if not subdir.endswith(os.path.sep): - subdir = f'{subdir}{os.path.sep}' + subdir = f"{subdir}{os.path.sep}" match = match and path.startswith(subdir) if not recursive: - match = match and (os.path.sep not in path[len(subdir or ''):]) + match = match and (os.path.sep not in path[len(subdir or ""):]) if file_ext: - match = match and path.endswith(f'.{file_ext}') + match = match and path.endswith(f".{file_ext}") return match @@ -414,6 +419,7 @@ class OvalDataSource(DataSource): All data sources which collect data from OVAL files must inherit from this `OvalDataSource` class. Subclasses must implement the methods `_fetch` and `set_api`. """ + @staticmethod def create_purl(pkg_name: str, pkg_version: str, pkg_data: Mapping) -> PackageURL: """ @@ -431,8 +437,8 @@ def _collect_pkgs(parsed_oval_data: Mapping) -> Set: """ all_pkgs = set() for definition_data in parsed_oval_data: - for test_data in definition_data['test_data']: - for package in test_data['package_list']: + for test_data in definition_data["test_data"]: + for package in test_data["package_list"]: all_pkgs.add(package) return all_pkgs @@ -494,42 +500,37 @@ def get_data_from_xml_doc(self, xml_doc: ET.ElementTree, pkg_metadata={}) -> Lis # These fields are definition level, i.e common for all # elements connected/linked to an OvalDefinition - vuln_id = definition_data['vuln_id'] - description = definition_data['description'] + vuln_id = definition_data["vuln_id"] + description = definition_data["description"] affected_purls = set() safe_purls = set() - references = [Reference(url=url) - for url in definition_data['reference_urls']] + references = [Reference(url=url) for url in definition_data["reference_urls"]] - for test_data in definition_data['test_data']: - for package in test_data['package_list']: + for test_data in definition_data["test_data"]: + for package in test_data["package_list"]: pkg_name = package if package and len(pkg_name) >= 50: continue - aff_ver_range = test_data['version_ranges'] + aff_ver_range = test_data["version_ranges"] all_versions = self.pkg_manager_api.get(package) # This filter is for filtering out long versions. # 50 is limit because that's what db permits atm. - all_versions = set( - filter( - lambda x: len(x) < 50, - all_versions)) + all_versions = set(filter(lambda x: len(x) < 50, all_versions)) if not all_versions: continue - affected_versions = set( - filter( - lambda x: x in aff_ver_range, - all_versions)) + affected_versions = set(filter(lambda x: x in aff_ver_range, all_versions)) safe_versions = all_versions - affected_versions for version in affected_versions: pkg_url = self.create_purl( - pkg_name=pkg_name, pkg_version=version, pkg_data=pkg_metadata) + pkg_name=pkg_name, pkg_version=version, pkg_data=pkg_metadata + ) affected_purls.add(pkg_url) for version in safe_versions: pkg_url = self.create_purl( - pkg_name=pkg_name, pkg_version=version, pkg_data=pkg_metadata) + pkg_name=pkg_name, pkg_version=version, pkg_data=pkg_metadata + ) safe_purls.add(pkg_url) all_adv.append( @@ -538,5 +539,7 @@ def get_data_from_xml_doc(self, xml_doc: ET.ElementTree, pkg_metadata={}) -> Lis impacted_package_urls=affected_purls, resolved_package_urls=safe_purls, cve_id=vuln_id, - vuln_references=references)) + vuln_references=references, + ) + ) return all_adv diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index e3355fd55..5950dbda6 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -134,7 +134,7 @@ def get_vuln_pkg_refs(vulnerability, package): def process_advisories(data_source: DataSource) -> None: - bulk_create_vuln_refs = set() + # bulk_create_vuln_refs = set() bulk_create_vuln_pkg_refs = set() # Treat updated_advisories and added_advisories as same. Eventually # we want to refactor all data sources to provide advisories via a @@ -145,18 +145,17 @@ def process_advisories(data_source: DataSource) -> None: try: vuln, vuln_created = _get_or_create_vulnerability(advisory) for vuln_ref in advisory.vuln_references: - ref = VulnerabilityReferenceInserter( - vulnerability=vuln, - url=vuln_ref.url, - reference_id=vuln_ref.reference_id, + ref, _ = models.VulnerabilityReference.objects.get_or_create( + vulnerability=vuln, reference_id=vuln_ref.reference_id, url=vuln_ref.url ) - if vuln_created or not vuln_ref_exists( - vuln, vuln_ref.url, vuln_ref.reference_id - ): - # A vulnerability reference can't exist if the - # vulnerability is just created so insert it - bulk_create_vuln_refs.add(ref) + for score in vuln_ref.scores: + models.VulnerabilitySeverity.objects.update_or_create( + vulnerability=vuln, + reference=ref, + severity_type=score.severity_type, + defaults={"severity_value": score.severity_value}, + ) for purl in chain(advisory.impacted_package_urls, advisory.resolved_package_urls): pkg, pkg_created = _get_or_create_package(purl) @@ -184,14 +183,9 @@ def process_advisories(data_source: DataSource) -> None: except Exception: # TODO: store error but continue logger.error( - f"Failed to process advisory: {advisory!r}:\n" - + traceback.format_exc() + f"Failed to process advisory: {advisory!r}:\n" + traceback.format_exc() ) - models.VulnerabilityReference.objects.bulk_create( - [i.to_model_object() for i in bulk_create_vuln_refs] - ) - # find_conflicting_relations handles in-memory conflicts conflicts = find_conflicting_relations(bulk_create_vuln_pkg_refs) @@ -267,8 +261,8 @@ def _get_or_create_vulnerability( except Exception: logger.error( - f"Failed to _get_or_create_vulnerability: {query_kwargs!r}:\n" - + traceback.format_exc()) + f"Failed to _get_or_create_vulnerability: {query_kwargs!r}:\n" + traceback.format_exc() + ) raise From 221bc1237ca74cd293172c6e9cc7d9d9656ff788 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 16:00:12 +0530 Subject: [PATCH 03/21] Store severity scores given by NVD. Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/nvd.py | 31 +++++++++++++++++++++++++++++++ vulnerabilities/tests/test_nvd.py | 16 +++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index a4c0370bf..ac3b12d0b 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -33,6 +33,7 @@ from vulnerabilities.data_source import DataSourceConfiguration from vulnerabilities.data_source import Reference from vulnerabilities.helpers import create_etag +from vulnerabilities.data_source import VulnerabilitySeverity @dataclasses.dataclass @@ -77,6 +78,14 @@ def to_advisories(self, nvd_data): cve_id = cve_item["cve"]["CVE_data_meta"]["ID"] ref_urls = self.extract_reference_urls(cve_item) references = [Reference(url=url) for url in ref_urls] + severity_scores = self.extract_severity_scores(cve_item) + references.append( + Reference( + url=f"https://nvd.nist.gov/vuln/detail/{cve_id}", + reference_id=cve_id, + scores=severity_scores, + ) + ) summary = self.extract_summary(cve_item) yield Advisory( cve_id=cve_id, summary=summary, vuln_references=references, impacted_package_urls=[] @@ -90,6 +99,28 @@ def extract_summary(cve_item): summaries = [desc["value"] for desc in cve_item["cve"]["description"]["description_data"]] return max(summaries, key=len) + @staticmethod + def extract_severity_scores(cve_item): + severity_scores = [] + + if cve_item["impact"].get("baseMetricV3"): + severity_scores.append( + VulnerabilitySeverity( + severity_type="cvssV3", + severity_value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), + ) + ) + + if cve_item["impact"].get("baseMetricV2"): + severity_scores.append( + VulnerabilitySeverity( + severity_type="cvssV2", + severity_value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), + ) + ) + + return severity_scores + def extract_reference_urls(self, cve_item): urls = set() for reference in cve_item["cve"]["references"]["reference_data"]: diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index eacdc1b70..24cbe5873 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -28,6 +28,7 @@ from vulnerabilities.importers import NVDDataSource from vulnerabilities.data_source import Reference from vulnerabilities.data_source import Advisory +from vulnerabilities.data_source import VulnerabilitySeverity BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/nvd/nvd_test.json") @@ -139,9 +140,18 @@ def test_to_advisories(self): [ Reference( url="http://code.google.com/p/gperftools/source/browse/tags/perftools-0.4/ChangeLog", # nopep8 + scores=[], ), Reference( url="http://kqueue.org/blog/2012/03/05/memory-allocator-security-revisited/", # nopep8 + scores=[], + ), + Reference( + url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 + scores=[ + VulnerabilitySeverity(severity_type="cvssV2", severity_value="5.0") + ], + reference_id="CVE-2005-4895", ), ], key=lambda x: x.url, @@ -154,6 +164,10 @@ def test_to_advisories(self): found_advisories = list(self.data_src.to_advisories(self.nvd_data)) # Only 1 advisory because other advisory is hardware related assert len(found_advisories) == 1 - found_advisories[0].vuln_references = sorted(found_advisories[0].vuln_references, key=lambda x: x.url) # nopep8 + found_advisories[0].vuln_references = sorted( + found_advisories[0].vuln_references, key=lambda x: x.url + ) # nopep8 + # for id,ref in found_advisories[0].vuln_references : + # ref[id].scores.sort(key = lambda x : x.severity_value) assert expected_advisories == found_advisories From 52724a55433e9fdc63fd81b4f4e481cb28cc0f4f Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 16:00:58 +0530 Subject: [PATCH 04/21] Store severity scores given by RedHat Signed-off-by: Shivam Sandbhor --- vulnerabilities/import_runner.py | 1 - vulnerabilities/importers/redhat.py | 52 ++++++++++--- vulnerabilities/tests/test_redhat_importer.py | 77 ++++++++++++++++++- 3 files changed, 116 insertions(+), 14 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 5950dbda6..c2b4ac13c 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -134,7 +134,6 @@ def get_vuln_pkg_refs(vulnerability, package): def process_advisories(data_source: DataSource) -> None: - # bulk_create_vuln_refs = set() bulk_create_vuln_pkg_refs = set() # Treat updated_advisories and added_advisories as same. Eventually # we want to refactor all data sources to provide advisories via a diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index d6e69b922..7c3a13609 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -20,14 +20,17 @@ # VulnerableCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. -import requests + +import json from packageurl import PackageURL +import requests from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import DataSource from vulnerabilities.data_source import DataSourceConfiguration from vulnerabilities.data_source import Reference +from vulnerabilities.data_source import VulnerabilitySeverity class RedhatDataSource(DataSource): @@ -40,9 +43,9 @@ def __enter__(self): def updated_advisories(self): processed_advisories = [] for advisory_data in self.redhat_response: - processed_advisories.append(to_advisory(advisory_data)) + yield [to_advisory(advisory_data)] - return self.batch_advisories(processed_advisories) + # return self.batch_advisories(processed_advisories) def fetch(): @@ -75,21 +78,50 @@ def to_advisory(advisory_data): references = [] if advisory_data.get("bugzilla"): bugzilla = advisory_data.get("bugzilla") + url = "https://bugzilla.redhat.com/show_bug.cgi?id={}".format(bugzilla) + bugzilla_severity = VulnerabilitySeverity( + severity_type="REDHAT_BUGZILLA_SEVERITY", + severity_value=requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json()["bugs"][0]["severity"], # nopep8 + ) + references.append( Reference( - url="https://bugzilla.redhat.com/show_bug.cgi?id={}".format(bugzilla), + scores=[bugzilla_severity], + url=url, reference_id=bugzilla, ) ) - for rhsa in advisory_data["advisories"]: - references.append( - Reference( - url="https://access.redhat.com/errata/{}".format(rhsa), reference_id=rhsa, + for rh_adv in advisory_data["advisories"]: + + url = "https://access.redhat.com/errata/{}".format(rh_adv) + + # RH provides 3 types of advisories RHSA, RHBA, RHEA. Only RHSA's contain severity score. + # See https://access.redhat.com/articles/2130961 for more details. + + if "RHSA" in rh_adv: + rhsa_aggregate_severity = VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", + severity_value=requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json()["cvrfdoc"]["aggregate_severity"], # nopep8 ) - ) - references.append(Reference(url=advisory_data["resource_url"])) + references.append( + Reference( + scores=[rhsa_aggregate_severity], + url=url, + reference_id=rh_adv, + ) + ) + + else: + references.append(Reference(scores=[], url=url, reference_id=rh_adv)) + + redhat_cvss3 = VulnerabilitySeverity( + severity_type="REDHAT_CVSS3", + severity_value=requests.get(advisory_data["resource_url"]).json()["cvss3"]["cvss3_base_score"], # nopep8 + ) + + references.append(Reference(scores=[redhat_cvss3], url=advisory_data["resource_url"])) return Advisory( summary=advisory_data["bugzilla_description"], diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 8447c508a..9690bbc5c 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -30,6 +30,7 @@ import vulnerabilities.importers.redhat as redhat from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference +from vulnerabilities.data_source import VulnerabilitySeverity BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/", "redhat.json") @@ -83,18 +84,38 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", reference_id="1396383", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:1931", reference_id="RHSA-2017:1931", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:0725", reference_id="RHSA-2017:0725", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 reference_id="", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_CVSS3", severity_value=6.0 + ) + ], ), ], key=lambda x: x.url, @@ -138,26 +159,56 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1430347", reference_id="1430347", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:1842", reference_id="RHSA-2017:1842", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:2437", reference_id="RHSA-2017:2437", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:2077", reference_id="RHSA-2017:2077", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/errata/RHSA-2017:2444", reference_id="RHSA-2017:2444", + scores=[ + VulnerabilitySeverity( + severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + ) + ], ), Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", # nopep8 reference_id="", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_CVSS3", severity_value=6.0 + ) + ], ), ], key=lambda x: x.url, @@ -176,10 +227,20 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1492984", reference_id="1492984", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + ) + ], ), Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", # nopep8 reference_id="", + scores=[ + VulnerabilitySeverity( + severity_type="REDHAT_CVSS3", severity_value=6.0 + ) + ], ), ], key=lambda x: x.url, @@ -189,8 +250,18 @@ def test_to_advisory(self): } found_data = set() + mock_resp = unittest.mock.MagicMock() + mock_resp.json = lambda: { + "bugs": [{"severity": 2.0}], + "cvrfdoc": {"aggregate_severity": 2.2}, + "cvss3": {"cvss3_base_score": 6.0}, + } for adv in data: - adv = redhat.to_advisory(adv) - adv.vuln_references = sorted(adv.vuln_references, key=lambda x: x.url) - found_data.add(adv) + with unittest.mock.patch( + "vulnerabilities.importers.redhat.requests.get", return_value=mock_resp + ): + adv = redhat.to_advisory(adv) + adv.vuln_references = sorted(adv.vuln_references, key=lambda x: x.url) + found_data.add(adv) + assert expected_data == found_data From 98e4082d75f537a2324fe72b0ba6a27314f851ae Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 16:10:40 +0530 Subject: [PATCH 05/21] Remove VulnerabilityReferenceInserter class Signed-off-by: Shivam Sandbhor --- vulnerabilities/import_runner.py | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index c2b4ac13c..fd453a232 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -41,27 +41,7 @@ logger = logging.getLogger(__name__) -# These _inserter classes are used to instantiate model objects. -# Frozen dataclass store args required to store instantiate -# model objects, this way model objects can be hashed indirectly which -# is required in this implementation. - - -@dataclasses.dataclass(frozen=True) -class VulnerabilityReferenceInserter: - vulnerability: models.Vulnerability - reference_id: Optional[str] = "" - url: Optional[str] = "" - - def __post_init__(self): - if not any([self.reference_id, self.url]): - raise TypeError("VulnerabilityReferenceInserter expects either reference_id or url") - - def to_model_object(self): - return models.VulnerabilityReference(**dataclasses.asdict(self)) - - -# These _inserter classes are used to instantiate model objects. +# This *Inserter class is used to instantiate model objects. # Frozen dataclass store args required to store instantiate # model objects, this way model objects can be hashed indirectly which # is required in this implementation. From 9fc7ec8d5e12af80747cdd86847bc788363ab08e Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 2 Dec 2020 16:10:39 +0530 Subject: [PATCH 06/21] Make code review changes Signed-off-by: Shivam Sandbhor --- vulnerabilities/.vscode/settings.json | 3 +++ vulnerabilities/importers/redhat.py | 16 +++++++++++----- vulnerabilities/tests/test_nvd.py | 7 +------ 3 files changed, 15 insertions(+), 11 deletions(-) create mode 100644 vulnerabilities/.vscode/settings.json diff --git a/vulnerabilities/.vscode/settings.json b/vulnerabilities/.vscode/settings.json new file mode 100644 index 000000000..cfeb3fb8c --- /dev/null +++ b/vulnerabilities/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.pythonPath": "/home/shivam/coding/development/vulnerablecode/venv/bin/python" +} \ No newline at end of file diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 7c3a13609..713b58065 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -43,9 +43,9 @@ def __enter__(self): def updated_advisories(self): processed_advisories = [] for advisory_data in self.redhat_response: - yield [to_advisory(advisory_data)] + processed_advisories.extend(to_advisory(advisory_data)) - # return self.batch_advisories(processed_advisories) + return self.batch_advisories(processed_advisories) def fetch(): @@ -79,9 +79,11 @@ def to_advisory(advisory_data): if advisory_data.get("bugzilla"): bugzilla = advisory_data.get("bugzilla") url = "https://bugzilla.redhat.com/show_bug.cgi?id={}".format(bugzilla) + bugzilla_data = requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json() + bugzilla_severity_val = bugzilla_data["bugs"][0]["severity"] bugzilla_severity = VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", - severity_value=requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json()["bugs"][0]["severity"], # nopep8 + severity_value=bugzilla_severity_val, ) references.append( @@ -100,9 +102,11 @@ def to_advisory(advisory_data): # See https://access.redhat.com/articles/2130961 for more details. if "RHSA" in rh_adv: + rhsa_data = requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json() # nopep8 + severity_value = rhsa_data["cvrfdoc"]["aggregate_severity"] rhsa_aggregate_severity = VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", - severity_value=requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json()["cvrfdoc"]["aggregate_severity"], # nopep8 + severity_value=severity_value, ) references.append( @@ -116,9 +120,11 @@ def to_advisory(advisory_data): else: references.append(Reference(scores=[], url=url, reference_id=rh_adv)) + redhat_cve_entry = requests.get(advisory_data["resource_url"]).json() + redhat_cvss = redhat_cve_entry["cvss3"]["cvss3_base_score"] redhat_cvss3 = VulnerabilitySeverity( severity_type="REDHAT_CVSS3", - severity_value=requests.get(advisory_data["resource_url"]).json()["cvss3"]["cvss3_base_score"], # nopep8 + severity_value=redhat_cvss, ) references.append(Reference(scores=[redhat_cvss3], url=advisory_data["resource_url"])) diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 24cbe5873..1e21d4ad8 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -164,10 +164,5 @@ def test_to_advisories(self): found_advisories = list(self.data_src.to_advisories(self.nvd_data)) # Only 1 advisory because other advisory is hardware related assert len(found_advisories) == 1 - found_advisories[0].vuln_references = sorted( - found_advisories[0].vuln_references, key=lambda x: x.url - ) # nopep8 - # for id,ref in found_advisories[0].vuln_references : - # ref[id].scores.sort(key = lambda x : x.severity_value) - + found_advisories[0].vuln_references = sorted(found_advisories[0].vuln_references, key=lambda x: x.url) # nopep8 assert expected_advisories == found_advisories From b0ae6a88a22a11f2961b1c810a3ddcff9f7442b1 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 2 Dec 2020 16:13:15 +0530 Subject: [PATCH 07/21] Avoid unnnecessary init of default args in test_nvd.py Signed-off-by: Shivam Sandbhor --- vulnerabilities/tests/test_nvd.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 1e21d4ad8..1368b17ba 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -140,11 +140,9 @@ def test_to_advisories(self): [ Reference( url="http://code.google.com/p/gperftools/source/browse/tags/perftools-0.4/ChangeLog", # nopep8 - scores=[], ), Reference( url="http://kqueue.org/blog/2012/03/05/memory-allocator-security-revisited/", # nopep8 - scores=[], ), Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 From 5d22856ddfe97e28b0469d394fe92f3630faf68a Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 2 Dec 2020 19:45:41 +0530 Subject: [PATCH 08/21] Delete vscode config and add it to gitignore Signed-off-by: Shivam Sandbhor --- .gitignore | 3 +++ vulnerabilities/.vscode/settings.json | 3 --- 2 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 vulnerabilities/.vscode/settings.json diff --git a/.gitignore b/.gitignore index e12634454..545630d64 100644 --- a/.gitignore +++ b/.gitignore @@ -122,3 +122,6 @@ Pipfile # pytest .pytest_cache + +# VSCode +.vscode diff --git a/vulnerabilities/.vscode/settings.json b/vulnerabilities/.vscode/settings.json deleted file mode 100644 index cfeb3fb8c..000000000 --- a/vulnerabilities/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "python.pythonPath": "/home/shivam/coding/development/vulnerablecode/venv/bin/python" -} \ No newline at end of file From 190440bd0584557752f868724db037041162df02 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 2 Dec 2020 19:48:32 +0530 Subject: [PATCH 09/21] Rename `scores` to `severities` in the advisory dataclass Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 2 +- vulnerabilities/import_runner.py | 2 +- vulnerabilities/importers/nvd.py | 14 +++++------ vulnerabilities/importers/redhat.py | 8 +++---- vulnerabilities/tests/test_nvd.py | 2 +- vulnerabilities/tests/test_redhat_importer.py | 24 +++++++++---------- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index 4455d9289..c56bb6dab 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -55,7 +55,7 @@ class VulnerabilitySeverity: @dataclasses.dataclass class Reference: - scores: List[VulnerabilitySeverity] = dataclasses.field(default_factory=list) + severities: List[VulnerabilitySeverity] = dataclasses.field(default_factory=list) url: str = "" reference_id: str = "" diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index fd453a232..5eb4db856 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -128,7 +128,7 @@ def process_advisories(data_source: DataSource) -> None: vulnerability=vuln, reference_id=vuln_ref.reference_id, url=vuln_ref.url ) - for score in vuln_ref.scores: + for score in vuln_ref.severities: models.VulnerabilitySeverity.objects.update_or_create( vulnerability=vuln, reference=ref, diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index ac3b12d0b..0b698cfe0 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -78,12 +78,12 @@ def to_advisories(self, nvd_data): cve_id = cve_item["cve"]["CVE_data_meta"]["ID"] ref_urls = self.extract_reference_urls(cve_item) references = [Reference(url=url) for url in ref_urls] - severity_scores = self.extract_severity_scores(cve_item) + severity_severities = self.extract_severity_severities(cve_item) references.append( Reference( url=f"https://nvd.nist.gov/vuln/detail/{cve_id}", reference_id=cve_id, - scores=severity_scores, + severities=severity_severities, ) ) summary = self.extract_summary(cve_item) @@ -100,11 +100,11 @@ def extract_summary(cve_item): return max(summaries, key=len) @staticmethod - def extract_severity_scores(cve_item): - severity_scores = [] + def extract_severity_severities(cve_item): + severity_severities = [] if cve_item["impact"].get("baseMetricV3"): - severity_scores.append( + severity_severities.append( VulnerabilitySeverity( severity_type="cvssV3", severity_value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), @@ -112,14 +112,14 @@ def extract_severity_scores(cve_item): ) if cve_item["impact"].get("baseMetricV2"): - severity_scores.append( + severity_severities.append( VulnerabilitySeverity( severity_type="cvssV2", severity_value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), ) ) - return severity_scores + return severity_severities def extract_reference_urls(self, cve_item): urls = set() diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 713b58065..c480e208f 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -88,7 +88,7 @@ def to_advisory(advisory_data): references.append( Reference( - scores=[bugzilla_severity], + severities=[bugzilla_severity], url=url, reference_id=bugzilla, ) @@ -111,14 +111,14 @@ def to_advisory(advisory_data): references.append( Reference( - scores=[rhsa_aggregate_severity], + severities=[rhsa_aggregate_severity], url=url, reference_id=rh_adv, ) ) else: - references.append(Reference(scores=[], url=url, reference_id=rh_adv)) + references.append(Reference(severities=[], url=url, reference_id=rh_adv)) redhat_cve_entry = requests.get(advisory_data["resource_url"]).json() redhat_cvss = redhat_cve_entry["cvss3"]["cvss3_base_score"] @@ -127,7 +127,7 @@ def to_advisory(advisory_data): severity_value=redhat_cvss, ) - references.append(Reference(scores=[redhat_cvss3], url=advisory_data["resource_url"])) + references.append(Reference(severities=[redhat_cvss3], url=advisory_data["resource_url"])) return Advisory( summary=advisory_data["bugzilla_description"], diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 1368b17ba..957786084 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -146,7 +146,7 @@ def test_to_advisories(self): ), Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 - scores=[ + severities=[ VulnerabilitySeverity(severity_type="cvssV2", severity_value="5.0") ], reference_id="CVE-2005-4895", diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 9690bbc5c..9f8812f1d 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -84,7 +84,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", reference_id="1396383", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -93,7 +93,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1931", reference_id="RHSA-2017:1931", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -102,7 +102,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:0725", reference_id="RHSA-2017:0725", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -111,7 +111,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 reference_id="", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) @@ -159,7 +159,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1430347", reference_id="1430347", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -168,7 +168,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1842", reference_id="RHSA-2017:1842", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -177,7 +177,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2437", reference_id="RHSA-2017:2437", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -186,7 +186,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2077", reference_id="RHSA-2017:2077", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -195,7 +195,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2444", reference_id="RHSA-2017:2444", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -204,7 +204,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", # nopep8 reference_id="", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) @@ -227,7 +227,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1492984", reference_id="1492984", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -236,7 +236,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", # nopep8 reference_id="", - scores=[ + severities=[ VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) From ee7e50982b8fc21abcf6cf7b9fb8964053209cfe Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 2 Dec 2020 19:54:42 +0530 Subject: [PATCH 10/21] Improve order of fields in Reference dataclass Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index c56bb6dab..f446eeaed 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -55,9 +55,9 @@ class VulnerabilitySeverity: @dataclasses.dataclass class Reference: - severities: List[VulnerabilitySeverity] = dataclasses.field(default_factory=list) - url: str = "" reference_id: str = "" + url: str = "" + severities: List[VulnerabilitySeverity] = dataclasses.field(default_factory=list) def __post_init__(self): if not any([self.url, self.reference_id]): From 9ca55582cc8f3b223d8fe8a7cbe26fc57bbd2459 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 16:00:12 +0530 Subject: [PATCH 11/21] Store severity scores given by NVD. Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/nvd.py | 12 ++++++------ vulnerabilities/tests/test_nvd.py | 9 +++++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index 0b698cfe0..f6a2fe0c4 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -78,7 +78,7 @@ def to_advisories(self, nvd_data): cve_id = cve_item["cve"]["CVE_data_meta"]["ID"] ref_urls = self.extract_reference_urls(cve_item) references = [Reference(url=url) for url in ref_urls] - severity_severities = self.extract_severity_severities(cve_item) + severity_scores = self.extract_severity_scores(cve_item) references.append( Reference( url=f"https://nvd.nist.gov/vuln/detail/{cve_id}", @@ -100,11 +100,11 @@ def extract_summary(cve_item): return max(summaries, key=len) @staticmethod - def extract_severity_severities(cve_item): - severity_severities = [] + def extract_severity_scores(cve_item): + severity_scores = [] if cve_item["impact"].get("baseMetricV3"): - severity_severities.append( + severity_scores.append( VulnerabilitySeverity( severity_type="cvssV3", severity_value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), @@ -112,14 +112,14 @@ def extract_severity_severities(cve_item): ) if cve_item["impact"].get("baseMetricV2"): - severity_severities.append( + severity_scores.append( VulnerabilitySeverity( severity_type="cvssV2", severity_value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), ) ) - return severity_severities + return severity_scores def extract_reference_urls(self, cve_item): urls = set() diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 957786084..83aeab055 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -140,9 +140,18 @@ def test_to_advisories(self): [ Reference( url="http://code.google.com/p/gperftools/source/browse/tags/perftools-0.4/ChangeLog", # nopep8 + scores=[], ), Reference( url="http://kqueue.org/blog/2012/03/05/memory-allocator-security-revisited/", # nopep8 + scores=[], + ), + Reference( + url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 + scores=[ + VulnerabilitySeverity(severity_type="cvssV2", severity_value="5.0") + ], + reference_id="CVE-2005-4895", ), Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 From 0887e83e4e5944e6d1a4cd40c7948504ecf23b0d Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 29 Nov 2020 16:00:58 +0530 Subject: [PATCH 12/21] Store severity scores given by RedHat Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/redhat.py | 4 +- vulnerabilities/tests/test_redhat_importer.py | 48 +++++++++++++++++++ 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index c480e208f..94aaec2e6 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -43,9 +43,9 @@ def __enter__(self): def updated_advisories(self): processed_advisories = [] for advisory_data in self.redhat_response: - processed_advisories.extend(to_advisory(advisory_data)) + yield [to_advisory(advisory_data)] - return self.batch_advisories(processed_advisories) + # return self.batch_advisories(processed_advisories) def fetch(): diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 9f8812f1d..b9663cc07 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -84,7 +84,11 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", reference_id="1396383", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -93,7 +97,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1931", reference_id="RHSA-2017:1931", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -102,7 +110,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:0725", reference_id="RHSA-2017:0725", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -111,7 +123,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 reference_id="", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) @@ -159,7 +175,11 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1430347", reference_id="1430347", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -168,7 +188,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1842", reference_id="RHSA-2017:1842", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -177,7 +201,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2437", reference_id="RHSA-2017:2437", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -186,7 +214,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2077", reference_id="RHSA-2017:2077", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -195,7 +227,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2444", reference_id="RHSA-2017:2444", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 ) @@ -204,7 +240,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", # nopep8 reference_id="", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) @@ -227,7 +267,11 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1492984", reference_id="1492984", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 ) @@ -236,7 +280,11 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", # nopep8 reference_id="", +<<<<<<< HEAD severities=[ +======= + scores=[ +>>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( severity_type="REDHAT_CVSS3", severity_value=6.0 ) From 86f2e4f0251c7c4c47f8f39c67c29fff3ed1edbf Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Fri, 18 Dec 2020 14:07:47 +0530 Subject: [PATCH 13/21] Misc Data Structure changes Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 4 +-- vulnerabilities/import_runner.py | 4 +-- vulnerabilities/importers/nvd.py | 8 +++--- vulnerabilities/importers/redhat.py | 6 ++--- .../0002_vulnerabilityseverityscore.py | 2 +- .../migrations/0003_auto_20201127_1423.py | 4 +-- .../migrations/0004_auto_20201129_1024.py | 2 +- .../migrations/0005_auto_20201218_0824.py | 27 +++++++++++++++++++ vulnerabilities/models.py | 11 ++++---- vulnerabilities/tests/test_nvd.py | 2 +- vulnerabilities/tests/test_redhat_importer.py | 24 ++++++++--------- 11 files changed, 61 insertions(+), 33 deletions(-) create mode 100644 vulnerabilities/migrations/0005_auto_20201218_0824.py diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index f446eeaed..a639cb9e9 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -48,8 +48,8 @@ @dataclasses.dataclass class VulnerabilitySeverity: - severity_type: str - severity_value: str + system: str + value: str @dataclasses.dataclass diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 5eb4db856..1399c2f79 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -131,9 +131,9 @@ def process_advisories(data_source: DataSource) -> None: for score in vuln_ref.severities: models.VulnerabilitySeverity.objects.update_or_create( vulnerability=vuln, + scoring_system_identifier=score.system, reference=ref, - severity_type=score.severity_type, - defaults={"severity_value": score.severity_value}, + defaults={"value": score.value}, ) for purl in chain(advisory.impacted_package_urls, advisory.resolved_package_urls): diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index f6a2fe0c4..157580764 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -106,16 +106,16 @@ def extract_severity_scores(cve_item): if cve_item["impact"].get("baseMetricV3"): severity_scores.append( VulnerabilitySeverity( - severity_type="cvssV3", - severity_value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), + system="cvssV3", + value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), ) ) if cve_item["impact"].get("baseMetricV2"): severity_scores.append( VulnerabilitySeverity( - severity_type="cvssV2", - severity_value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), + system="cvssV2", + value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), ) ) diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 94aaec2e6..21a1f295d 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -43,9 +43,9 @@ def __enter__(self): def updated_advisories(self): processed_advisories = [] for advisory_data in self.redhat_response: - yield [to_advisory(advisory_data)] + processed_advisories.extend(to_advisory(advisory_data)) - # return self.batch_advisories(processed_advisories) + return self.batch_advisories(processed_advisories) def fetch(): @@ -148,4 +148,4 @@ def rpm_to_purl(rpm_string): name, version = components if version[0].isdigit(): - return PackageURL(name=name, type="rpm", version=version, namespace="redhat") + return PackageURL(name=name, type="rpm", version=version, namespace="redhat") \ No newline at end of file diff --git a/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py b/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py index 30f3e7911..2c75f9810 100644 --- a/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py +++ b/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py @@ -15,7 +15,7 @@ class Migration(migrations.Migration): name='VulnerabilitySeverityScore', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('severity_type', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), + ('system', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), ('severity_score', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), diff --git a/vulnerabilities/migrations/0003_auto_20201127_1423.py b/vulnerabilities/migrations/0003_auto_20201127_1423.py index 362e8c71c..d1b200852 100644 --- a/vulnerabilities/migrations/0003_auto_20201127_1423.py +++ b/vulnerabilities/migrations/0003_auto_20201127_1423.py @@ -15,8 +15,8 @@ class Migration(migrations.Migration): name='VulnerabilitySeverity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('severity_type', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), - ('severity_value', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), + ('system', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), + ('value', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), ], diff --git a/vulnerabilities/migrations/0004_auto_20201129_1024.py b/vulnerabilities/migrations/0004_auto_20201129_1024.py index f239a4bcf..2b8f2e912 100644 --- a/vulnerabilities/migrations/0004_auto_20201129_1024.py +++ b/vulnerabilities/migrations/0004_auto_20201129_1024.py @@ -12,6 +12,6 @@ class Migration(migrations.Migration): operations = [ migrations.AlterUniqueTogether( name='vulnerabilityseverity', - unique_together={('vulnerability', 'reference', 'severity_type')}, + unique_together={('vulnerability', 'reference', 'system')}, ), ] diff --git a/vulnerabilities/migrations/0005_auto_20201218_0824.py b/vulnerabilities/migrations/0005_auto_20201218_0824.py new file mode 100644 index 000000000..690a2e3b2 --- /dev/null +++ b/vulnerabilities/migrations/0005_auto_20201218_0824.py @@ -0,0 +1,27 @@ +# Generated by Django 3.0.7 on 2020-12-18 08:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('vulnerabilities', '0004_auto_20201129_1024'), + ] + + operations = [ + migrations.AddField( + model_name='vulnerabilityseverity', + name='scoring_system_identifier', + field=models.CharField(default='', help_text='Example: cvssv2', max_length=50), + preserve_default=False, + ), + migrations.AlterUniqueTogether( + name='vulnerabilityseverity', + unique_together={('vulnerability', 'reference', 'scoring_system_identifier')}, + ), + migrations.RemoveField( + model_name='vulnerabilityseverity', + name='system', + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index f361526d1..e7fb53b09 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -205,13 +205,14 @@ def __str__(self): class VulnerabilitySeverity(models.Model): - severity_type = models.CharField( - max_length=50, help_text="Example: CVSS v2, Redhat Impact Score" + vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) + value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") + scoring_system_identifier = models.CharField( + max_length=50, help_text="Example: cvssv2" ) - severity_value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") reference = models.ForeignKey(VulnerabilityReference, on_delete=models.CASCADE) - vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) + class Meta: # TBD - unique_together = ("vulnerability", "reference", "severity_type") + unique_together = ("vulnerability", "reference", "scoring_system_identifier") diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 83aeab055..ee5488fde 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -149,7 +149,7 @@ def test_to_advisories(self): Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 scores=[ - VulnerabilitySeverity(severity_type="cvssV2", severity_value="5.0") + VulnerabilitySeverity(system="cvssV2", value="5.0") ], reference_id="CVE-2005-4895", ), diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index b9663cc07..1723735b1 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -90,7 +90,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) ], ), @@ -103,7 +103,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -116,7 +116,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -129,7 +129,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_CVSS3", severity_value=6.0 + system="REDHAT_CVSS3", value=6.0 ) ], ), @@ -181,7 +181,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) ], ), @@ -194,7 +194,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -207,7 +207,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -220,7 +220,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -233,7 +233,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", severity_value=2.2 + system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) ], ), @@ -246,7 +246,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_CVSS3", severity_value=6.0 + system="REDHAT_CVSS3", value=6.0 ) ], ), @@ -273,7 +273,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_BUGZILLA_SEVERITY", severity_value=2.0 + system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) ], ), @@ -286,7 +286,7 @@ def test_to_advisory(self): scores=[ >>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( - severity_type="REDHAT_CVSS3", severity_value=6.0 + system="REDHAT_CVSS3", value=6.0 ) ], ), From e8e6166ff747dc6069317546ab22a33409a59772 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Fri, 18 Dec 2020 14:31:52 +0530 Subject: [PATCH 14/21] Refactor tests and importers to use new data structure field names Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/redhat.py | 24 +++++----- vulnerabilities/models.py | 5 +- vulnerabilities/tests/test_nvd.py | 11 +---- vulnerabilities/tests/test_redhat_importer.py | 48 ------------------- 4 files changed, 15 insertions(+), 73 deletions(-) diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 21a1f295d..3f44d46f1 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017 nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/vulnerablecode/ # The VulnerableCode software is licensed under the Apache License version 2.0. # Data generated with VulnerableCode require an acknowledgment. @@ -17,7 +17,7 @@ # OR CONDITIONS OF ANY KIND, either express or implied. No content created from # VulnerableCode should be considered or used as legal advice. Consult an Attorney # for any legal advice. -# VulnerableCode is a free software code scanning tool from nexB Inc. and others. +# VulnerableCode is a free software code from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. @@ -82,8 +82,8 @@ def to_advisory(advisory_data): bugzilla_data = requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json() bugzilla_severity_val = bugzilla_data["bugs"][0]["severity"] bugzilla_severity = VulnerabilitySeverity( - severity_type="REDHAT_BUGZILLA_SEVERITY", - severity_value=bugzilla_severity_val, + system="REDHAT_BUGZILLA_SEVERITY", + value=bugzilla_severity_val, ) references.append( @@ -102,11 +102,13 @@ def to_advisory(advisory_data): # See https://access.redhat.com/articles/2130961 for more details. if "RHSA" in rh_adv: - rhsa_data = requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json() # nopep8 - severity_value = rhsa_data["cvrfdoc"]["aggregate_severity"] + rhsa_data = requests.get( + f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json" + ).json() # nopep8 + value = rhsa_data["cvrfdoc"]["aggregate_severity"] rhsa_aggregate_severity = VulnerabilitySeverity( - severity_type="RHSA_AGGREGATE_SEVERITY", - severity_value=severity_value, + system="RHSA_AGGREGATE_SEVERITY", + value=value, ) references.append( @@ -123,8 +125,8 @@ def to_advisory(advisory_data): redhat_cve_entry = requests.get(advisory_data["resource_url"]).json() redhat_cvss = redhat_cve_entry["cvss3"]["cvss3_base_score"] redhat_cvss3 = VulnerabilitySeverity( - severity_type="REDHAT_CVSS3", - severity_value=redhat_cvss, + system="REDHAT_CVSS3", + value=redhat_cvss, ) references.append(Reference(severities=[redhat_cvss3], url=advisory_data["resource_url"])) @@ -148,4 +150,4 @@ def rpm_to_purl(rpm_string): name, version = components if version[0].isdigit(): - return PackageURL(name=name, type="rpm", version=version, namespace="redhat") \ No newline at end of file + return PackageURL(name=name, type="rpm", version=version, namespace="redhat") diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index e7fb53b09..a37b62f02 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -207,11 +207,8 @@ class VulnerabilitySeverity(models.Model): vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") - scoring_system_identifier = models.CharField( - max_length=50, help_text="Example: cvssv2" - ) + scoring_system_identifier = models.CharField(max_length=50, help_text="Example: cvssv2") reference = models.ForeignKey(VulnerabilityReference, on_delete=models.CASCADE) - class Meta: # TBD diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index ee5488fde..71ac81ea5 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -140,23 +140,14 @@ def test_to_advisories(self): [ Reference( url="http://code.google.com/p/gperftools/source/browse/tags/perftools-0.4/ChangeLog", # nopep8 - scores=[], ), Reference( url="http://kqueue.org/blog/2012/03/05/memory-allocator-security-revisited/", # nopep8 - scores=[], - ), - Reference( - url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 - scores=[ - VulnerabilitySeverity(system="cvssV2", value="5.0") - ], - reference_id="CVE-2005-4895", ), Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 severities=[ - VulnerabilitySeverity(severity_type="cvssV2", severity_value="5.0") + VulnerabilitySeverity(system="cvssV2", value="5.0") ], reference_id="CVE-2005-4895", ), diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 1723735b1..839c240ab 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -84,11 +84,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", reference_id="1396383", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) @@ -97,11 +93,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1931", reference_id="RHSA-2017:1931", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -110,11 +102,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:0725", reference_id="RHSA-2017:0725", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -123,11 +111,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 reference_id="", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_CVSS3", value=6.0 ) @@ -175,11 +159,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1430347", reference_id="1430347", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) @@ -188,11 +168,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:1842", reference_id="RHSA-2017:1842", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -201,11 +177,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2437", reference_id="RHSA-2017:2437", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -214,11 +186,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2077", reference_id="RHSA-2017:2077", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -227,11 +195,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/errata/RHSA-2017:2444", reference_id="RHSA-2017:2444", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="RHSA_AGGREGATE_SEVERITY", value=2.2 ) @@ -240,11 +204,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", # nopep8 reference_id="", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_CVSS3", value=6.0 ) @@ -267,11 +227,7 @@ def test_to_advisory(self): Reference( url="https://bugzilla.redhat.com/show_bug.cgi?id=1492984", reference_id="1492984", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_BUGZILLA_SEVERITY", value=2.0 ) @@ -280,11 +236,7 @@ def test_to_advisory(self): Reference( url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", # nopep8 reference_id="", -<<<<<<< HEAD severities=[ -======= - scores=[ ->>>>>>> Store severity scores given by RedHat VulnerabilitySeverity( system="REDHAT_CVSS3", value=6.0 ) From c9df2f9758a1e874288e123122a7b0c55dbe0964 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Fri, 18 Dec 2020 14:52:42 +0530 Subject: [PATCH 15/21] Rebase and resolve conflicts Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/nvd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index 157580764..91efb5776 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -83,7 +83,7 @@ def to_advisories(self, nvd_data): Reference( url=f"https://nvd.nist.gov/vuln/detail/{cve_id}", reference_id=cve_id, - severities=severity_severities, + severities=severity_scores, ) ) summary = self.extract_summary(cve_item) From 0312cfbfbac020fb20481635f9152ea42c802d1d Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Thu, 24 Dec 2020 15:02:16 +0530 Subject: [PATCH 16/21] Create and use ScoringSystem objects for handling severity Signed-off-by: Shivam Sandbhor --- vulnerabilities/data_source.py | 3 +- vulnerabilities/import_runner.py | 2 +- vulnerabilities/importers/nvd.py | 7 +-- vulnerabilities/importers/redhat.py | 17 +++--- vulnerabilities/severity_systems.py | 54 +++++++++++++++++++ vulnerabilities/tests/test_nvd.py | 3 +- vulnerabilities/tests/test_redhat_importer.py | 25 ++++----- 7 files changed, 82 insertions(+), 29 deletions(-) create mode 100644 vulnerabilities/severity_systems.py diff --git a/vulnerabilities/data_source.py b/vulnerabilities/data_source.py index a639cb9e9..1bb4d2594 100644 --- a/vulnerabilities/data_source.py +++ b/vulnerabilities/data_source.py @@ -42,13 +42,14 @@ from packageurl import PackageURL from vulnerabilities.oval_parser import OvalParser +from vulnerabilities.severity_systems import ScoringSystem logger = logging.getLogger(__name__) @dataclasses.dataclass class VulnerabilitySeverity: - system: str + system: ScoringSystem value: str diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index 1399c2f79..f06ba41ca 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -131,7 +131,7 @@ def process_advisories(data_source: DataSource) -> None: for score in vuln_ref.severities: models.VulnerabilitySeverity.objects.update_or_create( vulnerability=vuln, - scoring_system_identifier=score.system, + scoring_system_identifier=score.system.identifier, reference=ref, defaults={"value": score.value}, ) diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index 91efb5776..6e2e66973 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -32,8 +32,9 @@ from vulnerabilities.data_source import DataSource from vulnerabilities.data_source import DataSourceConfiguration from vulnerabilities.data_source import Reference -from vulnerabilities.helpers import create_etag from vulnerabilities.data_source import VulnerabilitySeverity +from vulnerabilities.helpers import create_etag +from vulnerabilities.severity_systems import scoring_systems @dataclasses.dataclass @@ -106,7 +107,7 @@ def extract_severity_scores(cve_item): if cve_item["impact"].get("baseMetricV3"): severity_scores.append( VulnerabilitySeverity( - system="cvssV3", + system=scoring_systems["cvssv3"], value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), ) ) @@ -114,7 +115,7 @@ def extract_severity_scores(cve_item): if cve_item["impact"].get("baseMetricV2"): severity_scores.append( VulnerabilitySeverity( - system="cvssV2", + system=scoring_systems["cvssv2"], value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), ) ) diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 3f44d46f1..2c25a3d4e 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -31,13 +31,13 @@ from vulnerabilities.data_source import DataSourceConfiguration from vulnerabilities.data_source import Reference from vulnerabilities.data_source import VulnerabilitySeverity +from vulnerabilities.severity_systems import scoring_systems class RedhatDataSource(DataSource): CONFIG_CLASS = DataSourceConfiguration def __enter__(self): - self.redhat_response = fetch() def updated_advisories(self): @@ -82,7 +82,7 @@ def to_advisory(advisory_data): bugzilla_data = requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json() bugzilla_severity_val = bugzilla_data["bugs"][0]["severity"] bugzilla_severity = VulnerabilitySeverity( - system="REDHAT_BUGZILLA_SEVERITY", + system=scoring_systems["rhbs"], value=bugzilla_severity_val, ) @@ -95,26 +95,21 @@ def to_advisory(advisory_data): ) for rh_adv in advisory_data["advisories"]: - - url = "https://access.redhat.com/errata/{}".format(rh_adv) - # RH provides 3 types of advisories RHSA, RHBA, RHEA. Only RHSA's contain severity score. # See https://access.redhat.com/articles/2130961 for more details. if "RHSA" in rh_adv: - rhsa_data = requests.get( - f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json" - ).json() # nopep8 + rhsa_data = requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json() # nopep8 value = rhsa_data["cvrfdoc"]["aggregate_severity"] rhsa_aggregate_severity = VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", + system=scoring_systems["rhas"], value=value, ) references.append( Reference( severities=[rhsa_aggregate_severity], - url=url, + url="https://access.redhat.com/errata/{}".format(rh_adv), reference_id=rh_adv, ) ) @@ -125,7 +120,7 @@ def to_advisory(advisory_data): redhat_cve_entry = requests.get(advisory_data["resource_url"]).json() redhat_cvss = redhat_cve_entry["cvss3"]["cvss3_base_score"] redhat_cvss3 = VulnerabilitySeverity( - system="REDHAT_CVSS3", + system=scoring_systems["rh_cvssv3"], value=redhat_cvss, ) diff --git a/vulnerabilities/severity_systems.py b/vulnerabilities/severity_systems.py new file mode 100644 index 000000000..758a78c9a --- /dev/null +++ b/vulnerabilities/severity_systems.py @@ -0,0 +1,54 @@ +import dataclasses + + +@dataclasses.dataclass +class ScoringSystem: + + # a short identifier for the scoring system. + identifier: str + # a name which represents the scoring system such as `RedHat bug severity`. + # This is for human understanding + name: str + # a url to documentation about that sscoring system + url: str + # notes about that scoring system + notes: str = "" + + def as_score(self, value): + """ + Return a normalized numeric score for this scoring system given a raw + value. For instance htis can be used to convert a CVSS vector to a base + score. + """ + raise NotImplementedError + + +scoring_systems = { + "cvssv2": ScoringSystem( + identifier="cvssv2", + name="CVSSv2", + url="https://www.first.org/cvss/v2/", + notes="We store the vector as value and compute scores from that.", + ), + "cvssv3": ScoringSystem( + identifier="cvssv3", + name="CVSSv3", + url="https://www.first.org/cvss/v3-0/", + notes="We store the vector as value and compute scores from that.", + ), + "rhbs": ScoringSystem( + identifier="rhbs", + name="RedHat Bugzilla severity", + url="https://bugzilla.redhat.com/page.cgi?id=fields.html#bug_severity", + ), + "rhas": ScoringSystem( + identifier="rhas", + name="RedHat Aggregate severity", + url="https://access.redhat.com/security/updates/classification/", + ), + "rh_cvssv3": ScoringSystem( + identifier="rh_cvssv3", + name="RedHat CVSSv3", + url="https://access.redhat.com/security/updates/classification/", + ), +} diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 71ac81ea5..f7a8c8534 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -29,6 +29,7 @@ from vulnerabilities.data_source import Reference from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import VulnerabilitySeverity +from vulnerabilities.severity_systems import scoring_systems BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/nvd/nvd_test.json") @@ -147,7 +148,7 @@ def test_to_advisories(self): Reference( url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 severities=[ - VulnerabilitySeverity(system="cvssV2", value="5.0") + VulnerabilitySeverity(system=scoring_systems["cvssv2"], value="5.0") ], reference_id="CVE-2005-4895", ), diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 839c240ab..78ac53b83 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -31,6 +31,7 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.data_source import VulnerabilitySeverity +from vulnerabilities.severity_systems import scoring_systems BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/", "redhat.json") @@ -86,7 +87,7 @@ def test_to_advisory(self): reference_id="1396383", severities=[ VulnerabilitySeverity( - system="REDHAT_BUGZILLA_SEVERITY", value=2.0 + system=scoring_systems["rhbs"], value=2.0 ) ], ), @@ -95,7 +96,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:1931", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -104,7 +105,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:0725", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -113,7 +114,7 @@ def test_to_advisory(self): reference_id="", severities=[ VulnerabilitySeverity( - system="REDHAT_CVSS3", value=6.0 + system=scoring_systems["rh_cvssv3"], value=6.0 ) ], ), @@ -161,7 +162,7 @@ def test_to_advisory(self): reference_id="1430347", severities=[ VulnerabilitySeverity( - system="REDHAT_BUGZILLA_SEVERITY", value=2.0 + system=scoring_systems["rhbs"], value=2.0 ) ], ), @@ -170,7 +171,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:1842", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -179,7 +180,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:2437", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -188,7 +189,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:2077", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -197,7 +198,7 @@ def test_to_advisory(self): reference_id="RHSA-2017:2444", severities=[ VulnerabilitySeverity( - system="RHSA_AGGREGATE_SEVERITY", value=2.2 + system=scoring_systems["rhas"], value=2.2 ) ], ), @@ -206,7 +207,7 @@ def test_to_advisory(self): reference_id="", severities=[ VulnerabilitySeverity( - system="REDHAT_CVSS3", value=6.0 + system=scoring_systems["rh_cvssv3"], value=6.0 ) ], ), @@ -229,7 +230,7 @@ def test_to_advisory(self): reference_id="1492984", severities=[ VulnerabilitySeverity( - system="REDHAT_BUGZILLA_SEVERITY", value=2.0 + system=scoring_systems["rhbs"], value=2.0 ) ], ), @@ -238,7 +239,7 @@ def test_to_advisory(self): reference_id="", severities=[ VulnerabilitySeverity( - system="REDHAT_CVSS3", value=6.0 + system=scoring_systems["rh_cvssv3"], value=6.0 ) ], ), From 641b4577d6c0d79cffbcc8ec9ec3f24c737a9d8b Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Thu, 24 Dec 2020 15:25:03 +0530 Subject: [PATCH 17/21] Cast score value to string while inserting it into db Signed-off-by: Shivam Sandbhor --- vulnerabilities/import_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index f06ba41ca..bdefb2a99 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -133,7 +133,7 @@ def process_advisories(data_source: DataSource) -> None: vulnerability=vuln, scoring_system_identifier=score.system.identifier, reference=ref, - defaults={"value": score.value}, + defaults={"value": str(score.value)}, ) for purl in chain(advisory.impacted_package_urls, advisory.resolved_package_urls): From be24df02f5726edf8ae8493a6bd3e80644be60d0 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 24 Jan 2021 18:30:27 +0530 Subject: [PATCH 18/21] Change field naming in VulnerabilitySeverity model scoring_system_identifier is changed to scoring_system Signed-off-by: Shivam Sandbhor --- vulnerabilities/fixtures/debian.json | 12 +- vulnerabilities/fixtures/openssl.json | 537 ++++++------------ vulnerabilities/import_runner.py | 2 +- .../migrations/0006_auto_20210123_0616.py | 27 + vulnerabilities/models.py | 19 +- 5 files changed, 227 insertions(+), 370 deletions(-) create mode 100644 vulnerabilities/migrations/0006_auto_20210123_0616.py diff --git a/vulnerabilities/fixtures/debian.json b/vulnerabilities/fixtures/debian.json index 27fdd0e72..bb2218434 100644 --- a/vulnerabilities/fixtures/debian.json +++ b/vulnerabilities/fixtures/debian.json @@ -4,8 +4,8 @@ "pk": 1, "fields": { "cve_id": "CVE-2014-8242", - "summary": "", - "cvss": null + "summary": "" + } }, { @@ -13,8 +13,8 @@ "pk": 2, "fields": { "cve_id": "CVE-2009-1382", - "summary": "", - "cvss": null + "summary": "" + } }, { @@ -22,8 +22,8 @@ "pk": 3, "fields": { "cve_id": "CVE-2009-2459", - "summary": "", - "cvss": null + "summary": "" + } }, { diff --git a/vulnerabilities/fixtures/openssl.json b/vulnerabilities/fixtures/openssl.json index 0ce2e9dab..6f6929919 100644 --- a/vulnerabilities/fixtures/openssl.json +++ b/vulnerabilities/fixtures/openssl.json @@ -1,12 +1,10 @@ -[ - +[ { "model": "vulnerabilities.vulnerability", "pk": 293, "fields": { "cve_id": "CVE-2018-5407", - "summary": "OpenSSL ECC scalar multiplication, used in e.g. ECDSA and ECDH, has been shown to be vulnerable to a microarchitecture timing side channel attack. An attacker with sufficient access to mount local timing attacks during ECDSA signature generation could recover the private key.", - "cvss": null + "summary": "OpenSSL ECC scalar multiplication, used in e.g. ECDSA and ECDH, has been shown to be vulnerable to a microarchitecture timing side channel attack. An attacker with sufficient access to mount local timing attacks during ECDSA signature generation could recover the private key." } }, { @@ -14,8 +12,7 @@ "pk": 294, "fields": { "cve_id": "CVE-2019-1549", - "summary": "OpenSSL 1.1.1 introduced a rewritten random number generator (RNG). This was intended to include protection in the event of a fork() system call in order to ensure that the parent and child processes did not share the same RNG state. However this protection was not being used in the default case. A partial mitigation for this issue is that the output from a high precision timer is mixed into the RNG state so the likelihood of a parent and child process sharing state is significantly reduced. If an application already calls OPENSSL_init_crypto() explicitly using OPENSSL_INIT_ATFORK then this problem does not occur at all.", - "cvss": null + "summary": "OpenSSL 1.1.1 introduced a rewritten random number generator (RNG). This was intended to include protection in the event of a fork() system call in order to ensure that the parent and child processes did not share the same RNG state. However this protection was not being used in the default case. A partial mitigation for this issue is that the output from a high precision timer is mixed into the RNG state so the likelihood of a parent and child process sharing state i significantly reduced. If an application already calls OPENSSL_init_crypto() explicitly using OPENSSL_INIT_ATFORK then this problem does not occur at all." } }, { @@ -23,8 +20,7 @@ "pk": 295, "fields": { "cve_id": "CVE-2020-1967", - "summary": "Server or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d.", - "cvss": null + "summary": "Server or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by thi issue. This issue did not affect OpenSSL versions prior to 1.1.1d." } }, { @@ -32,8 +28,7 @@ "pk": 296, "fields": { "cve_id": "CVE-2019-1552", - "summary": "OpenSSL has internal defaults for a directory tree where it can find a configuration file as well as certificates used for verification in TLS. This directory is most commonly referred to as OPENSSLDIR, and is configurable with the --prefix / --openssldir configuration options. For OpenSSL versions 1.1.0 and 1.1.1, the mingw configuration targets assume that resulting programs and libraries are installed in a Unix-like environment and the default prefix for program installation as well as for OPENSSLDIR should be '/usr/local'. However, mingw programs are Windows programs, and as such, find themselves looking at sub-directories of 'C:/usr/local', which may be world writable, which enables untrusted users to modify OpenSSL's default configuration, insert CA certificates, modify (or even replace) existing engine modules, etc. For OpenSSL 1.0.2, '/usr/local/ssl' is used as default for OPENSSLDIR on all Unix and Windows targets, including Visual C builds. However, some build instructions for the diverse Windows targets on 1.0.2 encourage you to specify your own --prefix. OpenSSL versions 1.1.1, 1.1.0 and 1.0.2 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", - "cvss": null + "summary": "OpenSSL has internal defaults for a directory tree where it can find a configuration file as well as certificates used for verification in TLS. This directory is most commonly referred to as OPENSSLDIR, and is configurable with the --prefix / --openssldir configuration options. For OpenSSL versions 1.1.0 and 1.1.1, the mingw configuration targets assume that resulting programs and libraries are installed in a Unix-like environment and the default prefix for progra installation as well as for OPENSSLDIR should be '/usr/local'. However, mingw programs are Windows programs, and as such, find themselves looking at sub-directories of 'C:/usr/local', which may be world writable, which enables untrusted users to modify OpenSSL's default configuration, insert CA certificates, modify (or even replace) existing engine modules, etc. For OpenSSL 1.0.2, '/usr/local/ssl' is used as default for OPENSSLDIR on all Unix and Windows targets, including Visual C builds. However, some build instructions for the diverse Windows targets on 1.0.2 encourage you to specify your own --prefix. OpenSSL versions 1.1.1, 1.1.0 and 1.0.2 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time." } }, { @@ -41,8 +36,7 @@ "pk": 297, "fields": { "cve_id": "CVE-2019-1551", - "summary": "There is an overflow bug in the x64_64 Montgomery squaring procedure used in exponentiation with 512-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against 2-prime RSA1024, 3-prime RSA1536, and DSA1024 as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH512 are considered just feasible. However, for an attack the target would have to re-use the DH512 private key, which is not recommended anyway. Also applications directly using the low level API BN_mod_exp may be affected if they use BN_FLG_CONSTTIME.", - "cvss": null + "summary": "There is an overflow bug in the x64_64 Montgomery squaring procedure used in exponentiation with 512-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against 2-prime RSA1024, 3-prime RSA1536, and DSA1024 as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH512 are considered just feasible. However, for an attack the target would have to re-use the DH512 private key, which is not recommende anyway. Also applications directly using the low level API BN_mod_exp may be affected if they use BN_FLG_CONSTTIME." } }, { @@ -50,8 +44,7 @@ "pk": 298, "fields": { "cve_id": "CVE-2019-1543", - "summary": "ChaCha20-Poly1305 is an AEAD cipher, and requires a unique nonce input for every encryption operation. RFC 7539 specifies that the nonce value (IV) should be 96 bits (12 bytes). OpenSSL allows a variable nonce length and front pads the nonce with 0 bytes if it is less than 12 bytes. However it also incorrectly allows a nonce to be set of up to 16 bytes. In this case only the last 12 bytes are significant and any additional leading bytes are ignored. It is a requirement of using this cipher that nonce values are unique. Messages encrypted using a reused nonce value are susceptible to serious confidentiality and integrity attacks. If an application changes the default nonce length to be longer than 12 bytes and then makes a change to the leading bytes of the nonce expecting the new value to be a new unique nonce then such an application could inadvertently encrypt messages with a reused nonce. Additionally the ignored bytes in a long nonce are not covered by the integrity guarantee of this cipher. Any application that relies on the integrity of these ignored leading bytes of a long nonce may be further affected. Any OpenSSL internal use of this cipher, including in SSL/TLS, is safe because no such use sets such a long nonce value. However user applications that use this cipher directly and set a non-default nonce length to be longer than 12 bytes may be vulnerable. OpenSSL versions 1.1.1 and 1.1.0 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time.", - "cvss": null + "summary": "ChaCha20-Poly1305 is an AEAD cipher, and requires a unique nonce input for every encryption operation. RFC 7539 specifies that the nonce value (IV) should be 96 bits (12 bytes). OpenSSL allows a variable nonce length and front pads the nonce with 0 bytes if it is less than 12 bytes. However it also incorrectly allows a nonce to be set of up to 16 bytes. In this case only the last 12 bytes are significant and any additional leading bytes are ignored. It is a requiremen of using this cipher that nonce values are unique. Messages encrypted using a reused nonce value are susceptible to serious confidentiality and integrity attacks. If an application changes the default nonce length to be longer than 12 bytes and then makes a change to the leading bytes of the nonce expecting the new value to be a new unique nonce then such an application could inadvertently encrypt messages with a reused nonce. Additionally the ignored bytes in a long nonce are not covered by the integrity guarantee of this cipher. Any application that relies on the integrity of these ignored leading bytes of a long nonce may be further affected. Any OpenSSL internal use of this cipher, including in SSL/TLS, is safe because no such use sets such a long nonce value. However user applications that use this cipher directly and set a non-default nonce length to be longer than 12 bytes may be vulnerable. OpenSSL versions 1.1.1 and 1.1.0 are affected by this issue. Due to the limited scope of affected deployments this has been assessed as low severity and therefore we are not creating new releases at this time." } }, { @@ -59,8 +52,7 @@ "pk": 299, "fields": { "cve_id": "CVE-2020-1968", - "summary": "The Raccoon attack exploits a flaw in the TLS specification which can lead to an attacker being able to compute the pre-master secret in connections which have used a Diffie-Hellman (DH) based ciphersuite. In such a case this would result in the attacker being able to eavesdrop on all encrypted communications sent over that TLS connection. The attack can only be exploited if an implementation re-uses a DH secret across multiple TLS connections. Note that this issue only impacts DH ciphersuites and not ECDH ciphersuites. This issue affects OpenSSL 1.0.2 which is out of support and no longer receiving public updates. OpenSSL 1.1.1 is not vulnerable to this issue.", - "cvss": null + "summary": "The Raccoon attack exploits a flaw in the TLS specification which can lead to an attacker being able to compute the pre-master secret in connections which have used a Diffie-Hellman (DH) based ciphersuite. In such a case this would result in the attacker being able to eavesdrop on all encrypted communications sent over that TLS connection. The attack can only be exploited if an implementation re-uses a DH secret across multiple TLS connections. Note that this issue onl impacts DH ciphersuites and not ECDH ciphersuites. This issue affects OpenSSL 1.0.2 which is out of support and no longer receiving public updates. OpenSSL 1.1.1 is not vulnerable to this issue." } }, { @@ -68,8 +60,7 @@ "pk": 300, "fields": { "cve_id": "CVE-2019-1547", - "summary": "Normally in OpenSSL EC groups always have a co-factor present and this is used in side channel resistant code paths. However, in some cases, it is possible to construct a group using explicit parameters (instead of using a named curve). In those cases it is possible that such a group does not have the cofactor present. This can occur even where all the parameters match a known named curve. If such a curve is used then OpenSSL falls back to non-side channel resistant code paths which may result in full key recovery during an ECDSA signature operation. In order to be vulnerable an attacker would have to have the ability to time the creation of a large number of signatures where explicit parameters with no co-factor present are in use by an application using libcrypto. For the avoidance of doubt libssl is not vulnerable because explicit parameters are never used.", - "cvss": null + "summary": "Normally in OpenSSL EC groups always have a co-factor present and this is used in side channel resistant code paths. However, in some cases, it is possible to construct a group using explicit parameters (instead of using a named curve). In those cases it is possible that such a group does not have the cofactor present. This can occur even where all the parameters match a known named curve. If such a curve is used then OpenSSL falls back to non-side channel resistant cod paths which may result in full key recovery during an ECDSA signature operation. In order to be vulnerable an attacker would have to have the ability to time the creation of a large number of signatures where explicit parameters with no co-factor present are in use by an application using libcrypto. For the avoidance of doubt libssl is not vulnerable because explicit parameters are never used." } }, { @@ -77,8 +68,7 @@ "pk": 301, "fields": { "cve_id": "CVE-2019-1563", - "summary": "In situations where an attacker receives automated notification of the success or failure of a decryption attempt an attacker, after sending a very large number of messages to be decrypted, can recover a CMS/PKCS7 transported encryption key or decrypt any RSA encrypted message that was encrypted with the public RSA key, using a Bleichenbacher padding oracle attack. Applications are not affected if they use a certificate together with the private RSA key to the CMS_decrypt or PKCS7_decrypt functions to select the correct recipient info to decrypt.", - "cvss": null + "summary": "In situations where an attacker receives automated notification of the success or failure of a decryption attempt an attacker, after sending a very large number of messages to be decrypted, can recover a CMS/PKCS7 transported encryption key or decrypt any RSA encrypted message that was encrypted with the public RSA key, using a Bleichenbacher padding oracle attack. Applications are not affected if they use a certificate together with the private RSA key to th CMS_decrypt or PKCS7_decrypt functions to select the correct recipient info to decrypt." } }, { @@ -86,8 +76,7 @@ "pk": 302, "fields": { "cve_id": "CVE-2019-1559", - "summary": "If an application encounters a fatal protocol error and then calls SSL_shutdown() twice (once to send a close_notify, and once to receive one) then OpenSSL can respond differently to the calling application if a 0 byte record is received with invalid padding compared to if a 0 byte record is received with an invalid MAC. If the application then behaves differently based on that in a way that is detectable to the remote peer, then this amounts to a padding oracle that could be used to decrypt data. In order for this to be exploitable \"non-stitched\" ciphersuites must be in use. Stitched ciphersuites are optimised implementations of certain commonly used ciphersuites. Also the application must call SSL_shutdown() twice even if a protocol error has occurred (applications should not do this but some do anyway). AEAD ciphersuites are not impacted.", - "cvss": null + "summary": "If an application encounters a fatal protocol error and then calls SSL_shutdown() twice (once to send a close_notify, and once to receive one) then OpenSSL can respond differently to the calling application if a 0 byte record is received with invalid padding compared to if a 0 byte record is received with an invalid MAC. If the application then behaves differently based on that in a way that is detectable to the remote peer, then this amounts to a padding oracle tha could be used to decrypt data. In order for this to be exploitable \"non-stitched\" ciphersuites must be in use. Stitched ciphersuites are optimised implementations of certain commonly used ciphersuites. Also the application must call SSL_shutdown() twice even if a protocol error has occurred (applications should not do this but some do anyway). AEAD ciphersuites are not impacted." } }, { @@ -95,8 +84,7 @@ "pk": 303, "fields": { "cve_id": "CVE-2017-3738", - "summary": "There is an overflow bug in the AVX2 Montgomery multiplication procedure used in exponentiation with 1024-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH1024 are considered just feasible, because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH1024 private key among multiple clients, which is no longer an option since CVE-2016-0701. This only affects processors that support the AVX2 but not ADX extensions like Intel Haswell (4th generation). Note: The impact from this issue is similar to CVE-2017-3736, CVE-2017-3732 and CVE-2015-3193. Due to the low severity of this issue we are not issuing a new release of OpenSSL 1.1.0 at this time. The fix will be included in OpenSSL 1.1.0h when it becomes available. The fix is also available in commit e502cc86d in the OpenSSL git repository.", - "cvss": null + "summary": "There is an overflow bug in the AVX2 Montgomery multiplication procedure used in exponentiation with 1024-bit moduli. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH1024 are considered just feasible, because most of the work necessary to deduce information about a private key may be performed offline. The amount of resource required for such an attack would be significant. However, for an attack on TLS to be meaningful, the server would have to share the DH1024 private key among multiple clients, which is no longer an option since CVE-2016-0701. This only affects processors that support the AVX2 but not ADX extensions like Intel Haswell (4th generation). Note: The impact from this issue is similar to CVE-2017-3736, CVE-2017-3732 and CVE-2015-3193. Due to the low severity of this issue we are not issuing a new release of OpenSSL 1.1.0 at this time. The fix will be included in OpenSSL 1.1.0h when it becomes available. The fix is also available in commit e502cc86d in the OpenSSL git repository." } }, { @@ -104,8 +92,7 @@ "pk": 304, "fields": { "cve_id": "CVE-2017-3735", - "summary": "While parsing an IPAdressFamily extension in an X.509 certificate, it is possible to do a one-byte overread. This would result in an incorrect text display of the certificate.", - "cvss": null + "summary": "While parsing an IPAdressFamily extension in an X.509 certificate, it is possible to do a one-byte overread. This would result in an incorrect text display of the certificate." } }, { @@ -113,8 +100,7 @@ "pk": 305, "fields": { "cve_id": "CVE-2018-0733", - "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected.", - "cvss": null + "summary": "Because of an implementation bug the PA-RISC CRYPTO_memcmp function is effectively reduced to only comparing the least significant bit of each byte. This allows an attacker to forge messages that would be considered as authenticated in an amount of tries lower than that guaranteed by the security claims of the scheme. The module can only be compiled by the HP-UX assembler, so that only HP-UX PA-RISC targets are affected." } }, { @@ -122,8 +108,7 @@ "pk": 306, "fields": { "cve_id": "CVE-2017-3737", - "summary": "OpenSSL 1.0.2 (starting from version 1.0.2b) introduced an \"error state\" mechanism. The intent was that if a fatal error occurred during a handshake then OpenSSL would move into the error state and would immediately fail if you attempted to continue the handshake. This works as designed for the explicit handshake functions (SSL_do_handshake(), SSL_accept() and SSL_connect()), however due to a bug it does not work correctly if SSL_read() or SSL_write() is called directly. In that scenario, if the handshake fails then a fatal error will be returned in the initial function call. If SSL_read()/SSL_write() is subsequently called by the application for the same SSL object then it will succeed and the data is passed without being decrypted/encrypted directly from the SSL/TLS record layer. In order to exploit this issue an application bug would have to be present that resulted in a call to SSL_read()/SSL_write() being issued after having already received a fatal error.", - "cvss": null + "summary": "OpenSSL 1.0.2 (starting from version 1.0.2b) introduced an \"error state\" mechanism. The intent was that if a fatal error occurred during a handshake then OpenSSL would move into the error state and would immediately fail if you attempted to continue the handshake. This works as designed for the explicit handshake functions (SSL_do_handshake(), SSL_accept() and SSL_connect()), however due to a bug it does not work correctly if SSL_read() or SSL_write() is calle directly. In that scenario, if the handshake fails then a fatal error will be returned in the initial function call. If SSL_read()/SSL_write() is subsequently called by the application for the same SSL object then it will succeed and the data is passed without being decrypted/encrypted directly from the SSL/TLS record layer. In order to exploit this issue an application bug would have to be present that resulted in a call to SSL_read()/SSL_write() being issued after having already received a fatal error." } }, { @@ -131,8 +116,7 @@ "pk": 307, "fields": { "cve_id": "CVE-2018-0735", - "summary": "The OpenSSL ECDSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", - "cvss": null + "summary": "The OpenSSL ECDSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key." } }, { @@ -140,8 +124,7 @@ "pk": 308, "fields": { "cve_id": "CVE-2017-3736", - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. This only affects processors that support the BMI1, BMI2 and ADX extensions like Intel Broadwell (5th generation) and later or AMD Ryzen.", - "cvss": null + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such a attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. This only affects processors that support the BMI1, BMI2 and ADX extensions like Intel Broadwell (5th generation) and later or AMD Ryzen." } }, { @@ -149,8 +132,7 @@ "pk": 309, "fields": { "cve_id": "CVE-2018-0737", - "summary": "The OpenSSL RSA Key generation algorithm has been shown to be vulnerable to a cache timing side channel attack. An attacker with sufficient access to mount cache timing attacks during the RSA key generation process could recover the private key.", - "cvss": null + "summary": "The OpenSSL RSA Key generation algorithm has been shown to be vulnerable to a cache timing side channel attack. An attacker with sufficient access to mount cache timing attacks during the RSA key generation process could recover the private key." } }, { @@ -158,8 +140,7 @@ "pk": 310, "fields": { "cve_id": "CVE-2018-0739", - "summary": "Constructed ASN.1 types with a recursive definition (such as can be found in PKCS7) could eventually exceed the stack given malicious input with excessive recursion. This could result in a Denial Of Service attack. There are no such structures used within SSL/TLS that come from untrusted sources so this is considered safe.", - "cvss": null + "summary": "Constructed ASN.1 types with a recursive definition (such as can be found in PKCS7) could eventually exceed the stack given malicious input with excessive recursion. This could result in a Denial Of Service attack. There are no such structures used within SSL/TLS that come from untrusted sources so this is considered safe." } }, { @@ -167,8 +148,7 @@ "pk": 311, "fields": { "cve_id": "CVE-2018-0734", - "summary": "The OpenSSL DSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key.", - "cvss": null + "summary": "The OpenSSL DSA signature algorithm has been shown to be vulnerable to a timing side channel attack. An attacker could use variations in the signing algorithm to recover the private key." } }, { @@ -176,8 +156,7 @@ "pk": 312, "fields": { "cve_id": "CVE-2018-0732", - "summary": "During key agreement in a TLS handshake using a DH(E) based ciphersuite a malicious server can send a very large prime value to the client. This will cause the client to spend an unreasonably long period of time generating a key for this prime resulting in a hang until the client has finished. This could be exploited in a Denial Of Service attack.", - "cvss": null + "summary": "During key agreement in a TLS handshake using a DH(E) based ciphersuite a malicious server can send a very large prime value to the client. This will cause the client to spend an unreasonably long period of time generating a key for this prime resulting in a hang until the client has finished. This could be exploited in a Denial Of Service attack." } }, { @@ -185,8 +164,7 @@ "pk": 313, "fields": { "cve_id": "CVE-2017-3732", - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem.", - "cvss": null + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such a attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites. Note: This issue is very similar to CVE-2015-3193 but must be treated as a separate problem." } }, { @@ -194,8 +172,7 @@ "pk": 314, "fields": { "cve_id": "CVE-2017-3733", - "summary": "During a renegotiation handshake if the Encrypt-Then-Mac extension is negotiated where it was not in the original handshake (or vice-versa) then this can cause OpenSSL to crash (dependent on ciphersuite). Both clients and servers are affected.", - "cvss": null + "summary": "During a renegotiation handshake if the Encrypt-Then-Mac extension is negotiated where it was not in the original handshake (or vice-versa) then this can cause OpenSSL to crash (dependent on ciphersuite). Both clients and servers are affected." } }, { @@ -203,8 +180,7 @@ "pk": 315, "fields": { "cve_id": "CVE-2016-7053", - "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected.", - "cvss": null + "summary": "Applications parsing invalid CMS structures can crash with a NULL pointer dereference. This is caused by a bug in the handling of the ASN.1 CHOICE type in OpenSSL 1.1.0 which can result in a NULL value being passed to the structure callback if an attempt is made to free certain invalid encodings. Only CHOICE structures using a callback which do not handle NULL value are affected." } }, { @@ -212,8 +188,7 @@ "pk": 316, "fields": { "cve_id": "CVE-2017-3730", - "summary": "If a malicious server supplies bad parameters for a DHE or ECDHE key exchange then this can result in the client attempting to dereference a NULL pointer leading to a client crash. This could be exploited in a Denial of Service attack.", - "cvss": null + "summary": "If a malicious server supplies bad parameters for a DHE or ECDHE key exchange then this can result in the client attempting to dereference a NULL pointer leading to a client crash. This could be exploited in a Denial of Service attack." } }, { @@ -221,8 +196,7 @@ "pk": 317, "fields": { "cve_id": "CVE-2016-7052", - "summary": "This issue only affects OpenSSL 1.0.2i, released on 22nd September 2016. A bug fix which included a CRL sanity check was added to OpenSSL 1.1.0 but was omitted from OpenSSL 1.0.2i. As a result any attempt to use CRLs in OpenSSL 1.0.2i will crash with a null pointer exception.", - "cvss": null + "summary": "This issue only affects OpenSSL 1.0.2i, released on 22nd September 2016. A bug fix which included a CRL sanity check was added to OpenSSL 1.1.0 but was omitted from OpenSSL 1.0.2i. As a result any attempt to use CRLs in OpenSSL 1.0.2i will crash with a null pointer exception." } }, { @@ -230,8 +204,7 @@ "pk": 318, "fields": { "cve_id": "CVE-2016-6304", - "summary": "A malicious client can send an excessively large OCSP Status Request extension. If that client continually requests renegotiation, sending a large OCSP Status Request extension each time, then there will be unbounded memory growth on the server. This will eventually lead to a Denial Of Service attack through memory exhaustion. Servers with a default configuration are vulnerable even if they do not support OCSP. Builds using the \"no-ocsp\" build time option are not affected. Servers using OpenSSL versions prior to 1.0.1g are not vulnerable in a default configuration, instead only if an application explicitly enables OCSP stapling support.", - "cvss": null + "summary": "A malicious client can send an excessively large OCSP Status Request extension. If that client continually requests renegotiation, sending a large OCSP Status Request extension each time, then there will be unbounded memory growth on the server. This will eventually lead to a Denial Of Service attack through memory exhaustion. Servers with a default configuration are vulnerable even if they do not support OCSP. Builds using the \"no-ocsp\" build time option are no affected. Servers using OpenSSL versions prior to 1.0.1g are not vulnerable in a default configuration, instead only if an application explicitly enables OCSP stapling support." } }, { @@ -239,8 +212,7 @@ "pk": 319, "fields": { "cve_id": "CVE-2016-7054", - "summary": "TLS connections using *-CHACHA20-POLY1305 ciphersuites are susceptible to a DoS attack by corrupting larger payloads. This can result in an OpenSSL crash. This issue is not considered to be exploitable beyond a DoS.", - "cvss": null + "summary": "TLS connections using *-CHACHA20-POLY1305 ciphersuites are susceptible to a DoS attack by corrupting larger payloads. This can result in an OpenSSL crash. This issue is not considered to be exploitable beyond a DoS." } }, { @@ -248,8 +220,7 @@ "pk": 320, "fields": { "cve_id": "CVE-2016-6309", - "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentially lead to execution of arbitrary code.", - "cvss": null + "summary": "This issue only affects OpenSSL 1.1.0a, released on 22nd September 2016. The patch applied to address CVE-2016-6307 resulted in an issue where if a message larger than approx 16k is received then the underlying buffer to store the incoming message is reallocated and moved. Unfortunately a dangling pointer to the old location is left which results in an attempt to write to the previously freed location. This is likely to result in a crash, however it could potentiall lead to execution of arbitrary code." } }, { @@ -257,8 +228,7 @@ "pk": 321, "fields": { "cve_id": "CVE-2017-3731", - "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k", - "cvss": null + "summary": "If an SSL/TLS server or client is running on a 32-bit host, and a specific cipher is being used, then a truncated packet can cause that server or client to perform an out-of-bounds read, usually resulting in a crash. For OpenSSL 1.1.0, the crash can be triggered when using CHACHA20/POLY1305; users should upgrade to 1.1.0d. For Openssl 1.0.2, the crash can be triggered when using RC4-MD5; users who have not disabled that algorithm should update to 1.0.2k" } }, { @@ -266,8 +236,7 @@ "pk": 322, "fields": { "cve_id": "CVE-2016-7055", - "summary": "There is a carry propagating bug in the Broadwell-specific Montgomery multiplication procedure that handles input lengths divisible by, but longer than 256 bits. Analysis suggests that attacks against RSA, DSA and DH private keys are impossible. This is because the subroutine in question is not used in operations with the private key itself and an input of the attacker's direct choice. Otherwise the bug can manifest itself as transient authentication and key negotiation failures or reproducible erroneous outcome of public-key operations with specially crafted input. Among EC algorithms only Brainpool P-512 curves are affected and one presumably can attack ECDH key negotiation. Impact was not analyzed in detail, because pre-requisites for attack are considered unlikely. Namely multiple clients have to choose the curve in question and the server has to share the private key among them, neither of which is default behaviour. Even then only clients that chose the curve will be affected.", - "cvss": null + "summary": "There is a carry propagating bug in the Broadwell-specific Montgomery multiplication procedure that handles input lengths divisible by, but longer than 256 bits. Analysis suggests that attacks against RSA, DSA and DH private keys are impossible. This is because the subroutine in question is not used in operations with the private key itself and an input of the attacker's direct choice. Otherwise the bug can manifest itself as transient authentication and key negotiatio failures or reproducible erroneous outcome of public-key operations with specially crafted input. Among EC algorithms only Brainpool P-512 curves are affected and one presumably can attack ECDH key negotiation. Impact was not analyzed in detail, because pre-requisites for attack are considered unlikely. Namely multiple clients have to choose the curve in question and the server has to share the private key among them, neither of which is default behaviour. Even then only clients that chose the curve will be affected." } }, { @@ -275,8 +244,7 @@ "pk": 323, "fields": { "cve_id": "CVE-2016-6302", - "summary": "If a server uses SHA512 for TLS session ticket HMAC it is vulnerable to a DoS attack where a malformed ticket will result in an OOB read which will ultimately crash. The use of SHA512 in TLS session tickets is comparatively rare as it requires a custom server callback and ticket lookup mechanism.", - "cvss": null + "summary": "If a server uses SHA512 for TLS session ticket HMAC it is vulnerable to a DoS attack where a malformed ticket will result in an OOB read which will ultimately crash. The use of SHA512 in TLS session tickets is comparatively rare as it requires a custom server callback and ticket lookup mechanism." } }, { @@ -284,8 +252,7 @@ "pk": 324, "fields": { "cve_id": "CVE-2016-2182", - "summary": "The function BN_bn2dec() does not check the return value of BN_div_word(). This can cause an OOB write if an application uses this function with an overly large BIGNUM. This could be a problem if an overly large certificate or CRL is printed out from an untrusted source. TLS is not affected because record limits will reject an oversized certificate before it is parsed.", - "cvss": null + "summary": "The function BN_bn2dec() does not check the return value of BN_div_word(). This can cause an OOB write if an application uses this function with an overly large BIGNUM. This could be a problem if an overly large certificate or CRL is printed out from an untrusted source. TLS is not affected because record limits will reject an oversized certificate before it is parsed." } }, { @@ -293,8 +260,7 @@ "pk": 325, "fields": { "cve_id": "CVE-2016-2180", - "summary": "The function TS_OBJ_print_bio() misuses OBJ_obj2txt(): the return value is the total length the OID text representation would use and not the amount of data written. This will result in OOB reads when large OIDs are presented.", - "cvss": null + "summary": "The function TS_OBJ_print_bio() misuses OBJ_obj2txt(): the return value is the total length the OID text representation would use and not the amount of data written. This will result in OOB reads when large OIDs are presented." } }, { @@ -302,8 +268,7 @@ "pk": 326, "fields": { "cve_id": "CVE-2016-2178", - "summary": "Operations in the DSA signing algorithm should run in constant time in order to avoid side channel attacks. A flaw in the OpenSSL DSA implementation means that a non-constant time codepath is followed for certain operations. This has been demonstrated through a cache-timing attack to be sufficient for an attacker to recover the private DSA key.", - "cvss": null + "summary": "Operations in the DSA signing algorithm should run in constant time in order to avoid side channel attacks. A flaw in the OpenSSL DSA implementation means that a non-constant time codepath is followed for certain operations. This has been demonstrated through a cache-timing attack to be sufficient for an attacker to recover the private DSA key." } }, { @@ -311,8 +276,7 @@ "pk": 327, "fields": { "cve_id": "CVE-2016-6305", - "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack.", - "cvss": null + "summary": "OpenSSL 1.1.0 SSL/TLS will hang during a call to SSL_peek() if the peer sends an empty record. This could be exploited by a malicious peer in a Denial Of Service attack." } }, { @@ -320,8 +284,7 @@ "pk": 328, "fields": { "cve_id": "CVE-2016-6306", - "summary": "In OpenSSL 1.0.2 and earlier some missing message length checks can result in OOB reads of up to 2 bytes beyond an allocated buffer. There is a theoretical DoS risk but this has not been observed in practice on common platforms. The messages affected are client certificate, client certificate request and server certificate. As a result the attack can only be performed against a client or a server which enables client authentication.", - "cvss": null + "summary": "In OpenSSL 1.0.2 and earlier some missing message length checks can result in OOB reads of up to 2 bytes beyond an allocated buffer. There is a theoretical DoS risk but this has not been observed in practice on common platforms. The messages affected are client certificate, client certificate request and server certificate. As a result the attack can only be performed against a client or a server which enables client authentication." } }, { @@ -329,8 +292,7 @@ "pk": 329, "fields": { "cve_id": "CVE-2016-2181", - "summary": "A flaw in the DTLS replay attack protection mechanism means that records that arrive for future epochs update the replay protection \"window\" before the MAC for the record has been validated. This could be exploited by an attacker by sending a record for the next epoch (which does not have to decrypt or have a valid MAC), with a very large sequence number. This means that all subsequent legitimate packets are dropped causing a denial of service for a specific DTLS connection.", - "cvss": null + "summary": "A flaw in the DTLS replay attack protection mechanism means that records that arrive for future epochs update the replay protection \"window\" before the MAC for the record has been validated. This could be exploited by an attacker by sending a record for the next epoch (which does not have to decrypt or have a valid MAC), with a very large sequence number. This means that all subsequent legitimate packets are dropped causing a denial of service for a specific DTL connection." } }, { @@ -338,8 +300,7 @@ "pk": 330, "fields": { "cve_id": "CVE-2016-2179", - "summary": "In a DTLS connection where handshake messages are delivered out-of-order those messages that OpenSSL is not yet ready to process will be buffered for later use. Under certain circumstances, a flaw in the logic means that those messages do not get removed from the buffer even though the handshake has been completed. An attacker could force up to approx. 15 messages to remain in the buffer when they are no longer required. These messages will be cleared when the DTLS connection is closed. The default maximum size for a message is 100k. Therefore the attacker could force an additional 1500k to be consumed per connection. By opening many simulataneous connections an attacker could cause a DoS attack through memory exhaustion.", - "cvss": null + "summary": "In a DTLS connection where handshake messages are delivered out-of-order those messages that OpenSSL is not yet ready to process will be buffered for later use. Under certain circumstances, a flaw in the logic means that those messages do not get removed from the buffer even though the handshake has been completed. An attacker could force up to approx. 15 messages to remain in the buffer when they are no longer required. These messages will be cleared when the DTL connection is closed. The default maximum size for a message is 100k. Therefore the attacker could force an additional 1500k to be consumed per connection. By opening many simulataneous connections an attacker could cause a DoS attack through memory exhaustion." } }, { @@ -347,8 +308,7 @@ "pk": 331, "fields": { "cve_id": "CVE-2016-2177", - "summary": "Avoid some undefined pointer arithmetic A common idiom in the codebase is to check limits in the following manner: \"p + len > limit\" Where \"p\" points to some malloc'd data of SIZE bytes and limit == p + SIZE \"len\" here could be from some externally supplied data (e.g. from a TLS message). The rules of C pointer arithmetic are such that \"p + len\" is only well defined where len <= SIZE. Therefore the above idiom is actually undefined behaviour. For example this could cause problems if some malloc implementation provides an address for \"p\" such that \"p + len\" actually overflows for values of len that are too big and therefore p + len < limit.", - "cvss": null + "summary": "Avoid some undefined pointer arithmetic A common idiom in the codebase is to check limits in the following manner: \"p + len > limit\" Where \"p\" points to some malloc'd data of SIZE bytes and limit == p + SIZE \"len\" here could be from some externally supplied data (e.g. from a TLS message). The rules of C pointer arithmetic are such that \"p + len\" is only well defined where len <= SIZE. Therefore the above idiom is actually undefined behaviour. For example thi could cause problems if some malloc implementation provides an address for \"p\" such that \"p + len\" actually overflows for values of len that are too big and therefore p + len < limit." } }, { @@ -356,8 +316,7 @@ "pk": 332, "fields": { "cve_id": "CVE-2016-6303", - "summary": "An overflow can occur in MDC2_Update() either if called directly or through the EVP_DigestUpdate() function using MDC2. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. The amount of data needed is comparable to SIZE_MAX which is impractical on most platforms.", - "cvss": null + "summary": "An overflow can occur in MDC2_Update() either if called directly or through the EVP_DigestUpdate() function using MDC2. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. The amount of data needed is comparable to SIZE_MAX which is impractical on most platforms." } }, { @@ -365,8 +324,7 @@ "pk": 333, "fields": { "cve_id": "CVE-2016-2106", - "summary": "An overflow can occur in the EVP_EncryptUpdate() function. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. Following an analysis of all OpenSSL internal usage of the EVP_EncryptUpdate() function all usage is one of two forms. The first form is where the EVP_EncryptUpdate() call is known to be the first called function after an EVP_EncryptInit(), and therefore that specific call must be safe. The second form is where the length passed to EVP_EncryptUpdate() can be seen from the code to be some small value and therefore there is no possibility of an overflow. Since all instances are one of these two forms, it is believed that there can be no overflows in internal code due to this problem. It should be noted that EVP_DecryptUpdate() can call EVP_EncryptUpdate() in certain code paths. Also EVP_CipherUpdate() is a synonym for EVP_EncryptUpdate(). All instances of these calls have also been analysed too and it is believed there are no instances in internal usage where an overflow could occur. This could still represent a security issue for end user code that calls this function directly.", - "cvss": null + "summary": "An overflow can occur in the EVP_EncryptUpdate() function. If an attacker is able to supply very large amounts of input data after a previous call to EVP_EncryptUpdate() with a partial block then a length check can overflow resulting in a heap corruption. Following an analysis of all OpenSSL internal usage of the EVP_EncryptUpdate() function all usage is one of two forms. The first form is where the EVP_EncryptUpdate() call is known to be the first called function afte an EVP_EncryptInit(), and therefore that specific call must be safe. The second form is where the length passed to EVP_EncryptUpdate() can be seen from the code to be some small value and therefore there is no possibility of an overflow. Since all instances are one of these two forms, it is believed that there can be no overflows in internal code due to this problem. It should be noted that EVP_DecryptUpdate() can call EVP_EncryptUpdate() in certain code paths. Also EVP_CipherUpdate() is a synonym for EVP_EncryptUpdate(). All instances of these calls have also been analysed too and it is believed there are no instances in internal usage where an overflow could occur. This could still represent a security issue for end user code that calls this function directly." } }, { @@ -374,8 +332,7 @@ "pk": 334, "fields": { "cve_id": "CVE-2016-6308", - "summary": "A DTLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", - "cvss": null + "summary": "A DTLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Du to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service." } }, { @@ -383,8 +340,7 @@ "pk": 335, "fields": { "cve_id": "CVE-2016-6307", - "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due to way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service.", - "cvss": null + "summary": "A TLS message includes 3 bytes for its length in the header for the message. This would allow for messages up to 16Mb in length. Messages of this length are excessive and OpenSSL includes a check to ensure that a peer is sending reasonably sized messages in order to avoid too much memory being consumed to service a connection. A flaw in the logic of version 1.1.0 means that memory for the message is allocated too early, prior to the excessive message length check. Due t way memory is allocated in OpenSSL this could mean an attacker could force up to 21Mb to be allocated to service a connection. This could lead to a Denial of Service through memory exhaustion. However, the excessive message length check still takes place, and this would cause the connection to immediately fail. Assuming that the application calls SSL_free() on the failed conneciton in a timely manner then the 21Mb of allocated memory will then be immediately freed again. Therefore the excessive memory allocation will be transitory in nature. This then means that there is only a security impact if: 1) The application does not call SSL_free() in a timely manner in the event that the connection fails or 2) The application is working in a constrained environment where there is very little free memory or 3) The attacker initiates multiple connection attempts such that there are multiple connections in a state where memory has been allocated for the connection; SSL_free() has not yet been called; and there is insufficient memory to service the multiple requests. Except in the instance of (1) above any Denial Of Service is likely to be transitory because as soon as the connection fails the memory is subsequently freed again in the SSL_free() call. However there is an increased risk during this period of application crashes due to the lack of memory - which would then mean a more serious Denial of Service." } }, { @@ -392,8 +348,7 @@ "pk": 336, "fields": { "cve_id": "CVE-2016-2109", - "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected.", - "cvss": null + "summary": "When ASN.1 data is read from a BIO using functions such as d2i_CMS_bio() a short invalid encoding can casuse allocation of large amounts of memory potentially consuming excessive resources or exhausting memory. Any application parsing untrusted data through d2i BIO functions is affected. The memory based functions such as d2i_X509() are *not* affected. Since the memory based functions are used by the TLS library, TLS applications are not affected." } }, { @@ -401,8 +356,7 @@ "pk": 337, "fields": { "cve_id": "CVE-2016-2107", - "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and padding bytes.", - "cvss": null + "summary": "A MITM attacker can use a padding oracle attack to decrypt traffic when the connection uses an AES CBC cipher and the server support AES-NI. This issue was introduced as part of the fix for Lucky 13 padding attack (CVE-2013-0169). The padding check was rewritten to be in constant time by making sure that always the same bytes are read and compared against either the MAC or padding bytes. But it no longer checked that there was enough data to have both the MAC and paddin bytes." } }, { @@ -410,8 +364,7 @@ "pk": 338, "fields": { "cve_id": "CVE-2016-0705", - "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare.", - "cvss": null + "summary": "A double free bug was discovered when OpenSSL parses malformed DSA private keys and could lead to a DoS attack or memory corruption for applications that receive DSA private keys from untrusted sources. This scenario is considered rare." } }, { @@ -419,8 +372,7 @@ "pk": 339, "fields": { "cve_id": "CVE-2016-2108", - "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negative zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities.", - "cvss": null + "summary": "This issue affected versions of OpenSSL prior to April 2015. The bug causing the vulnerability was fixed on April 18th 2015, and released as part of the June 11th 2015 security releases. The security impact of the bug was not known at the time. In previous versions of OpenSSL, ASN.1 encoding the value zero represented as a negative integer can cause a buffer underflow with an out-of-bounds write in i2c_ASN1_INTEGER. The ASN.1 parser does not normally create \"negativ zeroes\" when parsing ASN.1 input, and therefore, an attacker cannot trigger this bug. However, a second, independent bug revealed that the ASN.1 parser (specifically, d2i_ASN1_TYPE) can misinterpret a large universal tag as a negative zero value. Large universal tags are not present in any common ASN.1 structures (such as X509) but are accepted as part of ANY structures. Therefore, if an application deserializes untrusted ASN.1 structures containing an ANY field, and later reserializes them, an attacker may be able to trigger an out-of-bounds write. This has been shown to cause memory corruption that is potentially exploitable with some malloc implementations. Applications that parse and re-encode X509 certificates are known to be vulnerable. Applications that verify RSA signatures on X509 certificates may also be vulnerable; however, only certificates with valid signatures trigger ASN.1 re-encoding and hence the bug. Specifically, since OpenSSL's default TLS X509 chain verification code verifies the certificate chain from root to leaf, TLS handshakes could only be targeted with valid certificates issued by trusted Certification Authorities." } }, { @@ -428,8 +380,7 @@ "pk": 340, "fields": { "cve_id": "CVE-2016-2105", - "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerable because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case).", - "cvss": null + "summary": "An overflow can occur in the EVP_EncodeUpdate() function which is used for Base64 encoding of binary data. If an attacker is able to supply very large amounts of input data then a length check can overflow resulting in a heap corruption. Internally to OpenSSL the EVP_EncodeUpdate() function is primarly used by the PEM_write_bio* family of functions. These are mainly used within the OpenSSL command line applications. These internal uses are not considered vulnerabl because all calls are bounded with length checks so no overflow is possible. User applications that call these APIs directly with large amounts of untrusted data may be vulnerable. (Note: Initial analysis suggested that the PEM_write_bio* were vulnerable, and this is reflected in the patch commit message. This is no longer believed to be the case)." } }, { @@ -437,8 +388,7 @@ "pk": 341, "fields": { "cve_id": "CVE-2016-2176", - "summary": "ASN1 Strings that are over 1024 bytes can cause an overread in applications using the X509_NAME_oneline() function on EBCDIC systems. This could result in arbitrary stack data being returned in the buffer.", - "cvss": null + "summary": "ASN1 Strings that are over 1024 bytes can cause an overread in applications using the X509_NAME_oneline() function on EBCDIC systems. This could result in arbitrary stack data being returned in the buffer." } }, { @@ -446,8 +396,7 @@ "pk": 342, "fields": { "cve_id": "CVE-2016-0800", - "summary": "A cross-protocol attack was discovered that could lead to decryption of TLS sessions by using a server supporting SSLv2 and EXPORT cipher suites as a Bleichenbacher RSA padding oracle. Note that traffic between clients and non-vulnerable servers can be decrypted provided another server supporting SSLv2 and EXPORT ciphers (even with a different protocol such as SMTP, IMAP or POP) shares the RSA keys of the non-vulnerable server. This vulnerability is known as DROWN (CVE-2016-0800). Recovering one session key requires the attacker to perform approximately 2^50 computation, as well as thousands of connections to the affected server. A more efficient variant of the DROWN attack exists against unpatched OpenSSL servers using versions that predate 1.0.2a, 1.0.1m, 1.0.0r and 0.9.8zf released on 19/Mar/2015 (see CVE-2016-0703 below). Users can avoid this issue by disabling the SSLv2 protocol in all their SSL/TLS servers, if they've not done so already. Disabling all SSLv2 ciphers is also sufficient, provided the patches for CVE-2015-3197 (fixed in OpenSSL 1.0.1r and 1.0.2f) have been deployed. Servers that have not disabled the SSLv2 protocol, and are not patched for CVE-2015-3197 are vulnerable to DROWN even if all SSLv2 ciphers are nominally disabled, because malicious clients can force the use of SSLv2 with EXPORT ciphers. OpenSSL 1.0.2g and 1.0.1s deploy the following mitigation against DROWN: SSLv2 is now by default disabled at build-time. Builds that are not configured with \"enable-ssl2\" will not support SSLv2. Even if \"enable-ssl2\" is used, users who want to negotiate SSLv2 via the version-flexible SSLv23_method() will need to explicitly call either of: SSL_CTX_clear_options(ctx, SSL_OP_NO_SSLv2); or SSL_clear_options(ssl, SSL_OP_NO_SSLv2); as appropriate. Even if either of those is used, or the application explicitly uses the version-specific SSLv2_method() or its client or server variants, SSLv2 ciphers vulnerable to exhaustive search key recovery have been removed. Specifically, the SSLv2 40-bit EXPORT ciphers, and SSLv2 56-bit DES are no longer available. In addition, weak ciphers in SSLv3 and up are now disabled in default builds of OpenSSL. Builds that are not configured with \"enable-weak-ssl-ciphers\" will not provide any \"EXPORT\" or \"LOW\" strength ciphers.", - "cvss": null + "summary": "A cross-protocol attack was discovered that could lead to decryption of TLS sessions by using a server supporting SSLv2 and EXPORT cipher suites as a Bleichenbacher RSA padding oracle. Note that traffic between clients and non-vulnerable servers can be decrypted provided another server supporting SSLv2 and EXPORT ciphers (even with a different protocol such as SMTP, IMAP or POP) shares the RSA keys of the non-vulnerable server. This vulnerability is known as DROWN (CV-2016-0800). Recovering one session key requires the attacker to perform approximately 2^50 computation, as well as thousands of connections to the affected server. A more efficient variant of the DROWN attack exists against unpatched OpenSSL servers using versions that predate 1.0.2a, 1.0.1m, 1.0.0r and 0.9.8zf released on 19/Mar/2015 (see CVE-2016-0703 below). Users can avoid this issue by disabling the SSLv2 protocol in all their SSL/TLS servers, if they've not done so already. Disabling all SSLv2 ciphers is also sufficient, provided the patches for CVE-2015-3197 (fixed in OpenSSL 1.0.1r and 1.0.2f) have been deployed. Servers that have not disabled the SSLv2 protocol, and are not patched for CVE-2015-3197 are vulnerable to DROWN even if all SSLv2 ciphers are nominally disabled, because malicious clients can force the use of SSLv2 with EXPORT ciphers. OpenSSL 1.0.2g and 1.0.1s deploy the following mitigation against DROWN: SSLv2 is now by default disabled at build-time. Builds that are not configured with \"enable-ssl2\" will not support SSLv2. Even if \"enable-ssl2\" is used, users who want to negotiate SSLv2 via the version-flexible SSLv23_method() will need to explicitly call either of: SSL_CTX_clear_options(ctx, SSL_OP_NO_SSLv2); or SSL_clear_options(ssl, SSL_OP_NO_SSLv2); as appropriate. Even if either of those is used, or the application explicitly uses the version-specific SSLv2_method() or its client or server variants, SSLv2 ciphers vulnerable to exhaustive search key recovery have been removed. Specifically, the SSLv2 40-bit EXPORT ciphers, and SSLv2 56-bit DES are no longer available. In addition, weak ciphers in SSLv3 and up are now disabled in default builds of OpenSSL. Builds that are not configured with \"enable-weak-ssl-ciphers\" will not provide any \"EXPORT\" or \"LOW\" strength ciphers." } }, { @@ -455,8 +404,7 @@ "pk": 343, "fields": { "cve_id": "CVE-2016-0703", - "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address vulnerability CVE-2015-0293. s2_srvr.c did not enforce that clear-key-length is 0 for non-export ciphers. If clear-key bytes are present for these ciphers, they *displace* encrypted-key bytes. This leads to an efficient divide-and-conquer key recovery attack: if an eavesdropper has intercepted an SSLv2 handshake, they can use the server as an oracle to determine the SSLv2 master-key, using only 16 connections to the server and negligible computation. More importantly, this leads to a more efficient version of DROWN that is effective against non-export ciphersuites, and requires no significant computation.", - "cvss": null + "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address vulnerability CVE-2015-0293. s2_srvr.c did not enforce that clear-key-length is 0 for non-export ciphers. If clear-key bytes are present for these ciphers, they *displace* encrypted-key bytes. This leads to an efficient divide-and-conquer key recovery attack: if an eavesdropper has intercepted an SSLv2 handshake, they can use the server as an oracle t determine the SSLv2 master-key, using only 16 connections to the server and negligible computation. More importantly, this leads to a more efficient version of DROWN that is effective against non-export ciphersuites, and requires no significant computation." } }, { @@ -464,8 +412,7 @@ "pk": 344, "fields": { "cve_id": "CVE-2016-0799", - "summary": "The internal |fmtstr| function used in processing a \"%s\" format string in the BIO_*printf functions could overflow while calculating the length of a string and cause an OOB read when printing very long strings. Additionally the internal |doapr_outch| function can attempt to write to an OOB memory location (at an offset from the NULL pointer) in the event of a memory allocation failure. In 1.0.2 and below this could be caused where the size of a buffer to be allocated is greater than INT_MAX. E.g. this could be in processing a very long \"%s\" format string. Memory leaks can also occur. The first issue may mask the second issue dependent on compiler behaviour. These problems could enable attacks where large amounts of untrusted data is passed to the BIO_*printf functions. If applications use these functions in this way then they could be vulnerable. OpenSSL itself uses these functions when printing out human-readable dumps of ASN.1 data. Therefore applications that print this data could be vulnerable if the data is from untrusted sources. OpenSSL command line applications could also be vulnerable where they print out ASN.1 data, or if untrusted data is passed as command line arguments. Libssl is not considered directly vulnerable. Additionally certificates etc received via remote connections via libssl are also unlikely to be able to trigger these issues because of message size limits enforced within libssl.", - "cvss": null + "summary": "The internal |fmtstr| function used in processing a \"%s\" format string in the BIO_*printf functions could overflow while calculating the length of a string and cause an OOB read when printing very long strings. Additionally the internal |doapr_outch| function can attempt to write to an OOB memory location (at an offset from the NULL pointer) in the event of a memory allocation failure. In 1.0.2 and below this could be caused where the size of a buffer to be allocate is greater than INT_MAX. E.g. this could be in processing a very long \"%s\" format string. Memory leaks can also occur. The first issue may mask the second issue dependent on compiler behaviour. These problems could enable attacks where large amounts of untrusted data is passed to the BIO_*printf functions. If applications use these functions in this way then they could be vulnerable. OpenSSL itself uses these functions when printing out human-readable dumps of ASN.1 data. Therefore applications that print this data could be vulnerable if the data is from untrusted sources. OpenSSL command line applications could also be vulnerable where they print out ASN.1 data, or if untrusted data is passed as command line arguments. Libssl is not considered directly vulnerable. Additionally certificates etc received via remote connections via libssl are also unlikely to be able to trigger these issues because of message size limits enforced within libssl." } }, { @@ -473,8 +420,7 @@ "pk": 345, "fields": { "cve_id": "CVE-2015-3193", - "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such an attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites.", - "cvss": null + "summary": "There is a carry propagating bug in the x86_64 Montgomery squaring procedure. No EC algorithms are affected. Analysis suggests that attacks against RSA and DSA as a result of this defect would be very difficult to perform and are not believed likely. Attacks against DH are considered just feasible (although very difficult) because most of the work necessary to deduce information about a private key may be performed offline. The amount of resources required for such a attack would be very significant and likely only accessible to a limited number of attackers. An attacker would additionally need online access to an unpatched system using the target private key in a scenario with persistent DH parameters and a private key that is shared between multiple clients. For example this can occur by default in OpenSSL DHE based SSL/TLS ciphersuites." } }, { @@ -482,8 +428,7 @@ "pk": 346, "fields": { "cve_id": "CVE-2016-0704", - "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack.", - "cvss": null + "summary": "This issue only affected versions of OpenSSL prior to March 19th 2015 at which time the code was refactored to address the vulnerability CVE-2015-0293. s2_srvr.c overwrite the wrong bytes in the master-key when applying Bleichenbacher protection for export cipher suites. This provides a Bleichenbacher oracle, and could potentially allow more efficient variants of the DROWN attack." } }, { @@ -491,8 +436,7 @@ "pk": 347, "fields": { "cve_id": "CVE-2015-3197", - "summary": "A malicious client can negotiate SSLv2 ciphers that have been disabled on the server and complete SSLv2 handshakes even if all SSLv2 ciphers have been disabled, provided that the SSLv2 protocol was not also disabled via SSL_OP_NO_SSLv2.", - "cvss": null + "summary": "A malicious client can negotiate SSLv2 ciphers that have been disabled on the server and complete SSLv2 handshakes even if all SSLv2 ciphers have been disabled, provided that the SSLv2 protocol was not also disabled via SSL_OP_NO_SSLv2." } }, { @@ -500,8 +444,7 @@ "pk": 348, "fields": { "cve_id": "CVE-2015-1794", - "summary": "If a client receives a ServerKeyExchange for an anonymous DH ciphersuite with the value of p set to 0 then a seg fault can occur leading to a possible denial of service attack.", - "cvss": null + "summary": "If a client receives a ServerKeyExchange for an anonymous DH ciphersuite with the value of p set to 0 then a seg fault can occur leading to a possible denial of service attack." } }, { @@ -509,8 +452,7 @@ "pk": 349, "fields": { "cve_id": "CVE-2016-0798", - "summary": "The SRP user database lookup method SRP_VBASE_get_by_user had confusing memory management semantics; the returned pointer was sometimes newly allocated, and sometimes owned by the callee. The calling code has no way of distinguishing these two cases. Specifically, SRP servers that configure a secret seed to hide valid login information are vulnerable to a memory leak: an attacker connecting with an invalid username can cause a memory leak of around 300 bytes per connection. Servers that do not configure SRP, or configure SRP but do not configure a seed are not vulnerable. In Apache, the seed directive is known as SSLSRPUnknownUserSeed. To mitigate the memory leak, the seed handling in SRP_VBASE_get_by_user is now disabled even if the user has configured a seed. Applications are advised to migrate to SRP_VBASE_get1_by_user. However, note that OpenSSL makes no strong guarantees about the indistinguishability of valid and invalid logins. In particular, computations are currently not carried out in constant time.", - "cvss": null + "summary": "The SRP user database lookup method SRP_VBASE_get_by_user had confusing memory management semantics; the returned pointer was sometimes newly allocated, and sometimes owned by the callee. The calling code has no way of distinguishing these two cases. Specifically, SRP servers that configure a secret seed to hide valid login information are vulnerable to a memory leak: an attacker connecting with an invalid username can cause a memory leak of around 300 bytes pe connection. Servers that do not configure SRP, or configure SRP but do not configure a seed are not vulnerable. In Apache, the seed directive is known as SSLSRPUnknownUserSeed. To mitigate the memory leak, the seed handling in SRP_VBASE_get_by_user is now disabled even if the user has configured a seed. Applications are advised to migrate to SRP_VBASE_get1_by_user. However, note that OpenSSL makes no strong guarantees about the indistinguishability of valid and invalid logins. In particular, computations are currently not carried out in constant time." } }, { @@ -518,8 +460,7 @@ "pk": 350, "fields": { "cve_id": "CVE-2016-0797", - "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory is allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare.", - "cvss": null + "summary": "In the BN_hex2bn function the number of hex digits is calculated using an int value |i|. Later |bn_expand| is called with a value of |i * 4|. For large values of |i| this can result in |bn_expand| not allocating any memory because |i * 4| is negative. This can leave the internal BIGNUM data field as NULL leading to a subsequent NULL ptr deref. For very large values of |i|, the calculation |i * 4| could be a positive value smaller than |i|. In this case memory i allocated to the internal BIGNUM data field, but it is insufficiently sized leading to heap corruption. A similar issue exists in BN_dec2bn. This could have security consequences if BN_hex2bn/BN_dec2bn is ever called by user applications with very large untrusted hex/dec data. This is anticipated to be a rare occurrence. All OpenSSL internal usage of these functions use data that is not expected to be untrusted, e.g. config file data or application command line arguments. If user developed applications generate config file data based on untrusted data then it is possible that this could also lead to security consequences. This is also anticipated to be rare." } }, { @@ -527,8 +468,7 @@ "pk": 351, "fields": { "cve_id": "CVE-2016-0701", - "summary": "Historically OpenSSL usually only ever generated DH parameters based on \"safe\" primes. More recently (in version 1.0.2) support was provided for generating X9.42 style parameter files such as those required for RFC 5114 support. The primes used in such files may not be \"safe\". Where an application is using DH configured with parameters based on primes that are not \"safe\" then an attacker could use this fact to find a peer's private DH exponent. This attack requires that the attacker complete multiple handshakes in which the peer uses the same private DH exponent. For example this could be used to discover a TLS server's private DH exponent if it's reusing the private DH exponent or it's using a static DH ciphersuite. OpenSSL provides the option SSL_OP_SINGLE_DH_USE for ephemeral DH (DHE) in TLS. It is not on by default. If the option is not set then the server reuses the same private DH exponent for the life of the server process and would be vulnerable to this attack. It is believed that many popular applications do set this option and would therefore not be at risk. OpenSSL before 1.0.2f will reuse the key if: - SSL_CTX_set_tmp_dh()/SSL_set_tmp_dh() is used and SSL_OP_SINGLE_DH_USE is not set. - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used, and both the parameters and the key are set and SSL_OP_SINGLE_DH_USE is not used. This is an undocumted feature and parameter files don't contain the key. - Static DH ciphersuites are used. The key is part of the certificate and so it will always reuse it. This is only supported in 1.0.2. It will not reuse the key for DHE ciphers suites if: - SSL_OP_SINGLE_DH_USE is set - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used and the callback does not provide the key, only the parameters. The callback is almost always used like this. Non-safe primes are generated by OpenSSL when using: - genpkey with the dh_rfc5114 option. This will write an X9.42 style file including the prime-order subgroup size \"q\". This is supported since the 1.0.2 version. Older versions can't read files generated in this way. - dhparam with the -dsaparam option. This has always been documented as requiring the single use. The fix for this issue adds an additional check where a \"q\" parameter is available (as is the case in X9.42 based parameters). This detects the only known attack, and is the only possible defense for static DH ciphersuites. This could have some performance impact. Additionally the SSL_OP_SINGLE_DH_USE option has been switched on by default and cannot be disabled. This could have some performance impact.", - "cvss": null + "summary": "Historically OpenSSL usually only ever generated DH parameters based on \"safe\" primes. More recently (in version 1.0.2) support was provided for generating X9.42 style parameter files such as those required for RFC 5114 support. The primes used in such files may not be \"safe\". Where an application is using DH configured with parameters based on primes that are not \"safe\" then an attacker could use this fact to find a peer's private DH exponent. This attack require that the attacker complete multiple handshakes in which the peer uses the same private DH exponent. For example this could be used to discover a TLS server's private DH exponent if it's reusing the private DH exponent or it's using a static DH ciphersuite. OpenSSL provides the option SSL_OP_SINGLE_DH_USE for ephemeral DH (DHE) in TLS. It is not on by default. If the option is not set then the server reuses the same private DH exponent for the life of the server process and would be vulnerable to this attack. It is believed that many popular applications do set this option and would therefore not be at risk. OpenSSL before 1.0.2f will reuse the key if: - SSL_CTX_set_tmp_dh()/SSL_set_tmp_dh() is used and SSL_OP_SINGLE_DH_USE is not set. - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used, and both the parameters and the key are set and SSL_OP_SINGLE_DH_USE is not used. This is an undocumted feature and parameter files don't contain the key. - Static DH ciphersuites are used. The key is part of the certificate and so it will always reuse it. This is only supported in 1.0.2. It will not reuse the key for DHE ciphers suites if: - SSL_OP_SINGLE_DH_USE is set - SSL_CTX_set_tmp_dh_callback()/SSL_set_tmp_dh_callback() is used and the callback does not provide the key, only the parameters. The callback is almost always used like this. Non-safe primes are generated by OpenSSL when using: - genpkey with the dh_rfc5114 option. This will write an X9.42 style file including the prime-order subgroup size \"q\". This is supported since the 1.0.2 version. Older versions can't read files generated in this way. - dhparam with the -dsaparam option. This has always been documented as requiring the single use. The fix for this issue adds an additional check where a \"q\" parameter is available (as is the case in X9.42 based parameters). This detects the only known attack, and is the only possible defense for static DH ciphersuites. This could have some performance impact. Additionally the SSL_OP_SINGLE_DH_USE option has been switched on by default and cannot be disabled. This could have some performance impact." } }, { @@ -536,8 +476,7 @@ "pk": 352, "fields": { "cve_id": "CVE-2016-0702", - "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions.", - "cvss": null + "summary": "A side-channel attack was found which makes use of cache-bank conflicts on the Intel Sandy-Bridge microarchitecture which could lead to the recovery of RSA keys. The ability to exploit this issue is limited as it relies on an attacker who has control of code in a thread running on the same hyper-threaded core as the victim thread which is performing decryptions." } }, { @@ -545,8 +484,7 @@ "pk": 353, "fields": { "cve_id": "CVE-2015-3196", - "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data.", - "cvss": null + "summary": "If PSK identity hints are received by a multi-threaded client then the values are wrongly updated in the parent SSL_CTX structure. This can result in a race condition potentially leading to a double free of the identify hint data." } }, { @@ -554,8 +492,7 @@ "pk": 354, "fields": { "cve_id": "CVE-2015-1793", - "summary": "An error in the implementation of the alternative certificate chain logic could allow an attacker to cause certain checks on untrusted certificates to be bypassed, such as the CA flag, enabling them to use a valid leaf certificate to act as a CA and \"issue\" an invalid certificate.", - "cvss": null + "summary": "An error in the implementation of the alternative certificate chain logic could allow an attacker to cause certain checks on untrusted certificates to be bypassed, such as the CA flag, enabling them to use a valid leaf certificate to act as a CA and \"issue\" an invalid certificate." } }, { @@ -563,8 +500,7 @@ "pk": 355, "fields": { "cve_id": "CVE-2015-3195", - "summary": "When presented with a malformed X509_ATTRIBUTE structure OpenSSL will leak memory. This structure is used by the PKCS#7 and CMS routines so any application which reads PKCS#7 or CMS data from untrusted sources is affected. SSL/TLS is not affected.", - "cvss": null + "summary": "When presented with a malformed X509_ATTRIBUTE structure OpenSSL will leak memory. This structure is used by the PKCS#7 and CMS routines so any application which reads PKCS#7 or CMS data from untrusted sources is affected. SSL/TLS is not affected." } }, { @@ -572,8 +508,7 @@ "pk": 356, "fields": { "cve_id": "CVE-2015-1792", - "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code.", - "cvss": null + "summary": "When verifying a signedData message the CMS code can enter an infinite loop if presented with an unknown hash function OID. This can be used to perform denial of service against any system which verifies signedData messages using the CMS code." } }, { @@ -581,8 +516,7 @@ "pk": 357, "fields": { "cve_id": "CVE-2015-1790", - "summary": "The PKCS#7 parsing code does not handle missing inner EncryptedContent correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", - "cvss": null + "summary": "The PKCS#7 parsing code does not handle missing inner EncryptedContent correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected." } }, { @@ -590,8 +524,7 @@ "pk": 358, "fields": { "cve_id": "CVE-2015-1788", - "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled.", - "cvss": null + "summary": "When processing an ECParameters structure OpenSSL enters an infinite loop if the curve specified is over a specially malformed binary polynomial field. This can be used to perform denial of service against any system which processes public keys, certificate requests or certificates. This includes TLS clients and TLS servers with client authentication enabled." } }, { @@ -599,8 +532,7 @@ "pk": 359, "fields": { "cve_id": "CVE-2015-1789", - "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and servers with client authentication enabled may be affected if they use custom verification callbacks.", - "cvss": null + "summary": "X509_cmp_time does not properly check the length of the ASN1_TIME string and can read a few bytes out of bounds. In addition, X509_cmp_time accepts an arbitrary number of fractional seconds in the time string. An attacker can use this to craft malformed certificates and CRLs of various sizes and potentially cause a segmentation fault, resulting in a DoS on applications that verify certificates or CRLs. TLS clients that verify CRLs are affected. TLS clients and server with client authentication enabled may be affected if they use custom verification callbacks." } }, { @@ -608,8 +540,7 @@ "pk": 360, "fields": { "cve_id": "CVE-2014-8176", - "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption.", - "cvss": null + "summary": "This vulnerability does not affect current versions of OpenSSL. It existed in previous OpenSSL versions and was fixed in June 2014. If a DTLS peer receives application data between the ChangeCipherSpec and Finished messages, buffering of such data may cause an invalid free, resulting in a segmentation fault or potentially, memory corruption." } }, { @@ -617,8 +548,7 @@ "pk": 361, "fields": { "cve_id": "CVE-2015-1791", - "summary": "If a NewSessionTicket is received by a multi-threaded client when attempting to reuse a previous ticket then a race condition can occur potentially leading to a double free of the ticket data.", - "cvss": null + "summary": "If a NewSessionTicket is received by a multi-threaded client when attempting to reuse a previous ticket then a race condition can occur potentially leading to a double free of the ticket data." } }, { @@ -626,8 +556,7 @@ "pk": 362, "fields": { "cve_id": "CVE-2015-3194", - "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", - "cvss": null + "summary": "The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and absent mask generation function parameter. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers whic enable client authentication." } }, { @@ -635,8 +564,7 @@ "pk": 363, "fields": { "cve_id": "CVE-2015-0293", - "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message.", - "cvss": null + "summary": "DoS via reachable assert in SSLv2 servers. A malicious client can trigger an OPENSSL_assert in servers that both support SSLv2 and enable export cipher suites by sending a specially crafted SSLv2 CLIENT-MASTER-KEY message." } }, { @@ -644,8 +572,7 @@ "pk": 364, "fields": { "cve_id": "CVE-2015-0292", - "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption.", - "cvss": null + "summary": "A vulnerability existed in previous versions of OpenSSL related to the processing of base64 encoded data. Any code path that reads base64 data from an untrusted source could be affected (such as the PEM processing routines). Maliciously crafted base 64 data could trigger a segmenation fault or memory corruption." } }, { @@ -653,8 +580,7 @@ "pk": 365, "fields": { "cve_id": "CVE-2015-0287", - "summary": "ASN.1 structure reuse memory corruption. Reusing a structure in ASN.1 parsing may allow an attacker to cause memory corruption via an invalid write. Such reuse is and has been strongly discouraged and is believed to be rare.", - "cvss": null + "summary": "ASN.1 structure reuse memory corruption. Reusing a structure in ASN.1 parsing may allow an attacker to cause memory corruption via an invalid write. Such reuse is and has been strongly discouraged and is believed to be rare." } }, { @@ -662,8 +588,7 @@ "pk": 366, "fields": { "cve_id": "CVE-2015-0286", - "summary": "Segmentation fault in ASN1_TYPE_cmp. The function ASN1_TYPE_cmp will crash with an invalid read if an attempt is made to compare ASN.1 boolean types. Since ASN1_TYPE_cmp is used to check certificate signature algorithm consistency this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", - "cvss": null + "summary": "Segmentation fault in ASN1_TYPE_cmp. The function ASN1_TYPE_cmp will crash with an invalid read if an attempt is made to compare ASN.1 boolean types. Since ASN1_TYPE_cmp is used to check certificate signature algorithm consistency this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication." } }, { @@ -671,8 +596,7 @@ "pk": 367, "fields": { "cve_id": "CVE-2015-0208", - "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSSL clients and servers which enable client authentication.", - "cvss": null + "summary": "Segmentation fault for invalid PSS parameters. The signature verification routines will crash with a NULL pointer dereference if presented with an ASN.1 signature using the RSA PSS algorithm and invalid parameters. Since these routines are used to verify certificate signature algorithms this can be used to crash any certificate verification operation and exploited in a DoS attack. Any application which performs certificate verification is vulnerable including OpenSS clients and servers which enable client authentication." } }, { @@ -680,8 +604,7 @@ "pk": 368, "fields": { "cve_id": "CVE-2015-0207", - "summary": "Segmentation fault in DTLSv1_listen. A defect in the implementation of DTLSv1_listen means that state is preserved in the SSL object from one invocation to the next that can lead to a segmentation fault. Errors processing the initial ClientHello can trigger this scenario. An example of such an error could be that a DTLS1.0 only client is attempting to connect to a DTLS1.2 only server.", - "cvss": null + "summary": "Segmentation fault in DTLSv1_listen. A defect in the implementation of DTLSv1_listen means that state is preserved in the SSL object from one invocation to the next that can lead to a segmentation fault. Errors processing the initial ClientHello can trigger this scenario. An example of such an error could be that a DTLS1.0 only client is attempting to connect to a DTLS1.2 only server." } }, { @@ -689,8 +612,7 @@ "pk": 369, "fields": { "cve_id": "CVE-2015-1787", - "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack.", - "cvss": null + "summary": "Empty CKE with client auth and DHE. If client auth is used then a server can seg fault in the event of a DHE ciphersuite being selected and a zero length ClientKeyExchange message being sent by the client. This could be exploited in a DoS attack." } }, { @@ -698,8 +620,7 @@ "pk": 370, "fields": { "cve_id": "CVE-2015-0289", - "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected.", - "cvss": null + "summary": "PKCS#7 NULL pointer dereference. The PKCS#7 parsing code does not handle missing outer ContentInfo correctly. An attacker can craft malformed ASN.1-encoded PKCS#7 blobs with missing content and trigger a NULL pointer dereference on parsing. Applications that verify PKCS#7 signatures, decrypt PKCS#7 data or otherwise parse PKCS#7 structures from untrusted sources are affected. OpenSSL clients and servers are not affected." } }, { @@ -707,8 +628,7 @@ "pk": 371, "fields": { "cve_id": "CVE-2015-0290", - "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. However if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack.", - "cvss": null + "summary": "Multiblock corrupted pointer. OpenSSL 1.0.2 introduced the \"multiblock\" performance improvement. This feature only applies on 64 bit x86 architecture platforms that support AES NI instructions. A defect in the implementation of \"multiblock\" can cause OpenSSL's internal write buffer to become incorrectly set to NULL when using non-blocking IO. Typically, when the user application is using a socket BIO for writing, this will only result in a failed connection. Howeve if some other BIO is used then it is likely that a segmentation fault will be triggered, thus enabling a potential DoS attack." } }, { @@ -716,8 +636,7 @@ "pk": 372, "fields": { "cve_id": "CVE-2015-0291", - "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server.", - "cvss": null + "summary": "ClientHello sigalgs DoS. If a client connects to an OpenSSL 1.0.2 server and renegotiates with an invalid signature algorithms extension a NULL pointer dereference will occur. This can be exploited in a DoS attack against the server." } }, { @@ -725,8 +644,7 @@ "pk": 373, "fields": { "cve_id": "CVE-2015-0205", - "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered.", - "cvss": null + "summary": "An OpenSSL server will accept a DH certificate for client authentication without the certificate verify message. This effectively allows a client to authenticate without the use of a private key. This only affects servers which trust a client certificate authority which issues certificates containing DH keys: these are extremely rare and hardly ever encountered." } }, { @@ -734,8 +652,7 @@ "pk": 374, "fields": { "cve_id": "CVE-2015-0206", - "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion.", - "cvss": null + "summary": "A memory leak can occur in the dtls1_buffer_record function under certain conditions. In particular this could occur if an attacker sent repeated DTLS records with the same sequence number but for the next epoch. The memory leak could be exploited by an attacker in a Denial of Service attack through memory exhaustion." } }, { @@ -743,8 +660,7 @@ "pk": 375, "fields": { "cve_id": "CVE-2015-0204", - "summary": "An OpenSSL client will accept the use of an RSA temporary key in a non-export RSA key exchange ciphersuite. A server could present a weak temporary key and downgrade the security of the session.", - "cvss": null + "summary": "An OpenSSL client will accept the use of an RSA temporary key in a non-export RSA key exchange ciphersuite. A server could present a weak temporary key and downgrade the security of the session." } }, { @@ -752,8 +668,7 @@ "pk": 376, "fields": { "cve_id": "CVE-2015-0209", - "summary": "Use After Free following d2i_ECPrivatekey error. A malformed EC private key file consumed via the d2i_ECPrivateKey function could cause a use after free condition. This, in turn, could cause a double free in several private key parsing functions (such as d2i_PrivateKey or EVP_PKCS82PKEY) and could lead to a DoS attack or memory corruption for applications that receive EC private keys from untrusted sources. This scenario is considered rare.", - "cvss": null + "summary": "Use After Free following d2i_ECPrivatekey error. A malformed EC private key file consumed via the d2i_ECPrivateKey function could cause a use after free condition. This, in turn, could cause a double free in several private key parsing functions (such as d2i_PrivateKey or EVP_PKCS82PKEY) and could lead to a DoS attack or memory corruption for applications that receive EC private keys from untrusted sources. This scenario is considered rare." } }, { @@ -761,8 +676,7 @@ "pk": 377, "fields": { "cve_id": "CVE-2015-0288", - "summary": "X509_to_X509_REQ NULL pointer deref. The function X509_to_X509_REQ will crash with a NULL pointer dereference if the certificate key is invalid. This function is rarely used in practice.", - "cvss": null + "summary": "X509_to_X509_REQ NULL pointer deref. The function X509_to_X509_REQ will crash with a NULL pointer dereference if the certificate key is invalid. This function is rarely used in practice." } }, { @@ -770,8 +684,7 @@ "pk": 378, "fields": { "cve_id": "CVE-2014-3572", - "summary": "An OpenSSL client will accept a handshake using an ephemeral ECDH ciphersuite using an ECDSA certificate if the server key exchange message is omitted. This effectively removes forward secrecy from the ciphersuite.", - "cvss": null + "summary": "An OpenSSL client will accept a handshake using an ephemeral ECDH ciphersuite using an ECDSA certificate if the server key exchange message is omitted. This effectively removes forward secrecy from the ciphersuite." } }, { @@ -779,8 +692,7 @@ "pk": 379, "fields": { "cve_id": "CVE-2015-0285", - "summary": "Under certain conditions an OpenSSL 1.0.2 client can complete a handshake with an unseeded PRNG. If the handshake succeeds then the client random that has been used will have been generated from a PRNG with insufficient entropy and therefore the output may be predictable.", - "cvss": null + "summary": "Under certain conditions an OpenSSL 1.0.2 client can complete a handshake with an unseeded PRNG. If the handshake succeeds then the client random that has been used will have been generated from a PRNG with insufficient entropy and therefore the output may be predictable." } }, { @@ -788,8 +700,7 @@ "pk": 380, "fields": { "cve_id": "CVE-2014-8275", - "summary": "OpenSSL accepts several non-DER-variations of certificate signature algorithm and signature encodings. OpenSSL also does not enforce a match between the signature algorithm between the signed and unsigned portions of the certificate. By modifying the contents of the signature algorithm or the encoding of the signature, it is possible to change the certificate's fingerprint. This does not allow an attacker to forge certificates, and does not affect certificate verification or OpenSSL servers/clients in any other way. It also does not affect common revocation mechanisms. Only custom applications that rely on the uniqueness of the fingerprint (e.g. certificate blacklists) may be affected.", - "cvss": null + "summary": "OpenSSL accepts several non-DER-variations of certificate signature algorithm and signature encodings. OpenSSL also does not enforce a match between the signature algorithm between the signed and unsigned portions of the certificate. By modifying the contents of the signature algorithm or the encoding of the signature, it is possible to change the certificate's fingerprint. This does not allow an attacker to forge certificates, and does not affect certificat verification or OpenSSL servers/clients in any other way. It also does not affect common revocation mechanisms. Only custom applications that rely on the uniqueness of the fingerprint (e.g. certificate blacklists) may be affected." } }, { @@ -797,8 +708,7 @@ "pk": 381, "fields": { "cve_id": "CVE-2014-3571", - "summary": "A carefully crafted DTLS message can cause a segmentation fault in OpenSSL due to a NULL pointer dereference. This could lead to a Denial Of Service attack.", - "cvss": null + "summary": "A carefully crafted DTLS message can cause a segmentation fault in OpenSSL due to a NULL pointer dereference. This could lead to a Denial Of Service attack." } }, { @@ -806,8 +716,7 @@ "pk": 382, "fields": { "cve_id": "CVE-2014-3569", - "summary": "When openssl is built with the no-ssl3 option and a SSL v3 ClientHello is received the ssl method would be set to NULL which could later result in a NULL pointer dereference.", - "cvss": null + "summary": "When openssl is built with the no-ssl3 option and a SSL v3 ClientHello is received the ssl method would be set to NULL which could later result in a NULL pointer dereference." } }, { @@ -815,8 +724,7 @@ "pk": 383, "fields": { "cve_id": "CVE-2014-5139", - "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service.", - "cvss": null + "summary": "A crash was found affecting SRP ciphersuites used in a Server Hello message. The issue affects OpenSSL clients and allows a malicious server to crash the client with a null pointer dereference (read) by specifying an SRP ciphersuite even though it was not properly negotiated with the client. This could lead to a Denial of Service." } }, { @@ -824,8 +732,7 @@ "pk": 384, "fields": { "cve_id": "CVE-2014-3508", - "summary": "A flaw in OBJ_obj2txt may cause pretty printing functions such as X509_name_oneline, X509_name_print_ex, to leak some information from the stack. Applications may be affected if they echo pretty printing output to the attacker. OpenSSL SSL/TLS clients and servers themselves are not affected.", - "cvss": null + "summary": "A flaw in OBJ_obj2txt may cause pretty printing functions such as X509_name_oneline, X509_name_print_ex, to leak some information from the stack. Applications may be affected if they echo pretty printing output to the attacker. OpenSSL SSL/TLS clients and servers themselves are not affected." } }, { @@ -833,8 +740,7 @@ "pk": 385, "fields": { "cve_id": "CVE-2014-3505", - "summary": "A Double Free was found when processing DTLS packets. An attacker can force an error condition which causes openssl to crash whilst processing DTLS packets due to memory being freed twice. This could lead to a Denial of Service attack.", - "cvss": null + "summary": "A Double Free was found when processing DTLS packets. An attacker can force an error condition which causes openssl to crash whilst processing DTLS packets due to memory being freed twice. This could lead to a Denial of Service attack." } }, { @@ -842,8 +748,7 @@ "pk": 386, "fields": { "cve_id": "CVE-2014-3509", - "summary": "A race condition was found in ssl_parse_serverhello_tlsext. If a multithreaded client connects to a malicious server using a resumed session and the server sends an ec point format extension, it could write up to 255 bytes to freed memory.", - "cvss": null + "summary": "A race condition was found in ssl_parse_serverhello_tlsext. If a multithreaded client connects to a malicious server using a resumed session and the server sends an ec point format extension, it could write up to 255 bytes to freed memory." } }, { @@ -851,8 +756,7 @@ "pk": 387, "fields": { "cve_id": null, - "summary": "OpenSSL has added support for TLS_FALLBACK_SCSV to allow applications to block the ability for a MITM attacker to force a protocol downgrade. Some client applications (such as browsers) will reconnect using a downgraded protocol to work around interoperability bugs in older servers. This could be exploited by an active man-in-the-middle to downgrade connections to SSL 3.0 even if both sides of the connection support higher protocols. SSL 3.0 contains a number of weaknesses including POODLE (CVE-2014-3566). See also https://tools.ietf.org/html/draft-ietf-tls-downgrade-scsv-00 and https://www.openssl.org/~bodo/ssl-poodle.pdf", - "cvss": null + "summary": "OpenSSL has added support for TLS_FALLBACK_SCSV to allow applications to block the ability for a MITM attacker to force a protocol downgrade. Some client applications (such as browsers) will reconnect using a downgraded protocol to work around interoperability bugs in older servers. This could be exploited by an active man-in-the-middle to downgrade connections to SSL 3.0 even if both sides of the connection support higher protocols. SSL 3.0 contains a number o weaknesses including POODLE (CVE-2014-3566). See also https://tools.ietf.org/html/draft-ietf-tls-downgrade-scsv-00 and https://www.openssl.org/~bodo/ssl-poodle.pdf" } }, { @@ -860,8 +764,7 @@ "pk": 388, "fields": { "cve_id": "CVE-2014-3568", - "summary": "When OpenSSL is configured with \"no-ssl3\" as a build option, servers could accept and complete a SSL 3.0 handshake, and clients could be configured to send them.", - "cvss": null + "summary": "When OpenSSL is configured with \"no-ssl3\" as a build option, servers could accept and complete a SSL 3.0 handshake, and clients could be configured to send them." } }, { @@ -869,8 +772,7 @@ "pk": 389, "fields": { "cve_id": "CVE-2014-3506", - "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack.", - "cvss": null + "summary": "A DTLS flaw leading to memory exhaustion was found. An attacker can force openssl to consume large amounts of memory whilst processing DTLS handshake messages. This could lead to a Denial of Service attack." } }, { @@ -878,8 +780,7 @@ "pk": 390, "fields": { "cve_id": "CVE-2014-3567", - "summary": "When an OpenSSL SSL/TLS/DTLS server receives a session ticket the integrity of that ticket is first verified. In the event of a session ticket integrity check failing, OpenSSL will fail to free memory causing a memory leak. By sending a large number of invalid session tickets an attacker could exploit this issue in a Denial Of Service attack.", - "cvss": null + "summary": "When an OpenSSL SSL/TLS/DTLS server receives a session ticket the integrity of that ticket is first verified. In the event of a session ticket integrity check failing, OpenSSL will fail to free memory causing a memory leak. By sending a large number of invalid session tickets an attacker could exploit this issue in a Denial Of Service attack." } }, { @@ -887,8 +788,7 @@ "pk": 391, "fields": { "cve_id": "CVE-2014-3570", - "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On most platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved.", - "cvss": null + "summary": "Bignum squaring (BN_sqr) may produce incorrect results on some platforms, including x86_64. This bug occurs at random with a very low probability, and is not known to be exploitable in any way, though its exact impact is difficult to determine. The following has been determined: *) The probability of BN_sqr producing an incorrect result at random is very low: 1/2^64 on the single affected 32-bit platform (MIPS) and 1/2^128 on affected 64-bit platforms. *) On mos platforms, RSA follows a different code path and RSA operations are not affected at all. For the remaining platforms (e.g. OpenSSL built without assembly support), pre-existing countermeasures thwart bug attacks [1]. *) Static ECDH is theoretically affected: it is possible to construct elliptic curve points that would falsely appear to be on the given curve. However, there is no known computationally feasible way to construct such points with low order, and so the security of static ECDH private keys is believed to be unaffected. *) Other routines known to be theoretically affected are modular exponentiation, primality testing, DSA, RSA blinding, JPAKE and SRP. No exploits are known and straightforward bug attacks fail - either the attacker cannot control when the bug triggers, or no private key material is involved." } }, { @@ -896,8 +796,7 @@ "pk": 392, "fields": { "cve_id": "CVE-2014-3513", - "summary": "A flaw in the DTLS SRTP extension parsing code allows an attacker, who sends a carefully crafted handshake message, to cause OpenSSL to fail to free up to 64k of memory causing a memory leak. This could be exploited in a Denial Of Service attack. This issue affects OpenSSL 1.0.1 server implementations for both SSL/TLS and DTLS regardless of whether SRTP is used or configured. Implementations of OpenSSL that have been compiled with OPENSSL_NO_SRTP defined are not affected.", - "cvss": null + "summary": "A flaw in the DTLS SRTP extension parsing code allows an attacker, who sends a carefully crafted handshake message, to cause OpenSSL to fail to free up to 64k of memory causing a memory leak. This could be exploited in a Denial Of Service attack. This issue affects OpenSSL 1.0.1 server implementations for both SSL/TLS and DTLS regardless of whether SRTP is used or configured. Implementations of OpenSSL that have been compiled with OPENSSL_NO_SRTP defined are no affected." } }, { @@ -905,8 +804,7 @@ "pk": 393, "fields": { "cve_id": "CVE-2002-0659", - "summary": "A flaw in the ASN1 library allowed remote attackers to cause a denial of service by sending invalid encodings.", - "cvss": null + "summary": "A flaw in the ASN1 library allowed remote attackers to cause a denial of service by sending invalid encodings." } }, { @@ -914,8 +812,7 @@ "pk": 394, "fields": { "cve_id": "CVE-2014-3507", - "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack.", - "cvss": null + "summary": "A DTLS memory leak from zero-length fragments was found. By sending carefully crafted DTLS packets an attacker could cause OpenSSL to leak memory. This could lead to a Denial of Service attack." } }, { @@ -923,8 +820,7 @@ "pk": 395, "fields": { "cve_id": "CVE-2014-3512", - "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected.", - "cvss": null + "summary": "A SRP buffer overrun was found. A malicious client or server can send invalid SRP parameters and overrun an internal buffer. Only applications which are explicitly set up for SRP use are affected." } }, { @@ -932,8 +828,7 @@ "pk": 396, "fields": { "cve_id": "CVE-2002-1568", - "summary": "The use of assertions when detecting buffer overflow attacks allowed remote attackers to cause a denial of service (crash) by sending certain messages to cause OpenSSL to abort from a failed assertion, as demonstrated using SSLv2 CLIENT_MASTER_KEY messages, which were not properly handled in s2_srvr.c.", - "cvss": null + "summary": "The use of assertions when detecting buffer overflow attacks allowed remote attackers to cause a denial of service (crash) by sending certain messages to cause OpenSSL to abort from a failed assertion, as demonstrated using SSLv2 CLIENT_MASTER_KEY messages, which were not properly handled in s2_srvr.c." } }, { @@ -941,8 +836,7 @@ "pk": 397, "fields": { "cve_id": "CVE-2002-0656", - "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3.", - "cvss": null + "summary": "A buffer overflow allowed remote attackers to execute arbitrary code by sending a large client master key in SSL2 or a large session ID in SSL3." } }, { @@ -950,8 +844,7 @@ "pk": 398, "fields": { "cve_id": "CVE-2014-3511", - "summary": "A flaw in the OpenSSL SSL/TLS server code causes the server to negotiate TLS 1.0 instead of higher protocol versions when the ClientHello message is badly fragmented. This allows a man-in-the-middle attacker to force a downgrade to TLS 1.0 even if both the server and the client support a higher protocol version, by modifying the client's TLS records.", - "cvss": null + "summary": "A flaw in the OpenSSL SSL/TLS server code causes the server to negotiate TLS 1.0 instead of higher protocol versions when the ClientHello message is badly fragmented. This allows a man-in-the-middle attacker to force a downgrade to TLS 1.0 even if both the server and the client support a higher protocol version, by modifying the client's TLS records." } }, { @@ -959,8 +852,7 @@ "pk": 399, "fields": { "cve_id": "CVE-2002-0655", - "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code.", - "cvss": null + "summary": "Inproper handling of ASCII representations of integers on 64 bit platforms allowed remote attackers to cause a denial of service or possibly execute arbitrary code." } }, { @@ -968,8 +860,7 @@ "pk": 400, "fields": { "cve_id": "CVE-2002-0657", - "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7", - "cvss": null + "summary": "A buffer overflow when Kerberos is enabled allowed attackers to execute arbitrary code by sending a long master key. Note that this flaw did not affect any released version of 0.9.6 or 0.9.7" } }, { @@ -977,8 +868,7 @@ "pk": 401, "fields": { "cve_id": "CVE-2003-0078", - "summary": "sl3_get_record in s3_pkt.c did not perform a MAC computation if an incorrect block cipher padding was used, causing an information leak (timing discrepancy) that may make it easier to launch cryptographic attacks that rely on distinguishing between padding and MAC verification errors, possibly leading to extraction of the original plaintext, aka the \"Vaudenay timing attack.\"", - "cvss": null + "summary": "sl3_get_record in s3_pkt.c did not perform a MAC computation if an incorrect block cipher padding was used, causing an information leak (timing discrepancy) that may make it easier to launch cryptographic attacks that rely on distinguishing between padding and MAC verification errors, possibly leading to extraction of the original plaintext, aka the \"Vaudenay timing attack.\"" } }, { @@ -986,8 +876,7 @@ "pk": 402, "fields": { "cve_id": "CVE-2014-3510", - "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages.", - "cvss": null + "summary": "A flaw in handling DTLS anonymous EC(DH) ciphersuites was found. OpenSSL DTLS clients enabling anonymous (EC)DH ciphersuites are subject to a denial of service attack. A malicious server can crash the client with a null pointer dereference (read) by specifying an anonymous (EC)DH ciphersuite and sending carefully crafted handshake messages." } }, { @@ -995,8 +884,7 @@ "pk": 403, "fields": { "cve_id": "CVE-2003-0545", - "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash.", - "cvss": null + "summary": "Certain ASN.1 encodings that were rejected as invalid by the parser could trigger a bug in the deallocation of the corresponding data structure, corrupting the stack, leading to a crash." } }, { @@ -1004,8 +892,7 @@ "pk": 404, "fields": { "cve_id": "CVE-2004-0079", - "summary": "The Codenomicon TLS Test Tool uncovered a null-pointer assignment in the do_change_cipher_spec() function. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server that used the OpenSSL library in such a way as to cause a crash.", - "cvss": null + "summary": "The Codenomicon TLS Test Tool uncovered a null-pointer assignment in the do_change_cipher_spec() function. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server that used the OpenSSL library in such a way as to cause a crash." } }, { @@ -1013,8 +900,7 @@ "pk": 405, "fields": { "cve_id": "CVE-2004-0112", - "summary": "A flaw in SSL/TLS handshaking code when using Kerberos ciphersuites. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server configured to use Kerberos ciphersuites in such a way as to cause OpenSSL to crash. Most applications have no ability to use Kerberos ciphersuites and will therefore be unaffected.", - "cvss": null + "summary": "A flaw in SSL/TLS handshaking code when using Kerberos ciphersuites. A remote attacker could perform a carefully crafted SSL/TLS handshake against a server configured to use Kerberos ciphersuites in such a way as to cause OpenSSL to crash. Most applications have no ability to use Kerberos ciphersuites and will therefore be unaffected." } }, { @@ -1022,8 +908,7 @@ "pk": 406, "fields": { "cve_id": "CVE-2004-0975", - "summary": "The der_chop script created temporary files insecurely which could allow local users to overwrite files via a symlink attack on temporary files. Note that it is quite unlikely that a user would be using the redundant der_chop script, and this script was removed from the OpenSSL distribution.", - "cvss": null + "summary": "The der_chop script created temporary files insecurely which could allow local users to overwrite files via a symlink attack on temporary files. Note that it is quite unlikely that a user would be using the redundant der_chop script, and this script was removed from the OpenSSL distribution." } }, { @@ -1031,8 +916,7 @@ "pk": 407, "fields": { "cve_id": "CVE-2003-0851", - "summary": "A flaw in OpenSSL 0.9.6k (only) would cause certain ASN.1 sequences to trigger a large recursion. On platforms such as Windows this large recursion cannot be handled correctly and so the bug causes OpenSSL to crash. A remote attacker could exploit this flaw if they can send arbitrary ASN.1 sequences which would cause OpenSSL to crash. This could be performed for example by sending a client certificate to a SSL/TLS enabled server which is configured to accept them.", - "cvss": null + "summary": "A flaw in OpenSSL 0.9.6k (only) would cause certain ASN.1 sequences to trigger a large recursion. On platforms such as Windows this large recursion cannot be handled correctly and so the bug causes OpenSSL to crash. A remote attacker could exploit this flaw if they can send arbitrary ASN.1 sequences which would cause OpenSSL to crash. This could be performed for example by sending a client certificate to a SSL/TLS enabled server which is configured to accept them." } }, { @@ -1040,8 +924,7 @@ "pk": 408, "fields": { "cve_id": "CVE-2003-0147", - "summary": "RSA blinding was not enabled by default, which could allow local and remote attackers to obtain a server's private key by determining factors using timing differences on (1) the number of extra reductions during Montgomery reduction, and (2) the use of different integer multiplication algorithms (\"Karatsuba\" and normal).", - "cvss": null + "summary": "RSA blinding was not enabled by default, which could allow local and remote attackers to obtain a server's private key by determining factors using timing differences on (1) the number of extra reductions during Montgomery reduction, and (2) the use of different integer multiplication algorithms (\"Karatsuba\" and normal)." } }, { @@ -1049,8 +932,7 @@ "pk": 409, "fields": { "cve_id": "CVE-2003-0543", - "summary": "An integer overflow could allow remote attackers to cause a denial of service (crash) via an SSL client certificate with certain ASN.1 tag values.", - "cvss": null + "summary": "An integer overflow could allow remote attackers to cause a denial of service (crash) via an SSL client certificate with certain ASN.1 tag values." } }, { @@ -1058,8 +940,7 @@ "pk": 410, "fields": { "cve_id": "CVE-2003-0131", - "summary": "The SSL and TLS components allowed remote attackers to perform an unauthorized RSA private key operation via a modified Bleichenbacher attack that uses a large number of SSL or TLS connections using PKCS #1 v1.5 padding that caused OpenSSL to leak information regarding the relationship between ciphertext and the associated plaintext, aka the \"Klima-Pokorny-Rosa attack\"", - "cvss": null + "summary": "The SSL and TLS components allowed remote attackers to perform an unauthorized RSA private key operation via a modified Bleichenbacher attack that uses a large number of SSL or TLS connections using PKCS #1 v1.5 padding that caused OpenSSL to leak information regarding the relationship between ciphertext and the associated plaintext, aka the \"Klima-Pokorny-Rosa attack\"" } }, { @@ -1067,8 +948,7 @@ "pk": 411, "fields": { "cve_id": "CVE-2004-0081", - "summary": "The Codenomicon TLS Test Tool found that some unknown message types were handled incorrectly, allowing a remote attacker to cause a denial of service (infinite loop).", - "cvss": null + "summary": "The Codenomicon TLS Test Tool found that some unknown message types were handled incorrectly, allowing a remote attacker to cause a denial of service (infinite loop)." } }, { @@ -1076,8 +956,7 @@ "pk": 412, "fields": { "cve_id": "CVE-2003-0544", - "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used.", - "cvss": null + "summary": "Incorrect tracking of the number of characters in certain ASN.1 inputs could allow remote attackers to cause a denial of service (crash) by sending an SSL client certificate that causes OpenSSL to read past the end of a buffer when the long form is used." } }, { @@ -1085,8 +964,7 @@ "pk": 413, "fields": { "cve_id": "CVE-2006-4343", - "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash.", - "cvss": null + "summary": "A flaw in the SSLv2 client code was discovered. When a client application used OpenSSL to create an SSLv2 connection to a malicious server, that server could cause the client to crash." } }, { @@ -1094,8 +972,7 @@ "pk": 414, "fields": { "cve_id": "CVE-2006-2937", - "summary": "During the parsing of certain invalid ASN.1 structures an error condition is mishandled. This can result in an infinite loop which consumes system memory", - "cvss": null + "summary": "During the parsing of certain invalid ASN.1 structures an error condition is mishandled. This can result in an infinite loop which consumes system memory" } }, { @@ -1103,8 +980,7 @@ "pk": 415, "fields": { "cve_id": "CVE-2007-5502", - "summary": "The PRNG implementation for the OpenSSL FIPS Object Module 1.1.1 does not perform auto-seeding during the FIPS self-test, which generates random data that is more predictable than expected and makes it easier for attackers to bypass protection mechanisms that rely on the randomness.", - "cvss": null + "summary": "The PRNG implementation for the OpenSSL FIPS Object Module 1.1.1 does not perform auto-seeding during the FIPS self-test, which generates random data that is more predictable than expected and makes it easier for attackers to bypass protection mechanisms that rely on the randomness." } }, { @@ -1112,8 +988,7 @@ "pk": 416, "fields": { "cve_id": "CVE-2007-5135", - "summary": "A flaw was found in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that used this function and overrun a buffer with a single byte. Few applications make use of this vulnerable function and generally it is used only when applications are compiled for debugging.", - "cvss": null + "summary": "A flaw was found in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that used this function and overrun a buffer with a single byte. Few applications make use of this vulnerable function and generally it is used only when applications are compiled for debugging." } }, { @@ -1121,8 +996,7 @@ "pk": 417, "fields": { "cve_id": "CVE-2007-4995", - "summary": "A flaw in DTLS support. An attacker could create a malicious client or server that could trigger a heap overflow. This is possibly exploitable to run arbitrary code, but it has not been verified.", - "cvss": null + "summary": "A flaw in DTLS support. An attacker could create a malicious client or server that could trigger a heap overflow. This is possibly exploitable to run arbitrary code, but it has not been verified." } }, { @@ -1130,8 +1004,7 @@ "pk": 418, "fields": { "cve_id": "CVE-2006-4339", - "summary": "Daniel Bleichenbacher discovered an attack on PKCS #1 v1.5 signatures where under certain circumstances it may be possible for an attacker to forge a PKCS #1 v1.5 signature that would be incorrectly verified by OpenSSL.", - "cvss": null + "summary": "Daniel Bleichenbacher discovered an attack on PKCS #1 v1.5 signatures where under certain circumstances it may be possible for an attacker to forge a PKCS #1 v1.5 signature that would be incorrectly verified by OpenSSL." } }, { @@ -1139,8 +1012,7 @@ "pk": 419, "fields": { "cve_id": "CVE-2005-2969", - "summary": "A deprecated option, SSL_OP_MISE_SSLV2_RSA_PADDING, could allow an attacker acting as a \"man in the middle\" to force a connection to downgrade to SSL 2.0 even if both parties support better protocols.", - "cvss": null + "summary": "A deprecated option, SSL_OP_MISE_SSLV2_RSA_PADDING, could allow an attacker acting as a \"man in the middle\" to force a connection to downgrade to SSL 2.0 even if both parties support better protocols." } }, { @@ -1148,8 +1020,7 @@ "pk": 420, "fields": { "cve_id": "CVE-2008-0891", - "summary": "Testing using the Codenomicon TLS test suite discovered a flaw in the handling of server name extension data in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If OpenSSL has been compiled using the non-default TLS server name extensions, a remote attacker could send a carefully crafted packet to a server application using OpenSSL and cause it to crash.", - "cvss": null + "summary": "Testing using the Codenomicon TLS test suite discovered a flaw in the handling of server name extension data in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If OpenSSL has been compiled using the non-default TLS server name extensions, a remote attacker could send a carefully crafted packet to a server application using OpenSSL and cause it to crash." } }, { @@ -1157,8 +1028,7 @@ "pk": 421, "fields": { "cve_id": "CVE-2006-2940", - "summary": "Certain types of public key can take disproportionate amounts of time to process. This could be used by an attacker in a denial of service attack.", - "cvss": null + "summary": "Certain types of public key can take disproportionate amounts of time to process. This could be used by an attacker in a denial of service attack." } }, { @@ -1166,8 +1036,7 @@ "pk": 422, "fields": { "cve_id": "CVE-2006-3738", - "summary": "A buffer overflow was discovered in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that uses this function and overrun a buffer.", - "cvss": null + "summary": "A buffer overflow was discovered in the SSL_get_shared_ciphers() utility function. An attacker could send a list of ciphers to an application that uses this function and overrun a buffer." } }, { @@ -1175,8 +1044,7 @@ "pk": 423, "fields": { "cve_id": "CVE-2009-3555", - "summary": "Implement RFC5746 to address vulnerabilities in SSL/TLS renegotiation.", - "cvss": null + "summary": "Implement RFC5746 to address vulnerabilities in SSL/TLS renegotiation." } }, { @@ -1184,8 +1052,7 @@ "pk": 424, "fields": { "cve_id": "CVE-2009-0590", - "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software.", - "cvss": null + "summary": "The function ASN1_STRING_print_ex() when used to print a BMPString or UniversalString will crash with an invalid memory access if the encoded length of the string is illegal. Any OpenSSL application which prints out the contents of a certificate could be affected by this bug, including SSL servers, clients and S/MIME software." } }, { @@ -1193,8 +1060,7 @@ "pk": 425, "fields": { "cve_id": "CVE-2009-1378", - "summary": "Fix a denial of service flaw in the DTLS implementation. In dtls1_process_out_of_seq_message() the check if the current message is already buffered was missing. For every new message was memory allocated, allowing an attacker to perform an denial of service attack against a DTLS server by sending out of seq handshake messages until there is no memory left.", - "cvss": null + "summary": "Fix a denial of service flaw in the DTLS implementation. In dtls1_process_out_of_seq_message() the check if the current message is already buffered was missing. For every new message was memory allocated, allowing an attacker to perform an denial of service attack against a DTLS server by sending out of seq handshake messages until there is no memory left." } }, { @@ -1202,8 +1068,7 @@ "pk": 426, "fields": { "cve_id": "CVE-2009-0789", - "summary": "When a malformed ASN1 structure is received it's contents are freed up and zeroed and an error condition returned. On a small number of platforms where sizeof(long) < sizeof(void *) (for example WIN64) this can cause an invalid memory access later resulting in a crash when some invalid structures are read, for example RSA public keys.", - "cvss": null + "summary": "When a malformed ASN1 structure is received it's contents are freed up and zeroed and an error condition returned. On a small number of platforms where sizeof(long) < sizeof(void *) (for example WIN64) this can cause an invalid memory access later resulting in a crash when some invalid structures are read, for example RSA public keys." } }, { @@ -1211,8 +1076,7 @@ "pk": 427, "fields": { "cve_id": "CVE-2008-1672", - "summary": "Testing using the Codenomicon TLS test suite discovered a flaw if the 'Server Key exchange message' is omitted from a TLS handshake in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If a client connects to a malicious server with particular cipher suites, the server could cause the client to crash.", - "cvss": null + "summary": "Testing using the Codenomicon TLS test suite discovered a flaw if the 'Server Key exchange message' is omitted from a TLS handshake in OpenSSL 0.9.8f and OpenSSL 0.9.8g. If a client connects to a malicious server with particular cipher suites, the server could cause the client to crash." } }, { @@ -1220,8 +1084,7 @@ "pk": 428, "fields": { "cve_id": "CVE-2009-1386", - "summary": "Fix a NULL pointer dereference if a DTLS server recieved ChangeCipherSpec as first record. A remote attacker could use this flaw to cause a DTLS server to crash", - "cvss": null + "summary": "Fix a NULL pointer dereference if a DTLS server recieved ChangeCipherSpec as first record. A remote attacker could use this flaw to cause a DTLS server to crash" } }, { @@ -1229,8 +1092,7 @@ "pk": 429, "fields": { "cve_id": "CVE-2008-5077", - "summary": "The Google Security Team discovered several functions inside OpenSSL incorrectly checked the result after calling the EVP_VerifyFinal function, allowing a malformed signature to be treated as a good signature rather than as an error. This issue affected the signature checks on DSA and ECDSA keys used with SSL/TLS. One way to exploit this flaw would be for a remote attacker who is in control of a malicious server or who can use a 'man in the middle' attack to present a malformed SSL/TLS signature from a certificate chain to a vulnerable client, bypassing validation.", - "cvss": null + "summary": "The Google Security Team discovered several functions inside OpenSSL incorrectly checked the result after calling the EVP_VerifyFinal function, allowing a malformed signature to be treated as a good signature rather than as an error. This issue affected the signature checks on DSA and ECDSA keys used with SSL/TLS. One way to exploit this flaw would be for a remote attacker who is in control of a malicious server or who can use a 'man in the middle' attack to present malformed SSL/TLS signature from a certificate chain to a vulnerable client, bypassing validation." } }, { @@ -1238,8 +1100,7 @@ "pk": 430, "fields": { "cve_id": "CVE-2009-1377", - "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left.", - "cvss": null + "summary": "Fix a denial of service flaw in the DTLS implementation. Records are buffered if they arrive with a future epoch to be processed after finishing the corresponding handshake. There is currently no limitation to this buffer allowing an attacker to perform a DOS attack to a DTLS server by sending records with future epochs until there is no memory left." } }, { @@ -1247,8 +1108,7 @@ "pk": 431, "fields": { "cve_id": "CVE-2009-1387", - "summary": "Fix denial of service flaw due in the DTLS implementation. A remote attacker could use this flaw to cause a DTLS server to crash.", - "cvss": null + "summary": "Fix denial of service flaw due in the DTLS implementation. A remote attacker could use this flaw to cause a DTLS server to crash." } }, { @@ -1256,8 +1116,7 @@ "pk": 432, "fields": { "cve_id": "CVE-2009-0591", - "summary": "The function CMS_verify() does not correctly handle an error condition involving malformed signed attributes. This will cause an invalid set of signed attributes to appear valid and content digests will not be checked.", - "cvss": null + "summary": "The function CMS_verify() does not correctly handle an error condition involving malformed signed attributes. This will cause an invalid set of signed attributes to appear valid and content digests will not be checked." } }, { @@ -1265,8 +1124,7 @@ "pk": 433, "fields": { "cve_id": "CVE-2010-0740", - "summary": "In TLS connections, certain incorrectly formatted records can cause an OpenSSL client or server to crash due to a read attempt at NULL.", - "cvss": null + "summary": "In TLS connections, certain incorrectly formatted records can cause an OpenSSL client or server to crash due to a read attempt at NULL." } }, { @@ -1274,8 +1132,7 @@ "pk": 434, "fields": { "cve_id": "CVE-2009-3245", - "summary": "It was discovered that OpenSSL did not always check the return value of the bn_wexpand() function. An attacker able to trigger a memory allocation failure in that function could cause an application using the OpenSSL library to crash or, possibly, execute arbitrary code", - "cvss": null + "summary": "It was discovered that OpenSSL did not always check the return value of the bn_wexpand() function. An attacker able to trigger a memory allocation failure in that function could cause an application using the OpenSSL library to crash or, possibly, execute arbitrary code" } }, { @@ -1283,8 +1140,7 @@ "pk": 435, "fields": { "cve_id": "CVE-2009-1379", - "summary": "Use-after-free vulnerability in the dtls1_retrieve_buffered_fragment function could cause a client accessing a malicious DTLS server to crash.", - "cvss": null + "summary": "Use-after-free vulnerability in the dtls1_retrieve_buffered_fragment function could cause a client accessing a malicious DTLS server to crash." } }, { @@ -1292,8 +1148,7 @@ "pk": 436, "fields": { "cve_id": "CVE-2009-4355", - "summary": "A memory leak in the zlib_stateful_finish function in crypto/comp/c_zlib.c allows remote attackers to cause a denial of service via vectors that trigger incorrect calls to the CRYPTO_cleanup_all_ex_data function.", - "cvss": null + "summary": "A memory leak in the zlib_stateful_finish function in crypto/comp/c_zlib.c allows remote attackers to cause a denial of service via vectors that trigger incorrect calls to the CRYPTO_cleanup_all_ex_data function." } }, { @@ -1301,8 +1156,7 @@ "pk": 437, "fields": { "cve_id": "CVE-2010-0742", - "summary": "A flaw in the handling of CMS structures containing OriginatorInfo was found which could lead to a write to invalid memory address or double free. CMS support is disabled by default in OpenSSL 0.9.8 versions.", - "cvss": null + "summary": "A flaw in the handling of CMS structures containing OriginatorInfo was found which could lead to a write to invalid memory address or double free. CMS support is disabled by default in OpenSSL 0.9.8 versions." } }, { @@ -1310,8 +1164,7 @@ "pk": 438, "fields": { "cve_id": "CVE-2010-0433", - "summary": "A missing return value check flaw was discovered in OpenSSL, that could possibly cause OpenSSL to call a Kerberos library function with invalid arguments, resulting in a NULL pointer dereference crash in the MIT Kerberos library. In certain configurations, a remote attacker could use this flaw to crash a TLS/SSL server using OpenSSL by requesting Kerberos cipher suites during the TLS handshake", - "cvss": null + "summary": "A missing return value check flaw was discovered in OpenSSL, that could possibly cause OpenSSL to call a Kerberos library function with invalid arguments, resulting in a NULL pointer dereference crash in the MIT Kerberos library. In certain configurations, a remote attacker could use this flaw to crash a TLS/SSL server using OpenSSL by requesting Kerberos cipher suites during the TLS handshake" } }, { @@ -1319,8 +1172,7 @@ "pk": 439, "fields": { "cve_id": "CVE-2010-3864", - "summary": "A flaw in the OpenSSL TLS server extension code parsing which on affected servers can be exploited in a buffer overrun attack. Any OpenSSL based TLS server is vulnerable if it is multi-threaded and uses OpenSSL's internal caching mechanism. Servers that are multi-process and/or disable internal session caching are NOT affected.", - "cvss": null + "summary": "A flaw in the OpenSSL TLS server extension code parsing which on affected servers can be exploited in a buffer overrun attack. Any OpenSSL based TLS server is vulnerable if it is multi-threaded and uses OpenSSL's internal caching mechanism. Servers that are multi-process and/or disable internal session caching are NOT affected." } }, { @@ -1328,8 +1180,7 @@ "pk": 440, "fields": { "cve_id": "CVE-2010-4252", - "summary": "An error in OpenSSL's experimental J-PAKE implementation which could lead to successful validation by someone with no knowledge of the shared secret. The OpenSSL Team still consider the implementation of J-PAKE to be experimental and is not compiled by default.", - "cvss": null + "summary": "An error in OpenSSL's experimental J-PAKE implementation which could lead to successful validation by someone with no knowledge of the shared secret. The OpenSSL Team still consider the implementation of J-PAKE to be experimental and is not compiled by default." } }, { @@ -1337,8 +1188,7 @@ "pk": 441, "fields": { "cve_id": "CVE-2010-4180", - "summary": "A flaw in the OpenSSL SSL/TLS server code where an old bug workaround allows malicious clients to modify the stored session cache ciphersuite. In some cases the ciphersuite can be downgraded to a weaker one on subsequent connections. This issue only affects OpenSSL based SSL/TLS server if it uses OpenSSL's internal caching mechanisms and the SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG flag (many applications enable this by using the SSL_OP_ALL option).", - "cvss": null + "summary": "A flaw in the OpenSSL SSL/TLS server code where an old bug workaround allows malicious clients to modify the stored session cache ciphersuite. In some cases the ciphersuite can be downgraded to a weaker one on subsequent connections. This issue only affects OpenSSL based SSL/TLS server if it uses OpenSSL's internal caching mechanisms and the SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG flag (many applications enable this by using the SSL_OP_ALL option)." } }, { @@ -1346,8 +1196,7 @@ "pk": 442, "fields": { "cve_id": "CVE-2010-1633", - "summary": "An invalid Return value check in pkey_rsa_verifyrecover was discovered. When verification recovery fails for RSA keys an uninitialised buffer with an undefined length is returned instead of an error code. This could lead to an information leak.", - "cvss": null + "summary": "An invalid Return value check in pkey_rsa_verifyrecover was discovered. When verification recovery fails for RSA keys an uninitialised buffer with an undefined length is returned instead of an error code. This could lead to an information leak." } }, { @@ -1355,8 +1204,7 @@ "pk": 443, "fields": { "cve_id": "CVE-2011-4108", - "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing.", - "cvss": null + "summary": "OpenSSL was susceptable an extension of the Vaudenay padding oracle attack on CBC mode encryption which enables an efficient plaintext recovery attack against the OpenSSL implementation of DTLS by exploiting timing differences arising during decryption processing." } }, { @@ -1364,8 +1212,7 @@ "pk": 444, "fields": { "cve_id": "CVE-2011-4576", - "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances.", - "cvss": null + "summary": "OpenSSL failed to clear the bytes used as block cipher padding in SSL 3.0 records which could leak the contents of memory in some circumstances." } }, { @@ -1373,8 +1220,7 @@ "pk": 445, "fields": { "cve_id": "CVE-2011-3207", - "summary": "Under certain circumstances OpenSSL's internal certificate verification routines can incorrectly accept a CRL whose nextUpdate field is in the past. Applications are only affected by the CRL checking vulnerability if they enable OpenSSL's internal CRL checking which is off by default. Applications which use their own custom CRL checking (such as Apache) are not affected.", - "cvss": null + "summary": "Under certain circumstances OpenSSL's internal certificate verification routines can incorrectly accept a CRL whose nextUpdate field is in the past. Applications are only affected by the CRL checking vulnerability if they enable OpenSSL's internal CRL checking which is off by default. Applications which use their own custom CRL checking (such as Apache) are not affected." } }, { @@ -1382,8 +1228,7 @@ "pk": 446, "fields": { "cve_id": "CVE-2012-0027", - "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug.", - "cvss": null + "summary": "A malicious TLS client can send an invalid set of GOST parameters which will cause the server to crash due to lack of error checking. This could be used in a denial-of-service attack. Only users of the OpenSSL GOST ENGINE are affected by this bug." } }, { @@ -1391,8 +1236,7 @@ "pk": 447, "fields": { "cve_id": "CVE-2011-4109", - "summary": "If X509_V_FLAG_POLICY_CHECK is set in OpenSSL 0.9.8, then a policy check failure can lead to a double-free. The bug does not occur unless this flag is set. Users of OpenSSL 1.0.0 are not affected", - "cvss": null + "summary": "If X509_V_FLAG_POLICY_CHECK is set in OpenSSL 0.9.8, then a policy check failure can lead to a double-free. The bug does not occur unless this flag is set. Users of OpenSSL 1.0.0 are not affected" } }, { @@ -1400,8 +1244,7 @@ "pk": 448, "fields": { "cve_id": "CVE-2012-0050", - "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected.", - "cvss": null + "summary": "A flaw in the fix to CVE-2011-4108 can be exploited in a denial of service attack. Only DTLS applications are affected." } }, { @@ -1409,8 +1252,7 @@ "pk": 449, "fields": { "cve_id": "CVE-2011-3210", - "summary": "OpenSSL server code for ephemeral ECDH ciphersuites is not thread-safe, and furthermore can crash if a client violates the protocol by sending handshake messages in incorrect order. Only server-side applications that specifically support ephemeral ECDH ciphersuites are affected, and only if ephemeral ECDH ciphersuites are enabled in the configuration.", - "cvss": null + "summary": "OpenSSL server code for ephemeral ECDH ciphersuites is not thread-safe, and furthermore can crash if a client violates the protocol by sending handshake messages in incorrect order. Only server-side applications that specifically support ephemeral ECDH ciphersuites are affected, and only if ephemeral ECDH ciphersuites are enabled in the configuration." } }, { @@ -1418,8 +1260,7 @@ "pk": 450, "fields": { "cve_id": "CVE-2012-0884", - "summary": "A weakness in the OpenSSL CMS and PKCS #7 code can be exploited using Bleichenbacher's attack on PKCS #1 v1.5 RSA padding also known as the million message attack (MMA). Only users of CMS, PKCS #7, or S/MIME decryption operations are affected, SSL/TLS applications are not affected by this issue.", - "cvss": null + "summary": "A weakness in the OpenSSL CMS and PKCS #7 code can be exploited using Bleichenbacher's attack on PKCS #1 v1.5 RSA padding also known as the million message attack (MMA). Only users of CMS, PKCS #7, or S/MIME decryption operations are affected, SSL/TLS applications are not affected by this issue." } }, { @@ -1427,8 +1268,7 @@ "pk": 451, "fields": { "cve_id": "CVE-2011-4577", - "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default.", - "cvss": null + "summary": "RFC 3779 data can be included in certificates, and if it is malformed, may trigger an assertion failure. This could be used in a denial-of-service attack. Builds of OpenSSL are only vulnerable if configured with \"enable-rfc3779\", which is not a default." } }, { @@ -1436,8 +1276,7 @@ "pk": 452, "fields": { "cve_id": "CVE-2011-4619", - "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack.", - "cvss": null + "summary": "Support for handshake restarts for server gated cryptograpy (SGC) can be used in a denial-of-service attack." } }, { @@ -1445,8 +1284,7 @@ "pk": 453, "fields": { "cve_id": "CVE-2012-2686", - "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack.", - "cvss": null + "summary": "A flaw in the OpenSSL handling of CBC ciphersuites in TLS 1.1 and TLS 1.2 on AES-NI supporting platforms can be exploited in a DoS attack." } }, { @@ -1454,8 +1292,7 @@ "pk": 454, "fields": { "cve_id": "CVE-2011-0014", - "summary": "A buffer over-read flaw was discovered in the way OpenSSL parsed the Certificate Status Request TLS extensions in ClientHello TLS handshake messages. A remote attacker could possibly use this flaw to crash an SSL server using the affected OpenSSL functionality.", - "cvss": null + "summary": "A buffer over-read flaw was discovered in the way OpenSSL parsed the Certificate Status Request TLS extensions in ClientHello TLS handshake messages. A remote attacker could possibly use this flaw to crash an SSL server using the affected OpenSSL functionality." } }, { @@ -1463,8 +1300,7 @@ "pk": 455, "fields": { "cve_id": "CVE-2013-6450", - "summary": "A flaw in DTLS handling can cause an application using OpenSSL and DTLS to crash. This is not a vulnerability for OpenSSL prior to 1.0.0.", - "cvss": null + "summary": "A flaw in DTLS handling can cause an application using OpenSSL and DTLS to crash. This is not a vulnerability for OpenSSL prior to 1.0.0." } }, { @@ -1472,8 +1308,7 @@ "pk": 456, "fields": { "cve_id": "CVE-2013-6449", - "summary": "A flaw in OpenSSL can cause an application using OpenSSL to crash when using TLS version 1.2. This issue only affected OpenSSL 1.0.1 versions.", - "cvss": null + "summary": "A flaw in OpenSSL can cause an application using OpenSSL to crash when using TLS version 1.2. This issue only affected OpenSSL 1.0.1 versions." } }, { @@ -1481,8 +1316,7 @@ "pk": 457, "fields": { "cve_id": "CVE-2013-0169", - "summary": "A weakness in the handling of CBC ciphersuites in SSL, TLS and DTLS which could lead to plaintext recovery by exploiting timing differences arising during MAC processing.", - "cvss": null + "summary": "A weakness in the handling of CBC ciphersuites in SSL, TLS and DTLS which could lead to plaintext recovery by exploiting timing differences arising during MAC processing." } }, { @@ -1490,8 +1324,7 @@ "pk": 458, "fields": { "cve_id": "CVE-2012-2110", - "summary": "Multiple numeric conversion errors, leading to a buffer overflow, were found in the way OpenSSL parsed ASN.1 (Abstract Syntax Notation One) data from BIO (OpenSSL's I/O abstraction) inputs. Specially-crafted DER (Distinguished Encoding Rules) encoded data read from a file or other BIO input could cause an application using the OpenSSL library to crash or, potentially, execute arbitrary code.", - "cvss": null + "summary": "Multiple numeric conversion errors, leading to a buffer overflow, were found in the way OpenSSL parsed ASN.1 (Abstract Syntax Notation One) data from BIO (OpenSSL's I/O abstraction) inputs. Specially-crafted DER (Distinguished Encoding Rules) encoded data read from a file or other BIO input could cause an application using the OpenSSL library to crash or, potentially, execute arbitrary code." } }, { @@ -1499,8 +1332,7 @@ "pk": 459, "fields": { "cve_id": "CVE-2012-2333", - "summary": "An integer underflow flaw, leading to a buffer over-read, was found in the way OpenSSL handled TLS 1.1, TLS 1.2, and DTLS (Datagram Transport Layer Security) application data record lengths when using a block cipher in CBC (cipher-block chaining) mode. A malicious TLS 1.1, TLS 1.2, or DTLS client or server could use this flaw to crash its connection peer.", - "cvss": null + "summary": "An integer underflow flaw, leading to a buffer over-read, was found in the way OpenSSL handled TLS 1.1, TLS 1.2, and DTLS (Datagram Transport Layer Security) application data record lengths when using a block cipher in CBC (cipher-block chaining) mode. A malicious TLS 1.1, TLS 1.2, or DTLS client or server could use this flaw to crash its connection peer." } }, { @@ -1508,8 +1340,7 @@ "pk": 460, "fields": { "cve_id": "CVE-2013-4353", - "summary": "A carefully crafted invalid TLS handshake could crash OpenSSL with a NULL pointer exception. A malicious server could use this flaw to crash a connecting client. This issue only affected OpenSSL 1.0.1 versions.", - "cvss": null + "summary": "A carefully crafted invalid TLS handshake could crash OpenSSL with a NULL pointer exception. A malicious server could use this flaw to crash a connecting client. This issue only affected OpenSSL 1.0.1 versions." } }, { @@ -1517,8 +1348,7 @@ "pk": 461, "fields": { "cve_id": "CVE-2013-0166", - "summary": "A flaw in the OpenSSL handling of OCSP response verification can be exploited in a denial of service attack.", - "cvss": null + "summary": "A flaw in the OpenSSL handling of OCSP response verification can be exploited in a denial of service attack." } }, { @@ -1526,8 +1356,7 @@ "pk": 462, "fields": { "cve_id": "CVE-2012-2131", - "summary": "It was discovered that the fix for CVE-2012-2110 released on 19 Apr 2012 was not sufficient to correct the issue for OpenSSL 0.9.8. This issue only affects OpenSSL 0.9.8v. OpenSSL 1.0.1a and 1.0.0i already contain a patch sufficient to correct CVE-2012-2110.", - "cvss": null + "summary": "It was discovered that the fix for CVE-2012-2110 released on 19 Apr 2012 was not sufficient to correct the issue for OpenSSL 0.9.8. This issue only affects OpenSSL 0.9.8v. OpenSSL 1.0.1a and 1.0.0i already contain a patch sufficient to correct CVE-2012-2110." } }, { @@ -1535,8 +1364,7 @@ "pk": 463, "fields": { "cve_id": "CVE-2014-0224", - "summary": "An attacker can force the use of weak keying material in OpenSSL SSL/TLS clients and servers. This can be exploited by a Man-in-the-middle (MITM) attack where the attacker can decrypt and modify traffic from the attacked client and server.", - "cvss": null + "summary": "An attacker can force the use of weak keying material in OpenSSL SSL/TLS clients and servers. This can be exploited by a Man-in-the-middle (MITM) attack where the attacker can decrypt and modify traffic from the attacked client and server." } }, { @@ -1544,8 +1372,7 @@ "pk": 464, "fields": { "cve_id": "CVE-2014-0198", - "summary": "A flaw in the do_ssl3_write function can allow remote attackers to cause a denial of service via a NULL pointer dereference. This flaw only affects OpenSSL 1.0.0 and 1.0.1 where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", - "cvss": null + "summary": "A flaw in the do_ssl3_write function can allow remote attackers to cause a denial of service via a NULL pointer dereference. This flaw only affects OpenSSL 1.0.0 and 1.0.1 where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common." } }, { @@ -1553,8 +1380,7 @@ "pk": 465, "fields": { "cve_id": "CVE-2014-0221", - "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected.", - "cvss": null + "summary": "By sending an invalid DTLS handshake to an OpenSSL DTLS client the code can be made to recurse eventually crashing in a DoS attack. Only applications using OpenSSL as a DTLS client are affected." } }, { @@ -1562,8 +1388,7 @@ "pk": 466, "fields": { "cve_id": "CVE-2010-5298", - "summary": "A race condition in the ssl3_read_bytes function can allow remote attackers to inject data across sessions or cause a denial of service. This flaw only affects multithreaded applications using OpenSSL 1.0.0 and 1.0.1, where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common.", - "cvss": null + "summary": "A race condition in the ssl3_read_bytes function can allow remote attackers to inject data across sessions or cause a denial of service. This flaw only affects multithreaded applications using OpenSSL 1.0.0 and 1.0.1, where SSL_MODE_RELEASE_BUFFERS is enabled, which is not the default and not common." } }, { @@ -1571,8 +1396,7 @@ "pk": 467, "fields": { "cve_id": "CVE-2014-3470", - "summary": "OpenSSL TLS clients enabling anonymous ECDH ciphersuites are subject to a denial of service attack.", - "cvss": null + "summary": "OpenSSL TLS clients enabling anonymous ECDH ciphersuites are subject to a denial of service attack." } }, { @@ -1580,8 +1404,7 @@ "pk": 468, "fields": { "cve_id": "CVE-2014-0076", - "summary": "Fix for the attack described in the paper \"Recovering OpenSSL ECDSA Nonces Using the FLUSH+RELOAD Cache Side-channel Attack\"", - "cvss": null + "summary": "Fix for the attack described in the paper \"Recovering OpenSSL ECDSA Nonces Using the FLUSH+RELOAD Cache Side-channel Attack\"" } }, { @@ -1589,8 +1412,7 @@ "pk": 469, "fields": { "cve_id": "CVE-2014-0160", - "summary": "A missing bounds check in the handling of the TLS heartbeat extension can be used to reveal up to 64kB of memory to a connected client or server (a.k.a. Heartbleed). This issue did not affect versions of OpenSSL prior to 1.0.1.", - "cvss": null + "summary": "A missing bounds check in the handling of the TLS heartbeat extension can be used to reveal up to 64kB of memory to a connected client or server (a.k.a. Heartbleed). This issue did not affect versions of OpenSSL prior to 1.0.1." } }, { @@ -1598,8 +1420,7 @@ "pk": 470, "fields": { "cve_id": "CVE-2014-0195", - "summary": "A buffer overrun attack can be triggered by sending invalid DTLS fragments to an OpenSSL DTLS client or server. This is potentially exploitable to run arbitrary code on a vulnerable client or server. Only applications using OpenSSL as a DTLS client or server affected.", - "cvss": null + "summary": "A buffer overrun attack can be triggered by sending invalid DTLS fragments to an OpenSSL DTLS client or server. This is potentially exploitable to run arbitrary code on a vulnerable client or server. Only applications using OpenSSL as a DTLS client or server affected." } }, { diff --git a/vulnerabilities/import_runner.py b/vulnerabilities/import_runner.py index bdefb2a99..1b150bbf2 100644 --- a/vulnerabilities/import_runner.py +++ b/vulnerabilities/import_runner.py @@ -131,7 +131,7 @@ def process_advisories(data_source: DataSource) -> None: for score in vuln_ref.severities: models.VulnerabilitySeverity.objects.update_or_create( vulnerability=vuln, - scoring_system_identifier=score.system.identifier, + scoring_system=score.system.identifier, reference=ref, defaults={"value": str(score.value)}, ) diff --git a/vulnerabilities/migrations/0006_auto_20210123_0616.py b/vulnerabilities/migrations/0006_auto_20210123_0616.py new file mode 100644 index 000000000..101e7860b --- /dev/null +++ b/vulnerabilities/migrations/0006_auto_20210123_0616.py @@ -0,0 +1,27 @@ +# Generated by Django 3.0.7 on 2021-01-23 06:16 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('vulnerabilities', '0005_auto_20201218_0824'), + ] + + operations = [ + migrations.AddField( + model_name='vulnerabilityseverity', + name='scoring_system', + field=models.CharField(choices=[('cvssv2', 'CVSSv2'), ('cvssv3', 'CVSSv3'), ('rhbs', 'RedHat Bugzilla severity'), ('rhas', 'RedHat Aggregate severity'), ('rh_cvssv3', 'RedHat CVSSv3')], default='', help_text='Scoring system used for this score', max_length=50), + preserve_default=False, + ), + migrations.AlterUniqueTogether( + name='vulnerabilityseverity', + unique_together={('vulnerability', 'reference', 'scoring_system')}, + ), + migrations.RemoveField( + model_name='vulnerabilityseverity', + name='scoring_system_identifier', + ), + ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index a37b62f02..569b6a6c1 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -1,4 +1,3 @@ -# # Copyright (c) nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/vulnerablecode/ # The VulnerableCode software is licensed under the Apache License version 2.0. @@ -31,6 +30,7 @@ from packageurl import PackageURL from vulnerabilities.data_source import DataSource +from vulnerabilities.severity_systems import scoring_systems class Vulnerability(models.Model): @@ -41,7 +41,6 @@ class Vulnerability(models.Model): cve_id = models.CharField(max_length=50, help_text="CVE ID", unique=True, null=True) summary = models.TextField(help_text="Summary of the vulnerability", blank=True) - cvss = models.FloatField(max_length=100, help_text="CVSS Score", null=True) @property def vulnerable_to(self): @@ -205,11 +204,21 @@ def __str__(self): class VulnerabilitySeverity(models.Model): + scoring_system_choices = [(system.identifier, system.name) for system in scoring_systems.values()] # nopep8 vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") - scoring_system_identifier = models.CharField(max_length=50, help_text="Example: cvssv2") + scoring_system = models.CharField( + max_length=50, + choices=scoring_system_choices, + help_text="Identifier for the scoring system used. Avaiable choices are: {} ".format( + ", ".join( + [ + f"{ss.identifier} is identifier for {ss.name} system" + for ss in scoring_systems.values() + ] + )) + ) reference = models.ForeignKey(VulnerabilityReference, on_delete=models.CASCADE) class Meta: - # TBD - unique_together = ("vulnerability", "reference", "scoring_system_identifier") + unique_together = ("vulnerability", "reference", "scoring_system") From 5428b372fb76919033d33606fb9a8e23c64e1a8a Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Sun, 31 Jan 2021 16:32:32 +0530 Subject: [PATCH 19/21] Make PR review changes for PR #290 * Correct typo in severity_systems.py * Use typo for scoring fields instead of list in models.py * Handle absence of bugzilla and RHSA better in redhat.py Signed-off-by: Shivam Sandbhor --- vulnerabilities/importers/redhat.py | 6 +++--- vulnerabilities/models.py | 2 +- vulnerabilities/severity_systems.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index 2c25a3d4e..ad4272557 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -76,8 +76,8 @@ def to_advisory(advisory_data): affected_purls.append(rpm_to_purl(rpm)) references = [] - if advisory_data.get("bugzilla"): - bugzilla = advisory_data.get("bugzilla") + bugzilla = advisory_data.get("bugzilla") + if bugzilla: url = "https://bugzilla.redhat.com/show_bug.cgi?id={}".format(bugzilla) bugzilla_data = requests.get(f"https://bugzilla.redhat.com/rest/bug/{bugzilla}").json() bugzilla_severity_val = bugzilla_data["bugs"][0]["severity"] @@ -98,7 +98,7 @@ def to_advisory(advisory_data): # RH provides 3 types of advisories RHSA, RHBA, RHEA. Only RHSA's contain severity score. # See https://access.redhat.com/articles/2130961 for more details. - if "RHSA" in rh_adv: + if "RHSA" in rh_adv.upper(): rhsa_data = requests.get(f"https://access.redhat.com/hydra/rest/securitydata/cvrf/{rh_adv}.json").json() # nopep8 value = rhsa_data["cvrfdoc"]["aggregate_severity"] rhsa_aggregate_severity = VulnerabilitySeverity( diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 569b6a6c1..8269d8a1f 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -204,7 +204,7 @@ def __str__(self): class VulnerabilitySeverity(models.Model): - scoring_system_choices = [(system.identifier, system.name) for system in scoring_systems.values()] # nopep8 + scoring_system_choices = ((system.identifier, system.name) for system in scoring_systems.values()) # nopep8 vulnerability = models.ForeignKey(Vulnerability, on_delete=models.CASCADE) value = models.CharField(max_length=50, help_text="Example: 9.0, Important, High") scoring_system = models.CharField( diff --git a/vulnerabilities/severity_systems.py b/vulnerabilities/severity_systems.py index 758a78c9a..7b09ece63 100644 --- a/vulnerabilities/severity_systems.py +++ b/vulnerabilities/severity_systems.py @@ -17,7 +17,7 @@ class ScoringSystem: def as_score(self, value): """ Return a normalized numeric score for this scoring system given a raw - value. For instance htis can be used to convert a CVSS vector to a base + value. For instance this can be used to convert a CVSS vector to a base score. """ raise NotImplementedError From d78cd0f2541d069839165f012a50087443130797 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 3 Feb 2021 14:06:54 +0530 Subject: [PATCH 20/21] Add scoring system for vectors Signed-off-by: Shivam Sandbhor --- vulnerabilities/admin.py | 5 + vulnerabilities/importers/nvd.py | 12 + vulnerabilities/importers/redhat.py | 30 ++- vulnerabilities/migrations/0001_initial.py | 26 ++- .../0002_vulnerabilityseverityscore.py | 24 -- .../migrations/0003_auto_20201127_1423.py | 27 --- .../migrations/0004_auto_20201129_1024.py | 17 -- .../migrations/0005_auto_20201218_0824.py | 27 --- .../migrations/0006_auto_20210123_0616.py | 27 --- vulnerabilities/models.py | 2 +- vulnerabilities/severity_systems.py | 25 +- vulnerabilities/tests/test_data/redhat.json | 49 +--- vulnerabilities/tests/test_nvd.py | 27 ++- vulnerabilities/tests/test_redhat_importer.py | 217 ++++-------------- 14 files changed, 145 insertions(+), 370 deletions(-) delete mode 100644 vulnerabilities/migrations/0002_vulnerabilityseverityscore.py delete mode 100644 vulnerabilities/migrations/0003_auto_20201127_1423.py delete mode 100644 vulnerabilities/migrations/0004_auto_20201129_1024.py delete mode 100644 vulnerabilities/migrations/0005_auto_20201218_0824.py delete mode 100644 vulnerabilities/migrations/0006_auto_20210123_0616.py diff --git a/vulnerabilities/admin.py b/vulnerabilities/admin.py index d2271d1fc..3aee83bf5 100644 --- a/vulnerabilities/admin.py +++ b/vulnerabilities/admin.py @@ -29,6 +29,7 @@ Package, Vulnerability, VulnerabilityReference, + VulnerabilitySeverity ) @@ -55,3 +56,7 @@ class PackageRelatedVulnerabilityAdmin(admin.ModelAdmin): @admin.register(Importer) class ImporterAdmin(admin.ModelAdmin): pass + +@admin.register(VulnerabilitySeverity) +class VulnerabilitySeverityAdmin(admin.ModelAdmin): + pass diff --git a/vulnerabilities/importers/nvd.py b/vulnerabilities/importers/nvd.py index 6e2e66973..9ada9d452 100644 --- a/vulnerabilities/importers/nvd.py +++ b/vulnerabilities/importers/nvd.py @@ -111,6 +111,12 @@ def extract_severity_scores(cve_item): value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["baseScore"]), ) ) + severity_scores.append( + VulnerabilitySeverity( + system=scoring_systems["cvssv3_vector"], + value=str(cve_item["impact"]["baseMetricV3"]["cvssV3"]["vectorString"]), + ) + ) if cve_item["impact"].get("baseMetricV2"): severity_scores.append( @@ -119,6 +125,12 @@ def extract_severity_scores(cve_item): value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["baseScore"]), ) ) + severity_scores.append( + VulnerabilitySeverity( + system=scoring_systems["cvssv2_vector"], + value=str(cve_item["impact"]["baseMetricV2"]["cvssV2"]["vectorString"]), + ) + ) return severity_scores diff --git a/vulnerabilities/importers/redhat.py b/vulnerabilities/importers/redhat.py index ad4272557..9b9a2b045 100644 --- a/vulnerabilities/importers/redhat.py +++ b/vulnerabilities/importers/redhat.py @@ -43,7 +43,7 @@ def __enter__(self): def updated_advisories(self): processed_advisories = [] for advisory_data in self.redhat_response: - processed_advisories.extend(to_advisory(advisory_data)) + processed_advisories.append(to_advisory(advisory_data)) return self.batch_advisories(processed_advisories) @@ -55,7 +55,6 @@ def fetch(): url = "https://access.redhat.com/hydra/rest/securitydata/cve.json?page={}" while True: - resp_json = requests.get(url.format(page_no)).json() page_no += 1 if not resp_json: @@ -68,7 +67,6 @@ def fetch(): def to_advisory(advisory_data): - affected_purls = [] if advisory_data.get("affected_packages"): for rpm in advisory_data["affected_packages"]: @@ -117,14 +115,26 @@ def to_advisory(advisory_data): else: references.append(Reference(severities=[], url=url, reference_id=rh_adv)) - redhat_cve_entry = requests.get(advisory_data["resource_url"]).json() - redhat_cvss = redhat_cve_entry["cvss3"]["cvss3_base_score"] - redhat_cvss3 = VulnerabilitySeverity( - system=scoring_systems["rh_cvssv3"], - value=redhat_cvss, - ) + redhat_scores = [] + cvssv3_score = advisory_data.get("cvss3_score") + if cvssv3_score: + redhat_scores.append( + VulnerabilitySeverity( + system=scoring_systems["cvssv3"], + value=cvssv3_score, + ) + ) + + cvssv3_vector = advisory_data.get("cvss3_scoring_vector") + if cvssv3_vector: + redhat_scores.append( + VulnerabilitySeverity( + system=scoring_systems["cvssv3_vector"], + value=cvssv3_vector, + ) + ) - references.append(Reference(severities=[redhat_cvss3], url=advisory_data["resource_url"])) + references.append(Reference(severities=redhat_scores, url=advisory_data["resource_url"])) return Advisory( summary=advisory_data["bugzilla_description"], diff --git a/vulnerabilities/migrations/0001_initial.py b/vulnerabilities/migrations/0001_initial.py index 43c60f21d..4a3e25b6b 100644 --- a/vulnerabilities/migrations/0001_initial.py +++ b/vulnerabilities/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.7 on 2021-01-21 16:32 +# Generated by Django 3.0.7 on 2021-02-03 07:30 import django.contrib.postgres.fields.jsonb from django.db import migrations, models @@ -49,12 +49,24 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('cve_id', models.CharField(help_text='CVE ID', max_length=50, null=True, unique=True)), ('summary', models.TextField(blank=True, help_text='Summary of the vulnerability')), - ('cvss', models.FloatField(help_text='CVSS Score', max_length=100, null=True)), ], options={ 'verbose_name_plural': 'Vulnerabilities', }, ), + migrations.CreateModel( + name='VulnerabilityReference', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('source', models.CharField(blank=True, help_text='Source(s) name eg:NVD', max_length=50)), + ('reference_id', models.CharField(blank=True, help_text='Reference ID, eg:DSA-4465-1', max_length=50)), + ('url', models.URLField(blank=True, help_text='URL of Vulnerability data', max_length=1024)), + ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), + ], + options={ + 'unique_together': {('vulnerability', 'source', 'reference_id', 'url')}, + }, + ), migrations.CreateModel( name='PackageRelatedVulnerability', fields=[ @@ -74,16 +86,16 @@ class Migration(migrations.Migration): field=models.ManyToManyField(through='vulnerabilities.PackageRelatedVulnerability', to='vulnerabilities.Vulnerability'), ), migrations.CreateModel( - name='VulnerabilityReference', + name='VulnerabilitySeverity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('source', models.CharField(blank=True, help_text='Source(s) name eg:NVD', max_length=50)), - ('reference_id', models.CharField(blank=True, help_text='Reference ID, eg:DSA-4465-1', max_length=50)), - ('url', models.URLField(blank=True, help_text='URL of Vulnerability data', max_length=1024)), + ('value', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), + ('scoring_system', models.CharField(choices=[('cvssv2', 'CVSSv2'), ('cvssv3', 'CVSSv3'), ('rhbs', 'RedHat Bugzilla severity'), ('rhas', 'RedHat Aggregate severity')], help_text='Identifier for the scoring system used. Avaiable choices are: cvssv2 is identifier for CVSSv2 system, cvssv3 is identifier for CVSSv3 system, rhbs is identifier for RedHat Bugzilla severity system, rhas is identifier for RedHat Aggregate severity system ', max_length=50)), + ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), ], options={ - 'unique_together': {('vulnerability', 'source', 'reference_id', 'url')}, + 'unique_together': {('vulnerability', 'reference', 'scoring_system')}, }, ), migrations.AlterUniqueTogether( diff --git a/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py b/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py deleted file mode 100644 index 2c75f9810..000000000 --- a/vulnerabilities/migrations/0002_vulnerabilityseverityscore.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 3.0.7 on 2020-11-27 13:43 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('vulnerabilities', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='VulnerabilitySeverityScore', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('system', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), - ('severity_score', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), - ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), - ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), - ], - ), - ] diff --git a/vulnerabilities/migrations/0003_auto_20201127_1423.py b/vulnerabilities/migrations/0003_auto_20201127_1423.py deleted file mode 100644 index d1b200852..000000000 --- a/vulnerabilities/migrations/0003_auto_20201127_1423.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 3.0.7 on 2020-11-27 14:23 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('vulnerabilities', '0002_vulnerabilityseverityscore'), - ] - - operations = [ - migrations.CreateModel( - name='VulnerabilitySeverity', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('system', models.CharField(help_text='Example: CVSS v2, Redhat Impact Score', max_length=50)), - ('value', models.CharField(help_text='Example: 9.0, Important, High', max_length=50)), - ('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.VulnerabilityReference')), - ('vulnerability', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vulnerabilities.Vulnerability')), - ], - ), - migrations.DeleteModel( - name='VulnerabilitySeverityScore', - ), - ] diff --git a/vulnerabilities/migrations/0004_auto_20201129_1024.py b/vulnerabilities/migrations/0004_auto_20201129_1024.py deleted file mode 100644 index 2b8f2e912..000000000 --- a/vulnerabilities/migrations/0004_auto_20201129_1024.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 3.0.7 on 2020-11-29 10:24 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('vulnerabilities', '0003_auto_20201127_1423'), - ] - - operations = [ - migrations.AlterUniqueTogether( - name='vulnerabilityseverity', - unique_together={('vulnerability', 'reference', 'system')}, - ), - ] diff --git a/vulnerabilities/migrations/0005_auto_20201218_0824.py b/vulnerabilities/migrations/0005_auto_20201218_0824.py deleted file mode 100644 index 690a2e3b2..000000000 --- a/vulnerabilities/migrations/0005_auto_20201218_0824.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 3.0.7 on 2020-12-18 08:24 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('vulnerabilities', '0004_auto_20201129_1024'), - ] - - operations = [ - migrations.AddField( - model_name='vulnerabilityseverity', - name='scoring_system_identifier', - field=models.CharField(default='', help_text='Example: cvssv2', max_length=50), - preserve_default=False, - ), - migrations.AlterUniqueTogether( - name='vulnerabilityseverity', - unique_together={('vulnerability', 'reference', 'scoring_system_identifier')}, - ), - migrations.RemoveField( - model_name='vulnerabilityseverity', - name='system', - ), - ] diff --git a/vulnerabilities/migrations/0006_auto_20210123_0616.py b/vulnerabilities/migrations/0006_auto_20210123_0616.py deleted file mode 100644 index 101e7860b..000000000 --- a/vulnerabilities/migrations/0006_auto_20210123_0616.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 3.0.7 on 2021-01-23 06:16 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('vulnerabilities', '0005_auto_20201218_0824'), - ] - - operations = [ - migrations.AddField( - model_name='vulnerabilityseverity', - name='scoring_system', - field=models.CharField(choices=[('cvssv2', 'CVSSv2'), ('cvssv3', 'CVSSv3'), ('rhbs', 'RedHat Bugzilla severity'), ('rhas', 'RedHat Aggregate severity'), ('rh_cvssv3', 'RedHat CVSSv3')], default='', help_text='Scoring system used for this score', max_length=50), - preserve_default=False, - ), - migrations.AlterUniqueTogether( - name='vulnerabilityseverity', - unique_together={('vulnerability', 'reference', 'scoring_system')}, - ), - migrations.RemoveField( - model_name='vulnerabilityseverity', - name='scoring_system_identifier', - ), - ] diff --git a/vulnerabilities/models.py b/vulnerabilities/models.py index 8269d8a1f..4e0075fd4 100644 --- a/vulnerabilities/models.py +++ b/vulnerabilities/models.py @@ -210,7 +210,7 @@ class VulnerabilitySeverity(models.Model): scoring_system = models.CharField( max_length=50, choices=scoring_system_choices, - help_text="Identifier for the scoring system used. Avaiable choices are: {} ".format( + help_text="Identifier for the scoring system used. Available choices are: {} ".format( ", ".join( [ f"{ss.identifier} is identifier for {ss.name} system" diff --git a/vulnerabilities/severity_systems.py b/vulnerabilities/severity_systems.py index 7b09ece63..5849d8688 100644 --- a/vulnerabilities/severity_systems.py +++ b/vulnerabilities/severity_systems.py @@ -26,15 +26,27 @@ def as_score(self, value): scoring_systems = { "cvssv2": ScoringSystem( identifier="cvssv2", - name="CVSSv2", + name="CVSSv2 Base Score", url="https://www.first.org/cvss/v2/", - notes="We store the vector as value and compute scores from that.", + notes="cvssv2 base score", + ), + "cvssv2_vector": ScoringSystem( + identifier="cvssv2_vector", + name="CVSSv2 Vector", + url="https://www.first.org/cvss/v2/", + notes="cvssv2 vector, used to get additional info about nature and severity of vulnerability", ), "cvssv3": ScoringSystem( identifier="cvssv3", - name="CVSSv3", + name="CVSSv3 Base Score", + url="https://www.first.org/cvss/v3-0/", + notes="cvssv3 base score", + ), + "cvssv3_vector": ScoringSystem( + identifier="cvssv3_vector", + name="CVSSv3 Vector", url="https://www.first.org/cvss/v3-0/", - notes="We store the vector as value and compute scores from that.", + notes="cvssv3 vector, used to get additional info about nature and severity of vulnerability", ), "rhbs": ScoringSystem( identifier="rhbs", @@ -46,9 +58,4 @@ def as_score(self, value): name="RedHat Aggregate severity", url="https://access.redhat.com/security/updates/classification/", ), - "rh_cvssv3": ScoringSystem( - identifier="rh_cvssv3", - name="RedHat CVSSv3", - url="https://access.redhat.com/security/updates/classification/", - ), } diff --git a/vulnerabilities/tests/test_data/redhat.json b/vulnerabilities/tests/test_data/redhat.json index 0c47cc706..9659eacce 100644 --- a/vulnerabilities/tests/test_data/redhat.json +++ b/vulnerabilities/tests/test_data/redhat.json @@ -1,51 +1,4 @@ - [ { "CVE":"CVE-2016-10200", - "severity":"important", - "public_date":"2016-11-18T00:00:00Z", - "advisories":[ - "RHSA-2017:1842", - "RHSA-2017:2437", - "RHSA-2017:2077", - "RHSA-2017:2444" - - ], - "bugzilla":"1430347", - "bugzilla_description":"CVE-2016-10200 kernel: l2tp: Race condition in the L2TPv3 IP encapsulation feature", - "cvss_score":null, - "cvss_scoring_vector":null, - "CWE":"CWE-362", - "affected_packages":[ - "kernel-rt-0:3.10.0-693.rt56.617.el7", - "kernel-0:3.10.0-693.el7", - "kernel-0:3.10.0-514.28.1.el7", - "kernel-rt-1:3.10.0-514.rt56.231.el6rt" - - ], - "resource_url":"https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", - "cvss3_scoring_vector":"CVSS:3.0/AV:L/AC:H/PR:L/UI:N/S:U/C:H/I:H/A:H", - "cvss3_score":"7.0" - - }, - { "CVE":"CVE-2017-12168", - "severity":"moderate", - "public_date":"2016-11-18T00:00:00Z", - "advisories":[ - - - ], - "bugzilla":"1492984", - "bugzilla_description":"CVE-2017-12168 Kernel: kvm: ARM64: assert failure when accessing PMCCNTR register", - "cvss_score":5.2, - "cvss_scoring_vector":"AV:A/AC:M/Au:S/C:N/I:N/A:C", - "CWE":"CWE-617", - "affected_packages":[ - - - ], - "resource_url":"https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", - "cvss3_scoring_vector":"CVSS:3.0/AV:A/AC:L/PR:H/UI:N/S:C/C:N/I:N/A:H", - "cvss3_score":"6.2" - - }, + [ { "CVE":"CVE-2016-9401", "severity":"low", "public_date":"2016-11-17T00:00:00Z", diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index f7a8c8534..5a89918b6 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -29,7 +29,7 @@ from vulnerabilities.data_source import Reference from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import VulnerabilitySeverity -from vulnerabilities.severity_systems import scoring_systems +from vulnerabilities.severity_systems import ScoringSystem BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/nvd/nvd_test.json") @@ -146,9 +146,26 @@ def test_to_advisories(self): url="http://kqueue.org/blog/2012/03/05/memory-allocator-security-revisited/", # nopep8 ), Reference( - url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", # nopep8 + url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", severities=[ - VulnerabilitySeverity(system=scoring_systems["cvssv2"], value="5.0") + VulnerabilitySeverity( + system=ScoringSystem( + identifier="cvssv2", + name="CVSSv2 Base Score", + url="https://www.first.org/cvss/v2/", + notes="cvssv2 base score", + ), + value="5.0", + ), + VulnerabilitySeverity( + system=ScoringSystem( + identifier="cvssv2_vector", + name="CVSSv2 Vector", + url="https://www.first.org/cvss/v2/", + notes="cvssv2 vector, used to get additional info about nature and severity of vulnerability", + ), + value="AV:N/AC:L/Au:N/C:N/I:N/A:P", + ), ], reference_id="CVE-2005-4895", ), @@ -163,5 +180,7 @@ def test_to_advisories(self): found_advisories = list(self.data_src.to_advisories(self.nvd_data)) # Only 1 advisory because other advisory is hardware related assert len(found_advisories) == 1 - found_advisories[0].vuln_references = sorted(found_advisories[0].vuln_references, key=lambda x: x.url) # nopep8 + found_advisories[0].vuln_references = sorted( + found_advisories[0].vuln_references, key=lambda x: x.url + ) # nopep8 assert expected_advisories == found_advisories diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index 78ac53b83..bf4326efc 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017 nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/vulnerablecode/ # The VulnerableCode software is licensed under the Apache License version 2.0. # Data generated with VulnerableCode require an acknowledgment. @@ -31,7 +31,7 @@ from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import Reference from vulnerabilities.data_source import VulnerabilitySeverity -from vulnerabilities.severity_systems import scoring_systems +from vulnerabilities.severity_systems import scoring_systems, ScoringSystem BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/", "redhat.json") @@ -67,7 +67,7 @@ def test_to_advisory(self): namespace="redhat", name="bash", version="4.2.46-28.el7", - qualifiers=OrderedDict(), + qualifiers={}, subpath=None, ), PackageURL( @@ -75,179 +75,59 @@ def test_to_advisory(self): namespace="redhat", name="bash", version="4.1.2-48.el6", - qualifiers=OrderedDict(), + qualifiers={}, subpath=None, ), ], resolved_package_urls=[], - vuln_references=sorted( - [ - Reference( - url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", - reference_id="1396383", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhbs"], value=2.0 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:1931", - reference_id="RHSA-2017:1931", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:0725", - reference_id="RHSA-2017:0725", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 - reference_id="", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rh_cvssv3"], value=6.0 - ) - ], - ), - ], - key=lambda x: x.url, - ), - cve_id="CVE-2016-9401", - ), - Advisory( - summary=( - "CVE-2016-10200 kernel: l2tp: Race condition " - "in the L2TPv3 IP encapsulation feature" - ), - impacted_package_urls=[ - PackageURL( - type="rpm", - namespace="redhat", - name="kernel-rt", - version="3.10.0-693.rt56.617.el7", - qualifiers=OrderedDict(), - subpath=None, + vuln_references=[ + Reference( + reference_id="RHSA-2017:0725", + url="https://access.redhat.com/errata/RHSA-2017:0725", + severities=[ + VulnerabilitySeverity( + system=scoring_systems["rhas"], + value=2.2, + ) + ], ), - PackageURL( - type="rpm", - namespace="redhat", - name="kernel", - version="3.10.0-693.el7", - qualifiers=OrderedDict(), - subpath=None, + Reference( + reference_id="RHSA-2017:1931", + url="https://access.redhat.com/errata/RHSA-2017:1931", + severities=[ + VulnerabilitySeverity( + system=scoring_systems["rhas"], + value=2.2, + ) + ], ), - PackageURL( - type="rpm", - namespace="redhat", - name="kernel", - version="3.10.0-514.28.1.el7", - qualifiers=OrderedDict(), - subpath=None, + Reference( + reference_id="", + url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", + severities=[ + VulnerabilitySeverity( + system=scoring_systems["cvssv3"], + value="3.3", + ), + VulnerabilitySeverity( + system=scoring_systems["cvssv3_vector"], + value="CVSS:3.0/AV:L/AC:L/PR:L/UI:N/S:U/C:N/I:N/A:L", + ), + ], + ), + Reference( + reference_id="1396383", + url="https://bugzilla.redhat.com/show_bug.cgi?id=1396383", + severities=[ + VulnerabilitySeverity( + system=scoring_systems["rhbs"], + value=2.0, + ) + ], ), ], - resolved_package_urls=[], - vuln_references=sorted( - [ - Reference( - url="https://bugzilla.redhat.com/show_bug.cgi?id=1430347", - reference_id="1430347", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhbs"], value=2.0 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:1842", - reference_id="RHSA-2017:1842", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:2437", - reference_id="RHSA-2017:2437", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:2077", - reference_id="RHSA-2017:2077", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/errata/RHSA-2017:2444", - reference_id="RHSA-2017:2444", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhas"], value=2.2 - ) - ], - ), - Reference( - url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-10200.json", # nopep8 - reference_id="", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rh_cvssv3"], value=6.0 - ) - ], - ), - ], - key=lambda x: x.url, - ), - cve_id="CVE-2016-10200", - ), - Advisory( - summary=( - "CVE-2017-12168 Kernel: kvm: ARM64: " - "assert failure when accessing PMCCNTR register" - ), - impacted_package_urls=[], - resolved_package_urls=[], - vuln_references=sorted( - [ - Reference( - url="https://bugzilla.redhat.com/show_bug.cgi?id=1492984", - reference_id="1492984", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rhbs"], value=2.0 - ) - ], - ), - Reference( - url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2017-12168.json", # nopep8 - reference_id="", - severities=[ - VulnerabilitySeverity( - system=scoring_systems["rh_cvssv3"], value=6.0 - ) - ], - ), - ], - key=lambda x: x.url, - ), - cve_id="CVE-2017-12168", - ), + cve_id="CVE-2016-9401", + ) } found_data = set() @@ -255,7 +135,6 @@ def test_to_advisory(self): mock_resp.json = lambda: { "bugs": [{"severity": 2.0}], "cvrfdoc": {"aggregate_severity": 2.2}, - "cvss3": {"cvss3_base_score": 6.0}, } for adv in data: with unittest.mock.patch( From 8d194632d40178c279f70ca8ac960e288bde7df4 Mon Sep 17 00:00:00 2001 From: Shivam Sandbhor Date: Wed, 3 Feb 2021 14:15:38 +0530 Subject: [PATCH 21/21] Fix tests Signed-off-by: Shivam Sandbhor --- vulnerabilities/admin.py | 3 ++- vulnerabilities/severity_systems.py | 4 ++-- vulnerabilities/tests/test_nvd.py | 16 +++------------- vulnerabilities/tests/test_redhat_importer.py | 2 +- 4 files changed, 8 insertions(+), 17 deletions(-) diff --git a/vulnerabilities/admin.py b/vulnerabilities/admin.py index 3aee83bf5..5f69c1a68 100644 --- a/vulnerabilities/admin.py +++ b/vulnerabilities/admin.py @@ -29,7 +29,7 @@ Package, Vulnerability, VulnerabilityReference, - VulnerabilitySeverity + VulnerabilitySeverity, ) @@ -57,6 +57,7 @@ class PackageRelatedVulnerabilityAdmin(admin.ModelAdmin): class ImporterAdmin(admin.ModelAdmin): pass + @admin.register(VulnerabilitySeverity) class VulnerabilitySeverityAdmin(admin.ModelAdmin): pass diff --git a/vulnerabilities/severity_systems.py b/vulnerabilities/severity_systems.py index 5849d8688..cfa76740d 100644 --- a/vulnerabilities/severity_systems.py +++ b/vulnerabilities/severity_systems.py @@ -34,7 +34,7 @@ def as_score(self, value): identifier="cvssv2_vector", name="CVSSv2 Vector", url="https://www.first.org/cvss/v2/", - notes="cvssv2 vector, used to get additional info about nature and severity of vulnerability", + notes="cvssv2 vector, used to get additional info about nature and severity of vulnerability", # nopep8 ), "cvssv3": ScoringSystem( identifier="cvssv3", @@ -46,7 +46,7 @@ def as_score(self, value): identifier="cvssv3_vector", name="CVSSv3 Vector", url="https://www.first.org/cvss/v3-0/", - notes="cvssv3 vector, used to get additional info about nature and severity of vulnerability", + notes="cvssv3 vector, used to get additional info about nature and severity of vulnerability", # nopep8 ), "rhbs": ScoringSystem( identifier="rhbs", diff --git a/vulnerabilities/tests/test_nvd.py b/vulnerabilities/tests/test_nvd.py index 5a89918b6..d385ee3ac 100644 --- a/vulnerabilities/tests/test_nvd.py +++ b/vulnerabilities/tests/test_nvd.py @@ -29,7 +29,7 @@ from vulnerabilities.data_source import Reference from vulnerabilities.data_source import Advisory from vulnerabilities.data_source import VulnerabilitySeverity -from vulnerabilities.severity_systems import ScoringSystem +from vulnerabilities.severity_systems import scoring_systems BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/nvd/nvd_test.json") @@ -149,21 +149,11 @@ def test_to_advisories(self): url="https://nvd.nist.gov/vuln/detail/CVE-2005-4895", severities=[ VulnerabilitySeverity( - system=ScoringSystem( - identifier="cvssv2", - name="CVSSv2 Base Score", - url="https://www.first.org/cvss/v2/", - notes="cvssv2 base score", - ), + system=scoring_systems["cvssv2"], value="5.0", ), VulnerabilitySeverity( - system=ScoringSystem( - identifier="cvssv2_vector", - name="CVSSv2 Vector", - url="https://www.first.org/cvss/v2/", - notes="cvssv2 vector, used to get additional info about nature and severity of vulnerability", - ), + system=scoring_systems["cvssv2_vector"], value="AV:N/AC:L/Au:N/C:N/I:N/A:P", ), ], diff --git a/vulnerabilities/tests/test_redhat_importer.py b/vulnerabilities/tests/test_redhat_importer.py index bf4326efc..d39d4efc1 100644 --- a/vulnerabilities/tests/test_redhat_importer.py +++ b/vulnerabilities/tests/test_redhat_importer.py @@ -103,7 +103,7 @@ def test_to_advisory(self): ), Reference( reference_id="", - url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", + url="https://access.redhat.com/hydra/rest/securitydata/cve/CVE-2016-9401.json", # nopep8 severities=[ VulnerabilitySeverity( system=scoring_systems["cvssv3"],