Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
157 changes: 0 additions & 157 deletions vulnerabilities/importers/apache_httpd.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,9 @@

import logging
import urllib
from datetime import datetime
from typing import Iterable
from typing import List
from typing import Mapping
from typing import Optional

import requests
from bs4 import BeautifulSoup
from django.db.models.query import QuerySet
from packageurl import PackageURL
from univers.version_constraint import VersionConstraint
from univers.version_range import ApacheVersionRange
Expand All @@ -27,18 +21,8 @@
from vulnerabilities.importer import AffectedPackage
from vulnerabilities.importer import Importer
from vulnerabilities.importer import Reference
from vulnerabilities.importer import UnMergeablePackageError
from vulnerabilities.importer import VulnerabilitySeverity
from vulnerabilities.improver import Improver
from vulnerabilities.improver import Inference
from vulnerabilities.models import Advisory
from vulnerabilities.package_managers import GitHubTagsAPI
from vulnerabilities.package_managers import VersionAPI
from vulnerabilities.severity_systems import APACHE_HTTPD
from vulnerabilities.utils import AffectedPackage as LegacyAffectedPackage
from vulnerabilities.utils import get_affected_packages_by_patched_package
from vulnerabilities.utils import nearest_patched_package
from vulnerabilities.utils import resolve_version_range

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -163,144 +147,3 @@ def fetch_links(url):
continue
links.append(urllib.parse.urljoin(url, link))
return links


IGNORE_TAGS = {
"AGB_BEFORE_AAA_CHANGES",
"APACHE_1_2b1",
"APACHE_1_2b10",
"APACHE_1_2b11",
"APACHE_1_2b2",
"APACHE_1_2b3",
"APACHE_1_2b4",
"APACHE_1_2b5",
"APACHE_1_2b6",
"APACHE_1_2b7",
"APACHE_1_2b8",
"APACHE_1_2b9",
"APACHE_1_3_PRE_NT",
"APACHE_1_3a1",
"APACHE_1_3b1",
"APACHE_1_3b2",
"APACHE_1_3b3",
"APACHE_1_3b5",
"APACHE_1_3b6",
"APACHE_1_3b7",
"APACHE_2_0_2001_02_09",
"APACHE_2_0_52_WROWE_RC1",
"APACHE_2_0_ALPHA",
"APACHE_2_0_ALPHA_2",
"APACHE_2_0_ALPHA_3",
"APACHE_2_0_ALPHA_4",
"APACHE_2_0_ALPHA_5",
"APACHE_2_0_ALPHA_6",
"APACHE_2_0_ALPHA_7",
"APACHE_2_0_ALPHA_8",
"APACHE_2_0_ALPHA_9",
"APACHE_2_0_BETA_CANDIDATE_1",
"APACHE_BIG_SYMBOL_RENAME_POST",
"APACHE_BIG_SYMBOL_RENAME_PRE",
"CHANGES",
"HTTPD_LDAP_1_0_0",
"INITIAL",
"MOD_SSL_2_8_3",
"PCRE_3_9",
"POST_APR_SPLIT",
"PRE_APR_CHANGES",
"STRIKER_2_0_51_RC1",
"STRIKER_2_0_51_RC2",
"STRIKER_2_1_0_RC1",
"WROWE_2_0_43_PRE1",
"apache-1_3-merge-1-post",
"apache-1_3-merge-1-pre",
"apache-1_3-merge-2-post",
"apache-1_3-merge-2-pre",
"apache-apr-merge-3",
"apache-doc-split-01",
"dg_last_1_2_doc_merge",
"djg-apache-nspr-07",
"djg_nspr_split",
"moving_to_httpd_module",
"mpm-3",
"mpm-merge-1",
"mpm-merge-2",
"post_ajp_proxy",
"pre_ajp_proxy",
}


class ApacheHTTPDImprover(Improver):
def __init__(self) -> None:
self.versions_fetcher_by_purl: Mapping[str, VersionAPI] = {}
self.vesions_by_purl = {}

@property
def interesting_advisories(self) -> QuerySet:
return Advisory.objects.filter(created_by=ApacheHTTPDImporter.qualified_name)

def get_package_versions(
self, package_url: PackageURL, until: Optional[datetime] = None
) -> List[str]:
"""
Return a list of `valid_versions` for the `package_url`
"""
api_name = "apache/httpd"
versions_fetcher = GitHubTagsAPI()
return versions_fetcher.get_until(package_name=api_name, until=until).valid_versions

def get_inferences(self, advisory_data: AdvisoryData) -> Iterable[Inference]:
"""
Yield Inferences for the given advisory data
"""
if not advisory_data.affected_packages:
return
try:
purl, affected_version_ranges, _ = AffectedPackage.merge(
advisory_data.affected_packages
)
except UnMergeablePackageError:
logger.error(f"Cannot merge with different purls {advisory_data.affected_packages!r}")
return iter([])

pkg_type = purl.type
pkg_namespace = purl.namespace
pkg_name = purl.name

if not self.vesions_by_purl.get(str(purl)):
valid_versions = self.get_package_versions(
package_url=purl, until=advisory_data.date_published
)
self.vesions_by_purl[str(purl)] = valid_versions

valid_versions = self.vesions_by_purl[str(purl)]

for affected_version_range in affected_version_ranges:
aff_vers, unaff_vers = resolve_version_range(
affected_version_range=affected_version_range,
package_versions=valid_versions,
ignorable_versions=IGNORE_TAGS,
)
affected_purls = [
PackageURL(type=pkg_type, namespace=pkg_namespace, name=pkg_name, version=version)
for version in aff_vers
]

unaffected_purls = [
PackageURL(type=pkg_type, namespace=pkg_namespace, name=pkg_name, version=version)
for version in unaff_vers
]

affected_packages: List[LegacyAffectedPackage] = nearest_patched_package(
vulnerable_packages=affected_purls, resolved_packages=unaffected_purls
)

for (
fixed_package,
affected_packages,
) in get_affected_packages_by_patched_package(affected_packages).items():
yield Inference.from_advisory_data(
advisory_data,
confidence=100, # We are getting all valid versions to get this inference
affected_purls=affected_packages,
fixed_purl=fixed_package,
)
80 changes: 0 additions & 80 deletions vulnerabilities/importers/debian.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from typing import Mapping

import requests
from django.db.models.query import QuerySet
from packageurl import PackageURL
from univers.version_range import DebianVersionRange
from univers.versions import DebianVersion
Expand All @@ -23,16 +22,8 @@
from vulnerabilities.importer import AffectedPackage
from vulnerabilities.importer import Importer
from vulnerabilities.importer import Reference
from vulnerabilities.importer import UnMergeablePackageError
from vulnerabilities.improver import MAX_CONFIDENCE
from vulnerabilities.improver import Improver
from vulnerabilities.improver import Inference
from vulnerabilities.models import Advisory
from vulnerabilities.utils import AffectedPackage as LegacyAffectedPackage
from vulnerabilities.utils import dedupe
from vulnerabilities.utils import get_affected_packages_by_patched_package
from vulnerabilities.utils import get_item
from vulnerabilities.utils import nearest_patched_package

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -164,74 +155,3 @@ def parse(self, pkg_name: str, records: Mapping[str, Any]) -> Iterable[AdvisoryD
affected_packages=affected_packages,
references=references,
)


class DebianBasicImprover(Improver):
@property
def interesting_advisories(self) -> QuerySet:
return Advisory.objects.filter(created_by=DebianImporter.qualified_name)

def get_inferences(self, advisory_data: AdvisoryData) -> Iterable[Inference]:
"""
Yield Inferences for the given advisory data
"""
if not advisory_data.affected_packages:
return
try:
purl, affected_version_ranges, fixed_versions = AffectedPackage.merge(
advisory_data.affected_packages
)
except UnMergeablePackageError:
logger.error(f"Cannot merge with different purls {advisory_data.affected_packages!r}")
return

pkg_type = purl.type
pkg_namespace = purl.namespace
pkg_name = purl.name
pkg_qualifiers = purl.qualifiers
fixed_purls = [
PackageURL(
type=pkg_type,
namespace=pkg_namespace,
name=pkg_name,
version=str(version),
qualifiers=pkg_qualifiers,
)
for version in fixed_versions
]
if not affected_version_ranges:
for fixed_purl in fixed_purls:
yield Inference.from_advisory_data(
advisory_data, # We are getting all valid versions to get this inference
confidence=MAX_CONFIDENCE,
affected_purls=[],
fixed_purl=fixed_purl,
)
else:
aff_versions = set()
for affected_version_range in affected_version_ranges:
for constraint in affected_version_range.constraints:
aff_versions.add(constraint.version.string)
affected_purls = [
PackageURL(
type=pkg_type,
namespace=pkg_namespace,
name=pkg_name,
version=version,
qualifiers=pkg_qualifiers,
)
for version in aff_versions
]
affected_packages: List[LegacyAffectedPackage] = nearest_patched_package(
vulnerable_packages=affected_purls, resolved_packages=fixed_purls
)

for (fixed_package, affected_packages,) in get_affected_packages_by_patched_package(
affected_packages=affected_packages
).items():
yield Inference.from_advisory_data(
advisory_data,
confidence=MAX_CONFIDENCE, # We are getting all valid versions to get this inference
affected_purls=affected_packages,
fixed_purl=fixed_package,
)
Loading