Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions vulnerabilities/importers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from vulnerabilities.pipelines.v2_importers import aosp_importer as aosp_importer_v2
from vulnerabilities.pipelines.v2_importers import apache_httpd_importer as apache_httpd_v2
from vulnerabilities.pipelines.v2_importers import archlinux_importer as archlinux_importer_v2
from vulnerabilities.pipelines.v2_importers import collect_fix_commits as collect_fix_commits_v2
from vulnerabilities.pipelines.v2_importers import curl_importer as curl_importer_v2
from vulnerabilities.pipelines.v2_importers import (
elixir_security_importer as elixir_security_importer_v2,
Expand Down Expand Up @@ -135,5 +136,19 @@
ubuntu_usn.UbuntuUSNImporter,
fireeye.FireyeImporter,
oss_fuzz.OSSFuzzImporter,
collect_fix_commits_v2.CollectNodejsFixCommitsPipeline,
collect_fix_commits_v2.CollectCpythonFixCommitsPipeline,
collect_fix_commits_v2.CollectGoFixCommitsPipeline,
collect_fix_commits_v2.CollectRustFixCommitsPipeline,
collect_fix_commits_v2.CollectPhpFixCommitsPipeline,
collect_fix_commits_v2.CollectRubyFixCommitsPipeline,
collect_fix_commits_v2.CollectNginxFixCommitsPipeline,
collect_fix_commits_v2.CollectPostgresFixCommitsPipeline,
collect_fix_commits_v2.CollectMysqlFixCommitsPipeline,
collect_fix_commits_v2.CollectGitFixCommitsPipeline,
collect_fix_commits_v2.CollectTensorflowFixCommitsPipeline,
collect_fix_commits_v2.CollectFirefoxFixCommitsPipeline,
collect_fix_commits_v2.CollectQEMUFixCommitsPipeline,
collect_fix_commits_v2.CollectDenoFixCommitsPipeline,
]
)
114 changes: 114 additions & 0 deletions vulnerabilities/pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@
#

import logging
import re
import shutil
import tempfile
import traceback
from collections import defaultdict
from datetime import datetime
from datetime import timezone
from timeit import default_timer as timer
Expand All @@ -19,8 +23,12 @@
from aboutcode.pipeline import LoopProgress
from aboutcode.pipeline import PipelineDefinition
from aboutcode.pipeline import humanize_time
from git import Repo
from packageurl.contrib.url2purl import url2purl

from vulnerabilities.importer import AdvisoryData
from vulnerabilities.importer import AffectedPackageV2
from vulnerabilities.importer import PackageCommitPatchData
from vulnerabilities.improver import MAX_CONFIDENCE
from vulnerabilities.models import Advisory
from vulnerabilities.models import PipelineRun
Expand Down Expand Up @@ -321,3 +329,109 @@ def collect_and_store_advisories(self):
continue

self.log(f"Successfully collected {collected_advisory_count:,d} advisories")


class CollectVCSFixCommitPipeline(VulnerableCodeBaseImporterPipelineV2):
"""
Pipeline to collect fix commits from any git repository.
"""

repo_url: str
patterns: list[str] = [
r"\bCVE-\d{4}-\d{4,19}\b",
r"GHSA-[2-9cfghjmpqrvwx]{4}-[2-9cfghjmpqrvwx]{4}-[2-9cfghjmpqrvwx]{4}",
]

@classmethod
def steps(cls):
return (
cls.clone,
cls.collect_and_store_advisories,
cls.clean_downloads,
)

def clone(self):
"""Clone the repository."""
self.repo = Repo.clone_from(
url=self.repo_url,
to_path=tempfile.mkdtemp(),
bare=True,
no_checkout=True,
multi_options=["--filter=blob:none"],
)

def advisories_count(self) -> int:
return 0

def extract_vulnerability_id(self, commit) -> list[str]:
"""
Extract vulnerability id from a commit message.
Returns a list of matched vulnerability IDs
"""
matches = []
for pattern in self.patterns:
found = re.findall(pattern, commit.message, flags=re.IGNORECASE)
matches.extend(found)
return matches

def collect_fix_commits(self):
"""
Iterate through repository commits and group them by vulnerability identifiers.
return a list with (vuln_id, [(commit_id, commit_message)]).
"""
self.log("Processing git repository fix commits (grouped by vulnerability IDs).")

grouped_commits = defaultdict(list)
for commit in self.repo.iter_commits("--all"):
matched_ids = self.extract_vulnerability_id(commit)
if not matched_ids:
continue

commit_id = commit.hexsha
commit_message = commit.message.strip()

for vuln_id in matched_ids:
grouped_commits[vuln_id].append((commit_id, commit_message))

self.log(f"Found {len(grouped_commits)} vulnerabilities with related commits.")
self.log("Finished processing all commits.")
return grouped_commits

def collect_advisories(self):
"""
Generate AdvisoryData objects for each vulnerability ID grouped with its related commits.
"""
self.log("Generating AdvisoryData objects from grouped commits.")
grouped_commits = self.collect_fix_commits()
purl = url2purl(self.repo_url)

for vuln_id, commits_data in grouped_commits.items():
if not commits_data or not vuln_id:
continue

commit_hash_set = {commit_hash for commit_hash, _ in commits_data}
affected_packages = [
AffectedPackageV2(
package=purl,
fixed_by_commit_patches=[
PackageCommitPatchData(vcs_url=self.repo_url, commit_hash=commit_hash)
for commit_hash in commit_hash_set
],
)
]

yield AdvisoryData(
advisory_id=vuln_id,
affected_packages=affected_packages,
url=self.repo_url,
)

def clean_downloads(self):
"""Cleanup any temporary repository data."""
self.log("Cleaning up local repository resources.")
if hasattr(self, "repo") and self.repo.working_dir:
shutil.rmtree(path=self.repo.working_dir)

def on_failure(self):
"""Ensure cleanup is always performed on failure."""
self.clean_downloads()
71 changes: 71 additions & 0 deletions vulnerabilities/pipelines/v2_importers/collect_fix_commits.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
from vulnerabilities.pipelines import CollectVCSFixCommitPipeline


class CollectNodejsFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_nodejs_fix_commits"
repo_url = "https://github.com/nodejs/node"


class CollectCpythonFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_cpython_fix_commits"
repo_url = "https://github.com/python/cpython"


class CollectGoFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_go_fix_commits"
repo_url = "https://github.com/golang/go"


class CollectRustFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_rust_lang_fix_commits"
repo_url = "https://github.com/rust-lang/rust"


class CollectPhpFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_php_fix_commits"
repo_url = "https://github.com/php/php-src"


class CollectRubyFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_ruby_fix_commits"
repo_url = "https://github.com/ruby/ruby"


class CollectNginxFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_nginx_fix_commits"
repo_url = "https://github.com/nginx/nginx"


class CollectPostgresFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_postgres_fix_commits"
repo_url = "https://github.com/postgres/postgres"


class CollectMysqlFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_mysql_fix_commits"
repo_url = "https://github.com/mysql/mysql-server"


class CollectGitFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_git_fix_commits"
repo_url = "https://github.com/git/git"


class CollectTensorflowFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_tensorflow_fix_commits"
repo_url = "https://github.com/tensorflow/tensorflow"


class CollectFirefoxFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_firefox_fix_commits"
repo_url = "https://github.com/mozilla-firefox/firefox"


class CollectQEMUFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_qemu_fix_commits"
repo_url = "https://github.com/qemu/qemu"


class CollectDenoFixCommitsPipeline(CollectVCSFixCommitPipeline):
pipeline_id = "collect_deno_fix_commits"
repo_url = "https://github.com/denoland/deno"
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
#
# Copyright (c) nexB Inc. and others. All rights reserved.
# VulnerableCode is a trademark of nexB Inc.
# SPDX-License-Identifier: Apache-2.0
# See http://www.apache.org/licenses/LICENSE-2.0 for the license text.
# See https://github.com/aboutcode-org/vulnerablecode for support or download.
# See https://aboutcode.org for more information about nexB OSS projects.
#

import json
from pathlib import Path
from unittest import TestCase
from unittest.mock import MagicMock
from unittest.mock import patch

import pytest

from vulnerabilities.pipelines import CollectVCSFixCommitPipeline
from vulnerabilities.tests import util_tests


@pytest.fixture
def pipeline():
pipeline = CollectVCSFixCommitPipeline()
pipeline.repo_url = "https://github.com/test/repo"
pipeline.pipeline_id = "collect_repo_fix_commits"
pipeline.log = MagicMock()
return pipeline


def test_classify_commit_type_extracts_ids(pipeline):
class DummyCommit:
message = "Fix for CVE-2023-1234 and GHSA-2479-qvv7-47qq"

result = pipeline.extract_vulnerability_id(DummyCommit)
assert result == ["CVE-2023-1234", "GHSA-2479-qvv7-47qq"]


@patch("vulnerabilities.pipelines.Repo")
def test_collect_fix_commits_groups_by_vuln(mock_repo, pipeline):
commit1 = MagicMock(message="Fix CVE-2021-0001", hexsha="abc123")
commit2 = MagicMock(message="Patch GHSA-f72r-2h5j-7639", hexsha="def456")
commit3 = MagicMock(message="Unrelated change", hexsha="ghi789")

pipeline.repo = MagicMock()
pipeline.repo.iter_commits.return_value = [commit1, commit2, commit3]

pipeline.classify_commit_type = MagicMock(
side_effect=lambda c: (
["CVE-2021-0001"]
if "CVE" in c.message
else ["GHSA-dead-beef-baad"]
if "GHSA" in c.message
else []
)
)

grouped = pipeline.collect_fix_commits()

expected = {
"CVE-2021-0001": [("abc123", "Fix CVE-2021-0001")],
"GHSA-f72r-2h5j-7639": [("def456", "Patch GHSA-f72r-2h5j-7639")],
}

assert grouped == expected


TEST_DATA = Path(__file__).parent.parent.parent / "test_data" / "fix_commits"


class TestRepoFixCommitPipeline(TestCase):
def test_collect_advisories_from_json(self):
input_file = TEST_DATA / "grouped_commits_input.json"
expected_file = TEST_DATA / "expected_linux_advisory_output.json"

grouped_commits = json.loads(input_file.read_text(encoding="utf-8"))

pipeline = CollectVCSFixCommitPipeline()
pipeline.repo_url = "https://github.com/test/repo"
pipeline.log = MagicMock()
pipeline.collect_fix_commits = MagicMock(return_value=grouped_commits)

result = [adv.to_dict() for adv in pipeline.collect_advisories()]

util_tests.check_results_against_json(result, expected_file, True)


@pytest.mark.parametrize(
"commit_message, expected_ids",
[
("Fix CVE-2023-12345 buffer overflow", ["CVE-2023-12345"]),
("Address GHSA-4486-gxhx-5mg7 report", ["GHSA-4486-gxhx-5mg7"]),
(
"Fix CVE-2023-1111 and GHSA-gch2-phqh-fg9q in kernel",
["CVE-2023-1111", "GHSA-gch2-phqh-fg9q"],
),
("Refactor logging system with no security ID", []),
],
)
def test_classify_commit_type_detects_vuln_ids(pipeline, commit_message, expected_ids):
"""Ensure classify_commit_type correctly extracts vulnerability IDs."""

class DummyCommit:
def __init__(self, message):
self.message = message

commit = DummyCommit(commit_message)
result = pipeline.extract_vulnerability_id(commit)

assert result == expected_ids, f"Unexpected result for message: {commit_message}"


def test_classify_commit_type_case_insensitive(pipeline):
"""Ensure pattern matching is case-insensitive."""

class DummyCommit:
message = "fix CVE-2022-9999 and GHSA-gqgv-6jq5-jjj9"

result = pipeline.extract_vulnerability_id(DummyCommit)
assert result == ["CVE-2022-9999", "GHSA-gqgv-6jq5-jjj9"]
Loading