gsd-2021-41125
Vulnerability from gsd
Modified
2023-12-13 01:23
Details
Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`.
Aliases
Aliases



{
  "GSD": {
    "alias": "CVE-2021-41125",
    "description": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`.",
    "id": "GSD-2021-41125",
    "references": [
      "https://www.suse.com/security/cve/CVE-2021-41125.html",
      "https://security.archlinux.org/CVE-2021-41125"
    ]
  },
  "gsd": {
    "metadata": {
      "exploitCode": "unknown",
      "remediation": "unknown",
      "reportConfidence": "confirmed",
      "type": "vulnerability"
    },
    "osvSchema": {
      "aliases": [
        "CVE-2021-41125"
      ],
      "details": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`.",
      "id": "GSD-2021-41125",
      "modified": "2023-12-13T01:23:26.896470Z",
      "schema_version": "1.4.0"
    }
  },
  "namespaces": {
    "cve.org": {
      "CVE_data_meta": {
        "ASSIGNER": "security-advisories@github.com",
        "ID": "CVE-2021-41125",
        "STATE": "PUBLIC",
        "TITLE": "HTTP authentication credential leak to target websites in scrapy"
      },
      "affects": {
        "vendor": {
          "vendor_data": [
            {
              "product": {
                "product_data": [
                  {
                    "product_name": "scrapy",
                    "version": {
                      "version_data": [
                        {
                          "version_value": "\u003c 1.8.1"
                        },
                        {
                          "version_value": "\u003e= 2.0.0, \u003c 2.5.1"
                        }
                      ]
                    }
                  }
                ]
              },
              "vendor_name": "scrapy"
            }
          ]
        }
      },
      "data_format": "MITRE",
      "data_type": "CVE",
      "data_version": "4.0",
      "description": {
        "description_data": [
          {
            "lang": "eng",
            "value": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`."
          }
        ]
      },
      "impact": {
        "cvss": {
          "attackComplexity": "LOW",
          "attackVector": "NETWORK",
          "availabilityImpact": "NONE",
          "baseScore": 5.7,
          "baseSeverity": "MEDIUM",
          "confidentialityImpact": "HIGH",
          "integrityImpact": "NONE",
          "privilegesRequired": "LOW",
          "scope": "UNCHANGED",
          "userInteraction": "REQUIRED",
          "vectorString": "CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:N/A:N",
          "version": "3.1"
        }
      },
      "problemtype": {
        "problemtype_data": [
          {
            "description": [
              {
                "lang": "eng",
                "value": "CWE-200: Exposure of Sensitive Information to an Unauthorized Actor"
              }
            ]
          }
        ]
      },
      "references": {
        "reference_data": [
          {
            "name": "https://github.com/scrapy/scrapy/security/advisories/GHSA-jwqp-28gf-p498",
            "refsource": "CONFIRM",
            "url": "https://github.com/scrapy/scrapy/security/advisories/GHSA-jwqp-28gf-p498"
          },
          {
            "name": "https://github.com/scrapy/scrapy/commit/b01d69a1bf48060daec8f751368622352d8b85a6",
            "refsource": "MISC",
            "url": "https://github.com/scrapy/scrapy/commit/b01d69a1bf48060daec8f751368622352d8b85a6"
          },
          {
            "name": "https://w3lib.readthedocs.io/en/latest/w3lib.html#w3lib.http.basic_auth_header",
            "refsource": "MISC",
            "url": "https://w3lib.readthedocs.io/en/latest/w3lib.html#w3lib.http.basic_auth_header"
          },
          {
            "name": "http://doc.scrapy.org/en/latest/topics/downloader-middleware.html#module-scrapy.downloadermiddlewares.httpauth",
            "refsource": "MISC",
            "url": "http://doc.scrapy.org/en/latest/topics/downloader-middleware.html#module-scrapy.downloadermiddlewares.httpauth"
          },
          {
            "name": "[debian-lts-announce] 20220316 [SECURITY] [DLA 2950-1] python-scrapy security update",
            "refsource": "MLIST",
            "url": "https://lists.debian.org/debian-lts-announce/2022/03/msg00021.html"
          }
        ]
      },
      "source": {
        "advisory": "GHSA-jwqp-28gf-p498",
        "discovery": "UNKNOWN"
      }
    },
    "gitlab.com": {
      "advisories": [
        {
          "affected_range": "\u003c1.8.1||\u003e=2.0.0,\u003c2.5.1",
          "affected_versions": "All versions before 1.8.1, all versions starting from 2.0.0 before 2.5.1",
          "cvss_v2": "AV:N/AC:L/Au:S/C:P/I:N/A:N",
          "cvss_v3": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N",
          "cwe_ids": [
            "CWE-1035",
            "CWE-522",
            "CWE-937"
          ],
          "date": "2022-04-22",
          "description": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. If you cannot upgrade to a patched version, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`.",
          "fixed_versions": [
            "1.8.1",
            "2.5.1"
          ],
          "identifier": "CVE-2021-41125",
          "identifiers": [
            "CVE-2021-41125",
            "GHSA-jwqp-28gf-p498"
          ],
          "not_impacted": "All versions starting from 1.8.1 before 2.0.0, all versions starting from 2.5.1",
          "package_slug": "pypi/scrapy",
          "pubdate": "2021-10-06",
          "solution": "Upgrade to versions 1.8.1, 2.5.1 or above.",
          "title": "Exposure of Sensitive Information to an Unauthorized Actor",
          "urls": [
            "https://nvd.nist.gov/vuln/detail/CVE-2021-41125",
            "http://doc.scrapy.org/en/latest/topics/downloader-middleware.html#module-scrapy.downloadermiddlewares.httpauth"
          ],
          "uuid": "01c6abc5-98e7-48a7-b83d-7c31ffeeca56"
        }
      ]
    },
    "nvd.nist.gov": {
      "configurations": {
        "CVE_data_version": "4.0",
        "nodes": [
          {
            "children": [],
            "cpe_match": [
              {
                "cpe23Uri": "cpe:2.3:a:scrapy:scrapy:*:*:*:*:*:*:*:*",
                "cpe_name": [],
                "versionEndExcluding": "1.8.1",
                "vulnerable": true
              },
              {
                "cpe23Uri": "cpe:2.3:a:scrapy:scrapy:*:*:*:*:*:*:*:*",
                "cpe_name": [],
                "versionEndExcluding": "2.5.1",
                "versionStartIncluding": "2.0.0",
                "vulnerable": true
              }
            ],
            "operator": "OR"
          },
          {
            "children": [],
            "cpe_match": [
              {
                "cpe23Uri": "cpe:2.3:o:debian:debian_linux:9.0:*:*:*:*:*:*:*",
                "cpe_name": [],
                "vulnerable": true
              }
            ],
            "operator": "OR"
          }
        ]
      },
      "cve": {
        "CVE_data_meta": {
          "ASSIGNER": "security-advisories@github.com",
          "ID": "CVE-2021-41125"
        },
        "data_format": "MITRE",
        "data_type": "CVE",
        "data_version": "4.0",
        "description": {
          "description_data": [
            {
              "lang": "en",
              "value": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`."
            }
          ]
        },
        "problemtype": {
          "problemtype_data": [
            {
              "description": [
                {
                  "lang": "en",
                  "value": "CWE-522"
                },
                {
                  "lang": "en",
                  "value": "CWE-200"
                }
              ]
            }
          ]
        },
        "references": {
          "reference_data": [
            {
              "name": "https://github.com/scrapy/scrapy/commit/b01d69a1bf48060daec8f751368622352d8b85a6",
              "refsource": "MISC",
              "tags": [
                "Patch",
                "Third Party Advisory"
              ],
              "url": "https://github.com/scrapy/scrapy/commit/b01d69a1bf48060daec8f751368622352d8b85a6"
            },
            {
              "name": "https://w3lib.readthedocs.io/en/latest/w3lib.html#w3lib.http.basic_auth_header",
              "refsource": "MISC",
              "tags": [
                "Third Party Advisory"
              ],
              "url": "https://w3lib.readthedocs.io/en/latest/w3lib.html#w3lib.http.basic_auth_header"
            },
            {
              "name": "https://github.com/scrapy/scrapy/security/advisories/GHSA-jwqp-28gf-p498",
              "refsource": "CONFIRM",
              "tags": [
                "Third Party Advisory"
              ],
              "url": "https://github.com/scrapy/scrapy/security/advisories/GHSA-jwqp-28gf-p498"
            },
            {
              "name": "http://doc.scrapy.org/en/latest/topics/downloader-middleware.html#module-scrapy.downloadermiddlewares.httpauth",
              "refsource": "MISC",
              "tags": [
                "Vendor Advisory"
              ],
              "url": "http://doc.scrapy.org/en/latest/topics/downloader-middleware.html#module-scrapy.downloadermiddlewares.httpauth"
            },
            {
              "name": "[debian-lts-announce] 20220316 [SECURITY] [DLA 2950-1] python-scrapy security update",
              "refsource": "MLIST",
              "tags": [
                "Mailing List",
                "Third Party Advisory"
              ],
              "url": "https://lists.debian.org/debian-lts-announce/2022/03/msg00021.html"
            }
          ]
        }
      },
      "impact": {
        "baseMetricV2": {
          "acInsufInfo": false,
          "cvssV2": {
            "accessComplexity": "LOW",
            "accessVector": "NETWORK",
            "authentication": "SINGLE",
            "availabilityImpact": "NONE",
            "baseScore": 4.0,
            "confidentialityImpact": "PARTIAL",
            "integrityImpact": "NONE",
            "vectorString": "AV:N/AC:L/Au:S/C:P/I:N/A:N",
            "version": "2.0"
          },
          "exploitabilityScore": 8.0,
          "impactScore": 2.9,
          "obtainAllPrivilege": false,
          "obtainOtherPrivilege": false,
          "obtainUserPrivilege": false,
          "severity": "MEDIUM",
          "userInteractionRequired": false
        },
        "baseMetricV3": {
          "cvssV3": {
            "attackComplexity": "LOW",
            "attackVector": "NETWORK",
            "availabilityImpact": "NONE",
            "baseScore": 6.5,
            "baseSeverity": "MEDIUM",
            "confidentialityImpact": "HIGH",
            "integrityImpact": "NONE",
            "privilegesRequired": "LOW",
            "scope": "UNCHANGED",
            "userInteraction": "NONE",
            "vectorString": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N",
            "version": "3.1"
          },
          "exploitabilityScore": 2.8,
          "impactScore": 3.6
        }
      },
      "lastModifiedDate": "2022-04-22T16:00Z",
      "publishedDate": "2021-10-06T18:15Z"
    }
  }
}


Log in or create an account to share your comment.




Tags
Taxonomy of the tags.


Loading…

Loading…

Loading…

Sightings

Author Source Type Date

Nomenclature

  • Seen: The vulnerability was mentioned, discussed, or seen somewhere by the user.
  • Confirmed: The vulnerability is confirmed from an analyst perspective.
  • Exploited: This vulnerability was exploited and seen by the user reporting the sighting.
  • Patched: This vulnerability was successfully patched by the user reporting the sighting.
  • Not exploited: This vulnerability was not exploited or seen by the user reporting the sighting.
  • Not confirmed: The user expresses doubt about the veracity of the vulnerability.
  • Not patched: This vulnerability was not successfully patched by the user reporting the sighting.