From 30166d28b80a8f64bdd235e98e3f72521131ed0d Mon Sep 17 00:00:00 2001 From: GeekMasher Date: Tue, 10 Jun 2025 11:08:44 +0100 Subject: [PATCH 1/4] feat(pip): Update Pipenv Lock --- Pipfile.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Pipfile.lock b/Pipfile.lock index ab27f03..47a7643 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -122,12 +122,12 @@ }, "ghastoolkit": { "hashes": [ - "sha256:02307e3293184336350ea4fe4e622478ae1ca6a7ae6baea918a5f702be7d33df", - "sha256:28bb710fabe8315c86054e0bbb472d5b1fdede0e5f032b4505d144e97da1022f" + "sha256:0dd2199a8809c0f7edd98b8f4dec84159dc1e77ef8ec7e11741dda7f74c9533c", + "sha256:f5e35fc8f8ee8bb60dd9ec81889a61c5b53caadf232d366c89f16baec5513c39" ], "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==0.15.1" + "markers": "python_version >= '3.10'", + "version": "==0.17.7" }, "idna": { "hashes": [ From 991e898badd0ea099f0f4b7a1d4c4aaa29db7704 Mon Sep 17 00:00:00 2001 From: GeekMasher Date: Tue, 10 Jun 2025 11:09:02 +0100 Subject: [PATCH 2/4] feat: Update vendored deps --- vendor/bin/normalizer | 6 +- vendor/certifi/__init__.py | 2 +- vendor/certifi/cacert.pem | 497 +++++------------- vendor/charset_normalizer/__init__.py | 4 +- vendor/charset_normalizer/__main__.py | 2 + vendor/charset_normalizer/api.py | 60 +-- vendor/charset_normalizer/cd.py | 68 +-- vendor/charset_normalizer/cli/__init__.py | 2 + vendor/charset_normalizer/cli/__main__.py | 75 ++- vendor/charset_normalizer/constant.py | 44 +- vendor/charset_normalizer/legacy.py | 9 +- vendor/charset_normalizer/md.py | 79 +-- vendor/charset_normalizer/models.py | 79 +-- vendor/charset_normalizer/utils.py | 81 ++- vendor/charset_normalizer/version.py | 4 +- vendor/ghastoolkit/__init__.py | 7 +- vendor/ghastoolkit/__main__.py | 6 +- vendor/ghastoolkit/billing/__main__.py | 114 ++++ vendor/ghastoolkit/codeql/consts.py | 4 + vendor/ghastoolkit/codeql/databases.py | 4 +- vendor/ghastoolkit/octokit/billing.py | 185 +++++++ vendor/ghastoolkit/octokit/dependabot.py | 236 +++++++-- vendor/ghastoolkit/octokit/dependencygraph.py | 271 +++++++--- vendor/ghastoolkit/octokit/enterprise.py | 1 + vendor/ghastoolkit/octokit/github.py | 102 +++- vendor/ghastoolkit/octokit/octokit.py | 6 +- vendor/ghastoolkit/supplychain/__init__.py | 5 + vendor/ghastoolkit/supplychain/advisories.py | 19 +- .../ghastoolkit/supplychain/dependencies.py | 292 ++++++---- vendor/ghastoolkit/supplychain/dependency.py | 153 ++++++ vendor/ghastoolkit/utils/cache.py | 128 +++++ vendor/requests/__version__.py | 4 +- vendor/requests/compat.py | 12 + vendor/requests/models.py | 4 +- vendor/requests/utils.py | 22 +- vendor/semantic_version/__init__.py | 10 +- vendor/urllib3/_base_connection.py | 21 +- vendor/urllib3/_collections.py | 20 +- vendor/urllib3/_request_methods.py | 2 +- vendor/urllib3/_version.py | 13 +- vendor/urllib3/connection.py | 33 +- vendor/urllib3/connectionpool.py | 14 +- .../urllib3/contrib/emscripten/connection.py | 5 +- vendor/urllib3/contrib/emscripten/fetch.py | 320 ++++++++++- vendor/urllib3/contrib/emscripten/response.py | 30 +- vendor/urllib3/contrib/pyopenssl.py | 18 +- vendor/urllib3/exceptions.py | 24 +- vendor/urllib3/fields.py | 4 +- vendor/urllib3/filepost.py | 2 +- vendor/urllib3/http2/connection.py | 2 +- vendor/urllib3/response.py | 21 +- vendor/urllib3/util/connection.py | 2 +- vendor/urllib3/util/request.py | 18 +- vendor/urllib3/util/ssl_.py | 51 +- vendor/urllib3/util/ssl_match_hostname.py | 2 +- vendor/urllib3/util/ssltransport.py | 15 +- vendor/urllib3/util/url.py | 6 +- 57 files changed, 2223 insertions(+), 997 deletions(-) create mode 100644 vendor/ghastoolkit/billing/__main__.py create mode 100644 vendor/ghastoolkit/octokit/billing.py create mode 100644 vendor/ghastoolkit/supplychain/dependency.py create mode 100644 vendor/ghastoolkit/utils/cache.py diff --git a/vendor/bin/normalizer b/vendor/bin/normalizer index 77a2cce..69f1188 100755 --- a/vendor/bin/normalizer +++ b/vendor/bin/normalizer @@ -1,8 +1,8 @@ -#!/usr/local/python/3.12.1/bin/python3 +#!/opt/homebrew/opt/python@3.13/bin/python3.13 # -*- coding: utf-8 -*- import re import sys -from charset_normalizer.cli import cli_detect +from charset_normalizer import cli if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(cli_detect()) + sys.exit(cli.cli_detect()) diff --git a/vendor/certifi/__init__.py b/vendor/certifi/__init__.py index f61d77f..bf83fa9 100644 --- a/vendor/certifi/__init__.py +++ b/vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2024.08.30" +__version__ = "2025.04.26" diff --git a/vendor/certifi/cacert.pem b/vendor/certifi/cacert.pem index 3c165a1..b1d0cfd 100644 --- a/vendor/certifi/cacert.pem +++ b/vendor/certifi/cacert.pem @@ -1,95 +1,4 @@ -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -125,39 +34,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m 0vdXcDazv/wor3ElhVsT/h5/WrQ8 -----END CERTIFICATE----- -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - # Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Label: "QuoVadis Root CA 2" @@ -245,103 +121,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK 4SVhM7JZG+Ju1zdXtg2pEto= -----END CERTIFICATE----- -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -474,47 +253,6 @@ ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ -----END CERTIFICATE----- -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - # Issuer: CN=SecureTrust CA O=SecureTrust Corporation # Subject: CN=SecureTrust CA O=SecureTrust Corporation # Label: "SecureTrust CA" @@ -763,35 +501,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - # Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Label: "Microsec e-Szigno Root CA 2009" @@ -3100,50 +2809,6 @@ LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG mpv0 -----END CERTIFICATE----- -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - # Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Label: "Microsoft ECC Root Certificate Authority 2017" @@ -3485,6 +3150,46 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= -----END CERTIFICATE----- +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + # Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Label: "ANF Secure Server Root CA" @@ -4214,46 +3919,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - # Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication ECC RootCA1" @@ -4927,3 +4592,85 @@ Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT 4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= -----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- diff --git a/vendor/charset_normalizer/__init__.py b/vendor/charset_normalizer/__init__.py index 55991fc..0d3a379 100644 --- a/vendor/charset_normalizer/__init__.py +++ b/vendor/charset_normalizer/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Charset-Normalizer ~~~~~~~~~~~~~~ @@ -19,6 +18,9 @@ :copyright: (c) 2021 by Ahmed TAHRI :license: MIT, see LICENSE for more details. """ + +from __future__ import annotations + import logging from .api import from_bytes, from_fp, from_path, is_binary diff --git a/vendor/charset_normalizer/__main__.py b/vendor/charset_normalizer/__main__.py index beae2ef..e0e76f7 100644 --- a/vendor/charset_normalizer/__main__.py +++ b/vendor/charset_normalizer/__main__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .cli import cli_detect if __name__ == "__main__": diff --git a/vendor/charset_normalizer/api.py b/vendor/charset_normalizer/api.py index e3f2283..2c8c061 100644 --- a/vendor/charset_normalizer/api.py +++ b/vendor/charset_normalizer/api.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import logging from os import PathLike -from typing import BinaryIO, List, Optional, Set, Union +from typing import BinaryIO from .cd import ( coherence_ratio, @@ -21,8 +23,6 @@ should_strip_sig_or_bom, ) -# Will most likely be controversial -# logging.addLevelName(TRACE, "TRACE") logger = logging.getLogger("charset_normalizer") explain_handler = logging.StreamHandler() explain_handler.setFormatter( @@ -31,12 +31,12 @@ def from_bytes( - sequences: Union[bytes, bytearray], + sequences: bytes | bytearray, steps: int = 5, chunk_size: int = 512, threshold: float = 0.2, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -62,7 +62,7 @@ def from_bytes( if not isinstance(sequences, (bytearray, bytes)): raise TypeError( - "Expected object of type bytes or bytearray, got: {0}".format( + "Expected object of type bytes or bytearray, got: {}".format( type(sequences) ) ) @@ -76,7 +76,7 @@ def from_bytes( if length == 0: logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level or logging.WARNING) return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) @@ -135,9 +135,9 @@ def from_bytes( ), ) - prioritized_encodings: List[str] = [] + prioritized_encodings: list[str] = [] - specified_encoding: Optional[str] = ( + specified_encoding: str | None = ( any_specified_encoding(sequences) if preemptive_behaviour else None ) @@ -149,13 +149,13 @@ def from_bytes( specified_encoding, ) - tested: Set[str] = set() - tested_but_hard_failure: List[str] = [] - tested_but_soft_failure: List[str] = [] + tested: set[str] = set() + tested_but_hard_failure: list[str] = [] + tested_but_soft_failure: list[str] = [] - fallback_ascii: Optional[CharsetMatch] = None - fallback_u8: Optional[CharsetMatch] = None - fallback_specified: Optional[CharsetMatch] = None + fallback_ascii: CharsetMatch | None = None + fallback_u8: CharsetMatch | None = None + fallback_specified: CharsetMatch | None = None results: CharsetMatches = CharsetMatches() @@ -189,7 +189,7 @@ def from_bytes( tested.add(encoding_iana) - decoded_payload: Optional[str] = None + decoded_payload: str | None = None bom_or_sig_available: bool = sig_encoding == encoding_iana strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( encoding_iana @@ -292,7 +292,7 @@ def from_bytes( early_stop_count: int = 0 lazy_str_hard_failure = False - md_chunks: List[str] = [] + md_chunks: list[str] = [] md_ratios = [] try: @@ -397,7 +397,7 @@ def from_bytes( ) if not is_multi_byte_decoder: - target_languages: List[str] = encoding_languages(encoding_iana) + target_languages: list[str] = encoding_languages(encoding_iana) else: target_languages = mb_encoding_languages(encoding_iana) @@ -462,7 +462,7 @@ def from_bytes( "Encoding detection: %s is most likely the one.", current_match.encoding, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) return CharsetMatches([current_match]) @@ -480,7 +480,7 @@ def from_bytes( "Encoding detection: %s is most likely the one.", probable_result.encoding, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) @@ -492,7 +492,7 @@ def from_bytes( "the beginning of the sequence.", encoding_iana, ) - if explain: + if explain: # Defensive: ensure exit path clean handler logger.removeHandler(explain_handler) logger.setLevel(previous_logger_level) return CharsetMatches([results[encoding_iana]]) @@ -546,8 +546,8 @@ def from_fp( steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -572,12 +572,12 @@ def from_fp( def from_path( - path: Union[str, bytes, PathLike], # type: ignore[type-arg] + path: str | bytes | PathLike, # type: ignore[type-arg] steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, @@ -603,12 +603,12 @@ def from_path( def is_binary( - fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] + fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] steps: int = 5, chunk_size: int = 512, threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, preemptive_behaviour: bool = True, explain: bool = False, language_threshold: float = 0.1, diff --git a/vendor/charset_normalizer/cd.py b/vendor/charset_normalizer/cd.py index 4ea6760..71a3ed5 100644 --- a/vendor/charset_normalizer/cd.py +++ b/vendor/charset_normalizer/cd.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import importlib from codecs import IncrementalDecoder from collections import Counter from functools import lru_cache -from typing import Counter as TypeCounter, Dict, List, Optional, Tuple +from typing import Counter as TypeCounter from .constant import ( FREQUENCIES, @@ -22,26 +24,24 @@ ) -def encoding_unicode_range(iana_name: str) -> List[str]: +def encoding_unicode_range(iana_name: str) -> list[str]: """ Return associated unicode ranges in a single byte code page. """ if is_multi_byte_encoding(iana_name): - raise IOError("Function not supported on multi-byte code page") + raise OSError("Function not supported on multi-byte code page") - decoder = importlib.import_module( - "encodings.{}".format(iana_name) - ).IncrementalDecoder + decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: Dict[str, int] = {} + seen_ranges: dict[str, int] = {} character_count: int = 0 for i in range(0x40, 0xFF): chunk: str = p.decode(bytes([i])) if chunk: - character_range: Optional[str] = unicode_range(chunk) + character_range: str | None = unicode_range(chunk) if character_range is None: continue @@ -61,11 +61,11 @@ def encoding_unicode_range(iana_name: str) -> List[str]: ) -def unicode_range_languages(primary_range: str) -> List[str]: +def unicode_range_languages(primary_range: str) -> list[str]: """ Return inferred languages used with a unicode range. """ - languages: List[str] = [] + languages: list[str] = [] for language, characters in FREQUENCIES.items(): for character in characters: @@ -77,13 +77,13 @@ def unicode_range_languages(primary_range: str) -> List[str]: @lru_cache() -def encoding_languages(iana_name: str) -> List[str]: +def encoding_languages(iana_name: str) -> list[str]: """ Single-byte encoding language association. Some code page are heavily linked to particular language(s). This function does the correspondence. """ - unicode_ranges: List[str] = encoding_unicode_range(iana_name) - primary_range: Optional[str] = None + unicode_ranges: list[str] = encoding_unicode_range(iana_name) + primary_range: str | None = None for specified_range in unicode_ranges: if "Latin" not in specified_range: @@ -97,7 +97,7 @@ def encoding_languages(iana_name: str) -> List[str]: @lru_cache() -def mb_encoding_languages(iana_name: str) -> List[str]: +def mb_encoding_languages(iana_name: str) -> list[str]: """ Multi-byte encoding language association. Some code page are heavily linked to particular language(s). This function does the correspondence. @@ -118,7 +118,7 @@ def mb_encoding_languages(iana_name: str) -> List[str]: @lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> Tuple[bool, bool]: +def get_target_features(language: str) -> tuple[bool, bool]: """ Determine main aspects from a supported language if it contains accents and if is pure Latin. """ @@ -135,12 +135,12 @@ def get_target_features(language: str) -> Tuple[bool, bool]: def alphabet_languages( - characters: List[str], ignore_non_latin: bool = False -) -> List[str]: + characters: list[str], ignore_non_latin: bool = False +) -> list[str]: """ Return associated languages associated to given characters. """ - languages: List[Tuple[str, float]] = [] + languages: list[tuple[str, float]] = [] source_have_accents = any(is_accentuated(character) for character in characters) @@ -170,7 +170,7 @@ def alphabet_languages( def characters_popularity_compare( - language: str, ordered_characters: List[str] + language: str, ordered_characters: list[str] ) -> float: """ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. @@ -178,7 +178,7 @@ def characters_popularity_compare( Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) """ if language not in FREQUENCIES: - raise ValueError("{} not available".format(language)) + raise ValueError(f"{language} not available") character_approved_count: int = 0 FREQUENCIES_language_set = set(FREQUENCIES[language]) @@ -214,14 +214,14 @@ def characters_popularity_compare( character_approved_count += 1 continue - characters_before_source: List[str] = FREQUENCIES[language][ + characters_before_source: list[str] = FREQUENCIES[language][ 0:character_rank_in_language ] - characters_after_source: List[str] = FREQUENCIES[language][ + characters_after_source: list[str] = FREQUENCIES[language][ character_rank_in_language: ] - characters_before: List[str] = ordered_characters[0:character_rank] - characters_after: List[str] = ordered_characters[character_rank:] + characters_before: list[str] = ordered_characters[0:character_rank] + characters_after: list[str] = ordered_characters[character_rank:] before_match_count: int = len( set(characters_before) & set(characters_before_source) @@ -249,24 +249,24 @@ def characters_popularity_compare( return character_approved_count / len(ordered_characters) -def alpha_unicode_split(decoded_sequence: str) -> List[str]: +def alpha_unicode_split(decoded_sequence: str) -> list[str]: """ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; One containing the latin letters and the other hebrew. """ - layers: Dict[str, str] = {} + layers: dict[str, str] = {} for character in decoded_sequence: if character.isalpha() is False: continue - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: continue - layer_target_range: Optional[str] = None + layer_target_range: str | None = None for discovered_range in layers: if ( @@ -288,12 +288,12 @@ def alpha_unicode_split(decoded_sequence: str) -> List[str]: return list(layers.values()) -def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: +def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: """ This function merge results previously given by the function coherence_ratio. The return type is the same as coherence_ratio. """ - per_language_ratios: Dict[str, List[float]] = {} + per_language_ratios: dict[str, list[float]] = {} for result in results: for sub_result in result: language, ratio = sub_result @@ -321,7 +321,7 @@ def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: We shall NOT return "English—" in CoherenceMatches because it is an alternative of "English". This function only keeps the best match and remove the em-dash in it. """ - index_results: Dict[str, List[float]] = dict() + index_results: dict[str, list[float]] = dict() for result in results: language, ratio = result @@ -345,14 +345,14 @@ def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: @lru_cache(maxsize=2048) def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None ) -> CoherenceMatches: """ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. A layer = Character extraction by alphabets/ranges. """ - results: List[Tuple[str, float]] = [] + results: list[tuple[str, float]] = [] ignore_non_latin: bool = False sufficient_match_count: int = 0 @@ -371,7 +371,7 @@ def coherence_ratio( if character_count <= TOO_SMALL_SEQUENCE: continue - popular_character_ordered: List[str] = [c for c, o in most_common] + popular_character_ordered: list[str] = [c for c, o in most_common] for language in lg_inclusion_list or alphabet_languages( popular_character_ordered, ignore_non_latin diff --git a/vendor/charset_normalizer/cli/__init__.py b/vendor/charset_normalizer/cli/__init__.py index d95fedf..543a5a4 100644 --- a/vendor/charset_normalizer/cli/__init__.py +++ b/vendor/charset_normalizer/cli/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .__main__ import cli_detect, query_yes_no __all__ = ( diff --git a/vendor/charset_normalizer/cli/__main__.py b/vendor/charset_normalizer/cli/__main__.py index e7edd0f..cb64156 100644 --- a/vendor/charset_normalizer/cli/__main__.py +++ b/vendor/charset_normalizer/cli/__main__.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import argparse import sys +import typing from json import dumps from os.path import abspath, basename, dirname, join, realpath from platform import python_version -from typing import List, Optional from unicodedata import unidata_version import charset_normalizer.md as md_module @@ -42,10 +44,69 @@ def query_yes_no(question: str, default: str = "yes") -> bool: elif choice in valid: return valid[choice] else: - sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") + + +class FileType: + """Factory for creating file object types + + Instances of FileType are typically passed as type= arguments to the + ArgumentParser add_argument() method. + + Keyword Arguments: + - mode -- A string indicating how the file is to be opened. Accepts the + same values as the builtin open() function. + - bufsize -- The file's desired buffer size. Accepts the same values as + the builtin open() function. + - encoding -- The file's encoding. Accepts the same values as the + builtin open() function. + - errors -- A string indicating how encoding and decoding errors are to + be handled. Accepts the same value as the builtin open() function. + + Backported from CPython 3.12 + """ + + def __init__( + self, + mode: str = "r", + bufsize: int = -1, + encoding: str | None = None, + errors: str | None = None, + ): + self._mode = mode + self._bufsize = bufsize + self._encoding = encoding + self._errors = errors + + def __call__(self, string: str) -> typing.IO: # type: ignore[type-arg] + # the special argument "-" means sys.std{in,out} + if string == "-": + if "r" in self._mode: + return sys.stdin.buffer if "b" in self._mode else sys.stdin + elif any(c in self._mode for c in "wax"): + return sys.stdout.buffer if "b" in self._mode else sys.stdout + else: + msg = f'argument "-" with mode {self._mode}' + raise ValueError(msg) + + # all other arguments are used as file names + try: + return open(string, self._mode, self._bufsize, self._encoding, self._errors) + except OSError as e: + message = f"can't open '{string}': {e}" + raise argparse.ArgumentTypeError(message) + + def __repr__(self) -> str: + args = self._mode, self._bufsize + kwargs = [("encoding", self._encoding), ("errors", self._errors)] + args_str = ", ".join( + [repr(arg) for arg in args if arg != -1] + + [f"{kw}={arg!r}" for kw, arg in kwargs if arg is not None] + ) + return f"{type(self).__name__}({args_str})" -def cli_detect(argv: Optional[List[str]] = None) -> int: +def cli_detect(argv: list[str] | None = None) -> int: """ CLI assistant using ARGV and ArgumentParser :param argv: @@ -58,7 +119,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: ) parser.add_argument( - "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + "files", type=FileType("rb"), nargs="+", help="File(s) to be analysed" ) parser.add_argument( "-v", @@ -124,7 +185,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: default=0.2, type=float, dest="threshold", - help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", ) parser.add_argument( "--version", @@ -259,7 +320,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: dir_path = dirname(realpath(my_file.name)) file_name = basename(realpath(my_file.name)) - o_: List[str] = file_name.split(".") + o_: list[str] = file_name.split(".") if args.replace is False: o_.insert(-1, best_guess.encoding) @@ -284,7 +345,7 @@ def cli_detect(argv: Optional[List[str]] = None) -> int: with open(x_[0].unicode_path, "wb") as fp: fp.write(best_guess.output()) - except IOError as e: + except OSError as e: print(str(e), file=sys.stderr) if my_file.closed is False: my_file.close() diff --git a/vendor/charset_normalizer/constant.py b/vendor/charset_normalizer/constant.py index f8f2a81..cc71a01 100644 --- a/vendor/charset_normalizer/constant.py +++ b/vendor/charset_normalizer/constant.py @@ -1,11 +1,12 @@ -# -*- coding: utf-8 -*- +from __future__ import annotations + from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE from encodings.aliases import aliases -from re import IGNORECASE, compile as re_compile -from typing import Dict, List, Set, Union +from re import IGNORECASE +from re import compile as re_compile # Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { +ENCODING_MARKS: dict[str, bytes | list[bytes]] = { "utf_8": BOM_UTF8, "utf_7": [ b"\x2b\x2f\x76\x38", @@ -25,7 +26,7 @@ UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 # Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: Dict[str, range] = { +UNICODE_RANGES_COMBINED: dict[str, range] = { "Control character": range(32), "Basic Latin": range(32, 128), "Latin-1 Supplement": range(128, 256), @@ -357,7 +358,7 @@ } -UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ +UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ "Supplement", "Extended", "Extensions", @@ -392,7 +393,7 @@ "koi8_u", ] -IANA_SUPPORTED: List[str] = sorted( +IANA_SUPPORTED: list[str] = sorted( filter( lambda x: x.endswith("_codec") is False and x not in {"rot_13", "tactis", "mbcs"}, @@ -403,7 +404,7 @@ IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) # pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { +IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { "cp037": ["cp1026", "cp1140", "cp273", "cp500"], "cp1026": ["cp037", "cp1140", "cp273", "cp500"], "cp1125": ["cp866"], @@ -492,7 +493,7 @@ } -CHARDET_CORRESPONDENCE: Dict[str, str] = { +CHARDET_CORRESPONDENCE: dict[str, str] = { "iso2022_kr": "ISO-2022-KR", "iso2022_jp": "ISO-2022-JP", "euc_kr": "EUC-KR", @@ -528,7 +529,7 @@ } -COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { +COMMON_SAFE_ASCII_CHARACTERS: set[str] = { "<", ">", "=", @@ -548,9 +549,26 @@ ")", } +# Sample character sets — replace with full lists if needed +COMMON_CHINESE_CHARACTERS = "的一是在不了有和人这中大为上个国我以要他时来用们生到作地于出就分对成会可主发年动同工也能下过子说产种面而方后多定行学法所民得经十三之进着等部度家电力里如水化高自二理起小物现实加量都两体制机当使点从业本去把性好应开它合还因由其些然前外天政四日那社义事平形相全表间样与关各重新线内数正心反你明看原又么利比或但质气第向道命此变条只没结解问意建月公无系军很情者最立代想已通并提直题党程展五果料象员革位入常文总次品式活设及管特件长求老头基资边流路级少图山统接知较将组见计别她手角期根论运农指几九区强放决西被干做必战先回则任取据处队南给色光门即保治北造百规热领七海口东导器压志世金增争济阶油思术极交受联什认六共权收证改清己美再采转更单风切打白教速花带安场身车例真务具万每目至达走积示议声报斗完类八离华名确才科张信马节话米整空元况今集温传土许步群广石记需段研界拉林律叫且究观越织装影算低持音众书布复容儿须际商非验连断深难近矿千周委素技备半办青省列习响约支般史感劳便团往酸历市克何除消构府太准精值号率族维划选标写存候毛亲快效斯院查江型眼王按格养易置派层片始却专状育厂京识适属圆包火住调满县局照参红细引听该铁价严龙飞" + +COMMON_JAPANESE_CHARACTERS = "日一国年大十二本中長出三時行見月分後前生五間上東四今金九入学高円子外八六下来気小七山話女北午百書先名川千水半男西電校語土木聞食車何南万毎白天母火右読友左休父雨" + +COMMON_KOREAN_CHARACTERS = "一二三四五六七八九十百千萬上下左右中人女子大小山川日月火水木金土父母天地國名年時文校學生" + +# Combine all into a set +COMMON_CJK_CHARACTERS = set( + "".join( + [ + COMMON_CHINESE_CHARACTERS, + COMMON_JAPANESE_CHARACTERS, + COMMON_KOREAN_CHARACTERS, + ] + ) +) -KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} +KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} # Logging LEVEL below DEBUG TRACE: int = 5 @@ -558,7 +576,7 @@ # Language label that contain the em dash "—" # character are to be considered alternative seq to origin -FREQUENCIES: Dict[str, List[str]] = { +FREQUENCIES: dict[str, list[str]] = { "English": [ "e", "a", diff --git a/vendor/charset_normalizer/legacy.py b/vendor/charset_normalizer/legacy.py index 3f6d490..e221bec 100644 --- a/vendor/charset_normalizer/legacy.py +++ b/vendor/charset_normalizer/legacy.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from warnings import warn from .api import from_bytes @@ -11,9 +11,9 @@ from typing_extensions import TypedDict class ResultDict(TypedDict): - encoding: Optional[str] + encoding: str | None language: str - confidence: Optional[float] + confidence: float | None def detect( @@ -37,8 +37,7 @@ def detect( if not isinstance(byte_str, (bytearray, bytes)): raise TypeError( # pragma: nocover - "Expected object of type bytes or bytearray, got: " - "{0}".format(type(byte_str)) + f"Expected object of type bytes or bytearray, got: {type(byte_str)}" ) if isinstance(byte_str, bytearray): diff --git a/vendor/charset_normalizer/md.py b/vendor/charset_normalizer/md.py index d834db0..12ce024 100644 --- a/vendor/charset_normalizer/md.py +++ b/vendor/charset_normalizer/md.py @@ -1,6 +1,7 @@ +from __future__ import annotations + from functools import lru_cache from logging import getLogger -from typing import List, Optional from .constant import ( COMMON_SAFE_ASCII_CHARACTERS, @@ -25,6 +26,7 @@ is_unprintable, remove_accent, unicode_range, + is_cjk_uncommon, ) @@ -68,7 +70,7 @@ def __init__(self) -> None: self._symbol_count: int = 0 self._character_count: int = 0 - self._last_printable_char: Optional[str] = None + self._last_printable_char: str | None = None self._frenzy_symbol_in_word: bool = False def eligible(self, character: str) -> bool: @@ -92,7 +94,7 @@ def feed(self, character: str) -> None: self._last_printable_char = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._punctuation_count = 0 self._character_count = 0 self._symbol_count = 0 @@ -123,7 +125,7 @@ def feed(self, character: str) -> None: if is_accentuated(character): self._accentuated_count += 1 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._accentuated_count = 0 @@ -149,7 +151,7 @@ def feed(self, character: str) -> None: self._unprintable_count += 1 self._character_count += 1 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._unprintable_count = 0 @property @@ -165,7 +167,7 @@ def __init__(self) -> None: self._successive_count: int = 0 self._character_count: int = 0 - self._last_latin_character: Optional[str] = None + self._last_latin_character: str | None = None def eligible(self, character: str) -> bool: return character.isalpha() and is_latin(character) @@ -184,7 +186,7 @@ def feed(self, character: str) -> None: self._successive_count += 1 self._last_latin_character = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._successive_count = 0 self._character_count = 0 self._last_latin_character = None @@ -201,7 +203,7 @@ class SuspiciousRange(MessDetectorPlugin): def __init__(self) -> None: self._suspicious_successive_range_count: int = 0 self._character_count: int = 0 - self._last_printable_seen: Optional[str] = None + self._last_printable_seen: str | None = None def eligible(self, character: str) -> bool: return character.isprintable() @@ -221,15 +223,15 @@ def feed(self, character: str) -> None: self._last_printable_seen = character return - unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) - unicode_range_b: Optional[str] = unicode_range(character) + unicode_range_a: str | None = unicode_range(self._last_printable_seen) + unicode_range_b: str | None = unicode_range(character) if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): self._suspicious_successive_range_count += 1 self._last_printable_seen = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._suspicious_successive_range_count = 0 self._last_printable_seen = None @@ -346,7 +348,7 @@ def feed(self, character: str) -> None: self._is_current_word_bad = True self._buffer += character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._buffer = "" self._is_current_word_bad = False self._foreign_long_watch = False @@ -364,35 +366,39 @@ def ratio(self) -> float: return self._bad_character_count / self._character_count -class CjkInvalidStopPlugin(MessDetectorPlugin): +class CjkUncommonPlugin(MessDetectorPlugin): """ - GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and - can be easily detected. Searching for the overuse of '丅' and '丄'. + Detect messy CJK text that probably means nothing. """ def __init__(self) -> None: - self._wrong_stop_count: int = 0 - self._cjk_character_count: int = 0 + self._character_count: int = 0 + self._uncommon_count: int = 0 def eligible(self, character: str) -> bool: - return True + return is_cjk(character) def feed(self, character: str) -> None: - if character in {"丅", "丄"}: - self._wrong_stop_count += 1 + self._character_count += 1 + + if is_cjk_uncommon(character): + self._uncommon_count += 1 return - if is_cjk(character): - self._cjk_character_count += 1 - def reset(self) -> None: # pragma: no cover - self._wrong_stop_count = 0 - self._cjk_character_count = 0 + def reset(self) -> None: # Abstract + self._character_count = 0 + self._uncommon_count = 0 @property def ratio(self) -> float: - if self._cjk_character_count < 16: + if self._character_count < 8: return 0.0 - return self._wrong_stop_count / self._cjk_character_count + + uncommon_form_usage: float = self._uncommon_count / self._character_count + + # we can be pretty sure it's garbage when uncommon characters are widely + # used. otherwise it could just be traditional chinese for example. + return uncommon_form_usage / 10 if uncommon_form_usage > 0.5 else 0.0 class ArchaicUpperLowerPlugin(MessDetectorPlugin): @@ -406,7 +412,7 @@ def __init__(self) -> None: self._character_count: int = 0 - self._last_alpha_seen: Optional[str] = None + self._last_alpha_seen: str | None = None self._current_ascii_only: bool = True def eligible(self, character: str) -> bool: @@ -454,7 +460,7 @@ def feed(self, character: str) -> None: self._character_count_since_last_sep += 1 self._last_alpha_seen = character - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._character_count_since_last_sep = 0 self._successive_upper_lower_count = 0 @@ -476,7 +482,7 @@ def __init__(self) -> None: self._character_count: int = 0 self._isolated_form_count: int = 0 - def reset(self) -> None: # pragma: no cover + def reset(self) -> None: # Abstract self._character_count = 0 self._isolated_form_count = 0 @@ -501,7 +507,7 @@ def ratio(self) -> float: @lru_cache(maxsize=1024) def is_suspiciously_successive_range( - unicode_range_a: Optional[str], unicode_range_b: Optional[str] + unicode_range_a: str | None, unicode_range_b: str | None ) -> bool: """ Determine if two Unicode range seen next to each other can be considered as suspicious. @@ -525,9 +531,10 @@ def is_suspiciously_successive_range( ): return False - keywords_range_a, keywords_range_b = unicode_range_a.split( - " " - ), unicode_range_b.split(" ") + keywords_range_a, keywords_range_b = ( + unicode_range_a.split(" "), + unicode_range_b.split(" "), + ) for el in keywords_range_a: if el in UNICODE_SECONDARY_RANGE_KEYWORD: @@ -580,7 +587,7 @@ def mess_ratio( Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. """ - detectors: List[MessDetectorPlugin] = [ + detectors: list[MessDetectorPlugin] = [ md_class() for md_class in MessDetectorPlugin.__subclasses__() ] @@ -622,7 +629,7 @@ def mess_ratio( logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - for dt in detectors: # pragma: nocover + for dt in detectors: logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") return round(mean_mess_ratio, 3) diff --git a/vendor/charset_normalizer/models.py b/vendor/charset_normalizer/models.py index 6f6b86b..1042758 100644 --- a/vendor/charset_normalizer/models.py +++ b/vendor/charset_normalizer/models.py @@ -1,8 +1,10 @@ +from __future__ import annotations + from encodings.aliases import aliases from hashlib import sha256 from json import dumps from re import sub -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Iterator, List, Tuple from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE from .utils import iana_name, is_multi_byte_encoding, unicode_range @@ -15,9 +17,9 @@ def __init__( guessed_encoding: str, mean_mess_ratio: float, has_sig_or_bom: bool, - languages: "CoherenceMatches", - decoded_payload: Optional[str] = None, - preemptive_declaration: Optional[str] = None, + languages: CoherenceMatches, + decoded_payload: str | None = None, + preemptive_declaration: str | None = None, ): self._payload: bytes = payload @@ -25,17 +27,17 @@ def __init__( self._mean_mess_ratio: float = mean_mess_ratio self._languages: CoherenceMatches = languages self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: Optional[List[str]] = None + self._unicode_ranges: list[str] | None = None - self._leaves: List[CharsetMatch] = [] + self._leaves: list[CharsetMatch] = [] self._mean_coherence_ratio: float = 0.0 - self._output_payload: Optional[bytes] = None - self._output_encoding: Optional[str] = None + self._output_payload: bytes | None = None + self._output_encoding: str | None = None - self._string: Optional[str] = decoded_payload + self._string: str | None = decoded_payload - self._preemptive_declaration: Optional[str] = preemptive_declaration + self._preemptive_declaration: str | None = preemptive_declaration def __eq__(self, other: object) -> bool: if not isinstance(other, CharsetMatch): @@ -77,9 +79,9 @@ def __str__(self) -> str: return self._string def __repr__(self) -> str: - return "".format(self.encoding, self.fingerprint) + return f"" - def add_submatch(self, other: "CharsetMatch") -> None: + def add_submatch(self, other: CharsetMatch) -> None: if not isinstance(other, CharsetMatch) or other == self: raise ValueError( "Unable to add instance <{}> as a submatch of a CharsetMatch".format( @@ -95,11 +97,11 @@ def encoding(self) -> str: return self._encoding @property - def encoding_aliases(self) -> List[str]: + def encoding_aliases(self) -> list[str]: """ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. """ - also_known_as: List[str] = [] + also_known_as: list[str] = [] for u, p in aliases.items(): if self.encoding == u: also_known_as.append(p) @@ -116,7 +118,7 @@ def byte_order_mark(self) -> bool: return self._has_sig_or_bom @property - def languages(self) -> List[str]: + def languages(self) -> list[str]: """ Return the complete list of possible languages found in decoded sequence. Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. @@ -177,7 +179,7 @@ def raw(self) -> bytes: return self._payload @property - def submatch(self) -> List["CharsetMatch"]: + def submatch(self) -> list[CharsetMatch]: return self._leaves @property @@ -185,19 +187,17 @@ def has_submatch(self) -> bool: return len(self._leaves) > 0 @property - def alphabets(self) -> List[str]: + def alphabets(self) -> list[str]: if self._unicode_ranges is not None: return self._unicode_ranges # list detected ranges - detected_ranges: List[Optional[str]] = [ - unicode_range(char) for char in str(self) - ] + detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] # filter and sort self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) return self._unicode_ranges @property - def could_be_from_charset(self) -> List[str]: + def could_be_from_charset(self) -> list[str]: """ The complete list of encoding that output the exact SAME str result and therefore could be the originating encoding. @@ -221,10 +221,11 @@ def output(self, encoding: str = "utf_8") -> bytes: patched_header = sub( RE_POSSIBLE_ENCODING_INDICATION, lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type] + m.groups()[0], + iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] ), decoded_string[:8192], - 1, + count=1, ) decoded_string = patched_header + decoded_string[8192:] @@ -247,13 +248,13 @@ class CharsetMatches: Act like a list(iterable) but does not implements all related methods. """ - def __init__(self, results: Optional[List[CharsetMatch]] = None): - self._results: List[CharsetMatch] = sorted(results) if results else [] + def __init__(self, results: list[CharsetMatch] | None = None): + self._results: list[CharsetMatch] = sorted(results) if results else [] def __iter__(self) -> Iterator[CharsetMatch]: yield from self._results - def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + def __getitem__(self, item: int | str) -> CharsetMatch: """ Retrieve a single item either by its position or encoding name (alias may be used here). Raise KeyError upon invalid index or encoding not present in results. @@ -293,7 +294,7 @@ def append(self, item: CharsetMatch) -> None: self._results.append(item) self._results = sorted(self._results) - def best(self) -> Optional["CharsetMatch"]: + def best(self) -> CharsetMatch | None: """ Simply return the first match. Strict equivalent to matches[0]. """ @@ -301,7 +302,7 @@ def best(self) -> Optional["CharsetMatch"]: return None return self._results[0] - def first(self) -> Optional["CharsetMatch"]: + def first(self) -> CharsetMatch | None: """ Redundant method, call the method best(). Kept for BC reasons. """ @@ -316,31 +317,31 @@ class CliDetectionResult: def __init__( self, path: str, - encoding: Optional[str], - encoding_aliases: List[str], - alternative_encodings: List[str], + encoding: str | None, + encoding_aliases: list[str], + alternative_encodings: list[str], language: str, - alphabets: List[str], + alphabets: list[str], has_sig_or_bom: bool, chaos: float, coherence: float, - unicode_path: Optional[str], + unicode_path: str | None, is_preferred: bool, ): self.path: str = path - self.unicode_path: Optional[str] = unicode_path - self.encoding: Optional[str] = encoding - self.encoding_aliases: List[str] = encoding_aliases - self.alternative_encodings: List[str] = alternative_encodings + self.unicode_path: str | None = unicode_path + self.encoding: str | None = encoding + self.encoding_aliases: list[str] = encoding_aliases + self.alternative_encodings: list[str] = alternative_encodings self.language: str = language - self.alphabets: List[str] = alphabets + self.alphabets: list[str] = alphabets self.has_sig_or_bom: bool = has_sig_or_bom self.chaos: float = chaos self.coherence: float = coherence self.is_preferred: bool = is_preferred @property - def __dict__(self) -> Dict[str, Any]: # type: ignore + def __dict__(self) -> dict[str, Any]: # type: ignore return { "path": self.path, "encoding": self.encoding, diff --git a/vendor/charset_normalizer/utils.py b/vendor/charset_normalizer/utils.py index e5cbbf4..6bf0384 100644 --- a/vendor/charset_normalizer/utils.py +++ b/vendor/charset_normalizer/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging import unicodedata @@ -5,9 +7,11 @@ from encodings.aliases import aliases from functools import lru_cache from re import findall -from typing import Generator, List, Optional, Set, Tuple, Union +from typing import Generator -from _multibytecodec import MultibyteIncrementalDecoder +from _multibytecodec import ( # type: ignore[import-not-found,import] + MultibyteIncrementalDecoder, +) from .constant import ( ENCODING_MARKS, @@ -16,6 +20,7 @@ UNICODE_RANGES_COMBINED, UNICODE_SECONDARY_RANGE_KEYWORD, UTF8_MAXIMAL_ALLOCATION, + COMMON_CJK_CHARACTERS, ) @@ -23,7 +28,7 @@ def is_accentuated(character: str) -> bool: try: description: str = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return ( "WITH GRAVE" in description @@ -43,13 +48,13 @@ def remove_accent(character: str) -> str: if not decomposed: return character - codes: List[str] = decomposed.split(" ") + codes: list[str] = decomposed.split(" ") return chr(int(codes[0], 16)) @lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> Optional[str]: +def unicode_range(character: str) -> str | None: """ Retrieve the Unicode range official name from a single character. """ @@ -66,7 +71,7 @@ def unicode_range(character: str) -> Optional[str]: def is_latin(character: str) -> bool: try: description: str = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "LATIN" in description @@ -78,7 +83,7 @@ def is_punctuation(character: str) -> bool: if "P" in character_category: return True - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -93,7 +98,7 @@ def is_symbol(character: str) -> bool: if "S" in character_category or "N" in character_category: return True - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -103,7 +108,7 @@ def is_symbol(character: str) -> bool: @lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) def is_emoticon(character: str) -> bool: - character_range: Optional[str] = unicode_range(character) + character_range: str | None = unicode_range(character) if character_range is None: return False @@ -130,7 +135,7 @@ def is_case_variable(character: str) -> bool: def is_cjk(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "CJK" in character_name @@ -140,7 +145,7 @@ def is_cjk(character: str) -> bool: def is_hiragana(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "HIRAGANA" in character_name @@ -150,7 +155,7 @@ def is_hiragana(character: str) -> bool: def is_katakana(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "KATAKANA" in character_name @@ -160,7 +165,7 @@ def is_katakana(character: str) -> bool: def is_hangul(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "HANGUL" in character_name @@ -170,7 +175,7 @@ def is_hangul(character: str) -> bool: def is_thai(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "THAI" in character_name @@ -180,7 +185,7 @@ def is_thai(character: str) -> bool: def is_arabic(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "ARABIC" in character_name @@ -190,12 +195,17 @@ def is_arabic(character: str) -> bool: def is_arabic_isolated_form(character: str) -> bool: try: character_name = unicodedata.name(character) - except ValueError: + except ValueError: # Defensive: unicode database outdated? return False return "ARABIC" in character_name and "ISOLATED FORM" in character_name +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk_uncommon(character: str) -> bool: + return character not in COMMON_CJK_CHARACTERS + + @lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) def is_unicode_range_secondary(range_name: str) -> bool: return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) @@ -206,13 +216,13 @@ def is_unprintable(character: str) -> bool: return ( character.isspace() is False # includes \n \t \r \v and character.isprintable() is False - and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\x1a" # Why? Its the ASCII substitute character. and character != "\ufeff" # bug discovered in Python, # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. ) -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: """ Extract using ASCII-only decoder any specified encoding in the first n-bytes. """ @@ -221,7 +231,7 @@ def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional seq_len: int = len(sequence) - results: List[str] = findall( + results: list[str] = findall( RE_POSSIBLE_ENCODING_INDICATION, sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), ) @@ -260,18 +270,18 @@ def is_multi_byte_encoding(name: str) -> bool: "utf_32_be", "utf_7", } or issubclass( - importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + importlib.import_module(f"encodings.{name}").IncrementalDecoder, MultibyteIncrementalDecoder, ) -def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: +def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: """ Identify and extract SIG/BOM in given sequence. """ for iana_encoding in ENCODING_MARKS: - marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] if isinstance(marks, bytes): marks = [marks] @@ -288,6 +298,7 @@ def should_strip_sig_or_bom(iana_encoding: str) -> bool: def iana_name(cp_name: str, strict: bool = True) -> str: + """Returns the Python normalized encoding name (Not the IANA official name).""" cp_name = cp_name.lower().replace("-", "_") encoding_alias: str @@ -298,35 +309,17 @@ def iana_name(cp_name: str, strict: bool = True) -> str: return encoding_iana if strict: - raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") return cp_name -def range_scan(decoded_sequence: str) -> List[str]: - ranges: Set[str] = set() - - for character in decoded_sequence: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - ranges.add(character_range) - - return list(ranges) - - def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): return 0.0 - decoder_a = importlib.import_module( - "encodings.{}".format(iana_name_a) - ).IncrementalDecoder - decoder_b = importlib.import_module( - "encodings.{}".format(iana_name_b) - ).IncrementalDecoder + decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder + decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder id_a: IncrementalDecoder = decoder_a(errors="ignore") id_b: IncrementalDecoder = decoder_b(errors="ignore") @@ -374,7 +367,7 @@ def cut_sequence_chunks( strip_sig_or_bom: bool, sig_payload: bytes, is_multi_byte_decoder: bool, - decoded_payload: Optional[str] = None, + decoded_payload: str | None = None, ) -> Generator[str, None, None]: if decoded_payload and is_multi_byte_decoder is False: for i in offsets: diff --git a/vendor/charset_normalizer/version.py b/vendor/charset_normalizer/version.py index 699990e..e5687e3 100644 --- a/vendor/charset_normalizer/version.py +++ b/vendor/charset_normalizer/version.py @@ -2,5 +2,7 @@ Expose version """ -__version__ = "3.4.0" +from __future__ import annotations + +__version__ = "3.4.2" VERSION = __version__.split(".") diff --git a/vendor/ghastoolkit/__init__.py b/vendor/ghastoolkit/__init__.py index 5518678..0f8bff8 100644 --- a/vendor/ghastoolkit/__init__.py +++ b/vendor/ghastoolkit/__init__.py @@ -3,7 +3,7 @@ __name__ = "ghastoolkit" __title__ = "GHAS Toolkit" -__version__ = "0.15.1" +__version__ = "0.17.7" __description__ = "GitHub Advanced Security Python Toolkit" __summary__ = """\ @@ -23,7 +23,7 @@ | | \\/| |_| |/ /_\\ \\\\ `--. | | ___ ___ | | | ___| |_ | | __ | _ || _ | `--. \\ | |/ _ \\ / _ \\| | |/ / | __| | |_\\ \\| | | || | | |/\\__/ / | | (_) | (_) | | <| | |_ - \\____/\\_| |_/\\_| |_/\\____/ \\_/\\___/ \\___/|_|_|\\_\\_|\\__| v{__version__} + \\____/\\_| |_/\\_| |_/\\____/ \\_/\\___/ \\___/|_|_|\\_\\_|\\__| v{__version__} by {__author__} """ @@ -43,7 +43,8 @@ # Supply Chain from ghastoolkit.supplychain.advisories import Advisory, Advisories from ghastoolkit.supplychain.dependencyalert import DependencyAlert -from ghastoolkit.supplychain.dependencies import Dependency, Dependencies +from ghastoolkit.supplychain.dependencies import Dependencies +from ghastoolkit.supplychain.dependency import Dependency from ghastoolkit.supplychain.licensing import Licenses # CodeQL diff --git a/vendor/ghastoolkit/__main__.py b/vendor/ghastoolkit/__main__.py index 5042517..0fd36e0 100644 --- a/vendor/ghastoolkit/__main__.py +++ b/vendor/ghastoolkit/__main__.py @@ -52,7 +52,11 @@ def run(self, arguments: Namespace): logging.info(f"v{__version__}") return - logging.info(__banner__) + print(__banner__) + + if not arguments.token or arguments.token == "": + logging.error("Missing GitHub token.") + return if arguments.mode in ["all", "codescanning"]: logging.info("") diff --git a/vendor/ghastoolkit/billing/__main__.py b/vendor/ghastoolkit/billing/__main__.py new file mode 100644 index 0000000..6fd765c --- /dev/null +++ b/vendor/ghastoolkit/billing/__main__.py @@ -0,0 +1,114 @@ +"""CodeQL CLI for ghastoolkit.""" + +import csv +import logging +from argparse import Namespace +from typing import List +from ghastoolkit.octokit.github import GitHub +from ghastoolkit.octokit.enterprise import Organization +from ghastoolkit.octokit.billing import Billing +from ghastoolkit.utils.cli import CommandLine + +logger = logging.getLogger("ghastoolkit-billing") + + +class CostCenter: + """Cost Center.""" + + def __init__(self, name: str, repositories: list[str] = []) -> None: + """Initialize Cost Center.""" + self.name = name + self.repositories = set(repositories) + + def addRepository(self, repo: str): + """Add a Repository.""" + self.repositories.add(repo) + + +def loadCostCenterCsv(path: str) -> List[CostCenter]: + cost_centers = {} + + with open(path, "r") as csv_file: + csv_reader = csv.DictReader(csv_file) + + for row in csv_reader: + cost_center = row["Cost Center"] + repo = row["Repository"] + + if cost_centers.get(cost_center): + cost_centers[cost_center].addRepository(repo) + else: + cost_centers[cost_center] = CostCenter(cost_center, [repo]) + + return cost_centers.values() + + +class BillingCommandLine(CommandLine): + """Billing CLI.""" + + def arguments(self): + """Billing arguments.""" + if self.subparser: + # self.addModes([""]) + + parser = self.parser.add_argument_group("billing") + parser.add_argument( + "--csv", + help="Input CSV Billing File", + ) + parser.add_argument( + "--cost-center", + help="Cost Center CSV File", + ) + + def run(self, arguments: Namespace): + self.default_logger() + + org = Organization(GitHub.owner) + + if arguments.csv: + logging.info(f"Loading GHAS Billing from {arguments.csv}") + + ghas = Billing.loadFromCsv(arguments.csv) + else: + if GitHub.token is None: + logger.error("No GitHub Token provided") + return + billing = Billing(org) + ghas = billing.getGhasBilling() + + if not ghas: + logger.error("No GHAS Billing found") + return + + print(f"GHAS Active Committers :: {ghas.active}") + print(f"GHAS Maximum Committers :: {ghas.maximum}") + print(f"GHAS Purchased Committers :: {ghas.purchased}") + + if arguments.cost_center: + cost_centers = loadCostCenterCsv(arguments.cost_center) + print(f"\nCost Centers :: {len(cost_centers)}\n") + total = 0 + + for center in cost_centers: + active = set() + repos = 0 + + for repo in center.repositories: + r = ghas.getRepository(repo, org.name) + if r: + repos += 1 + active.update(r.activeCommitterNames()) + else: + logger.warning(f"Repository cost center not found: {repo}") + + print(f" > {center.name} (active: {len(active)}, repos: {repos})") + total += len(active) + + print(f"\nShared Cost Center Licenses :: {total - ghas.active}") + + +if __name__ == "__main__": + parser = BillingCommandLine("ghastoolkit-billing") + parser.run(parser.parse_args()) + logging.info(f"Finished!") diff --git a/vendor/ghastoolkit/codeql/consts.py b/vendor/ghastoolkit/codeql/consts.py index 3802940..4158a67 100644 --- a/vendor/ghastoolkit/codeql/consts.py +++ b/vendor/ghastoolkit/codeql/consts.py @@ -1,5 +1,7 @@ # Dict of CodeQL supported Languages CODEQL_LANGUAGES = { + "actions": "actions", + "c": "cpp", "cpp": "cpp", "csharp": "csharp", "java": "java", @@ -8,5 +10,7 @@ "typescript": "javascript", "go": "go", "python": "python", + "rust": "rust", + "swift": "swift", "ruby": "ruby", } diff --git a/vendor/ghastoolkit/codeql/databases.py b/vendor/ghastoolkit/codeql/databases.py index 116d735..835ee6a 100644 --- a/vendor/ghastoolkit/codeql/databases.py +++ b/vendor/ghastoolkit/codeql/databases.py @@ -61,7 +61,9 @@ def __post_init__(self): self.path_download = self.createDownloadPath() if self.language not in CODEQL_LANGUAGES: - raise Exception("Language is not supported by CodeQL Summary Generator") + logger.warning( + f"Language `{self.language}` is not supported by CodeQL Summary Generator" + ) def __str__(self) -> str: name = str(self.repository) if self.repository else self.name diff --git a/vendor/ghastoolkit/octokit/billing.py b/vendor/ghastoolkit/octokit/billing.py new file mode 100644 index 0000000..38abcc8 --- /dev/null +++ b/vendor/ghastoolkit/octokit/billing.py @@ -0,0 +1,185 @@ +import logging +import csv +from dataclasses import dataclass, field +from typing import Optional, List, Set + +from ghastoolkit.errors import GHASToolkitError +from ghastoolkit.octokit.github import GitHub +from ghastoolkit.octokit.octokit import RestRequest, OctoItem, loadOctoItem +from ghastoolkit.octokit.enterprise import Organization + + +logger = logging.getLogger("ghastoolkit.octokit.github") + + +@dataclass +class BillingUser(OctoItem): + """Billing User.""" + + user_login: str + """Login.""" + last_pushed_date: str + """Last Pushed Date.""" + last_pushed_email: str + """Last Pushed Email.""" + + @property + def login(self) -> str: + """Login.""" + return self.user_login + + +@dataclass +class BillingRepository(OctoItem): + """Billing Repository.""" + + name: str + """Repository Name.""" + advanced_security_committers: int + """Advanced Security Committers.""" + advanced_security_committers_breakdown: List[BillingUser] = field( + default_factory=list + ) + """Advanced Security Committers Breakdown.""" + + def activeCommitterCount(self) -> int: + """Count of Active Committers.""" + return len(self.advanced_security_committers_breakdown) + + def activeCommitterNames(self) -> Set[str]: + """Active Committer Names.""" + results = set() + for commiter in self.advanced_security_committers_breakdown: + results.add(commiter.login) + return results + + def activeCommitterEmails(self) -> Set[str]: + """Active Committer Emails.""" + results = set() + for commiter in self.advanced_security_committers_breakdown: + results.add(commiter.last_pushed_email) + return results + + +@dataclass +class GhasBilling(OctoItem): + """Billing Response.""" + + repositories: List[BillingRepository] = field(default_factory=list) + """Repositories (required).""" + + total_advanced_security_committers: Optional[int] = None + """Total Advanced Security Committers.""" + total_count: Optional[int] = None + """Total Count.""" + maximum_advanced_security_committers: Optional[int] = None + """Maximum Advanced Security Committers.""" + purchased_advanced_security_committers: Optional[int] = None + """Purchased Advanced Security Committers.""" + + @property + def active(self) -> int: + """Active Advanced Security Committers.""" + return self.total_advanced_security_committers or 0 + + @property + def maximum(self) -> int: + """Maximum Advanced Security Committers.""" + return self.maximum_advanced_security_committers or 0 + + @property + def purchased(self) -> int: + """Purchased Advanced Security Committers.""" + return self.purchased_advanced_security_committers or 0 + + def getRepository( + self, name: str, org: Optional[str] = None + ) -> Optional[BillingRepository]: + """Get Repository by Name.""" + for repo in self.repositories: + org, repo_name = repo.name.split("/", 1) + if repo_name == name: + return repo + + return None + + def activeCommitterNames(self) -> Set[str]: + """Active Committer Names.""" + results = set() + for repo in self.repositories: + results.update(repo.activeCommitterNames()) + return results + + def activeCommitterEmails(self) -> Set[str]: + """Active Committer Emails.""" + results = set() + for repo in self.repositories: + results.update(repo.activeCommitterEmails()) + return results + + +class Billing: + """GitHub Billing API""" + + def __init__(self, organization: Optional[Organization] = None) -> None: + """Initialise Billing API.""" + if organization is not None: + self.org = organization.name + else: + self.org = GitHub.owner + self.rest = RestRequest() + self.state = None + + def getGhasBilling(self) -> GhasBilling: + """Get GitHub Advanced Security Billing.""" + if self.org is None: + logger.error("No organization provided") + raise GHASToolkitError( + "No organization provided", + ) + result = self.rest.get(f"/orgs/{self.org}/settings/billing/advanced-security") + + if isinstance(result, dict): + return loadOctoItem(GhasBilling, result) + + logger.error("Error getting billing") + raise GHASToolkitError( + "Error getting billing", + permissions=['"Administration" organization permissions (read)'], + docs="https://docs.github.com/en/enterprise-cloud@latest/rest/billing/billing#get-github-advanced-security-active-committers-for-an-organization", + ) + + @staticmethod + def loadFromCsv(path: str) -> GhasBilling: + """Load Billing from CSV.""" + # name: { + repositories: dict[str, List[BillingUser]] = {} + unique_committers = [] + + with open(path, mode="r") as csv_file: + csv_reader = csv.DictReader(csv_file) + + for row in csv_reader: + repo = row["Organization / repository"] + # if exists, add user to list + user = BillingUser( + row["User login"], + row["Last pushed date"], + row["Last pushed email"], + ) + if repositories.get(repo): + repositories[repo].append(user) + else: + repositories[repo] = [user] + + if user.login not in unique_committers: + unique_committers.append(user.login) + + result = GhasBilling([]) + result.total_count = len(unique_committers) + result.total_advanced_security_committers = len(unique_committers) + + for repo, usrs in repositories.items(): + result.repositories.append(BillingRepository(repo, len(usrs), usrs)) + + return result diff --git a/vendor/ghastoolkit/octokit/dependabot.py b/vendor/ghastoolkit/octokit/dependabot.py index 05e86fb..f3ebfd8 100644 --- a/vendor/ghastoolkit/octokit/dependabot.py +++ b/vendor/ghastoolkit/octokit/dependabot.py @@ -1,7 +1,7 @@ """Dependabot API.""" import logging -from typing import Optional +from typing import Optional, Any from ghastoolkit.errors import GHASToolkitError, GHASToolkitTypeError from ghastoolkit.octokit.github import GitHub, Repository @@ -55,6 +55,110 @@ def isSecurityUpdatesEnabled(self) -> bool: status = saa.get("dependabot_security_updates", {}).get("status", "disabled") return status == "enabled" + def getEnterpriseAlerts( + self, + state: str = "open", + severity: Optional[str] = None, + ecosystem: Optional[str] = None, + package: Optional[str] = None, + manifest: Optional[str] = None, + scope: Optional[str] = None, + ) -> list[DependencyAlert]: + """Get all Dependabot alerts from Enterprise. + + Arguments: + severity (str): Severity of the alert. + Options: low, moderate, high, critical + ecosystem (str): Ecosystem of the alert. + Options: npm, rubygems, maven, pip, etc. + package (str): Package name of the alert. + manifest (str): Manifest path of the alert. + scope (str): Scope of the alert. + Returns: + list[DependencyAlert]: List of Dependabot alerts. + Raises: + GHASToolkitAuthenticationError: If the request fails due to authentication. + GHASToolkitTypeError: If the state is not valid. + GHASToolkitError: If the request fails. + """ + parameters = self._validateInput( + { + "state": state, + "severity": severity, + "ecosystem": ecosystem, + "package": package, + "manifest": manifest, + "scope": scope, + } + ) + results = self.rest.get( + "/enterprises/{enterprise}/dependabot/alerts", parameters + ) + if isinstance(results, list): + return self._apiToAlerts(results) + + logger.debug(f"Failed to get Dependabot alerts :: {results}") + raise GHASToolkitTypeError( + "Error getting Dependabot organization alerts", + docs="https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-an-enterprise", + permissions=[ + '"Dependabot alerts" repository permissions (read)', + '"Dependabot security updates" repository permissions (read)', + ], + ) + + def getOrganizationAlerts( + self, + state: str = "open", + severity: Optional[str] = None, + ecosystem: Optional[str] = None, + package: Optional[str] = None, + manifest: Optional[str] = None, + scope: Optional[str] = None, + ) -> list[DependencyAlert]: + """Get all Dependabot alerts from organization. + + Arguments: + state (str): State of the alert. Defaults to "open". + Options: auto_dismissed, dismissed, fixed, open + severity (str): Severity of the alert. + Options: low, moderate, high, critical + ecosystem (str): Ecosystem of the alert. + Options: npm, rubygems, maven, pip, etc. + package (str): Package name of the alert. + manifest (str): Manifest path of the alert. + scope (str): Scope of the alert. + Returns: + list[DependencyAlert]: List of Dependabot alerts. + Raises: + GHASToolkitAuthenticationError: If the request fails due to authentication. + GHASToolkitTypeError: If the state is not valid. + GHASToolkitError: If the request fails. + """ + parameters = self._validateInput( + { + "state": state, + "severity": severity, + "ecosystem": ecosystem, + "package": package, + "manifest": manifest, + "scope": scope, + } + ) + results = self.rest.get("/orgs/{org}/dependabot/alerts", parameters) + if isinstance(results, list): + return self._apiToAlerts(results) + + logger.debug(f"Failed to get Dependabot alerts :: {results}") + raise GHASToolkitTypeError( + "Error getting Dependabot organization alerts", + docs="https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization", + permissions=[ + '"Dependabot alerts" repository permissions (read)', + '"Dependabot security updates" repository permissions (read)', + ], + ) + def getAlerts( self, state: str = "open", @@ -66,18 +170,27 @@ def getAlerts( ) -> list[DependencyAlert]: """Get All Dependabot alerts from REST API. - https://docs.github.com/en/rest/dependabot/alerts - """ - if state not in ["auto_dismissed", "dismissed", "fixed", "open"]: - raise GHASToolkitError( - f"Invalid state provided: {state}", - docs="https://docs.github.com/en/rest/reference/repos#get-a-repository", - ) + Arguments: + state (str): State of the alert. Defaults to "open". + Options: auto_dismissed, dismissed, fixed, open + severity (str): Severity of the alert. + Options: low, moderate, high, critical + ecosystem (str): Ecosystem of the alert. + Options: npm, rubygems, maven, pip, etc. + package (str): Package name of the alert. + manifest (str): Manifest path of the alert. + scope (str): Scope of the alert. - logger.debug(f"Getting Dependabot alerts with state: {state}") + Returns: + list[DependencyAlert]: List of Dependabot alerts. - results = self.rest.get( - "/repos/{owner}/{repo}/dependabot/alerts", + Raises: + GHASToolkitTypeError: If the state is not valid. + GHASToolkitError: If the request fails. + + https://docs.github.com/en/rest/dependabot/alerts + """ + parameters = self._validateInput( { "state": state, "severity": severity, @@ -85,35 +198,21 @@ def getAlerts( "package": package, "manifest": manifest, "scope": scope, - }, + } ) - if isinstance(results, list): - retval = [] - for alert in results: - advisory_data = alert.get("security_advisory", {}) - # Fix issues between GraphQL and Advisory class - advisory_data["affected"] = advisory_data.pop("vulnerabilities") - advisory = Advisory(**advisory_data) + results = self.rest.get("/repos/{owner}/{repo}/dependabot/alerts", parameters) - package = alert.get("dependency", {}).get("package", {}) - - retval.append( - DependencyAlert( - number=alert.get("number"), - state=alert.get("state"), - severity=alert.get("security_advisory", {}).get( - "severity", "unknown" - ), - advisory=advisory, - purl=f"pkg:{package.get('ecosystem')}/{package.get('name')}".lower(), - manifest=alert.get("manifest_path"), - ) - ) + if isinstance(results, list): + return self._apiToAlerts(results) - return retval + logger.debug(f"Failed to get Dependabot alerts :: {results}") raise GHASToolkitTypeError( f"Error getting Dependabot alerts", docs="https://docs.github.com/en/rest/dependabot/alerts", + permissions=[ + '"Dependabot alerts" repository permissions (read)', + '"Dependabot security updates" repository permissions (read)', + ], ) def getAlertsInPR(self) -> list[DependencyAlert]: @@ -160,19 +259,24 @@ def getAlertsGraphQL(self) -> list[DependencyAlert]: "This could be due to a lack of permissions or access token" ) raise GHASToolkitError(f"Failed to get GraphQL repository alerts") + logger.debug("GraphQL successfully got repository data") - alerts = repo.get("vulnerabilityAlerts", {}) + alerts = repo.get("vulnerabilityAlerts", {}).get("edges", []) + page_info = repo.get("vulnerabilityAlerts", {}).get("pageInfo", {}) - for alert in alerts.get("edges", []): + for alert in alerts: data = alert.get("node", {}) package = data.get("securityVulnerability", {}).get("package", {}) - purl = f"pkg:{package.get('ecosystem')}/{package.get('name')}".lower() + ecosystem = package.get("ecosystem", "") + name = package.get("name", "") + purl = f"pkg:{ecosystem}/{name}".lower() created_at = data.get("createdAt") advisory_data = data.get("securityAdvisory", {}) # Fix issues between GraphQL and Advisory class advisory_data["ghsa_id"] = advisory_data.pop("ghsaId") advisory = Advisory(**advisory_data) + logger.debug(f"Advisory :: {advisory}") dep_alert = DependencyAlert( number=data.get("number"), @@ -183,13 +287,67 @@ def getAlertsGraphQL(self) -> list[DependencyAlert]: created_at=created_at, ) dep_alert.__data__ = data + + logger.debug(f"Alert :: {dep_alert}") results.append(dep_alert) - if not alerts.get("pageInfo", {}).get("hasNextPage"): + if not page_info.get("pageInfo", {}).get("hasNextPage"): logger.debug(f"GraphQL cursor hit end page") break - self.graphql.cursor = alerts.get("pageInfo", {}).get("endCursor", "") + self.graphql.cursor = page_info.get("pageInfo", {}).get("endCursor", "") + logger.debug(f"GraphQL cursor :: {self.graphql.cursor}") logger.debug(f"Number of Dependabot Alerts :: {len(results)}") return results + + def _validateInput(self, parameters: dict[str, Any]) -> dict[str, Any]: + """Validates the input parameters for the API request.""" + if state := parameters.get("state"): + if state not in [ + "auto_dismissed", + "dismissed", + "fixed", + "open", + ]: + raise GHASToolkitTypeError( + f"Invalid state provided: {state}", + docs="https://docs.github.com/en/rest/dependabot/alerts", + ) + else: + parameters["state"] = "open" + logger.debug(f"Getting Dependabot alerts with state: {parameters.get('state')}") + + if severity := parameters.get("severity"): + if severity not in ["low", "moderate", "high", "critical"]: + raise GHASToolkitTypeError( + f"Invalid severity provided: {severity}", + docs="https://docs.github.com/en/rest/dependabot/alerts", + ) + return parameters + + def _apiToAlerts(self, alerts: list[dict[str, Any]]) -> list[DependencyAlert]: + retval = [] + for alert in alerts: + advisory_data = alert.get("security_advisory", {}) + # Fix issues between GraphQL and Advisory class + advisory_data["affected"] = advisory_data.pop("vulnerabilities") + advisory = Advisory(**advisory_data) + logger.debug(f"Advisory :: {advisory}") + + package = alert.get("dependency", {}).get("package", {}) + + alert = DependencyAlert( + number=alert.get("number"), + state=alert.get("state"), + severity=alert.get("security_advisory", {}).get("severity", "unknown"), + advisory=advisory, + purl=f"pkg:{package.get('ecosystem')}/{package.get('name')}".lower(), + manifest=alert.get("manifest_path"), + ) + logger.debug(f"Alert :: {alert}") + + retval.append(alert) + + logger.debug(f"Number of Dependabot Alerts :: {len(retval)}") + return retval diff --git a/vendor/ghastoolkit/octokit/dependencygraph.py b/vendor/ghastoolkit/octokit/dependencygraph.py index 42c2e98..6e5ed70 100644 --- a/vendor/ghastoolkit/octokit/dependencygraph.py +++ b/vendor/ghastoolkit/octokit/dependencygraph.py @@ -1,5 +1,6 @@ """Dependency Graph Octokit.""" +import json import logging from typing import Any, Dict import urllib.parse @@ -8,24 +9,43 @@ from ghastoolkit.errors import GHASToolkitError, GHASToolkitTypeError from ghastoolkit.octokit.github import GitHub, Repository -from ghastoolkit.supplychain.advisories import Advisory -from ghastoolkit.supplychain.dependencyalert import DependencyAlert -from ghastoolkit.supplychain.dependencies import Dependencies, Dependency +from ghastoolkit.supplychain import ( + Advisory, + Dependencies, + Dependency, + DependencyAlert, + uniqueDependencies, +) +from ghastoolkit.octokit.enterprise import Organization from ghastoolkit.octokit.octokit import GraphQLRequest, Optional, RestRequest +from ghastoolkit.utils.cache import Cache logger = logging.getLogger("ghastoolkit.octokit.dependencygraph") class DependencyGraph: - """Dependency Graph API.""" + """Dependency Graph API. + + This class is used to interact with the Dependency Graph API in GitHub. + """ def __init__( self, repository: Optional[Repository] = None, enable_graphql: bool = True, enable_clearlydefined: bool = False, + cache: bool = False, ) -> None: - """Initialise Dependency Graph.""" + """Initialise Dependency Graph. + + Arguments: + repository: The repository to use. If not provided, it will use the current + repository in `GitHub`. + enable_graphql: Enable GraphQL API. Defaults to True. + enable_clearlydefined: Enable ClearlyDefined API. Defaults to False. + cache: Enable caching. Defaults to False. + + """ self.repository = repository or GitHub.repository self.rest = RestRequest(repository) self.graphql = GraphQLRequest(repository) @@ -33,28 +53,66 @@ def __init__( self.enable_graphql = enable_graphql self.enable_clearlydefined = enable_clearlydefined - def getOrganizationDependencies(self) -> Dict[Repository, Dependencies]: - """Get Organization Dependencies.""" + self.cache_enabled = cache + self.cache = Cache(store="dependencygraph") + + def getOrganizationDependencies( + self, owner: Optional[str] = None + ) -> Dict[Repository, Dependencies]: + """Get Organization Dependencies for all repositories. + + This is done by iterating through all the repositories in the organization + and getting the dependencies for each repository. This is done as there is no + way to get all the dependencies for an organization in a single request. + + Arguments: + owner: The owner of the organization. If not provided, it will use the current + owner of the repository. + + Returns: + Dict[Repository, Dependencies]: A dictionary of repositories and their dependencies. + """ + org = Organization(organization=owner or GitHub.owner) + logger.debug(f"Processing organization :: {org}") + deps: Dict[Repository, Dependencies] = {} - repositories = self.rest.get("/orgs/{org}/repos") - if not isinstance(repositories, list): - raise Exception("Invalid organization") + repositories = org.getRepositories() + logger.debug(f"Found `{len(repositories)}` repositories in organization") for repo in repositories: - repo = Repository.parseRepository(repo.get("full_name")) logger.debug(f"Processing repository :: {repo}") try: - self.rest = RestRequest(repo) + depgraph = DependencyGraph(repo, enable_graphql=self.enable_graphql) + logger.debug(f"Using repository :: {depgraph.repository}") + + deps[repo] = depgraph.getDependenciesSbom() + + if depgraph.enable_graphql: + logger.debug("Enabled GraphQL Dependencies") + graph_deps = depgraph.getDependenciesGraphQL() - deps[repo] = self.getDependenciesSbom() + deps[repo].updateDependencies(graph_deps) + logger.debug("Updated dependencies with GraphQL") except Exception as err: - logger.warning(f"Failed to get dependencies :: {err}") + logger.warning(f"Failed to get `{repo}` dependencies :: {err}") deps[repo] = Dependencies() self.rest = RestRequest(self.repository) return deps + def getUniqueOrgDependencies( + self, + version: bool = False, + ) -> Dependencies: + """Create a unique list of dependencies, this is useful for merging multiple lists for example + from an organization. + + Arguments: + version: If True, include the version in the unique list. Defaults to False. + """ + return uniqueDependencies(self.getOrganizationDependencies(), version=version) + def getDependencies(self) -> Dependencies: """Get Dependencies.""" if GitHub.isEnterpriseServer(): @@ -88,44 +146,32 @@ def getDependencies(self) -> Dependencies: return deps def getDependenciesSbom(self) -> Dependencies: - """Get Dependencies from SBOM.""" - result = Dependencies() - spdx_bom = self.exportBOM() + """Get Dependencies from SBOM. - for package in spdx_bom.get("sbom", {}).get("packages", []): - extref = False - dep = Dependency("") - for ref in package.get("externalRefs", []): - if ref.get("referenceType", "") == "purl": - dep = Dependency.fromPurl(ref.get("referenceLocator")) - extref = True - else: - logger.warning(f"Unknown external reference :: {ref}") - - # if get find a PURL or not - if extref: - dep.license = package.get("licenseConcluded") + If cache is enabled, it will use the cached dependencies if they exist. + If not, it will download the SBOM and cache it. + """ + cache_key = self.rest.repository.__str__() + + if self.cache_enabled: + cache = self.cache.read(cache_key, file_type="spdx.json") + if cache: + logger.debug(f"Using cached dependencies for `{self.rest.repository}`") + data = json.loads(cache) + return Dependencies.loadSpdxSbom(data) else: - name = package.get("name", "").lower() - # manager ':' - if ":" in name: - dep.manager, name = name.split(":", 1) - - # HACK: Maven / NuGet - if dep.manager in ["maven", "nuget"]: - if "." in name: - dep.namespace, name = name.rsplit(".", 1) - # Namespace '/' - elif "/" in package: - dep.namespace, name = name.split("/", 1) + logger.debug( + f"Cache not found for {self.repository.repo}, downloading SBOM" + ) - dep.name = name - dep.version = package.get("versionInfo") - dep.license = package.get("licenseConcluded") + logger.debug(f"Downloading SBOM for {self.repository}") + spdx_bom = self.exportBOM() - result.append(dep) + if self.cache_enabled: + logger.debug(f"Caching dependencies for {self.repository.repo}") + self.cache.write(cache_key, spdx_bom, file_type="spdx.json") - return result + return Dependencies.loadSpdxSbom(spdx_bom) def getDependenciesGraphQL(self, dependencies_count: int = 100) -> Dependencies: """Get Dependencies from GraphQL. @@ -139,6 +185,17 @@ def getDependenciesGraphQL(self, dependencies_count: int = 100) -> Dependencies: """ deps = Dependencies() + if self.cache_enabled: + cache_key = self.rest.repository.__str__() + cache = self.cache.read(cache_key, file_type="graphql.json") + if cache: + logger.debug(f"Using cached dependencies for `{self.rest.repository}`") + data = json.loads(cache) + return self._parseGraphQL(data) + + # Build up a single list of dependencies + graphql_data = {} + manifests = True manifests_cursor = "" dependencies_cursor = "" @@ -167,10 +224,22 @@ def getDependenciesGraphQL(self, dependencies_count: int = 100) -> Dependencies: has_next_page = True while has_next_page: + if not graph_manifests.get("edges"): + logger.debug("No more manifests to be processed") + break + for manifest in graph_manifests.get("edges", []): node = manifest.get("node", {}) + + manifestfile = node.get("filename") or node.get("blobPath") + logger.debug(f"Processing :: '{manifestfile}'") + dependencies = node.get("dependencies", {}) - logger.debug(f"Processing :: '{node.get('filename')}'") + + if graphql_data.get(manifestfile): + graphql_data[manifestfile].update(dependencies) + else: + graphql_data[manifestfile] = dependencies # Pagination has_next_page = dependencies.get("pageInfo", {}).get( @@ -181,37 +250,6 @@ def getDependenciesGraphQL(self, dependencies_count: int = 100) -> Dependencies: else: dependencies_cursor = "" - for dep in dependencies.get("edges", []): - dep = dep.get("node", {}) - license = None - repository = None - - if dep.get("repository"): - if dep.get("repository", {}).get("licenseInfo"): - license = ( - dep.get("repository", {}) - .get("licenseInfo", {}) - .get("name") - ) - if dep.get("repository", {}).get("nameWithOwner"): - repository = dep.get("repository", {}).get( - "nameWithOwner" - ) - - version = dep.get("requirements") - if version: - version = version.replace("= ", "") - - deps.append( - Dependency( - name=dep.get("packageName"), - manager=dep.get("packageManager"), - version=version, - license=license, - repository=repository, - ) - ) - if has_next_page: logger.debug( f"Re-run and fetch next data page :: {manifests_cursor} ({dependencies_cursor})" @@ -243,10 +281,22 @@ def getDependenciesGraphQL(self, dependencies_count: int = 100) -> Dependencies: manifests_cursor = "" logger.debug("No more manifests to be processed") - return deps + if self.cache_enabled: + logger.debug(f"Caching dependencies for {self.repository.repo}") + self.cache.write(cache_key, graphql_data, file_type="graphql.json") + + return self._parseGraphQL(graphql_data) def getDependenciesInPR(self, base: str, head: str) -> Dependencies: - """Get all the dependencies from a Pull Request.""" + """Get all the dependencies from a Pull Request. + + Arguments: + base: The base branch of the Pull Request. + head: The head branch of the Pull Request. + Returns: + Dependencies: A list of dependencies. + + """ if GitHub.isEnterpriseServer() and GitHub.server_version < Version("3.6.0"): raise GHASToolkitError("Enterprise Server version must be >= 3.6") @@ -276,8 +326,8 @@ def getDependenciesInPR(self, base: str, head: str) -> Dependencies: purl = depdata.get("package_url") if not purl or purl == "": - logger.warn("Package URL is not present, skipping...") - logger.warn(f"Package :: {depdata}") + logger.warning("Package URL is not present, skipping...") + logger.warning(f"Package :: {depdata}") continue dep = Dependency.fromPurl(purl) @@ -303,11 +353,12 @@ def getDependenciesInPR(self, base: str, head: str) -> Dependencies: return dependencies - def exportBOM(self) -> Dependencies: + def exportBOM(self) -> Dict: """Download / Export DependencyGraph SBOM. https://docs.github.com/en/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository """ + logger.debug(f"Exporting SBOM for {self.repository}") result = self.rest.get("/repos/{owner}/{repo}/dependency-graph/sbom") if result: return result @@ -315,6 +366,7 @@ def exportBOM(self) -> Dependencies: raise GHASToolkitTypeError( "Failed to download SBOM", docs="https://docs.github.com/en/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository", + permissions=['"Contents" repository permissions (read)'], ) def submitDependencies( @@ -329,6 +381,15 @@ def submitDependencies( ): """Submit dependencies to GitHub Dependency Graph snapshots API. + Arguments: + dependencies: The dependencies to submit. + tool: The tool used to generate the dependencies. + path: The path to the dependencies file. + sha: The SHA of the commit. + ref: The reference of the commit. + version: The version of the dependencies. + url: The URL of the dependencies file. + https://docs.github.com/en/rest/dependency-graph/dependency-submission?apiVersion=2022-11-28#create-a-snapshot-of-dependencies-for-a-repository """ self.rest.postJson( @@ -344,3 +405,45 @@ def submitSbom(self, sbom: dict[Any, Any]): sbom, expected=201, ) + + def _parseGraphQL(self, data: Dict[str, Any]) -> Dependencies: + """Parse GraphQL data. + + Arguments: + data: The data to parse. + + Returns: + Dependencies: A list of dependencies. + """ + deps = Dependencies() + + for manifest, dependencies in data.items(): + for dep in dependencies.get("edges", []): + dep = dep.get("node", {}) + license = None + repository = None + + if dep.get("repository"): + if dep.get("repository", {}).get("licenseInfo"): + license = ( + dep.get("repository", {}).get("licenseInfo", {}).get("name") + ) + if dep.get("repository", {}).get("nameWithOwner"): + repository = dep.get("repository", {}).get("nameWithOwner") + + version = dep.get("requirements") + if version: + version = version.replace("= ", "") + + deps.append( + Dependency( + name=dep.get("packageName"), + manager=dep.get("packageManager"), + version=version, + license=license, + repository=repository, + path=manifest, + ) + ) + + return deps diff --git a/vendor/ghastoolkit/octokit/enterprise.py b/vendor/ghastoolkit/octokit/enterprise.py index d5dfb03..2f3af2d 100644 --- a/vendor/ghastoolkit/octokit/enterprise.py +++ b/vendor/ghastoolkit/octokit/enterprise.py @@ -41,6 +41,7 @@ def getRepositories(self) -> List[Repository]: for repository in result: repositories.append(Repository.parseRepository(repository.get("full_name"))) + logger.debug(f"Found {len(repositories)} repositories in {self.name}") return repositories def enableAllSecurityProduct(self) -> bool: diff --git a/vendor/ghastoolkit/octokit/github.py b/vendor/ghastoolkit/octokit/github.py index 5f2ae78..0a4d550 100644 --- a/vendor/ghastoolkit/octokit/github.py +++ b/vendor/ghastoolkit/octokit/github.py @@ -7,6 +7,7 @@ from semantic_version import Version +from ghastoolkit.errors import GHASToolkitError from ghastoolkit.octokit.repository import Repository @@ -30,7 +31,15 @@ class GitHub: """Enterprise Name""" token: Optional[str] = None - """GitHub Access Token""" + """GitHub Access Token + This is used to authenticate with the GitHub API. + + This can be set using the GITHUB_TOKEN environment variable or + passed in as a parameter. + """ + + token_type: Optional[str] = None + """GitHub Token Type""" # URLs instance: str = "https://github.com" @@ -43,9 +52,6 @@ class GitHub: server_version: Optional[Version] = None """GitHub Enterprise Server Version""" - github_app: bool = False - """GitHub App setting""" - @staticmethod def init( repository: Optional[str] = None, @@ -74,9 +80,12 @@ def init( if branch: GitHub.repository.branch = branch - if not token: - token = os.environ.get("GITHUB_TOKEN") - GitHub.token = token + # Set token or load from environment + if token: + GitHub.token = token + else: + GitHub.loadToken() + GitHub.token_type = GitHub.validateTokenType(GitHub.token) if not instance: instance = os.environ.get("GITHUB_SERVER_URL") @@ -135,3 +144,82 @@ def getMetaInformation() -> Dict: GitHub.server_version = Version(version) return response.json() + + @staticmethod + def loadToken(): + """Load the GitHub token from the environment variable.""" + if envvar := os.environ.get("GITHUB_TOKEN"): + GitHub.token = envvar + logger.debug("Loaded GITHUB_TOKEN from environment variable") + + GitHub.validateTokenType(GitHub.token) + elif envvar := os.environ.get("GH_TOKEN"): + # This is sometimes set by GitHub CLI + GitHub.token = envvar + logger.debug("Loaded GH_TOKEN from environment variable") + + else: + # TODO: Load from GH CLI? + logger.debug("Failed to load GitHub token") + + @staticmethod + def getToken(masked: bool = True) -> Optional[str]: + """Get the GitHub token. + + Masking the token will only show the first 5 and all the other + characters as `#`. + + Args: + masked (bool): Mask the token. Defaults to True. + + Returns: + str: The GitHub token. + """ + if not GitHub.token: + return None + + if masked: + last = len(GitHub.token) - 5 + return f"{GitHub.token[0:5]}{'#' * last}" + return GitHub.token + + @property + def github_app(self) -> bool: + """Check if the token is a GitHub App token.""" + # This is for backwards compatibility + if ttype := self.token_type: + return ttype == "OAUTH" + return False + + @staticmethod + def validateTokenType(token: Optional[str]) -> Optional[str]: + """Check what type of token is being used. + + Returns: + str: The type of token being used. + - "PAT" for Personal Access Token + - "OAUTH" for GitHub App token / OAuth token + - "ACTIONS" for GitHub Actions token + - "SERVICES" for Server-to-Server token + - "UNKNOWN" for unknown token type + + https://github.blog/engineering/behind-githubs-new-authentication-token-formats/ + """ + if not token or not isinstance(token, str): + return None + + # GitHub Actions sets the GITHUB_SECRET_SOURCE environment variable + if secret_source := os.environ.get("GITHUB_SECRET_SOURCE"): + if secret_source != "None": + return secret_source.upper() + + if token.startswith("ghp_") or token.startswith("github_pat_"): + return "PAT" + elif token.startswith("gho_"): + # GitHub OAuth tokens are used for GitHub Apps or GH CLI + return "OAUTH" + elif token.startswith("ghs_"): + # GitHub Actions token are Server-to-Server tokens + if os.environ.get("CI") == "true": + return "ACTIONS" + return "SERVICES" diff --git a/vendor/ghastoolkit/octokit/octokit.py b/vendor/ghastoolkit/octokit/octokit.py index 53b19c1..9283e3f 100644 --- a/vendor/ghastoolkit/octokit/octokit.py +++ b/vendor/ghastoolkit/octokit/octokit.py @@ -25,6 +25,7 @@ __OCTOKIT_PATH__ = os.path.dirname(os.path.realpath(__file__)) __OCTOKIT_ERRORS__ = { + 400: GHASToolkitError("Bad Request", status=400), 401: GHASToolkitAuthenticationError( "Authentication / Permission Issue", status=401 ), @@ -32,6 +33,9 @@ "Authentication / Permission Issue", status=403 ), 404: GHASToolkitError("Not Found", status=404), + 422: GHASToolkitError( + "Validation failed, or the endpoint has been spammed.", status=422 + ), 429: GHASToolkitError("Rate limit hit", status=429), 500: GHASToolkitError("GitHub Server Error", status=500), } @@ -113,7 +117,7 @@ def __init__( self.session.headers = { "Accept": "application/vnd.github.v3+json", "X-GitHub-Api-Version": RestRequest.VERSION, - "Authorization": f"token {GitHub.token}", + "Authorization": f"Bearer {GitHub.getToken(masked=False)}", } if retries: diff --git a/vendor/ghastoolkit/supplychain/__init__.py b/vendor/ghastoolkit/supplychain/__init__.py index e69de29..9c9ee07 100644 --- a/vendor/ghastoolkit/supplychain/__init__.py +++ b/vendor/ghastoolkit/supplychain/__init__.py @@ -0,0 +1,5 @@ +from .advisories import Advisory, Advisories, AdvisoryAffect +from .dependencies import Dependencies, uniqueDependencies +from .dependency import Dependency +from .licensing import Licenses +from .dependencyalert import DependencyAlert diff --git a/vendor/ghastoolkit/supplychain/advisories.py b/vendor/ghastoolkit/supplychain/advisories.py index b5a7015..c620c94 100644 --- a/vendor/ghastoolkit/supplychain/advisories.py +++ b/vendor/ghastoolkit/supplychain/advisories.py @@ -1,7 +1,7 @@ import logging import os import json -from typing import List, Dict, Optional +from typing import List, Dict, Optional, Any from dataclasses import dataclass, field from semantic_version import SimpleSpec, Version @@ -145,6 +145,9 @@ class Advisory(OctoItem): cvss_severities: Dict[str, dict] = field(default_factory=dict) """CVSS Severities""" + epss: List[dict[str, Any]] = field(default_factory=list) + """EPS Score""" + identifiers: List[dict] = field(default_factory=list) """List of identifiers""" references: List[dict] = field(default_factory=list) @@ -227,6 +230,20 @@ def cvss_score(self, version: int = 3) -> Optional[float]: return cvss.get("score") return None + @property + def epss_percentile(self) -> Optional[str]: + """Get EPSS Percentile.""" + if epss := self.epss: + return epss[0].get("percentile", "0") + return None + + @property + def epss_percentage(self) -> Optional[float]: + """Get EPSS Percentage.""" + if epss := self.epss: + return float(epss[0].get("percentage", 0)) + return None + class Advisories: """GitHub Advisory List.""" diff --git a/vendor/ghastoolkit/supplychain/dependencies.py b/vendor/ghastoolkit/supplychain/dependencies.py index a834b51..76b661d 100644 --- a/vendor/ghastoolkit/supplychain/dependencies.py +++ b/vendor/ghastoolkit/supplychain/dependencies.py @@ -1,116 +1,106 @@ +import os +import json import logging -from dataclasses import dataclass, field + from datetime import datetime import re -from typing import Optional, Union +from typing import Optional, Union, Dict from ghastoolkit.octokit.github import Repository -from ghastoolkit.supplychain.dependencyalert import DependencyAlert +from ghastoolkit.supplychain.dependency import Dependency from ghastoolkit.supplychain.licensing import NO_LICENSES, Licenses logger = logging.getLogger("ghastoolkit.supplychain.dependencies") -@dataclass -class Dependency: - """Dependency.""" - - name: str - """Name of the Dependency""" - namespace: Optional[str] = None - """Namespace of the Dependency""" - version: Optional[str] = None - """Version of the Dependency""" - manager: Optional[str] = None - """Package Manager""" - path: Optional[str] = None - """Path to the Dependency""" - qualifiers: dict[str, str] = field(default_factory=dict) - """Qualifiers""" - license: Optional[str] = None - """License information""" - alerts: list[DependencyAlert] = field(default_factory=list) - """Security Alerts""" - - repository: Optional[Union[str, Repository]] = None - """GitHub Repository for the dependency""" - - def __post_init__(self): - # normalize manager - if self.manager: - self.manager = self.manager.lower() - if self.repository and isinstance(self.repository, str): - self.repository = Repository.parseRepository(self.repository) - - def getPurl(self, version: bool = True) -> str: - """Create a PURL from the Dependency. - - https://github.com/package-url/purl-spec - """ - result = f"pkg:" - if self.manager: - result += f"{self.manager.lower()}/" - if self.namespace: - result += f"{self.namespace}/" - result += f"{self.name}" - if version and self.version: - result += f"@{self.version}" - - return result - - @staticmethod - def fromPurl(purl: str) -> "Dependency": - """Create a Dependency from a PURL.""" - dep = Dependency("") - # version (at end) - if "@" in purl: - pkg, dep.version = purl.split("@", 1) +class Dependencies: + """Set-like collection of Dependencies with list compatibility.""" + + def __init__(self, iterable=None): + """Initialize with an optional iterable.""" + self._dependencies = set() + if iterable: + for dep in iterable: + self.add(dep) + + def add(self, dependency: Dependency, repository: Repository = None): + """Add a dependency to the set.""" + self._dependencies.add(dependency) + + if repository: + # Find and add repo + for dep in self: + if dep.name == dependency.name or dep.fullname == dependency.fullname: + dep.repositories.add(repository) + self.add(dep) + return + + def append(self, dependency: Dependency): + """Append is an alias for `.add`, for backwards compatibility.""" + self.add(dependency) + + def extend(self, dependencies: "Dependencies"): + """Extends Dependencies with another list of Dependencies.""" + self._dependencies.update(dependencies._dependencies) + + def remove(self, dependency: Dependency): + """Remove a dependency from the set.""" + if dependency in self._dependencies: + self._dependencies.remove(dependency) else: - pkg = purl - - slashes = pkg.count("/") - if slashes == 0 and pkg.count(":", 1): - # basic purl `npm:name` - manager, dep.name = pkg.split(":", 1) - elif slashes == 2: - manager, dep.namespace, dep.name = pkg.split("/", 3) - elif slashes == 1: - manager, dep.name = pkg.split("/", 2) - elif slashes > 2: - manager, dep.namespace, dep.name = pkg.split("/", 2) + raise KeyError(f"Dependency {dependency} not found in the collection.") + + def pop(self, value: Union[str, int, Dependency]) -> Dependency: + """Pop allows you to remove an element from the set and return it.""" + if isinstance(value, int): + logger.warning("Index-based access is deprecated. Use iteration instead.") + raise Exception("Index-based access is deprecated. Use iteration instead.") + elif isinstance(value, str): + for dep in self._dependencies: + if dep.name == value or dep.fullname == value: + self.remove(dep) + return dep else: - raise Exception(f"Unable to parse PURL :: {purl}") - - if manager.startswith("pkg:"): - _, dep.manager = manager.split(":", 1) + if value in self._dependencies: + self.remove(value) + return value + else: + raise KeyError(f"Dependency {value} not found in the collection.") + + def __iter__(self): + """Iterator protocol support.""" + return iter(self._dependencies) + + def __len__(self): + """Return count of dependencies.""" + return len(self._dependencies) + + def __contains__(self, dependency: Dependency) -> bool: + """Check if dependency is in the collection.""" + return dependency in self._dependencies + + def __getitem__(self, key): + """Support for index-based access for backward compatibility.""" + if isinstance(key, int): + logger.warning("Index-based access is deprecated. Use iteration instead.") + raise Exception("Index-based access is deprecated. Use iteration instead.") + # If it's a dependency object, return the actual instance from the set + for dep in self._dependencies: + if dep == key: + return dep + raise KeyError(f"Dependency {key} not found") + + def __setitem__(self, key, value): + """Support for index-based setting for backward compatibility.""" + if isinstance(key, int): + # This is trickier since sets don't have indexes + # We'll remove the old item at that position and add the new one + items = list(self._dependencies) + self._dependencies.remove(items[key]) + self._dependencies.add(value) else: - dep.manager = manager - - return dep - - @property - def fullname(self) -> str: - """Full Name of the Dependency.""" - if self.namespace: - sep = "/" - if self.manager == "maven": - sep = ":" - return f"{self.namespace}{sep}{self.name}" - return self.name - - def __str__(self) -> str: - """To String (PURL).""" - return self.getPurl() - - def __repr__(self) -> str: - return self.getPurl() - - def __hash__(self) -> int: - return hash(self.getPurl()) - - -class Dependencies(list[Dependency]): - """List of Dependencies.""" + # Not supported + raise TypeError("Setting with non-integer indices not supported") def exportBOM( self, @@ -147,6 +137,65 @@ def exportBOM( } return data + @staticmethod + def loadSpdx( + path: str, + ) -> "Dependencies": + """Load a SPDX file into the Dependencies list.""" + if not os.path.exists(path): + raise ValueError(f"File does not exist: {path}") + if not os.path.isfile(path): + raise ValueError(f"Path is not a file: {path}") + + with open(path, "r") as file: + data = json.load(file) + + return Dependencies.loadSpdxSbom(data) + + @staticmethod + def loadSpdxSbom( + data: dict, + ) -> "Dependencies": + """Load a SBOM into the Dependencies list.""" + if not isinstance(data, dict): + raise ValueError("Data must be a dictionary") + + result = Dependencies() + + for package in data.get("sbom", {}).get("packages", []): + extref = False + dep = Dependency("") + for ref in package.get("externalRefs", []): + if ref.get("referenceType", "") == "purl": + dep = Dependency.fromPurl(ref.get("referenceLocator")) + extref = True + else: + logger.warning(f"Unknown external reference :: {ref}") + + # if get find a PURL or not + if extref: + dep.license = package.get("licenseConcluded") + else: + name = package.get("name", "").lower() + # manager ':' + if ":" in name: + dep.manager, name = name.split(":", 1) + + # HACK: Maven / NuGet + if dep.manager in ["maven", "nuget"]: + if "." in name: + dep.namespace, name = name.rsplit(".", 1) + # Namespace '/' + elif "/" in package: + dep.namespace, name = name.split("/", 1) + + dep.name = name + dep.version = package.get("versionInfo") + dep.license = package.get("licenseConcluded") + + result.append(dep) + return result + def findLicenses(self, licenses: list[str]) -> "Dependencies": """Find dependencies with a given license.""" regex_list = [re.compile(name_filter) for name_filter in licenses] @@ -190,11 +239,19 @@ def applyClearlyDefined(self): dep.license = " OR ".join(licenses) self[i] = dep - def contains(self, dependency: Dependency) -> bool: - """Contains the dependency.""" - purl = dependency.getPurl(version=False) + def contains(self, dependency: Dependency, version: bool = False) -> bool: + """Contains the dependency. + + Arguments: + dependency: Dependency to check + version: Check the version as well + + Returns: + bool: True if the dependency is in the list + """ + purl = dependency.getPurl(version=version) for dep in self: - if dep.name == dependency.name or dep.getPurl(version=False) == purl: + if dep.getPurl(version=version) == purl: return True return False @@ -232,3 +289,30 @@ def updateDependencies(self, dependencies: "Dependencies"): """Update a list of dependencies.""" for dep in dependencies: self.updateDependency(dep) + + def findDirectDependencies(self) -> "Dependencies": + """Find all the direct dependencies.""" + return Dependencies([dep for dep in self if dep.isDirect()]) + + +def uniqueDependencies( + dependencies: Dict[Repository, Dependencies], + version: bool = False, +) -> Dependencies: + """Create a unique list of dependencies, this is useful for merging multiple lists for example + from an organization. + + Arguments: + dependencies: List of dependencies to merge + version: Check the version as well + + Returns: + Dependencies: Unique list of dependencies + """ + unique_deps = Dependencies() + + for repo, deps in dependencies.items(): + for dep in deps: + unique_deps.add(dep, repo) + + return unique_deps diff --git a/vendor/ghastoolkit/supplychain/dependency.py b/vendor/ghastoolkit/supplychain/dependency.py new file mode 100644 index 0000000..d2ecc49 --- /dev/null +++ b/vendor/ghastoolkit/supplychain/dependency.py @@ -0,0 +1,153 @@ +import logging +from dataclasses import dataclass, field +from typing import Optional, Union, Dict + +from ghastoolkit.supplychain.dependencyalert import DependencyAlert +from ghastoolkit.octokit.github import Repository + +logger = logging.getLogger("ghastoolkit.supplychain.dependency") + + +@dataclass +class Dependency: + """Dependency.""" + + name: str + """Name of the Dependency""" + + namespace: Optional[str] = None + """Namespace of the Dependency""" + + version: Optional[str] = None + """Version of the Dependency""" + + manager: Optional[str] = None + """Package Manager""" + + path: Optional[str] = None + """Path to the Dependency""" + + qualifiers: dict[str, str] = field(default_factory=dict) + """Qualifiers""" + + relationship: Optional[str] = None + """Relationship to the Dependency. + + This can be direct or indirect/transitive. + """ + + license: Optional[str] = None + """License information""" + + alerts: list[DependencyAlert] = field(default_factory=list) + """Security Alerts""" + + repository: Optional[Union[str, Repository]] = None + """GitHub Repository for the dependency""" + + repositories: set[Repository] = field(default_factory=set) + """List of repositories for the dependency""" + + def __post_init__(self): + # normalize manager + if self.manager: + self.manager = self.manager.lower() + if self.repository and isinstance(self.repository, str): + self.repository = Repository.parseRepository(self.repository) + + if self.version: + self.version = self.version.strip() + if self.version.startswith("v"): + # normalize version + self.version = self.version[1:] + + def getPurl(self, version: bool = True) -> str: + """Create a PURL from the Dependency. + + https://github.com/package-url/purl-spec + """ + result = f"pkg:" + if self.manager: + result += f"{self.manager.lower()}/" + if self.namespace: + result += f"{self.namespace}/" + result += f"{self.name}" + if version and self.version: + result += f"@{self.version}" + + return result + + @staticmethod + def fromPurl(purl: str) -> "Dependency": + """Create a Dependency from a PURL.""" + dep = Dependency("") + # version (at end) + if "@" in purl: + pkg, dep.version = purl.split("@", 1) + else: + pkg = purl + + slashes = pkg.count("/") + if slashes == 0 and pkg.count(":", 1): + # basic purl `npm:name` + manager, dep.name = pkg.split(":", 1) + elif slashes == 2: + manager, dep.namespace, dep.name = pkg.split("/", 3) + elif slashes == 1: + manager, dep.name = pkg.split("/", 2) + elif slashes > 2: + manager, dep.namespace, dep.name = pkg.split("/", 2) + else: + raise Exception(f"Unable to parse PURL :: {purl}") + + if manager.startswith("pkg:"): + _, dep.manager = manager.split(":", 1) + else: + dep.manager = manager + + return dep + + @property + def fullname(self) -> str: + """Full Name of the Dependency.""" + if self.namespace: + sep = "/" + if self.manager == "maven": + sep = ":" + return f"{self.namespace}{sep}{self.name}" + return self.name + + def isDirect(self) -> bool: + """Is this a direct dependency? + + This is a bit of a hack to determine if this is a direct dependency or not. + In the future we will have this data as part of the API (SBOM). + + **Supports:** + + - `npm` + - `maven` + - `pip` + """ + if self.relationship and self.relationship.lower() == "direct": + return True + if manifest_file := self.path: + # Use the manifest file to determine if this is a direct dependency + if self.manager == "npm" and manifest_file.endswith("package.json"): + return True + elif self.manager == "maven" and manifest_file.endswith("pom.xml"): + return True + elif self.manager == "pip" and manifest_file.endswith("requirements.txt"): + return True + + return False + + def __str__(self) -> str: + """To String (PURL).""" + return self.getPurl() + + def __repr__(self) -> str: + return self.getPurl() + + def __hash__(self) -> int: + return hash(self.getPurl()) diff --git a/vendor/ghastoolkit/utils/cache.py b/vendor/ghastoolkit/utils/cache.py new file mode 100644 index 0000000..f496895 --- /dev/null +++ b/vendor/ghastoolkit/utils/cache.py @@ -0,0 +1,128 @@ +import os +import json +import logging +from typing import Any, Dict, Optional, Union +from datetime import datetime, timedelta + +logger = logging.getLogger("ghastoolkit.utils.cache") + +# A month in minutes +CACHE_MONTH = 30 * 24 * 60 +# A week in minutes +CACHE_WEEK = 7 * 24 * 60 +# A day in minutes +CACHE_DAY = 24 * 60 + + +class Cache: + """Cache class for storing and retrieving data.""" + + cache_age: int = CACHE_DAY + """Default cache age in minutes.""" + + def __init__( + self, + root: Optional[str] = None, + store: Optional[str] = None, + age: Union[int, str] = CACHE_DAY, + ): + """Initialize Cache. + + Args: + root (str, optional): Root directory for cache. Defaults to ~/.ghastoolkit/cache. + store (str, optional): Subdirectory for cache. Defaults to None. + age (int, str): Cache expiration age in hours. Defaults to 1440mins (24hrs). + """ + if root is None: + root = os.path.join(os.path.expanduser("~"), ".ghastoolkit", "cache") + self.root = root + self.store = store + self.cache: Dict[str, Any] = {} + + if isinstance(age, str): + if age.upper() == "MONTH": + Cache.cache_age = CACHE_MONTH + elif age.upper() == "WEEK": + Cache.cache_age = CACHE_WEEK + elif age.upper() == "DAY": + Cache.cache_age = CACHE_DAY + else: + Cache.cache_age = CACHE_DAY + else: + Cache.cache_age = age + + logger.debug(f"Cache root: {self.root}") + + if not os.path.exists(self.cache_path): + os.makedirs(self.cache_path, exist_ok=True) + + @property + def cache_path(self) -> str: + if self.store is None: + return self.root + return os.path.join(self.root, self.store) + + def get_file_age(self, path: str) -> Optional[float]: + """Get the age of a file in hours.""" + if not os.path.exists(path): + return None + + file_mtime = os.path.getmtime(path) + file_time = datetime.fromtimestamp(file_mtime) + current_time = datetime.now() + + age_hours = (current_time - file_time).total_seconds() / 3600 + logger.debug(f"Cache file age: {age_hours:.2f} hours for {path}") + + return age_hours + + def is_cache_expired(self, path: str, max_age_hours: float = 24.0) -> bool: + """Check if cache file is expired (older than max_age_hours).""" + age = self.get_file_age(path) + if age is None: + return True + + return age > max_age_hours + + def read( + self, key: str, file_type: Optional[str] = None, max_age_hours: float = 24.0 + ) -> Optional[Any]: + """Read from cache.""" + path = os.path.join(self.cache_path, key) + if file_type: + path = f"{path}.{file_type}" + + if os.path.exists(path): + if self.is_cache_expired(path, max_age_hours): + logger.debug(f"Cache expired ({max_age_hours} hours): {path}") + return None + + logger.debug(f"Cache hit: {path}") + with open(path, "r") as file: + return file.read() + return None + + def write(self, key: str, value: Any, file_type: Optional[str] = None): + """Write to cache.""" + if not isinstance(key, str): + raise ValueError("Key must be a string") + # Convert value to string if it's not already + if isinstance(value, str): + pass + elif isinstance(value, dict): + value = json.dumps(value) + else: + raise ValueError(f"Value is a unsupported type: {type(value)}") + + path = os.path.join(self.cache_path, key) + # the key might be a owner/repo + parent = os.path.dirname(path) + if not os.path.exists(parent): + os.makedirs(parent, exist_ok=True) + + if ftype := file_type: + path = f"{path}.{ftype}" + + logger.debug(f"Cache write: {path}") + with open(path, "w") as file: + file.write(value) diff --git a/vendor/requests/__version__.py b/vendor/requests/__version__.py index 2c105ac..3128a46 100644 --- a/vendor/requests/__version__.py +++ b/vendor/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = "requests" __description__ = "Python HTTP for Humans." __url__ = "https://requests.readthedocs.io" -__version__ = "2.32.3" -__build__ = 0x023203 +__version__ = "2.32.4" +__build__ = 0x023204 __author__ = "Kenneth Reitz" __author_email__ = "me@kennethreitz.org" __license__ = "Apache-2.0" diff --git a/vendor/requests/compat.py b/vendor/requests/compat.py index 095de1b..7f9d754 100644 --- a/vendor/requests/compat.py +++ b/vendor/requests/compat.py @@ -10,6 +10,18 @@ import importlib import sys +# ------- +# urllib3 +# ------- +from urllib3 import __version__ as urllib3_version + +# Detect which major version of urllib3 is being used. +try: + is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1 +except (TypeError, AttributeError): + # If we can't discern a version, prefer old functionality. + is_urllib3_1 = True + # ------------------- # Character Detection # ------------------- diff --git a/vendor/requests/models.py b/vendor/requests/models.py index 8f56ca7..c4b25fa 100644 --- a/vendor/requests/models.py +++ b/vendor/requests/models.py @@ -945,7 +945,9 @@ def text(self): return content def json(self, **kwargs): - r"""Returns the json-encoded content of a response, if any. + r"""Decodes the JSON response body (if any) as a Python object. + + This may return a dictionary, list, etc. depending on what is in the response. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises requests.exceptions.JSONDecodeError: If the response body does not diff --git a/vendor/requests/utils.py b/vendor/requests/utils.py index ae6c42f..8ab5585 100644 --- a/vendor/requests/utils.py +++ b/vendor/requests/utils.py @@ -38,6 +38,7 @@ getproxies, getproxies_environment, integer_types, + is_urllib3_1, ) from .compat import parse_http_list as _parse_list_header from .compat import ( @@ -136,7 +137,9 @@ def super_len(o): total_length = None current_position = 0 - if isinstance(o, str): + if not is_urllib3_1 and isinstance(o, str): + # urllib3 2.x+ treats all strings as utf-8 instead + # of latin-1 (iso-8859-1) like http.client. o = o.encode("utf-8") if hasattr(o, "__len__"): @@ -216,14 +219,7 @@ def get_netrc_auth(url, raise_errors=False): netrc_path = None for f in netrc_locations: - try: - loc = os.path.expanduser(f) - except KeyError: - # os.path.expanduser can fail when $HOME is undefined and - # getpwuid fails. See https://bugs.python.org/issue20164 & - # https://github.com/psf/requests/issues/1846 - return - + loc = os.path.expanduser(f) if os.path.exists(loc): netrc_path = loc break @@ -233,13 +229,7 @@ def get_netrc_auth(url, raise_errors=False): return ri = urlparse(url) - - # Strip port numbers from netloc. This weird `if...encode`` dance is - # used for Python 3.2, which doesn't support unicode literals. - splitstr = b":" - if isinstance(url, str): - splitstr = splitstr.decode("ascii") - host = ri.netloc.split(splitstr)[0] + host = ri.hostname try: _netrc = netrc(netrc_path).authenticators(host) diff --git a/vendor/semantic_version/__init__.py b/vendor/semantic_version/__init__.py index 4f4787a..1528bda 100644 --- a/vendor/semantic_version/__init__.py +++ b/vendor/semantic_version/__init__.py @@ -7,4 +7,12 @@ __author__ = "Raphaël Barrois " -__version__ = "2.10.0" +try: + # Python 3.8+ + from importlib.metadata import version + + __version__ = version("semantic_version") +except ImportError: + import pkg_resources + + __version__ = pkg_resources.get_distribution("semantic_version").version diff --git a/vendor/urllib3/_base_connection.py b/vendor/urllib3/_base_connection.py index 29ca334..dc0f318 100644 --- a/vendor/urllib3/_base_connection.py +++ b/vendor/urllib3/_base_connection.py @@ -62,8 +62,7 @@ def __init__( socket_options: _TYPE_SOCKET_OPTIONS | None = ..., proxy: Url | None = None, proxy_config: ProxyConfig | None = None, - ) -> None: - ... + ) -> None: ... def set_tunnel( self, @@ -71,11 +70,9 @@ def set_tunnel( port: int | None = None, headers: typing.Mapping[str, str] | None = None, scheme: str = "http", - ) -> None: - ... + ) -> None: ... - def connect(self) -> None: - ... + def connect(self) -> None: ... def request( self, @@ -91,14 +88,11 @@ def request( preload_content: bool = True, decode_content: bool = True, enforce_content_length: bool = True, - ) -> None: - ... + ) -> None: ... - def getresponse(self) -> BaseHTTPResponse: - ... + def getresponse(self) -> BaseHTTPResponse: ... - def close(self) -> None: - ... + def close(self) -> None: ... @property def is_closed(self) -> bool: @@ -168,5 +162,4 @@ def __init__( cert_file: str | None = None, key_file: str | None = None, key_password: str | None = None, - ) -> None: - ... + ) -> None: ... diff --git a/vendor/urllib3/_collections.py b/vendor/urllib3/_collections.py index 8a4409a..1b6c136 100644 --- a/vendor/urllib3/_collections.py +++ b/vendor/urllib3/_collections.py @@ -13,11 +13,9 @@ from typing_extensions import Self class HasGettableStringKeys(Protocol): - def keys(self) -> typing.Iterator[str]: - ... + def keys(self) -> typing.Iterator[str]: ... - def __getitem__(self, key: str) -> str: - ... + def __getitem__(self, key: str) -> str: ... __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] @@ -33,7 +31,7 @@ def __getitem__(self, key: str) -> str: ValidHTTPHeaderSource = typing.Union[ "HTTPHeaderDict", typing.Mapping[str, str], - typing.Iterable[typing.Tuple[str, str]], + typing.Iterable[tuple[str, str]], "HasGettableStringKeys", ] @@ -55,7 +53,7 @@ def ensure_can_construct_http_header_dict( # Similarly to Mapping, full runtime checking of the contents of an Iterable is # expensive, so for the purposes of typechecking, we assume that any Iterable # is the right shape. - return typing.cast(typing.Iterable[typing.Tuple[str, str]], potential) + return typing.cast(typing.Iterable[tuple[str, str]], potential) elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"): return typing.cast("HasGettableStringKeys", potential) else: @@ -155,7 +153,7 @@ def keys(self) -> set[_KT]: # type: ignore[override] return set(self._container.keys()) -class HTTPHeaderDictItemView(typing.Set[typing.Tuple[str, str]]): +class HTTPHeaderDictItemView(set[tuple[str, str]]): """ HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of address. @@ -352,7 +350,7 @@ def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: for key, val in other.items(): self.add(key, val) elif isinstance(other, typing.Iterable): - other = typing.cast(typing.Iterable[typing.Tuple[str, str]], other) + other = typing.cast(typing.Iterable[tuple[str, str]], other) for key, value in other: self.add(key, value) elif hasattr(other, "keys") and hasattr(other, "__getitem__"): @@ -368,12 +366,10 @@ def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: self.add(key, value) @typing.overload - def getlist(self, key: str) -> list[str]: - ... + def getlist(self, key: str) -> list[str]: ... @typing.overload - def getlist(self, key: str, default: _DT) -> list[str] | _DT: - ... + def getlist(self, key: str, default: _DT) -> list[str] | _DT: ... def getlist( self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed diff --git a/vendor/urllib3/_request_methods.py b/vendor/urllib3/_request_methods.py index 03186e5..297c271 100644 --- a/vendor/urllib3/_request_methods.py +++ b/vendor/urllib3/_request_methods.py @@ -12,7 +12,7 @@ __all__ = ["RequestMethods"] _TYPE_ENCODE_URL_FIELDS = typing.Union[ - typing.Sequence[typing.Tuple[str, typing.Union[str, bytes]]], + typing.Sequence[tuple[str, typing.Union[str, bytes]]], typing.Mapping[str, typing.Union[str, bytes]], ] diff --git a/vendor/urllib3/_version.py b/vendor/urllib3/_version.py index eb8b5c2..1a3a320 100644 --- a/vendor/urllib3/_version.py +++ b/vendor/urllib3/_version.py @@ -1,8 +1,13 @@ -# file generated by setuptools_scm +# file generated by setuptools-scm # don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + TYPE_CHECKING = False if TYPE_CHECKING: - from typing import Tuple, Union + from typing import Tuple + from typing import Union + VERSION_TUPLE = Tuple[Union[int, str], ...] else: VERSION_TUPLE = object @@ -12,5 +17,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '2.2.3' -__version_tuple__ = version_tuple = (2, 2, 3) +__version__ = version = '2.4.0' +__version_tuple__ = version_tuple = (2, 4, 0) diff --git a/vendor/urllib3/connection.py b/vendor/urllib3/connection.py index 7cbef7d..591ac40 100644 --- a/vendor/urllib3/connection.py +++ b/vendor/urllib3/connection.py @@ -78,8 +78,6 @@ class BaseSSLError(BaseException): # type: ignore[no-redef] _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -_HAS_SYS_AUDIT = hasattr(sys, "audit") - class HTTPConnection(_HTTPConnection): """ @@ -139,8 +137,9 @@ def __init__( timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 16384, - socket_options: None - | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options, + socket_options: None | ( + connection._TYPE_SOCKET_OPTIONS + ) = default_socket_options, proxy: Url | None = None, proxy_config: ProxyConfig | None = None, ) -> None: @@ -215,9 +214,7 @@ def _new_conn(self) -> socket.socket: self, f"Failed to establish a new connection: {e}" ) from e - # Audit hooks are only available in Python 3.8+ - if _HAS_SYS_AUDIT: - sys.audit("http.client.connect", self, self.host, self.port) + sys.audit("http.client.connect", self, self.host, self.port) return sock @@ -313,6 +310,13 @@ def proxy_is_forwarding(self) -> bool: """ return bool(self.proxy) and self._tunnel_host is None + @property + def proxy_is_tunneling(self) -> bool: + """ + Return True if a tunneling proxy is configured, else return False + """ + return self._tunnel_host is not None + def close(self) -> None: try: super().close() @@ -503,6 +507,11 @@ def getresponse( # type: ignore[override] # This is needed here to avoid circular import errors from .response import HTTPResponse + # Save a reference to the shutdown function before ownership is passed + # to httplib_response + # TODO should we implement it everywhere? + _shutdown = getattr(self.sock, "shutdown", None) + # Get the response from http.client.HTTPConnection httplib_response = super().getresponse() @@ -531,6 +540,7 @@ def getresponse( # type: ignore[override] enforce_content_length=resp_options.enforce_content_length, request_method=resp_options.request_method, request_url=resp_options.request_url, + sock_shutdown=_shutdown, ) return response @@ -561,8 +571,9 @@ def __init__( timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 16384, - socket_options: None - | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options, + socket_options: None | ( + connection._TYPE_SOCKET_OPTIONS + ) = HTTPConnection.default_socket_options, proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, @@ -695,7 +706,7 @@ def connect(self) -> None: tls_in_tls = False # Do we need to establish a tunnel? - if self._tunnel_host is not None: + if self.proxy_is_tunneling: # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. if self._tunnel_scheme == "https": # _connect_tls_proxy will verify and assign proxy_is_verified @@ -709,7 +720,7 @@ def connect(self) -> None: self._tunnel() # Override the host with the one we're requesting data from. - server_hostname = self._tunnel_host + server_hostname = typing.cast(str, self._tunnel_host) if self.server_hostname is not None: server_hostname = self.server_hostname diff --git a/vendor/urllib3/connectionpool.py b/vendor/urllib3/connectionpool.py index a2c3cf6..3a0685b 100644 --- a/vendor/urllib3/connectionpool.py +++ b/vendor/urllib3/connectionpool.py @@ -170,9 +170,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): """ scheme = "http" - ConnectionCls: ( - type[BaseHTTPConnection] | type[BaseHTTPSConnection] - ) = HTTPConnection + ConnectionCls: type[BaseHTTPConnection] | type[BaseHTTPSConnection] = HTTPConnection def __init__( self, @@ -544,13 +542,13 @@ def _make_request( response._pool = self # type: ignore[attr-defined] log.debug( - '%s://%s:%s "%s %s HTTP/%s" %s %s', + '%s://%s:%s "%s %s %s" %s %s', self.scheme, self.host, self.port, method, url, - response.version, + response.version_string, response.status, response.length_remaining, ) @@ -1137,13 +1135,11 @@ def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: @typing.overload -def _normalize_host(host: None, scheme: str | None) -> None: - ... +def _normalize_host(host: None, scheme: str | None) -> None: ... @typing.overload -def _normalize_host(host: str, scheme: str | None) -> str: - ... +def _normalize_host(host: str, scheme: str | None) -> str: ... def _normalize_host(host: str | None, scheme: str | None) -> str | None: diff --git a/vendor/urllib3/contrib/emscripten/connection.py b/vendor/urllib3/contrib/emscripten/connection.py index 2ceb457..41bfd27 100644 --- a/vendor/urllib3/contrib/emscripten/connection.py +++ b/vendor/urllib3/contrib/emscripten/connection.py @@ -182,8 +182,9 @@ def __init__( timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 16384, - socket_options: None - | _TYPE_SOCKET_OPTIONS = HTTPConnection.default_socket_options, + socket_options: ( + None | _TYPE_SOCKET_OPTIONS + ) = HTTPConnection.default_socket_options, proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, diff --git a/vendor/urllib3/contrib/emscripten/fetch.py b/vendor/urllib3/contrib/emscripten/fetch.py index 8d197ea..a514306 100644 --- a/vendor/urllib3/contrib/emscripten/fetch.py +++ b/vendor/urllib3/contrib/emscripten/fetch.py @@ -3,6 +3,16 @@ A few caveats - +If your browser (or Node.js) has WebAssembly JavaScript Promise Integration enabled +https://github.com/WebAssembly/js-promise-integration/blob/main/proposals/js-promise-integration/Overview.md +*and* you launch pyodide using `pyodide.runPythonAsync`, this will fetch data using the +JavaScript asynchronous fetch api (wrapped via `pyodide.ffi.call_sync`). In this case +timeouts and streaming should just work. + +Otherwise, it uses a combination of XMLHttpRequest and a web-worker for streaming. + +This approach has several caveats: + Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed. Streaming only works if you're running pyodide in a web worker. @@ -14,15 +24,16 @@ Cross-Origin-Embedder-Policy: require-corp You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in -javascript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole -request into a buffer and then returning it. it shows a warning in the javascript console in this case. +JavaScript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole +request into a buffer and then returning it. it shows a warning in the JavaScript console in this case. Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch. -NB: in this code, there are a lot of javascript objects. They are named js_* +NB: in this code, there are a lot of JavaScript objects. They are named js_* to make it clear what type of object they are. """ + from __future__ import annotations import io @@ -123,17 +134,18 @@ def closed(self) -> bool: return self.is_closed() def close(self) -> None: - if not self.is_closed(): - self.read_len = 0 - self.read_pos = 0 - self.int_buffer = None - self.byte_buffer = None - self._is_closed = True - self.request = None - if self.is_live: - self.worker.postMessage(_obj_from_dict({"close": self.connection_id})) - self.is_live = False - super().close() + if self.is_closed(): + return + self.read_len = 0 + self.read_pos = 0 + self.int_buffer = None + self.byte_buffer = None + self._is_closed = True + self.request = None + if self.is_live: + self.worker.postMessage(_obj_from_dict({"close": self.connection_id})) + self.is_live = False + super().close() def readable(self) -> bool: return True @@ -197,7 +209,8 @@ def __init__(self) -> None: self.streaming_ready = False js_data_blob = js.Blob.new( - [_STREAMING_WORKER_CODE], _obj_from_dict({"type": "application/javascript"}) + to_js([_STREAMING_WORKER_CODE], create_pyproxies=False), + _obj_from_dict({"type": "application/javascript"}), ) def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None: @@ -288,6 +301,113 @@ def send(self, request: EmscriptenRequest) -> EmscriptenResponse: ) +class _JSPIReadStream(io.RawIOBase): + """ + A read stream that uses pyodide.ffi.run_sync to read from a JavaScript fetch + response. This requires support for WebAssembly JavaScript Promise Integration + in the containing browser, and for pyodide to be launched via runPythonAsync. + + :param js_read_stream: + The JavaScript stream reader + + :param timeout: + Timeout in seconds + + :param request: + The request we're handling + + :param response: + The response this stream relates to + + :param js_abort_controller: + A JavaScript AbortController object, used for timeouts + """ + + def __init__( + self, + js_read_stream: Any, + timeout: float, + request: EmscriptenRequest, + response: EmscriptenResponse, + js_abort_controller: Any, # JavaScript AbortController for timeouts + ): + self.js_read_stream = js_read_stream + self.timeout = timeout + self._is_closed = False + self._is_done = False + self.request: EmscriptenRequest | None = request + self.response: EmscriptenResponse | None = response + self.current_buffer = None + self.current_buffer_pos = 0 + self.js_abort_controller = js_abort_controller + + def __del__(self) -> None: + self.close() + + # this is compatible with _base_connection + def is_closed(self) -> bool: + return self._is_closed + + # for compatibility with RawIOBase + @property + def closed(self) -> bool: + return self.is_closed() + + def close(self) -> None: + if self.is_closed(): + return + self.read_len = 0 + self.read_pos = 0 + self.js_read_stream.cancel() + self.js_read_stream = None + self._is_closed = True + self._is_done = True + self.request = None + self.response = None + super().close() + + def readable(self) -> bool: + return True + + def writable(self) -> bool: + return False + + def seekable(self) -> bool: + return False + + def _get_next_buffer(self) -> bool: + result_js = _run_sync_with_timeout( + self.js_read_stream.read(), + self.timeout, + self.js_abort_controller, + request=self.request, + response=self.response, + ) + if result_js.done: + self._is_done = True + return False + else: + self.current_buffer = result_js.value.to_py() + self.current_buffer_pos = 0 + return True + + def readinto(self, byte_obj: Buffer) -> int: + if self.current_buffer is None: + if not self._get_next_buffer() or self.current_buffer is None: + self.close() + return 0 + ret_length = min( + len(byte_obj), len(self.current_buffer) - self.current_buffer_pos + ) + byte_obj[0:ret_length] = self.current_buffer[ + self.current_buffer_pos : self.current_buffer_pos + ret_length + ] + self.current_buffer_pos += ret_length + if self.current_buffer_pos == len(self.current_buffer): + self.current_buffer = None + return ret_length + + # check if we are in a worker or not def is_in_browser_main_thread() -> bool: return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window @@ -321,7 +441,23 @@ def is_worker_available() -> bool: _fetcher = None +NODE_JSPI_ERROR = ( + "urllib3 only works in Node.js with pyodide.runPythonAsync" + " and requires the flag --experimental-wasm-stack-switching in " + " versions of node <24." +) + + def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None: + if has_jspi(): + return send_jspi_request(request, True) + elif is_in_node(): + raise _RequestError( + message=NODE_JSPI_ERROR, + request=request, + response=None, + ) + if _fetcher and streaming_ready(): return _fetcher.send(request) else: @@ -363,6 +499,14 @@ def _show_streaming_warning() -> None: def send_request(request: EmscriptenRequest) -> EmscriptenResponse: + if has_jspi(): + return send_jspi_request(request, False) + elif is_in_node(): + raise _RequestError( + message=NODE_JSPI_ERROR, + request=request, + response=None, + ) try: js_xhr = js.XMLHttpRequest.new() @@ -403,6 +547,152 @@ def send_request(request: EmscriptenRequest) -> EmscriptenResponse: raise _RequestError(err.message, request=request) +def send_jspi_request( + request: EmscriptenRequest, streaming: bool +) -> EmscriptenResponse: + """ + Send a request using WebAssembly JavaScript Promise Integration + to wrap the asynchronous JavaScript fetch api (experimental). + + :param request: + Request to send + + :param streaming: + Whether to stream the response + + :return: The response object + :rtype: EmscriptenResponse + """ + timeout = request.timeout + js_abort_controller = js.AbortController.new() + headers = {k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE} + req_body = request.body + fetch_data = { + "headers": headers, + "body": to_js(req_body), + "method": request.method, + "signal": js_abort_controller.signal, + } + # Call JavaScript fetch (async api, returns a promise) + fetcher_promise_js = js.fetch(request.url, _obj_from_dict(fetch_data)) + # Now suspend WebAssembly until we resolve that promise + # or time out. + response_js = _run_sync_with_timeout( + fetcher_promise_js, + timeout, + js_abort_controller, + request=request, + response=None, + ) + headers = {} + header_iter = response_js.headers.entries() + while True: + iter_value_js = header_iter.next() + if getattr(iter_value_js, "done", False): + break + else: + headers[str(iter_value_js.value[0])] = str(iter_value_js.value[1]) + status_code = response_js.status + body: bytes | io.RawIOBase = b"" + + response = EmscriptenResponse( + status_code=status_code, headers=headers, body=b"", request=request + ) + if streaming: + # get via inputstream + if response_js.body is not None: + # get a reader from the fetch response + body_stream_js = response_js.body.getReader() + body = _JSPIReadStream( + body_stream_js, timeout, request, response, js_abort_controller + ) + else: + # get directly via arraybuffer + # n.b. this is another async JavaScript call. + body = _run_sync_with_timeout( + response_js.arrayBuffer(), + timeout, + js_abort_controller, + request=request, + response=response, + ).to_py() + response.body = body + return response + + +def _run_sync_with_timeout( + promise: Any, + timeout: float, + js_abort_controller: Any, + request: EmscriptenRequest | None, + response: EmscriptenResponse | None, +) -> Any: + """ + Await a JavaScript promise synchronously with a timeout which is implemented + via the AbortController + + :param promise: + Javascript promise to await + + :param timeout: + Timeout in seconds + + :param js_abort_controller: + A JavaScript AbortController object, used on timeout + + :param request: + The request being handled + + :param response: + The response being handled (if it exists yet) + + :raises _TimeoutError: If the request times out + :raises _RequestError: If the request raises a JavaScript exception + + :return: The result of awaiting the promise. + """ + timer_id = None + if timeout > 0: + timer_id = js.setTimeout( + js_abort_controller.abort.bind(js_abort_controller), int(timeout * 1000) + ) + try: + from pyodide.ffi import run_sync + + # run_sync here uses WebAssembly JavaScript Promise Integration to + # suspend python until the JavaScript promise resolves. + return run_sync(promise) + except JsException as err: + if err.name == "AbortError": + raise _TimeoutError( + message="Request timed out", request=request, response=response + ) + else: + raise _RequestError(message=err.message, request=request, response=response) + finally: + if timer_id is not None: + js.clearTimeout(timer_id) + + +def has_jspi() -> bool: + """ + Return true if jspi can be used. + + This requires both browser support and also WebAssembly + to be in the correct state - i.e. that the javascript + call into python was async not sync. + + :return: True if jspi can be used. + :rtype: bool + """ + try: + from pyodide.ffi import can_run_sync, run_sync # noqa: F401 + + return bool(can_run_sync()) + except ImportError: + return False + + def streaming_ready() -> bool | None: if _fetcher: return _fetcher.streaming_ready diff --git a/vendor/urllib3/contrib/emscripten/response.py b/vendor/urllib3/contrib/emscripten/response.py index cd3d80e..cb1088a 100644 --- a/vendor/urllib3/contrib/emscripten/response.py +++ b/vendor/urllib3/contrib/emscripten/response.py @@ -75,7 +75,7 @@ def retries(self, retries: Retry | None) -> None: def stream( self, amt: int | None = 2**16, decode_content: bool | None = None - ) -> typing.Generator[bytes, None, None]: + ) -> typing.Generator[bytes]: """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the @@ -160,32 +160,24 @@ def read( # don't cache partial content cache_content = False data = self._response.body.read(amt) - if self.length_remaining is not None: - self.length_remaining = max(self.length_remaining - len(data), 0) - if (self.length_is_certain and self.length_remaining == 0) or len( - data - ) < amt: - # definitely finished reading, close response stream - self._response.body.close() - return typing.cast(bytes, data) else: # read all we can (and cache it) data = self._response.body.read() if cache_content: self._body = data - if self.length_remaining is not None: - self.length_remaining = max(self.length_remaining - len(data), 0) - if len(data) == 0 or ( - self.length_is_certain and self.length_remaining == 0 - ): - # definitely finished reading, close response stream - self._response.body.close() - return typing.cast(bytes, data) + if self.length_remaining is not None: + self.length_remaining = max(self.length_remaining - len(data), 0) + if len(data) == 0 or ( + self.length_is_certain and self.length_remaining == 0 + ): + # definitely finished reading, close response stream + self._response.body.close() + return typing.cast(bytes, data) def read_chunked( self, amt: int | None = None, decode_content: bool | None = None, - ) -> typing.Generator[bytes, None, None]: + ) -> typing.Generator[bytes]: # chunked is handled by browser while True: bytes = self.read(amt, decode_content) @@ -241,7 +233,7 @@ def close(self) -> None: self._closed = True @contextmanager - def _error_catcher(self) -> typing.Generator[None, None, None]: + def _error_catcher(self) -> typing.Generator[None]: """ Catch Emscripten specific exceptions thrown by fetch.py, instead re-raising urllib3 variants, so that low-level exceptions diff --git a/vendor/urllib3/contrib/pyopenssl.py b/vendor/urllib3/contrib/pyopenssl.py index c12cb31..3714500 100644 --- a/vendor/urllib3/contrib/pyopenssl.py +++ b/vendor/urllib3/contrib/pyopenssl.py @@ -366,9 +366,11 @@ def sendall(self, data: bytes) -> None: ) total_sent += sent - def shutdown(self) -> None: - # FIXME rethrow compatible exceptions should we ever use this - self.connection.shutdown() + def shutdown(self, how: int) -> None: + try: + self.connection.shutdown() + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"shutdown error: {e!r}") from e def close(self) -> None: self._closed = True @@ -422,6 +424,7 @@ def __init__(self, protocol: int) -> None: self.check_hostname = False self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED + self._verify_flags: int = ssl.VERIFY_X509_TRUSTED_FIRST @property def options(self) -> int: @@ -432,6 +435,15 @@ def options(self, value: int) -> None: self._options = value self._set_ctx_options() + @property + def verify_flags(self) -> int: + return self._verify_flags + + @verify_flags.setter + def verify_flags(self, value: int) -> None: + self._verify_flags = value + self._ctx.get_cert_store().set_flags(self._verify_flags) + @property def verify_mode(self) -> int: return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] diff --git a/vendor/urllib3/exceptions.py b/vendor/urllib3/exceptions.py index b0792f0..a0de9d6 100644 --- a/vendor/urllib3/exceptions.py +++ b/vendor/urllib3/exceptions.py @@ -23,9 +23,7 @@ class HTTPWarning(Warning): """Base warning used by this module.""" -_TYPE_REDUCE_RESULT = typing.Tuple[ - typing.Callable[..., object], typing.Tuple[object, ...] -] +_TYPE_REDUCE_RESULT = tuple[typing.Callable[..., object], tuple[object, ...]] class PoolError(HTTPError): @@ -33,11 +31,12 @@ class PoolError(HTTPError): def __init__(self, pool: ConnectionPool, message: str) -> None: self.pool = pool + self._message = message super().__init__(f"{pool}: {message}") def __reduce__(self) -> _TYPE_REDUCE_RESULT: # For pickling purposes. - return self.__class__, (None, None) + return self.__class__, (None, self._message) class RequestError(PoolError): @@ -49,7 +48,7 @@ def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: def __reduce__(self) -> _TYPE_REDUCE_RESULT: # For pickling purposes. - return self.__class__, (None, self.url, None) + return self.__class__, (None, self.url, self._message) class SSLError(HTTPError): @@ -102,6 +101,10 @@ def __init__( super().__init__(pool, url, message) + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self.url, self.reason) + class HostChangedError(RequestError): """Raised when an existing pool gets a request for a foreign host.""" @@ -141,8 +144,13 @@ class NewConnectionError(ConnectTimeoutError, HTTPError): def __init__(self, conn: HTTPConnection, message: str) -> None: self.conn = conn + self._message = message super().__init__(f"{conn}: {message}") + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self._message) + @property def pool(self) -> HTTPConnection: warnings.warn( @@ -160,8 +168,14 @@ class NameResolutionError(NewConnectionError): def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror): message = f"Failed to resolve '{host}' ({reason})" + self._host = host + self._reason = reason super().__init__(conn, message) + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (self._host, None, self._reason) + class EmptyPoolError(PoolError): """Raised when a pool runs out of connections and no more are allowed.""" diff --git a/vendor/urllib3/fields.py b/vendor/urllib3/fields.py index 3e258a5..97c4730 100644 --- a/vendor/urllib3/fields.py +++ b/vendor/urllib3/fields.py @@ -7,8 +7,8 @@ _TYPE_FIELD_VALUE = typing.Union[str, bytes] _TYPE_FIELD_VALUE_TUPLE = typing.Union[ _TYPE_FIELD_VALUE, - typing.Tuple[str, _TYPE_FIELD_VALUE], - typing.Tuple[str, _TYPE_FIELD_VALUE, str], + tuple[str, _TYPE_FIELD_VALUE], + tuple[str, _TYPE_FIELD_VALUE, str], ] diff --git a/vendor/urllib3/filepost.py b/vendor/urllib3/filepost.py index 1c90a21..14f70b0 100644 --- a/vendor/urllib3/filepost.py +++ b/vendor/urllib3/filepost.py @@ -11,7 +11,7 @@ writer = codecs.lookup("utf-8")[3] _TYPE_FIELDS_SEQUENCE = typing.Sequence[ - typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] + typing.Union[tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] ] _TYPE_FIELDS = typing.Union[ _TYPE_FIELDS_SEQUENCE, diff --git a/vendor/urllib3/http2/connection.py b/vendor/urllib3/http2/connection.py index f486145..d082239 100644 --- a/vendor/urllib3/http2/connection.py +++ b/vendor/urllib3/http2/connection.py @@ -140,7 +140,7 @@ def putrequest( # type: ignore[override] with self._h2_conn as conn: self._h2_stream = conn.get_next_available_stream_id() - def putheader(self, header: str | bytes, *values: str | bytes) -> None: + def putheader(self, header: str | bytes, *values: str | bytes) -> None: # type: ignore[override] # TODO SKIPPABLE_HEADERS from urllib3 are ignored. header = header.encode() if isinstance(header, str) else header header = header.lower() # A lot of upstream code uses capitalized headers. diff --git a/vendor/urllib3/response.py b/vendor/urllib3/response.py index a0273d6..66c6a68 100644 --- a/vendor/urllib3/response.py +++ b/vendor/urllib3/response.py @@ -5,6 +5,7 @@ import json as _json import logging import re +import socket import sys import typing import warnings @@ -440,6 +441,9 @@ def release_conn(self) -> None: def drain_conn(self) -> None: raise NotImplementedError() + def shutdown(self) -> None: + raise NotImplementedError() + def close(self) -> None: raise NotImplementedError() @@ -589,6 +593,7 @@ def __init__( request_method: str | None = None, request_url: str | None = None, auto_close: bool = True, + sock_shutdown: typing.Callable[[int], None] | None = None, ) -> None: super().__init__( headers=headers, @@ -618,6 +623,7 @@ def __init__( if hasattr(body, "read"): self._fp = body # type: ignore[assignment] + self._sock_shutdown = sock_shutdown # Are we using the chunked-style of transfer encoding? self.chunk_left: int | None = None @@ -733,7 +739,7 @@ def _init_length(self, request_method: str | None) -> int | None: return length @contextmanager - def _error_catcher(self) -> typing.Generator[None, None, None]: + def _error_catcher(self) -> typing.Generator[None]: """ Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the @@ -812,7 +818,7 @@ def _fp_read( happen. The known cases: - * 3.8 <= CPython < 3.9.7 because of a bug + * CPython < 3.9.7 because of a bug https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. * urllib3 injected with pyOpenSSL-backed SSL-support. * CPython < 3.10 only when `amt` does not fit 32-bit int. @@ -1037,7 +1043,7 @@ def read1( def stream( self, amt: int | None = 2**16, decode_content: bool | None = None - ) -> typing.Generator[bytes, None, None]: + ) -> typing.Generator[bytes]: """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the @@ -1066,7 +1072,14 @@ def stream( def readable(self) -> bool: return True + def shutdown(self) -> None: + if not self._sock_shutdown: + raise ValueError("Cannot shutdown socket as self._sock_shutdown is not set") + self._sock_shutdown(socket.SHUT_RD) + def close(self) -> None: + self._sock_shutdown = None + if not self.closed and self._fp: self._fp.close() @@ -1159,7 +1172,7 @@ def _handle_chunk(self, amt: int | None) -> bytes: def read_chunked( self, amt: int | None = None, decode_content: bool | None = None - ) -> typing.Generator[bytes, None, None]: + ) -> typing.Generator[bytes]: """ Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. diff --git a/vendor/urllib3/util/connection.py b/vendor/urllib3/util/connection.py index 89bd189..f92519e 100644 --- a/vendor/urllib3/util/connection.py +++ b/vendor/urllib3/util/connection.py @@ -6,7 +6,7 @@ from ..exceptions import LocationParseError from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT -_TYPE_SOCKET_OPTIONS = typing.List[typing.Tuple[int, int, typing.Union[int, bytes]]] +_TYPE_SOCKET_OPTIONS = list[tuple[int, int, typing.Union[int, bytes]]] if typing.TYPE_CHECKING: from .._base_connection import BaseHTTPConnection diff --git a/vendor/urllib3/util/request.py b/vendor/urllib3/util/request.py index 82454a0..94392a1 100644 --- a/vendor/urllib3/util/request.py +++ b/vendor/urllib3/util/request.py @@ -68,8 +68,10 @@ def make_headers( :param accept_encoding: Can be a boolean, list, or string. - ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or - ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. + ``True`` translates to 'gzip,deflate'. If the dependencies for + Brotli (either the ``brotli`` or ``brotlicffi`` package) and/or Zstandard + (the ``zstandard`` package) algorithms are installed, then their encodings are + included in the string ('br' and 'zstd', respectively). List will get joined by comma. String will be used as provided. @@ -116,14 +118,14 @@ def make_headers( headers["connection"] = "keep-alive" if basic_auth: - headers[ - "authorization" - ] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}" + headers["authorization"] = ( + f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}" + ) if proxy_basic_auth: - headers[ - "proxy-authorization" - ] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}" + headers["proxy-authorization"] = ( + f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}" + ) if disable_cache: headers["cache-control"] = "no-cache" diff --git a/vendor/urllib3/util/ssl_.py b/vendor/urllib3/util/ssl_.py index 5e93be9..e43bd8f 100644 --- a/vendor/urllib3/util/ssl_.py +++ b/vendor/urllib3/util/ssl_.py @@ -18,7 +18,7 @@ IS_PYOPENSSL = False ALPN_PROTOCOLS = ["http/1.1"] -_TYPE_VERSION_INFO = typing.Tuple[int, int, int, str, int] +_TYPE_VERSION_INFO = tuple[int, int, int, str, int] # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = { @@ -32,7 +32,7 @@ def _is_bpo_43522_fixed( version_info: _TYPE_VERSION_INFO, pypy_version_info: _TYPE_VERSION_INFO | None, ) -> bool: - """Return True for CPython 3.8.9+, 3.9.3+ or 3.10+ and PyPy 7.3.8+ where + """Return True for CPython 3.9.3+ or 3.10+ and PyPy 7.3.8+ where setting SSLContext.hostname_checks_common_name to False works. Outside of CPython and PyPy we don't know which implementations work @@ -48,11 +48,7 @@ def _is_bpo_43522_fixed( elif implementation_name == "cpython": major_minor = version_info[:2] micro = version_info[2] - return ( - (major_minor == (3, 8) and micro >= 9) - or (major_minor == (3, 9) and micro >= 3) - or major_minor >= (3, 10) - ) + return (major_minor == (3, 9) and micro >= 3) or major_minor >= (3, 10) else: # Defensive: return False @@ -105,6 +101,7 @@ class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): OPENSSL_VERSION_NUMBER, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, + VERIFY_X509_STRICT, OP_NO_SSLv2, OP_NO_SSLv3, SSLContext, @@ -113,15 +110,18 @@ class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): PROTOCOL_SSLv23 = PROTOCOL_TLS + # Needed for Python 3.9 which does not define this + VERIFY_X509_PARTIAL_CHAIN = getattr(ssl, "VERIFY_X509_PARTIAL_CHAIN", 0x80000) + # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython - # 3.8.9, 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+ + # 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+ if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable( OPENSSL_VERSION, OPENSSL_VERSION_NUMBER, sys.implementation.name, sys.version_info, sys.pypy_version_info if sys.implementation.name == "pypy" else None, # type: ignore[attr-defined] - ): + ): # Defensive: for Python < 3.9.3 HAS_NEVER_CHECK_COMMON_NAME = False # Need to be careful here in case old TLS versions get @@ -142,6 +142,8 @@ class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): OP_NO_SSLv3 = 0x2000000 # type: ignore[assignment] PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 # type: ignore[assignment] PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment] + VERIFY_X509_PARTIAL_CHAIN = 0x80000 + VERIFY_X509_STRICT = 0x20 # type: ignore[assignment] _TYPE_PEER_CERT_RET = typing.Union["_TYPE_PEER_CERT_RET_DICT", bytes, None] @@ -227,6 +229,7 @@ def create_urllib3_context( ciphers: str | None = None, ssl_minimum_version: int | None = None, ssl_maximum_version: int | None = None, + verify_flags: int | None = None, ) -> ssl.SSLContext: """Creates and configures an :class:`ssl.SSLContext` instance for use with urllib3. @@ -251,6 +254,9 @@ def create_urllib3_context( :param ciphers: Which cipher suites to allow the server to select. Defaults to either system configured ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers. + :param verify_flags: + The flags for certificate verification operations. These default to + ``ssl.VERIFY_X509_PARTIAL_CHAIN`` and ``ssl.VERIFY_X509_STRICT`` for Python 3.13+. :returns: Constructed SSLContext object with specified options :rtype: SSLContext @@ -324,6 +330,16 @@ def create_urllib3_context( context.options |= options + if verify_flags is None: + verify_flags = 0 + # In Python 3.13+ ssl.create_default_context() sets VERIFY_X509_PARTIAL_CHAIN + # and VERIFY_X509_STRICT so we do the same + if sys.version_info >= (3, 13): + verify_flags |= VERIFY_X509_PARTIAL_CHAIN + verify_flags |= VERIFY_X509_STRICT + + context.verify_flags |= verify_flags + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is # necessary for conditional client cert authentication with TLS 1.3. # The attribute is None for OpenSSL <= 1.1.0 or does not exist when using @@ -345,15 +361,12 @@ def create_urllib3_context( try: context.hostname_checks_common_name = False - except AttributeError: # Defensive: for CPython < 3.8.9 and 3.9.3; for PyPy < 7.3.8 + except AttributeError: # Defensive: for CPython < 3.9.3; for PyPy < 7.3.8 pass - # Enable logging of TLS session keys via defacto standard environment variable - # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. - if hasattr(context, "keylog_filename"): - sslkeylogfile = os.environ.get("SSLKEYLOGFILE") - if sslkeylogfile: - context.keylog_filename = sslkeylogfile + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile return context @@ -373,8 +386,7 @@ def ssl_wrap_socket( key_password: str | None = ..., ca_cert_data: None | str | bytes = ..., tls_in_tls: typing.Literal[False] = ..., -) -> ssl.SSLSocket: - ... +) -> ssl.SSLSocket: ... @typing.overload @@ -392,8 +404,7 @@ def ssl_wrap_socket( key_password: str | None = ..., ca_cert_data: None | str | bytes = ..., tls_in_tls: bool = ..., -) -> ssl.SSLSocket | SSLTransportType: - ... +) -> ssl.SSLSocket | SSLTransportType: ... def ssl_wrap_socket( diff --git a/vendor/urllib3/util/ssl_match_hostname.py b/vendor/urllib3/util/ssl_match_hostname.py index 453cfd4..25d9100 100644 --- a/vendor/urllib3/util/ssl_match_hostname.py +++ b/vendor/urllib3/util/ssl_match_hostname.py @@ -146,7 +146,7 @@ def match_hostname( if key == "commonName": if _dnsname_match(value, hostname): return - dnsnames.append(value) + dnsnames.append(value) # Defensive: for Python < 3.9.3 if len(dnsnames) > 1: raise CertificateError( diff --git a/vendor/urllib3/util/ssltransport.py b/vendor/urllib3/util/ssltransport.py index d69ccb3..6d59bc3 100644 --- a/vendor/urllib3/util/ssltransport.py +++ b/vendor/urllib3/util/ssltransport.py @@ -174,12 +174,10 @@ def close(self) -> None: @typing.overload def getpeercert( self, binary_form: typing.Literal[False] = ... - ) -> _TYPE_PEER_CERT_RET_DICT | None: - ... + ) -> _TYPE_PEER_CERT_RET_DICT | None: ... @typing.overload - def getpeercert(self, binary_form: typing.Literal[True]) -> bytes | None: - ... + def getpeercert(self, binary_form: typing.Literal[True]) -> bytes | None: ... def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET: return self.sslobj.getpeercert(binary_form) # type: ignore[return-value] @@ -219,13 +217,11 @@ def _wrap_ssl_read(self, len: int, buffer: bytearray | None = None) -> int | byt # func is sslobj.do_handshake or sslobj.unwrap @typing.overload - def _ssl_io_loop(self, func: typing.Callable[[], None]) -> None: - ... + def _ssl_io_loop(self, func: typing.Callable[[], None]) -> None: ... # func is sslobj.write, arg1 is data @typing.overload - def _ssl_io_loop(self, func: typing.Callable[[bytes], int], arg1: bytes) -> int: - ... + def _ssl_io_loop(self, func: typing.Callable[[bytes], int], arg1: bytes) -> int: ... # func is sslobj.read, arg1 is len, arg2 is buffer @typing.overload @@ -234,8 +230,7 @@ def _ssl_io_loop( func: typing.Callable[[int, bytearray | None], bytes], arg1: int, arg2: bytearray | None, - ) -> bytes: - ... + ) -> bytes: ... def _ssl_io_loop( self, diff --git a/vendor/urllib3/util/url.py b/vendor/urllib3/util/url.py index d53ea93..db057f1 100644 --- a/vendor/urllib3/util/url.py +++ b/vendor/urllib3/util/url.py @@ -291,13 +291,11 @@ def _remove_path_dot_segments(path: str) -> str: @typing.overload -def _normalize_host(host: None, scheme: str | None) -> None: - ... +def _normalize_host(host: None, scheme: str | None) -> None: ... @typing.overload -def _normalize_host(host: str, scheme: str | None) -> str: - ... +def _normalize_host(host: str, scheme: str | None) -> str: ... def _normalize_host(host: str | None, scheme: str | None) -> str | None: From caab1df071a55d24e6fd4f28737f366a3ee502f2 Mon Sep 17 00:00:00 2001 From: GeekMasher Date: Tue, 10 Jun 2025 11:13:26 +0100 Subject: [PATCH 3/4] fix(deps): Update Semantic Version --- vendor/semantic_version/__init__.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/vendor/semantic_version/__init__.py b/vendor/semantic_version/__init__.py index 1528bda..421acb6 100644 --- a/vendor/semantic_version/__init__.py +++ b/vendor/semantic_version/__init__.py @@ -7,12 +7,4 @@ __author__ = "Raphaël Barrois " -try: - # Python 3.8+ - from importlib.metadata import version - - __version__ = version("semantic_version") -except ImportError: - import pkg_resources - - __version__ = pkg_resources.get_distribution("semantic_version").version +__version__ = "2.10.0" \ No newline at end of file From 466bba178f86da1d585e9030edb11d19e38af6e9 Mon Sep 17 00:00:00 2001 From: GeekMasher Date: Tue, 10 Jun 2025 11:20:32 +0100 Subject: [PATCH 4/4] feat: Update tests --- tests/test_cocoapods.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_cocoapods.py b/tests/test_cocoapods.py index 3cbb851..1e8442d 100644 --- a/tests/test_cocoapods.py +++ b/tests/test_cocoapods.py @@ -10,7 +10,7 @@ def test_parse_pod(self): deps = createPod(Dependencies(), data) self.assertEqual(len(deps), 1) - dep = deps.pop(0) + dep = deps.find("YogaKit") self.assertEqual(dep.name, "YogaKit") self.assertEqual(dep.version, "1.0.0") @@ -20,11 +20,11 @@ def test_parse_pods(self): self.assertEqual(len(deps), 2) - dep1 = deps.pop(0) + dep1 = deps.find("YogaKit") self.assertEqual(dep1.name, "YogaKit") self.assertEqual(dep1.version, "1.18.1") - dep2 = deps.pop(0) + dep2 = deps.find("Yoga") self.assertEqual(dep2.name, "Yoga") self.assertEqual(dep2.version, "1.14")