Skip to content

Commit 18f75c9

Browse files
ci: disable nested workflows; use only root workflows
1 parent c5268a0 commit 18f75c9

File tree

7 files changed

+232
-42
lines changed

7 files changed

+232
-42
lines changed

.github/dependabot.yml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
version: 2
2+
updates:
3+
- package-ecosystem: "github-actions"
4+
directory: "/"
5+
schedule:
6+
interval: "weekly"
7+
- package-ecosystem: "pip"
8+
directory: "/sup-lang"
9+
schedule:
10+
interval: "weekly"
11+

.github/workflows/fuzz.yml

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
name: ClusterFuzzLite
2+
3+
on:
4+
pull_request:
5+
branches: [ main, master ]
6+
push:
7+
branches: [ main, master ]
8+
schedule:
9+
- cron: '0 4 * * *'
10+
11+
permissions:
12+
contents: read
13+
14+
jobs:
15+
fuzz:
16+
runs-on: ubuntu-latest
17+
steps:
18+
- uses: actions/checkout@v4
19+
- name: Set up Python
20+
uses: actions/setup-python@v5
21+
with:
22+
python-version: '3.11'
23+
- name: Install deps
24+
working-directory: sup-lang
25+
run: |
26+
python -m pip install --upgrade pip
27+
pip install . pytest hypothesis atheris
28+
- name: Run fuzzers (parser)
29+
working-directory: sup-lang
30+
run: |
31+
python - << 'PY'
32+
import atheris, sys
33+
with atheris.instrument_imports():
34+
from sup.parser import Parser
35+
36+
def TestOneInput(data: bytes):
37+
try:
38+
s = data.decode('utf-8', errors='ignore')
39+
Parser().parse(s)
40+
except Exception:
41+
pass
42+
43+
atheris.Setup(sys.argv, TestOneInput)
44+
atheris.Fuzz()
45+
PY
46+
47+

.github/workflows/osv.yml

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
name: OSV vulnerability scan
2+
3+
on:
4+
push:
5+
branches: [ main, master ]
6+
schedule:
7+
- cron: '0 3 * * 1'
8+
workflow_dispatch:
9+
10+
permissions:
11+
contents: read
12+
13+
jobs:
14+
osv-scanner:
15+
runs-on: ubuntu-latest
16+
steps:
17+
- uses: actions/checkout@v4
18+
- uses: google/osv-scanner-action@v1
19+
with:
20+
scan-args: >-
21+
--repo .
22+
23+

.github/workflows/release.yml

Lines changed: 49 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -3,51 +3,66 @@ name: Release
33
on:
44
push:
55
tags:
6-
- 'v*.*.*'
6+
- 'v*'
7+
8+
permissions:
9+
contents: write
10+
id-token: write
711

812
jobs:
9-
build-publish:
13+
pypi:
1014
runs-on: ubuntu-latest
11-
permissions:
12-
contents: write
13-
id-token: write
15+
defaults:
16+
run:
17+
working-directory: sup-lang
1418
steps:
1519
- uses: actions/checkout@v4
16-
17-
- name: Set up Python
18-
uses: actions/setup-python@v5
1920
with:
20-
python-version: '3.11'
21-
22-
- name: Build package
21+
fetch-depth: 0
22+
- uses: actions/setup-python@v5
23+
with:
24+
python-version: '3.12'
25+
- name: Install build tools
2326
run: |
24-
python -m pip install --upgrade pip build
25-
cd sup-lang
26-
python -m build
27-
cd ..
28-
29-
- name: Generate SBOM (CycloneDX)
27+
python -m pip install --upgrade pip
28+
pip install build twine hatchling cyclonedx-bom cyclonedx-python-lib
29+
- name: Build (reproducible)
30+
env:
31+
SOURCE_DATE_EPOCH: '1704067200'
32+
run: python -m build
33+
- name: Twine check
34+
run: twine check dist/*
35+
- name: SBOM (CycloneDX)
36+
run: cyclonedx-py environment -o sbom.json
37+
- name: Sign artifacts (if secrets present)
38+
env:
39+
GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
40+
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
3041
run: |
31-
python -m pip install cyclonedx-bom
32-
cyclonedx-py --evidence -r -o sbom.json
33-
34-
- name: Create GitHub Release
35-
id: create_release
36-
uses: softprops/action-gh-release@v2
42+
if [ -n "${GPG_PRIVATE_KEY}" ] && [ -n "${GPG_PASSPHRASE}" ]; then
43+
printf "%s" "$GPG_PRIVATE_KEY" | gpg --batch --import || true
44+
for f in dist/*; do
45+
gpg --batch --yes --pinentry-mode loopback --passphrase "$GPG_PASSPHRASE" --detach-sign --armor "$f" || true
46+
done
47+
else
48+
echo "GPG secrets not set; skipping signing."
49+
fi
50+
- name: Sigstore attest and sign (optional)
51+
if: ${{ github.event_name == 'push' }}
52+
uses: sigstore/gh-action-sigstore-python@v3.0.0
3753
with:
38-
name: ${{ github.ref_name }}
39-
draft: false
40-
prerelease: false
41-
files: |
54+
inputs: |
4255
sup-lang/dist/*
43-
sbom.json
44-
45-
- name: Publish to PyPI
46-
if: startsWith(github.ref, 'refs/tags/v')
47-
uses: pypa/gh-action-pypi-publish@v1.12.2
56+
upload-signing-artifacts: true
57+
- name: SLSA provenance
58+
uses: slsa-framework/slsa-github-generator/actions/generator@v2.0.0
59+
with:
60+
base64-subjects: true
61+
upload-assets: true
62+
- name: Publish to PyPI (token or OIDC)
63+
uses: pypa/gh-action-pypi-publish@release/v1
4864
with:
4965
packages-dir: sup-lang/dist
50-
skip-existing: true
51-
print-hash: true
66+
password: ${{ secrets.PYPI_API_TOKEN }}
5267

5368

.github/workflows/scorecard.yml

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
name: Scorecard supply-chain analysis
2+
3+
on:
4+
schedule:
5+
- cron: '0 2 * * 1'
6+
push:
7+
branches: [ main, master ]
8+
9+
permissions:
10+
security-events: write
11+
id-token: write
12+
contents: read
13+
14+
jobs:
15+
analysis:
16+
runs-on: ubuntu-latest
17+
steps:
18+
- uses: actions/checkout@v4
19+
with:
20+
persist-credentials: false
21+
- uses: ossf/scorecard-action@v2.3.3
22+
with:
23+
results_file: results.sarif
24+
results_format: sarif
25+
publish_results: true
26+
- uses: github/codeql-action/upload-sarif@v3
27+
with:
28+
sarif_file: results.sarif
29+
30+

sup-lang/sup/cli.py

Lines changed: 41 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -255,13 +255,36 @@ def main(argv: list[str] | None = None) -> int:
255255
args_i2.registry.rstrip("/")
256256
+ f"/resolve?name={name}&version={ver or '*'}"
257257
)
258-
with _u.urlopen(meta_url) as r:
258+
headers = {}
259+
token = os.environ.get("REGISTRY_TOKEN")
260+
if token:
261+
headers["Authorization"] = f"Bearer {token}"
262+
req = _u.Request(meta_url, headers=headers)
263+
with _u.urlopen(req) as r:
259264
if r.getcode() // 100 != 2:
260265
raise RuntimeError("Registry resolve failed")
261266
meta = _json.loads(r.read().decode("utf-8"))
262267
src_code = meta.get("source", "")
268+
digest = meta.get("sha256")
263269
if not src_code:
264270
raise RuntimeError("Registry returned empty source")
271+
# Verify integrity if digest present
272+
if digest:
273+
import hashlib as _hh
274+
275+
h = _hh.sha256(src_code.encode("utf-8")).hexdigest()
276+
if h != digest:
277+
raise RuntimeError("Integrity check failed: sha256 mismatch")
278+
# Optional HMAC verification if shared secret configured
279+
hmac_given = meta.get("hmac")
280+
hmac_secret = os.environ.get("SUP_REGISTRY_HMAC")
281+
if hmac_given and hmac_secret:
282+
import hmac as _h
283+
import hashlib as _hh2
284+
285+
calc = _h.new(hmac_secret.encode("utf-8"), src_code.encode("utf-8"), _hh2.sha256).hexdigest()
286+
if calc != hmac_given:
287+
raise RuntimeError("Integrity check failed: hmac mismatch")
265288
else:
266289
reg_dir = os.path.abspath(args_i2.registry)
267290
cand = os.path.join(reg_dir, f"{name}.sup")
@@ -409,9 +432,23 @@ def main(argv: list[str] | None = None) -> int:
409432
body = json.dumps(
410433
{"name": name, "version": version, "sha256": digest}
411434
).encode("utf-8")
412-
req = _u.Request(
413-
url, data=body, headers={"Content-Type": "application/json"}
414-
)
435+
headers = {"Content-Type": "application/json"}
436+
token = os.environ.get("REGISTRY_TOKEN")
437+
if token:
438+
headers["Authorization"] = f"Bearer {token}"
439+
# Add optional HMAC of tarball for verification
440+
hmac_secret = os.environ.get("SUP_REGISTRY_HMAC")
441+
if hmac_secret:
442+
import hmac as _h
443+
import hashlib as _hh2
444+
445+
with open(tar_path, "rb") as rf:
446+
tar_bytes = rf.read()
447+
hmac_hex = _h.new(hmac_secret.encode("utf-8"), tar_bytes, _hh2.sha256).hexdigest()
448+
payload = json.loads(body.decode("utf-8"))
449+
payload["hmac"] = hmac_hex
450+
body = json.dumps(payload).encode("utf-8")
451+
req = _u.Request(url, data=body, headers=headers)
415452
with _u.urlopen(req) as r:
416453
if r.getcode() // 100 != 2:
417454
raise RuntimeError("Registry upload failed")

sup-lang/sup/interpreter.py

Lines changed: 31 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,13 @@ def __init__(self) -> None:
9393
import random as _random
9494

9595
self._rng = _random.Random(self._seed or 0)
96+
# Force C locale for stable formatting/sorting
97+
try:
98+
import locale as _loc
99+
100+
_loc.setlocale(_loc.LC_ALL, "C")
101+
except Exception:
102+
pass
96103
else:
97104
self._rng = None
98105
# Runtime counters
@@ -674,7 +681,7 @@ def _eval_builtin(self, node: AST.BuiltinCall) -> object:
674681
self.last_result = qs_map
675682
return qs_map
676683

677-
# Crypto / base64 / randomness
684+
# Crypto / base64 / randomness / time
678685
if name == "sha256":
679686
import hashlib as _hh
680687

@@ -707,13 +714,33 @@ def _eval_builtin(self, node: AST.BuiltinCall) -> object:
707714
return hmac_hex
708715
if name == "random_bytes":
709716
import base64 as _b64
710-
import secrets as _secrets
711-
712717
n = int(self._num(self.eval(node.args[0]))) if len(node.args) > 0 else 16
713-
data = _secrets.token_bytes(max(1, n))
718+
n = max(1, n)
719+
if self._rng is not None:
720+
# Deterministic bytes derived from PRNG
721+
data = bytes(self._rng.randrange(0, 256) for _ in range(n))
722+
else:
723+
import secrets as _secrets
724+
725+
data = _secrets.token_bytes(n)
714726
b64 = _b64.b64encode(data).decode("ascii")
715727
self.last_result = b64
716728
return b64
729+
if name == "now":
730+
# Return ISO 8601 UTC timestamp. In deterministic mode, derive from seed.
731+
import datetime as _dt
732+
733+
if self._deterministic:
734+
# Stable pseudo-time based on seed (seconds since epoch)
735+
base = int(self._seed or 0)
736+
t = _dt.datetime(1970, 1, 1, tzinfo=_dt.timezone.utc) + _dt.timedelta(
737+
seconds=base
738+
)
739+
else:
740+
t = _dt.datetime.now(tz=_dt.timezone.utc)
741+
iso = t.replace(microsecond=0).isoformat().replace("+00:00", "Z")
742+
self.last_result = iso
743+
return iso
717744
if name == "base64_encode":
718745
import base64 as _b64
719746

0 commit comments

Comments
 (0)