|
| 1 | +#!/usr/bin/env bash |
| 2 | +set -euo pipefail |
| 3 | + |
| 4 | +ROOT_DIR="${1:-.}" |
| 5 | +OUT_DIR="${2:-$ROOT_DIR/stylus}" |
| 6 | + |
| 7 | +mkdir -p "$OUT_DIR" |
| 8 | + |
| 9 | +python3 - "$ROOT_DIR" "$OUT_DIR" <<'PY' |
| 10 | +import json |
| 11 | +import os |
| 12 | +import re |
| 13 | +import sys |
| 14 | +from pathlib import Path |
| 15 | +
|
| 16 | +ROOT_DIR = Path(sys.argv[1]).resolve() |
| 17 | +OUT_DIR = Path(sys.argv[2]).resolve() |
| 18 | +
|
| 19 | +EXPECTED_FOLDERS = ["brand", "alias", "mapped"] |
| 20 | +
|
| 21 | +def find_single_json(folder: Path) -> Path: |
| 22 | + if not folder.exists() or not folder.is_dir(): |
| 23 | + raise SystemExit(f"[ERROR] Missing folder: {folder}") |
| 24 | + files = sorted([p for p in folder.iterdir() if p.is_file() and p.suffix.lower() == ".json"]) |
| 25 | + if not files: |
| 26 | + raise SystemExit(f"[ERROR] No .json file found in: {folder}") |
| 27 | + if len(files) > 1: |
| 28 | + raise SystemExit( |
| 29 | + f"[ERROR] Expected exactly 1 .json file in {folder}, found {len(files)}: " |
| 30 | + + ", ".join(p.name for p in files) |
| 31 | + ) |
| 32 | + return files[0] |
| 33 | +
|
| 34 | +def load_json(path: Path): |
| 35 | + try: |
| 36 | + return json.loads(path.read_text(encoding="utf-8")) |
| 37 | + except json.JSONDecodeError as e: |
| 38 | + raise SystemExit(f"[ERROR] Invalid JSON in {path}: {e}") |
| 39 | +
|
| 40 | +def sanitize_part(part: str) -> str: |
| 41 | + part = str(part).strip().lower() |
| 42 | + part = part.replace("&", " and ") |
| 43 | + part = re.sub(r"[^a-z0-9]+", "-", part) |
| 44 | + part = re.sub(r"-{2,}", "-", part).strip("-") |
| 45 | + return part or "token" |
| 46 | +
|
| 47 | +def path_to_var(path_parts): |
| 48 | + return "-".join(sanitize_part(p) for p in path_parts) |
| 49 | +
|
| 50 | +REF_RE = re.compile(r"^\{([^{}]+)\}$") |
| 51 | +
|
| 52 | +def ref_to_var(ref_text: str) -> str: |
| 53 | + inner = ref_text.strip()[1:-1].strip() |
| 54 | + parts = [p.strip() for p in inner.split(".")] |
| 55 | + return path_to_var(parts) |
| 56 | +
|
| 57 | +def is_hex_color(s: str) -> bool: |
| 58 | + return bool(re.fullmatch(r"#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})", s)) |
| 59 | +
|
| 60 | +def is_css_dimension(s: str) -> bool: |
| 61 | + return bool(re.fullmatch(r"-?\d+(?:\.\d+)?(?:px|rem|em|vh|vw|%)", s)) |
| 62 | +
|
| 63 | +def quote_string(s: str) -> str: |
| 64 | + return json.dumps(s, ensure_ascii=False) |
| 65 | +
|
| 66 | +def stylus_scalar(value): |
| 67 | + if isinstance(value, bool): |
| 68 | + return "true" if value else "false" |
| 69 | + if value is None: |
| 70 | + return "null" |
| 71 | + if isinstance(value, (int, float)): |
| 72 | + return str(value) |
| 73 | + if isinstance(value, str): |
| 74 | + value = value.strip() |
| 75 | + if REF_RE.fullmatch(value): |
| 76 | + return ref_to_var(value) |
| 77 | + if is_hex_color(value): |
| 78 | + return value |
| 79 | + if is_css_dimension(value): |
| 80 | + return value |
| 81 | + return quote_string(value) |
| 82 | + return None |
| 83 | +
|
| 84 | +def stylus_value(value, indent=0): |
| 85 | + scalar = stylus_scalar(value) |
| 86 | + if scalar is not None: |
| 87 | + return scalar |
| 88 | +
|
| 89 | + if isinstance(value, dict): |
| 90 | + pad = " " * indent |
| 91 | + inner = " " * (indent + 1) |
| 92 | + lines = ["{"] |
| 93 | + for k, v in value.items(): |
| 94 | + key = sanitize_part(k) |
| 95 | + rendered = stylus_value(v, indent + 1) |
| 96 | + lines.append(f"{inner}{key}: {rendered}") |
| 97 | + lines.append(f"{pad}" + "}") |
| 98 | + return "\n".join(lines) |
| 99 | +
|
| 100 | + if isinstance(value, list): |
| 101 | + rendered = ", ".join(stylus_value(v, indent) for v in value) |
| 102 | + return f"[{rendered}]" |
| 103 | +
|
| 104 | + return quote_string(str(value)) |
| 105 | +
|
| 106 | +def flatten_tokens(node, path=None, out=None): |
| 107 | + if path is None: |
| 108 | + path = [] |
| 109 | + if out is None: |
| 110 | + out = {} |
| 111 | +
|
| 112 | + if isinstance(node, dict): |
| 113 | + if "$value" in node: |
| 114 | + out[tuple(path)] = { |
| 115 | + "type": node.get("$type"), |
| 116 | + "value": node.get("$value"), |
| 117 | + } |
| 118 | + return out |
| 119 | +
|
| 120 | + for key, value in node.items(): |
| 121 | + if key.startswith("$"): |
| 122 | + continue |
| 123 | + flatten_tokens(value, path + [key], out) |
| 124 | +
|
| 125 | + return out |
| 126 | +
|
| 127 | +def collect_all_vars(*flat_maps): |
| 128 | + vars_set = set() |
| 129 | + for flat in flat_maps: |
| 130 | + for token_path in flat.keys(): |
| 131 | + vars_set.add(path_to_var(token_path)) |
| 132 | + return vars_set |
| 133 | +
|
| 134 | +def render_file(title, flat_map, known_vars): |
| 135 | + lines = [] |
| 136 | + lines.append(f"// Auto-generated from {title}.json") |
| 137 | + lines.append(f"// Source layer: {title}") |
| 138 | + lines.append("") |
| 139 | +
|
| 140 | + unresolved = [] |
| 141 | +
|
| 142 | + for token_path in sorted(flat_map.keys(), key=lambda p: [sanitize_part(x) for x in p]): |
| 143 | + token = flat_map[token_path] |
| 144 | + var_name = path_to_var(token_path) |
| 145 | + rendered = stylus_value(token["value"]) |
| 146 | +
|
| 147 | + refs = [] |
| 148 | + def gather_refs(v): |
| 149 | + if isinstance(v, str) and REF_RE.fullmatch(v): |
| 150 | + refs.append(ref_to_var(v)) |
| 151 | + elif isinstance(v, dict): |
| 152 | + for vv in v.values(): |
| 153 | + gather_refs(vv) |
| 154 | + elif isinstance(v, list): |
| 155 | + for vv in v: |
| 156 | + gather_refs(vv) |
| 157 | +
|
| 158 | + gather_refs(token["value"]) |
| 159 | + for ref in refs: |
| 160 | + if ref not in known_vars: |
| 161 | + unresolved.append((var_name, ref)) |
| 162 | +
|
| 163 | + lines.append(f"{var_name} = {rendered}") |
| 164 | + lines.append("") |
| 165 | +
|
| 166 | + if unresolved: |
| 167 | + lines.append("// Unresolved references detected:") |
| 168 | + for src, ref in unresolved: |
| 169 | + lines.append(f"// {src} -> {ref}") |
| 170 | + lines.append("") |
| 171 | +
|
| 172 | + return "\n".join(lines).rstrip() + "\n" |
| 173 | +
|
| 174 | +brand_json = find_single_json(ROOT_DIR / "brand") |
| 175 | +alias_json = find_single_json(ROOT_DIR / "alias") |
| 176 | +mapped_json = find_single_json(ROOT_DIR / "mapped") |
| 177 | +
|
| 178 | +brand_data = load_json(brand_json) |
| 179 | +alias_data = load_json(alias_json) |
| 180 | +mapped_data = load_json(mapped_json) |
| 181 | +
|
| 182 | +brand_flat = flatten_tokens(brand_data) |
| 183 | +alias_flat = flatten_tokens(alias_data) |
| 184 | +mapped_flat = flatten_tokens(mapped_data) |
| 185 | +
|
| 186 | +known_vars = collect_all_vars(brand_flat, alias_flat, mapped_flat) |
| 187 | +
|
| 188 | +brand_out = render_file("brand", brand_flat, known_vars) |
| 189 | +alias_out = render_file("alias", alias_flat, known_vars) |
| 190 | +mapped_out = render_file("mapped", mapped_flat, known_vars) |
| 191 | +
|
| 192 | +(OUT_DIR / "brand.styl").write_text(brand_out, encoding="utf-8") |
| 193 | +(OUT_DIR / "alias.styl").write_text(alias_out, encoding="utf-8") |
| 194 | +(OUT_DIR / "mapped.styl").write_text(mapped_out, encoding="utf-8") |
| 195 | +
|
| 196 | +index_out = "\n".join([ |
| 197 | + "// Auto-generated import index", |
| 198 | + '@import "brand.styl"', |
| 199 | + '@import "alias.styl"', |
| 200 | + '@import "mapped.styl"', |
| 201 | + "", |
| 202 | +]) |
| 203 | +(OUT_DIR / "index.styl").write_text(index_out, encoding="utf-8") |
| 204 | +
|
| 205 | +print(f"[OK] brand json : {brand_json}") |
| 206 | +print(f"[OK] alias json : {alias_json}") |
| 207 | +print(f"[OK] mapped json: {mapped_json}") |
| 208 | +print(f"[OK] wrote : {OUT_DIR / 'brand.styl'}") |
| 209 | +print(f"[OK] wrote : {OUT_DIR / 'alias.styl'}") |
| 210 | +print(f"[OK] wrote : {OUT_DIR / 'mapped.styl'}") |
| 211 | +print(f"[OK] wrote : {OUT_DIR / 'index.styl'}") |
| 212 | +PY |
0 commit comments