Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
ef08ace
Many test/bug fixes
wallstop Oct 13, 2025
4a46118
PR updates
wallstop Oct 13, 2025
c05f863
Reformatting
wallstop Oct 13, 2025
b7374c2
Auto-complete fixes
wallstop Oct 13, 2025
cd9c824
Remove doc.md
wallstop Oct 13, 2025
18836b6
Add check-eol
wallstop Oct 13, 2025
9704652
Linter updates
wallstop Oct 13, 2025
61d3a31
Linter updates
wallstop Oct 13, 2025
30264ba
Progress
wallstop Oct 13, 2025
02ab4fd
Test fixes
wallstop Oct 13, 2025
30fce83
Add launcher
wallstop Oct 13, 2025
67f1508
Update terminal input
wallstop Oct 13, 2025
fd604e3
More tests
wallstop Oct 14, 2025
2ad47a3
Slightly better launcher
wallstop Oct 14, 2025
4c660f8
Command Terminal updates
wallstop Oct 14, 2025
8d5af2d
Progress
wallstop Oct 14, 2025
765da64
Tons of progress
wallstop Oct 14, 2025
150d8e0
More command goodies
wallstop Oct 14, 2025
fe8f326
Progress
wallstop Oct 14, 2025
bb60de6
Command Terminal progress
wallstop Oct 15, 2025
00a8c0c
Clear-History working
wallstop Oct 15, 2025
2446ec6
More better command terminal
wallstop Oct 15, 2025
506fc92
Progress on launcher
wallstop Oct 15, 2025
ff71f00
Progress
wallstop Oct 15, 2025
fbae294
Updated tests
wallstop Oct 15, 2025
373944a
Much better backends
wallstop Oct 15, 2025
3d9e390
Progress
wallstop Oct 15, 2025
80f81df
Terminal decomposition
wallstop Oct 15, 2025
2c4a696
Progress
wallstop Oct 15, 2025
97e28e2
Progress
wallstop Oct 15, 2025
b00a376
More decomposition
wallstop Oct 15, 2025
8c9fc17
Further decomposition
wallstop Oct 15, 2025
2f9eb36
Progress
wallstop Oct 15, 2025
785367d
More progress
wallstop Oct 15, 2025
91a1ca9
Progress
wallstop Oct 15, 2025
89858be
Progress
wallstop Oct 15, 2025
b8f19c8
Progress
wallstop Oct 15, 2025
f6c3a0f
Progress
wallstop Oct 15, 2025
b27b784
Progress
wallstop Oct 15, 2025
35acff4
Command Terminal improvements
wallstop Oct 15, 2025
c7bb6b5
Progress
wallstop Oct 16, 2025
3ac640f
Progress
wallstop Oct 16, 2025
d02a6d7
Progress
wallstop Oct 16, 2025
0cdc12c
Progress
wallstop Oct 17, 2025
7e4629f
Progress
wallstop Oct 17, 2025
0a3f43b
Updates
wallstop Oct 17, 2025
00f2281
Updates
wallstop Oct 17, 2025
6a80368
Progress
wallstop Oct 17, 2025
038fd49
Updates
wallstop Oct 17, 2025
7f85f24
Updates
wallstop Oct 17, 2025
aacc62a
Progress
wallstop Oct 17, 2025
d2c357d
Progress
wallstop Oct 18, 2025
98b2777
Progress
wallstop Oct 18, 2025
57881a2
Progress
wallstop Oct 18, 2025
7a26264
Progress
wallstop Oct 18, 2025
fe05dfd
Updates
wallstop Oct 18, 2025
b432555
Progress
wallstop Oct 18, 2025
4bd3bb7
Progress
wallstop Oct 18, 2025
9510999
Progress
wallstop Oct 18, 2025
9f86338
Progress
wallstop Oct 18, 2025
6432fce
Progress
wallstop Oct 18, 2025
11600cc
Progress
wallstop Oct 18, 2025
a026546
Progress
wallstop Oct 20, 2025
3bb13c4
Updates
wallstop Oct 20, 2025
7b05216
Updates
wallstop Oct 20, 2025
87e1944
meta fixes
wallstop Oct 20, 2025
8f58b5a
Updates
wallstop Oct 20, 2025
596a52e
Test updates
wallstop Oct 20, 2025
9c8cbe0
Stuff
wallstop Nov 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
20 changes: 10 additions & 10 deletions .config/dotnet-tools.json
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
{
"version": 1,
"isRoot": true,
"tools": {
"CSharpier": {
"version": "1.1.2",
"commands": ["csharpier"]
}
}
}
{
"version": 1,
"isRoot": true,
"tools": {
"CSharpier": {
"version": "1.1.2",
"commands": ["csharpier"]
}
}
}
1 change: 1 addition & 0 deletions .csharpierignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Runtime/Binaries/*.xml
3 changes: 3 additions & 0 deletions .csharpierrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"endOfLine": "crlf"
}
368 changes: 184 additions & 184 deletions .editorconfig

Large diffs are not rendered by default.

42 changes: 42 additions & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# Normalize text and enforce CRLF in working tree for key files
* text=auto

# Enforce CRLF for source and docs (matches Prettier/CSharpier + CI checks)
*.cs text eol=crlf
*.csproj text eol=crlf
*.sln text eol=crlf
*.props text eol=crlf
*.targets text eol=crlf

*.md text eol=crlf
*.markdown text eol=crlf

*.json text eol=crlf
*.asmdef text eol=crlf
*.asmref text eol=crlf

*.yml text eol=crlf
*.yaml text eol=crlf

# Common config treated as text with CRLF
.editorconfig text eol=crlf
.prettierrc text eol=crlf
.prettierrc.json text eol=crlf
.markdownlint.json text eol=crlf
.yamllint.yaml text eol=crlf

# Unity / assets and binaries (do not modify EOL)
*.png binary
*.jpg binary
*.jpeg binary
*.gif binary
*.mp4 binary
*.otf binary
*.ttf binary
*.woff binary
*.woff2 binary
*.unity binary
*.prefab binary
*.mat binary
*.asset binary
*.dll binary
26 changes: 24 additions & 2 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,32 @@
version: 2
updates:
# GitHub Actions workflow updates
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
interval: "daily"
assignees:
- wallstop
reviewers:
- wallstop
- wallstop

# NuGet: .csproj/props/targets and .config/dotnet-tools.json (local tools)
- package-ecosystem: "nuget"
directory: "/"
schedule:
interval: "daily"
assignees:
- wallstop
reviewers:
- wallstop

# npm/UPM: package.json at repo root (Unity package manifest)
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
versioning-strategy: increase
assignees:
- wallstop
reviewers:
- wallstop
77 changes: 77 additions & 0 deletions .github/scripts/check-markdown-links.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
Param(
[string]$Root = "."
)

$ErrorActionPreference = 'Stop'

function Normalize-Name {
param([string]$s)
if ([string]::IsNullOrWhiteSpace($s)) { return "" }
# Remove extension (like .md), collapse non-alphanumerics, lowercase
$noExt = $s -replace '\.[^\.]+$',''
$normalized = ($noExt -replace '[^A-Za-z0-9]', '')
return $normalized.ToLowerInvariant()
}

$issueCount = 0

# Exclude typical directories that shouldn't be scanned
$excludeDirs = @('.git', 'node_modules', '.vs')

$mdFiles = Get-ChildItem -Path $Root -Recurse -File -Filter *.md |
Where-Object { $excludeDirs -notcontains $_.Directory.Name }

# Regex for inline markdown links (exclude images), capture optional title
$pattern = '(?<!\!)\[(?<text>[^\]]+)\]\((?<target>[^)\s]+)(?:\s+"[^"]*")?\)'

foreach ($file in $mdFiles) {
$lines = Get-Content -LiteralPath $file.FullName -Encoding UTF8
for ($i = 0; $i -lt $lines.Count; $i++) {
$line = $lines[$i]
$matches = [System.Text.RegularExpressions.Regex]::Matches($line, $pattern)
foreach ($m in $matches) {
$text = $m.Groups['text'].Value.Trim()
$targetRaw = $m.Groups['target'].Value.Trim()

# Skip anchors, external links, and mailto
if ($targetRaw -match '^(#|https?://|mailto:|tel:|data:)') { continue }

# Remove query/anchor for file checks
$targetCore = $targetRaw -replace '[?#].*$',''

# Decode URL-encoded chars
try { $targetCore = [uri]::UnescapeDataString($targetCore) } catch { }

# Only care about links to markdown files
if (-not ($targetCore -match '\.md$')) { continue }

$fileName = [System.IO.Path]::GetFileName($targetCore)
$baseName = [System.IO.Path]::GetFileNameWithoutExtension($targetCore)

# Fail when the visible link text is the raw file name
$isExactFileName = $text.Equals($fileName, [System.StringComparison]::OrdinalIgnoreCase)

# Also fail when the visible text looks like a path or ends with .md
# contains path separators and no whitespace (heuristic for raw paths)
$looksLikePath = ($text -match '[\\/]' -and -not ($text -match '\\s'))
$looksLikeMarkdownFileName = $text.Trim().ToLowerInvariant().EndsWith('.md')

if ($isExactFileName -or $looksLikePath -or $looksLikeMarkdownFileName) {
$issueCount++
$lineNo = $i + 1
$msg = "Link text '$text' should be human-readable, not a raw file name or path"
# GitHub Actions annotation
Write-Output "::error file=$($file.FullName),line=$lineNo::$msg (target: $targetRaw)"
}
}
}
}

if ($issueCount -gt 0) {
Write-Host "Found $issueCount documentation link(s) with non-human-readable text." -ForegroundColor Red
Write-Host "Use a descriptive phrase instead of the raw file name."
exit 1
}
else {
Write-Host "All markdown links have human-readable text."
}
89 changes: 89 additions & 0 deletions .github/scripts/check_markdown_links.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
#!/usr/bin/env python3
import os
import re
import sys
import urllib.parse


EXCLUDE_DIRS = {".git", "node_modules", ".vs"}


def normalize_name(s: str) -> str:
if not s:
return ""
# remove extension, strip non-alphanumerics, lowercase
base = re.sub(r"\.[^.]+$", "", s)
return re.sub(r"[^A-Za-z0-9]", "", base).lower()


LINK_RE = re.compile(r"(?<!\!)\[(?P<text>[^\]]+)\]\((?P<target>[^)\s]+)(?:\s+\"[^\"]*\")?\)")


def should_check_target(target: str) -> bool:
if re.match(r"^(#|https?://|mailto:|tel:|data:)", target):
return False
# only check links that end in .md (ignoring anchors/query)
core = re.sub(r"[?#].*$", "", target)
try:
core = urllib.parse.unquote(core)
except Exception:
pass
return core.lower().endswith(".md")


def main(root: str) -> int:
issues = 0
for dirpath, dirnames, filenames in os.walk(root):
# prune excluded directories
dirnames[:] = [d for d in dirnames if d not in EXCLUDE_DIRS]
for filename in filenames:
if not filename.lower().endswith(".md"):
continue
path = os.path.join(dirpath, filename)
try:
with open(path, "r", encoding="utf-8") as f:
lines = f.readlines()
except Exception:
continue
for idx, line in enumerate(lines, start=1):
for m in LINK_RE.finditer(line):
text = m.group("text").strip()
target_raw = m.group("target").strip()
if not should_check_target(target_raw):
continue
target_core = re.sub(r"[?#].*$", "", target_raw)
try:
target_core = urllib.parse.unquote(target_core)
except Exception:
pass
file_name = os.path.basename(target_core)
base_name, _ = os.path.splitext(file_name)

is_exact_file_name = text.lower() == file_name.lower()
looks_like_path = (("/" in text) or ("\\" in text)) and not re.search(r"\s", text)
looks_like_markdown = text.strip().lower().endswith(".md")

if (
is_exact_file_name
or looks_like_path
or looks_like_markdown
):
issues += 1
msg = f"{path}:{idx}: Link text '{text}' should be human-readable, not a raw file name or path (target: {target_raw})"
print(msg)

if issues:
print(
f"Found {issues} documentation link(s) with non-human-readable text.",
file=sys.stderr,
)
print(
"Use a descriptive phrase instead of the raw file name.", file=sys.stderr
)
return 1
return 0


if __name__ == "__main__":
root = sys.argv[1] if len(sys.argv) > 1 else "."
sys.exit(main(root))
74 changes: 74 additions & 0 deletions .github/scripts/check_markdown_url_encoding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
#!/usr/bin/env python3
import os
import re
import sys


EXCLUDE_DIRS = {".git", "node_modules", ".vs", ".vscode", "Library", "Temp"}


# Inline markdown link or image: ![alt](target "title") or [text](target "title")
INLINE_LINK_RE = re.compile(
r"!?(?P<all>\[(?P<text>[^\]]+)\]\((?P<target>[^)\s]+)(?:\s+\"[^\"]*\")?\))"
)

# Reference-style link definitions: [id]: target "title"
REF_DEF_RE = re.compile(r"^\s*\[[^\]]+\]:\s*(?P<target>\S+)(?:\s+\"[^\"]*\")?\s*$")


def is_external(target: str) -> bool:
return target.startswith("http://") or target.startswith("https://") or target.startswith("mailto:") or target.startswith("tel:") or target.startswith("data:")


def has_unencoded_chars(target: str) -> bool:
# Only flag raw spaces or plus signs in the path/query/fragment
return (" " in target) or ("+" in target)


def scan_file(path: str) -> int:
issues = 0
try:
with open(path, "r", encoding="utf-8") as f:
lines = f.readlines()
except Exception:
return 0

for idx, line in enumerate(lines, start=1):
# Inline links/images
for m in INLINE_LINK_RE.finditer(line):
target = m.group("target").strip()
if is_external(target):
continue
if has_unencoded_chars(target):
issues += 1
print(f"{path}:{idx}: Unencoded character(s) in link target: '{target}'. Encode spaces as %20 and '+' as %2B.")

# Reference-style link definitions
m = REF_DEF_RE.match(line)
if m:
target = m.group("target").strip()
if not is_external(target) and has_unencoded_chars(target):
issues += 1
print(f"{path}:{idx}: Unencoded character(s) in link definition: '{target}'. Encode spaces as %20 and '+' as %2B.")

return issues


def main(root: str) -> int:
issues = 0
for dirpath, dirnames, filenames in os.walk(root):
dirnames[:] = [d for d in dirnames if d not in EXCLUDE_DIRS]
for filename in filenames:
if filename.lower().endswith(".md"):
issues += scan_file(os.path.join(dirpath, filename))
if issues:
print(f"Found {issues} markdown link(s) with unencoded spaces or plus signs.", file=sys.stderr)
print("Please URL-encode spaces as %20 and '+' as %2B in relative links.", file=sys.stderr)
return 1
return 0


if __name__ == "__main__":
root = sys.argv[1] if len(sys.argv) > 1 else "."
sys.exit(main(root))

Loading